Added new method to get a recent score for a dataset
--- a/README.rst
+++ b/README.rst
@@ -33,10 +33,9 @@
googleanalytics.id = UA-1010101-1
googleanalytics.account = Account name (e.g. data.gov.uk, see top level item at https://www.google.com/analytics)
ga-report.period = monthly
- ga-report.bounce_url = /data
+ ga-report.bounce_url = /
- The ga-report.bounce_url specifies the path to use when calculating bounces. For DGU this is /data
- but you may want to set this to /.
+ The ga-report.bounce_url specifies a particular path to record the bounce rate for. Typically it is / (the home page).
3. Set up this extension's database tables using a paster command. (Ensure your CKAN pyenv is still activated, run the command from ``src/ckanext-ga-report``, alter the ``--config`` option to point to your site config file)::
--- a/ckanext/ga_report/command.py
+++ b/ckanext/ga_report/command.py
@@ -80,6 +80,11 @@
default=False,
dest='delete_first',
help='Delete data for the period first')
+ self.parser.add_option('-s', '--skip_url_stats',
+ action='store_true',
+ default=False,
+ dest='skip_url_stats',
+ help='Skip the download of URL data - just do site-wide stats')
def command(self):
self._load_config()
@@ -95,7 +100,8 @@
return
downloader = DownloadAnalytics(svc, profile_id=get_profile_id(svc),
- delete_first=self.options.delete_first)
+ delete_first=self.options.delete_first,
+ skip_url_stats=self.options.skip_url_stats)
time_period = self.args[1] if self.args and len(self.args) > 1 \
else 'latest'
--- a/ckanext/ga_report/controller.py
+++ b/ckanext/ga_report/controller.py
@@ -71,13 +71,13 @@
entries = q.order_by('ga_stat.key').all()
def clean_key(key, val):
- if key in ['Average time on site', 'Pages per visit', 'New visits', 'Bounces']:
+ if key in ['Average time on site', 'Pages per visit', 'New visits', 'Bounce rate (home page)']:
val = "%.2f" % round(float(val), 2)
if key == 'Average time on site':
mins, secs = divmod(float(val), 60)
hours, mins = divmod(mins, 60)
val = '%02d:%02d:%02d (%s seconds) ' % (hours, mins, secs, val)
- if key in ['New visits','Bounces']:
+ if key in ['New visits','Bounce rate (home page)']:
val = "%s%%" % val
if key in ['Total page views', 'Total visits']:
val = int(val)
@@ -244,11 +244,11 @@
if publisher:
q = q.filter(GA_Url.department_id==publisher.name)
q = q.filter(GA_Url.period_name==month)
- q = q.order_by('ga_url.visitors::int desc')
+ q = q.order_by('ga_url.visits::int desc')
top_packages = []
for entry,package in q.limit(count):
if package:
- top_packages.append((package, entry.pageviews, entry.visitors))
+ top_packages.append((package, entry.pageviews, entry.visits))
else:
log.warning('Could not find package associated package')
@@ -306,11 +306,13 @@
month = c.month or 'All'
connection = model.Session.connection()
q = """
- select department_id, sum(pageviews::int) views, sum(visitors::int) visits
+ select department_id, sum(pageviews::int) views, sum(visits::int) visits
from ga_url
where department_id <> ''
+ and package_id <> ''
+ and url like '/dataset/%%'
and period_name=%s
- group by department_id order by visits desc
+ group by department_id order by views desc
"""
if limit:
q = q + " limit %s;" % (limit)
--- a/ckanext/ga_report/download_analytics.py
+++ b/ckanext/ga_report/download_analytics.py
@@ -17,11 +17,13 @@
class DownloadAnalytics(object):
'''Downloads and stores analytics info'''
- def __init__(self, service=None, profile_id=None, delete_first=False):
+ def __init__(self, service=None, profile_id=None, delete_first=False,
+ skip_url_stats=False):
self.period = config['ga-report.period']
self.service = service
self.profile_id = profile_id
self.delete_first = delete_first
+ self.skip_url_stats = skip_url_stats
def specific_month(self, date):
import calendar
@@ -96,31 +98,32 @@
self.get_full_period_name(period_name, period_complete_day),
start_date.strftime('%Y-%m-%d'),
end_date.strftime('%Y-%m-%d'))
-
+
if self.delete_first:
log.info('Deleting existing Analytics for this period "%s"',
period_name)
ga_model.delete(period_name)
- # Clean up the entries before we run this
- ga_model.pre_update_url_stats(period_name)
-
- accountName = config.get('googleanalytics.account')
-
- log.info('Downloading analytics for dataset views')
- data = self.download(start_date, end_date, '~/%s/dataset/[a-z0-9-_]+' % accountName)
-
- log.info('Storing dataset views (%i rows)', len(data.get('url')))
- self.store(period_name, period_complete_day, data, )
-
- log.info('Downloading analytics for publisher views')
- data = self.download(start_date, end_date, '~/%s/publisher/[a-z0-9-_]+' % accountName)
-
- log.info('Storing publisher views (%i rows)', len(data.get('url')))
- self.store(period_name, period_complete_day, data,)
-
- log.info('Aggregating datasets by publisher')
- ga_model.update_publisher_stats(period_name) # about 30 seconds.
+ if not self.skip_url_stats:
+ # Clean out old url data before storing the new
+ ga_model.pre_update_url_stats(period_name)
+
+ accountName = config.get('googleanalytics.account')
+
+ log.info('Downloading analytics for dataset views')
+ data = self.download(start_date, end_date, '~/%s/dataset/[a-z0-9-_]+' % accountName)
+
+ log.info('Storing dataset views (%i rows)', len(data.get('url')))
+ self.store(period_name, period_complete_day, data, )
+
+ log.info('Downloading analytics for publisher views')
+ data = self.download(start_date, end_date, '~/%s/publisher/[a-z0-9-_]+' % accountName)
+
+ log.info('Storing publisher views (%i rows)', len(data.get('url')))
+ self.store(period_name, period_complete_day, data,)
+
+ log.info('Aggregating datasets by publisher')
+ ga_model.update_publisher_stats(period_name) # about 30 seconds.
log.info('Downloading and storing analytics for site-wide stats')
self.sitewide_stats( period_name )
@@ -159,8 +162,8 @@
start_date = start_date.strftime('%Y-%m-%d')
end_date = end_date.strftime('%Y-%m-%d')
query = 'ga:pagePath=%s$' % path
- metrics = 'ga:uniquePageviews, ga:visits'
- sort = '-ga:uniquePageviews'
+ metrics = 'ga:pageviews, ga:visits'
+ sort = '-ga:pageviews'
# Supported query params at
# https://developers.google.com/analytics/devguides/reporting/core/v3/reference
@@ -177,8 +180,12 @@
packages = []
for entry in results.get('rows'):
(loc,pageviews,visits) = entry
- url = _normalize_url('http:/' + loc)
+ url = _normalize_url('http:/' + loc) # strips off domain e.g. www.data.gov.uk or data.gov.uk
+
if not url.startswith('/dataset/') and not url.startswith('/publisher/'):
+ # filter out strays like:
+ # /data/user/login?came_from=http://data.gov.uk/dataset/os-code-point-open
+ # /403.html?page=/about&from=http://data.gov.uk/publisher/planning-inspectorate
continue
packages.append( (url, pageviews, visits,) ) # Temporary hack
return dict(url=packages)
@@ -212,8 +219,8 @@
results = self.service.data().ga().get(
ids='ga:' + self.profile_id,
start_date=start_date,
- metrics='ga:uniquePageviews',
- sort='-ga:uniquePageviews',
+ metrics='ga:pageviews',
+ sort='-ga:pageviews',
max_results=10000,
end_date=end_date).execute()
result_data = results.get('rows')
@@ -234,25 +241,27 @@
}
ga_model.update_sitewide_stats(period_name, "Totals", data)
- # Bounces from /data. This url is specified in configuration because
- # for DGU we don't want /.
- path = config.get('ga-report.bounce_url','/')
- print path
- results = self.service.data().ga().get(
- ids='ga:' + self.profile_id,
- filters='ga:pagePath=~%s$' % (path,),
- start_date=start_date,
- metrics='ga:bounces,ga:uniquePageviews',
+ # Bounces from / or another configurable page.
+ path = '/%s%s' % (config.get('googleanalytics.account'),
+ config.get('ga-report.bounce_url', '/'))
+ results = self.service.data().ga().get(
+ ids='ga:' + self.profile_id,
+ filters='ga:pagePath==%s' % (path,),
+ start_date=start_date,
+ metrics='ga:bounces,ga:pageviews',
dimensions='ga:pagePath',
max_results=10000,
end_date=end_date).execute()
result_data = results.get('rows')
- for results in result_data:
- if results[0] == path:
- bounce, total = [float(x) for x in results[1:]]
- pct = 100 * bounce/total
- print "%d bounces from %d total == %s" % (bounce, total, pct)
- ga_model.update_sitewide_stats(period_name, "Totals", {'Bounces': pct})
+ if not result_data or len(result_data) != 1:
+ log.error('Could not pinpoint the bounces for path: %s. Got results: %r',
+ path, result_data)
+ return
+ results = result_data[0]
+ bounces, total = [float(x) for x in result_data[0][1:]]
+ pct = 100 * bounces/total
+ log.info('%d bounces from %d total == %s', bounces, total, pct)
+ ga_model.update_sitewide_stats(period_name, "Totals", {'Bounce rate (home page)': pct})
def _locale_stats(self, start_date, end_date, period_name):
@@ -260,8 +269,8 @@
results = self.service.data().ga().get(
ids='ga:' + self.profile_id,
start_date=start_date,
- metrics='ga:uniquePageviews',
- sort='-ga:uniquePageviews',
+ metrics='ga:pageviews',
+ sort='-ga:pageviews',
dimensions="ga:language,ga:country",
max_results=10000,
end_date=end_date).execute()
@@ -284,8 +293,8 @@
results = self.service.data().ga().get(
ids='ga:' + self.profile_id,
start_date=start_date,
- metrics='ga:uniquePageviews',
- sort='-ga:uniquePageviews',
+ metrics='ga:pageviews',
+ sort='-ga:pageviews',
dimensions="ga:socialNetwork,ga:referralPath",
max_results=10000,
end_date=end_date).execute()
@@ -303,8 +312,8 @@
results = self.service.data().ga().get(
ids='ga:' + self.profile_id,
start_date=start_date,
- metrics='ga:uniquePageviews',
- sort='-ga:uniquePageviews',
+ metrics='ga:pageviews',
+ sort='-ga:pageviews',
dimensions="ga:operatingSystem,ga:operatingSystemVersion",
max_results=10000,
end_date=end_date).execute()
@@ -328,8 +337,8 @@
results = self.service.data().ga().get(
ids='ga:' + self.profile_id,
start_date=start_date,
- metrics='ga:uniquePageviews',
- sort='-ga:uniquePageviews',
+ metrics='ga:pageviews',
+ sort='-ga:pageviews',
dimensions="ga:browser,ga:browserVersion",
max_results=10000,
end_date=end_date).execute()
@@ -377,8 +386,8 @@
results = self.service.data().ga().get(
ids='ga:' + self.profile_id,
start_date=start_date,
- metrics='ga:uniquePageviews',
- sort='-ga:uniquePageviews',
+ metrics='ga:pageviews',
+ sort='-ga:pageviews',
dimensions="ga:mobileDeviceBranding, ga:mobileDeviceInfo",
max_results=10000,
end_date=end_date).execute()
--- a/ckanext/ga_report/ga_model.py
+++ b/ckanext/ga_report/ga_model.py
@@ -27,7 +27,7 @@
Column('period_name', types.UnicodeText),
Column('period_complete_day', types.Integer),
Column('pageviews', types.UnicodeText),
- Column('visitors', types.UnicodeText),
+ Column('visits', types.UnicodeText),
Column('url', types.UnicodeText),
Column('department_id', types.UnicodeText),
Column('package_id', types.UnicodeText),
@@ -63,7 +63,7 @@
Column('period_name', types.UnicodeText),
Column('publisher_name', types.UnicodeText),
Column('views', types.UnicodeText),
- Column('visitors', types.UnicodeText),
+ Column('visits', types.UnicodeText),
Column('toplevel', types.Boolean, default=False),
Column('subpublishercount', types.Integer, default=0),
Column('parent', types.UnicodeText),
@@ -111,12 +111,10 @@
>>> normalize_url('http://data.gov.uk/dataset/weekly_fuel_prices')
'/dataset/weekly_fuel_prices'
'''
- # Deliberately leaving a /
- url = url.replace('http:/','')
- return '/' + '/'.join(url.split('/')[2:])
-
-
-def _get_department_id_of_url(url):
+ return '/' + '/'.join(url.split('/')[3:])
+
+
+def _get_package_and_publisher(url):
# e.g. /dataset/fuel_prices
# e.g. /dataset/fuel_prices/resource/e63380d4
dataset_match = re.match('/dataset/([^/]+)(/.*)?', url)
@@ -126,12 +124,13 @@
if dataset:
publisher_groups = dataset.get_groups('publisher')
if publisher_groups:
- return publisher_groups[0].name
+ return dataset_ref,publisher_groups[0].name
+ return dataset_ref, None
else:
publisher_match = re.match('/publisher/([^/]+)(/.*)?', url)
if publisher_match:
- return publisher_match.groups()[0]
-
+ return None, publisher_match.groups()[0]
+ return None, None
def update_sitewide_stats(period_name, stat_name, data):
for k,v in data.iteritems():
@@ -156,25 +155,6 @@
model.Session.commit()
-def update_url_stat_totals(period_name):
-
- """
- items = model.Session.query(GA_Url).\
- filter(GA_Url.period_name != "All").\
- filter(GA_Url.url==url).all()
- values = {'id': make_uuid(),
- 'period_name': "All",
- 'period_complete_day': "0",
- 'url': url,
- 'pageviews': sum([int(x.pageviews) for x in items]),
- 'visitors': sum([int(x.visitors) for x in items]),
- 'department_id': department_id,
- 'package_id': package
- }
- model.Session.add(GA_Url(**values))
- model.Session.commit()
- """
-
def pre_update_url_stats(period_name):
model.Session.query(GA_Url).\
filter(GA_Url.period_name==period_name).delete()
@@ -188,26 +168,42 @@
stores them in GA_Url under the period and recalculates the totals for
the 'All' period.
'''
- for url, views, visitors in url_data:
- department_id = _get_department_id_of_url(url)
-
- package = None
- if url.startswith('/dataset/'):
- package = url[len('/dataset/'):]
-
- values = {'id': make_uuid(),
- 'period_name': period_name,
- 'period_complete_day': period_complete_day,
- 'url': url,
- 'pageviews': views,
- 'visitors': visitors,
- 'department_id': department_id,
- 'package_id': package
- }
- model.Session.add(GA_Url(**values))
+ for url, views, visits in url_data:
+ package, publisher = _get_package_and_publisher(url)
+
+
+ item = model.Session.query(GA_Url).\
+ filter(GA_Url.period_name==period_name).\
+ filter(GA_Url.url==url).first()
+ if item:
+ item.pageviews = item.pageviews + views
+ item.visits = item.visits + visits
+ if not item.package_id:
+ item.package_id = package
+ if not item.department_id:
+ item.department_id = publisher
+ model.Session.add(item)
+ else:
+ values = {'id': make_uuid(),
+ 'period_name': period_name,
+ 'period_complete_day': period_complete_day,
+ 'url': url,
+ 'pageviews': views,
+ 'visits': visits,
+ 'department_id': publisher,
+ 'package_id': package
+ }
+ model.Session.add(GA_Url(**values))
model.Session.commit()
if package:
+ old_pageviews, old_visits = 0, 0
+ old = model.Session.query(GA_Url).\
+ filter(GA_Url.period_name=='All').\
+ filter(GA_Url.url==url).all()
+ old_pageviews = sum([int(o.pageviews) for o in old])
+ old_visits = sum([int(o.visits) for o in old])
+
entries = model.Session.query(GA_Url).\
filter(GA_Url.period_name!='All').\
filter(GA_Url.url==url).all()
@@ -215,14 +211,14 @@
'period_name': 'All',
'period_complete_day': 0,
'url': url,
- 'pageviews': sum([int(e.pageviews) for e in entries]),
- 'visitors': sum([int(e.visitors) for e in entries]),
- 'department_id': department_id,
+ 'pageviews': sum([int(e.pageviews) for e in entries]) + old_pageviews,
+ 'visits': sum([int(e.visits) for e in entries]) + old_visits,
+ 'department_id': publisher,
'package_id': package
}
+
model.Session.add(GA_Url(**values))
model.Session.commit()
-
@@ -266,7 +262,7 @@
filter(model.Group.type=='publisher').\
filter(model.Group.state=='active').all()
for publisher in publishers:
- views, visitors, subpub = update_publisher(period_name, publisher, publisher.name)
+ views, visits, subpub = update_publisher(period_name, publisher, publisher.name)
parent, parents = '', publisher.get_groups('publisher')
if parents:
parent = parents[0].name
@@ -275,7 +271,7 @@
filter(GA_Publisher.publisher_name==publisher.name).first()
if item:
item.views = views
- item.visitors = visitors
+ item.visits = visits
item.publisher_name = publisher.name
item.toplevel = publisher in toplevel
item.subpublishercount = subpub
@@ -287,7 +283,7 @@
'period_name': period_name,
'publisher_name': publisher.name,
'views': views,
- 'visitors': visitors,
+ 'visits': visits,
'toplevel': publisher in toplevel,
'subpublishercount': subpub,
'parent': parent
@@ -297,7 +293,7 @@
def update_publisher(period_name, pub, part=''):
- views,visitors,subpub = 0, 0, 0
+ views,visits,subpub = 0, 0, 0
for publisher in go_down_tree(pub):
subpub = subpub + 1
items = model.Session.query(GA_Url).\
@@ -305,9 +301,9 @@
filter(GA_Url.department_id==publisher.name).all()
for item in items:
views = views + int(item.pageviews)
- visitors = visitors + int(item.visitors)
-
- return views, visitors, (subpub-1)
+ visits = visits + int(item.visits)
+
+ return views, visits, (subpub-1)
def get_top_level():
@@ -347,3 +343,21 @@
q.delete()
model.Session.commit()
+def get_score_for_dataset(dataset_name):
+ import datetime
+ now = datetime.datetime.now()
+ period_names = ['%s-%02d' % (now.year, now.month),
+ '%s-%02d' % (now.year, now.month-1)]
+
+ entry = model.Session.query(GA_Url)\
+ .filter(GA_Url.period_name==period_names[0])\
+ .filter(GA_Url.package_id==dataset_name).first()
+ score = int(entry.pageviews) if entry else 0
+
+ entry = model.Session.query(GA_Url)\
+ .filter(GA_Url.period_name==period_names[1])\
+ .filter(GA_Url.package_id==dataset_name).first()
+ val = int(entry.pageviews) if entry else 0
+ score += val/2 if val else 0
+
+ return 0
--- a/ckanext/ga_report/helpers.py
+++ b/ckanext/ga_report/helpers.py
@@ -106,7 +106,7 @@
if not p in datasets:
datasets[p] = {'views':0, 'visits': 0}
datasets[p]['views'] = datasets[p]['views'] + int(entry.pageviews)
- datasets[p]['visits'] = datasets[p]['visits'] + int(entry.visitors)
+ datasets[p]['visits'] = datasets[p]['visits'] + int(entry.visits)
results = []
for k, v in datasets.iteritems():
--- a/ckanext/ga_report/templates/ga_report/notes.html
+++ b/ckanext/ga_report/templates/ga_report/notes.html
@@ -8,7 +8,6 @@
<ul>
<li>"Views" is the number of sessions during which the page was viewed one or more times (technically known as "unique pageviews").</li>
<li>"Visits" is the number of unique user visits to a page, counted once for each visitor for each session.</li>
-<!--! <li>"Visitors" is the number of unique users visiting the site (whether once or more times).</li> -->
<li>These usage statistics are confined to users with javascript enabled, which excludes web crawlers and API calls.</li>
<li>The results are not shown when the number of views/visits is tiny. Where these relate to site pages, results are available in full in the CSV download. Where these relate to users' web browser information, results are not disclosed, for privacy reasons.</li>
</ul>
--- a/ckanext/ga_report/templates/ga_report/publisher/index.html
+++ b/ckanext/ga_report/templates/ga_report/publisher/index.html
@@ -41,14 +41,14 @@
<table class="table table-condensed table-bordered table-striped">
<tr>
<th>Publisher</th>
- <th>Dataset Visits</th>
+<!-- <th>Dataset Visits</th>-->
<th>Dataset Views</th>
</tr>
<py:for each="publisher, views, visits in c.top_publishers">
<tr>
<td>${h.link_to(publisher.title, h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport', action='read_publisher', id=publisher.name))}
</td>
- <td>${visits}</td>
+<!-- <td>${visits}</td> -->
<td>${views}</td>
</tr>
</py:for>
--- a/ckanext/ga_report/templates/ga_report/publisher/read.html
+++ b/ckanext/ga_report/templates/ga_report/publisher/read.html
@@ -47,14 +47,14 @@
<table py:if="c.top_packages" class="table table-condensed table-bordered table-striped">
<tr>
<th>Dataset</th>
- <th>Visits</th>
+<!-- <th>Visits</th> -->
<th>Views</th>
</tr>
<py:for each="package, views, visits in c.top_packages">
<tr>
<td>${h.link_to(package.title or package.name, h.url_for(controller='package', action='read', id=package.name))}
</td>
- <td>${visits}</td>
+<!-- <td>${visits}</td> -->
<td>${views}</td>
</tr>
</py:for>
--- /dev/null
+++ b/ckanext/ga_report/tests/test_model.py
@@ -1,1 +1,18 @@
+from nose.tools import assert_equal
+from ckanext.ga_report.ga_model import _normalize_url
+
+class TestNormalizeUrl:
+ def test_normal(self):
+ assert_equal(_normalize_url('http://data.gov.uk/dataset/weekly_fuel_prices'),
+ '/dataset/weekly_fuel_prices')
+
+ def test_www_dot(self):
+ assert_equal(_normalize_url('http://www.data.gov.uk/dataset/weekly_fuel_prices'),
+ '/dataset/weekly_fuel_prices')
+
+ def test_https(self):
+ assert_equal(_normalize_url('https://data.gov.uk/dataset/weekly_fuel_prices'),
+ '/dataset/weekly_fuel_prices')
+
+