From: David Read
Date: Thu, 08 Nov 2012 10:08:08 +0000
Subject: Sort the dataset report by views, since we only display that now.
X-Git-Url: http://maxious.lambdacomplex.org/git/?p=ckanext-ga-report.git&a=commitdiff&h=8c5081dedadf6c12309f05584952f9d36494e2f9
---
Sort the dataset report by views, since we only display that now.
---
--- a/README.rst
+++ b/README.rst
@@ -26,15 +26,16 @@
1. Activate you CKAN python environment and install this extension's software::
$ pyenv/bin/activate
- $ pip install -e git+https://github.com/okfn/ckanext-ga-report.git#egg=ckanext-ga-report
+ $ pip install -e git+https://github.com/datagovuk/ckanext-ga-report.git#egg=ckanext-ga-report
2. Ensure you development.ini (or similar) contains the info about your Google Analytics account and configuration::
googleanalytics.id = UA-1010101-1
- googleanalytics.account = Account name (i.e. data.gov.uk, see top level item at https://www.google.com/analytics)
+ googleanalytics.account = Account name (e.g. data.gov.uk, see top level item at https://www.google.com/analytics)
ga-report.period = monthly
+ ga-report.bounce_url = /
- Note that your credentials will be readable by system administrators on your server. Rather than use sensitive account details, it is suggested you give access to the GA account to a new Google account that you create just for this purpose.
+ The ga-report.bounce_url specifies a particular path to record the bounce rate for. Typically it is / (the home page).
3. Set up this extension's database tables using a paster command. (Ensure your CKAN pyenv is still activated, run the command from ``src/ckanext-ga-report``, alter the ``--config`` option to point to your site config file)::
@@ -43,6 +44,12 @@
4. Enable the extension in your CKAN config file by adding it to ``ckan.plugins``::
ckan.plugins = ga-report
+
+Problem shooting
+----------------
+
+* ``(ProgrammingError) relation "ga_url" does not exist``
+ This means that the ``paster initdb`` step has not been run successfully. Refer to the installation instructions for this extension.
Authorization
@@ -79,7 +86,7 @@
Tutorial
--------
-Download some GA data and store it in CKAN's db. (Ensure your CKAN pyenv is still activated, run the command from ``src/ckanext-ga-report``, alter the ``--config`` option to point to your site config file) and specifying the name of your auth file (token.dat by default) from the previous step::
+Download some GA data and store it in CKAN's database. (Ensure your CKAN pyenv is still activated, run the command from ``src/ckanext-ga-report``, alter the ``--config`` option to point to your site config file) and specifying the name of your auth file (token.dat by default) from the previous step::
$ paster loadanalytics token.dat latest --config=../ckan/development.ini
--- a/ckanext/ga_report/command.py
+++ b/ckanext/ga_report/command.py
@@ -66,13 +66,25 @@
And where is:
all - data for all time
latest - (default) just the 'latest' data
- YYYY-MM-DD - just data for all time periods going
- back to (and including) this date
+ YYYY-MM - just data for the specific month
"""
summary = __doc__.split('\n')[0]
usage = __doc__
max_args = 2
min_args = 1
+
+ def __init__(self, name):
+ super(LoadAnalytics, self).__init__(name)
+ self.parser.add_option('-d', '--delete-first',
+ action='store_true',
+ default=False,
+ dest='delete_first',
+ help='Delete data for the period first')
+ self.parser.add_option('-s', '--skip_url_stats',
+ action='store_true',
+ default=False,
+ dest='skip_url_stats',
+ help='Skip the download of URL data - just do site-wide stats')
def command(self):
self._load_config()
@@ -84,10 +96,12 @@
svc = init_service(self.args[0], None)
except TypeError:
print ('Have you correctly run the getauthtoken task and '
- 'specified the correct file here')
+ 'specified the correct token file?')
return
- downloader = DownloadAnalytics(svc, profile_id=get_profile_id(svc))
+ downloader = DownloadAnalytics(svc, profile_id=get_profile_id(svc),
+ delete_first=self.options.delete_first,
+ skip_url_stats=self.options.skip_url_stats)
time_period = self.args[1] if self.args and len(self.args) > 1 \
else 'latest'
@@ -96,6 +110,7 @@
elif time_period == 'latest':
downloader.latest()
else:
- since_date = datetime.datetime.strptime(time_period, '%Y-%m-%d')
- downloader.since_date(since_date)
+ # The month to use
+ for_date = datetime.datetime.strptime(time_period, '%Y-%m')
+ downloader.specific_month(for_date)
--- a/ckanext/ga_report/controller.py
+++ b/ckanext/ga_report/controller.py
@@ -1,10 +1,345 @@
+import re
+import csv
+import sys
import logging
-from ckan.lib.base import BaseController, c, render
-import report_model
+import operator
+import collections
+from ckan.lib.base import (BaseController, c, g, render, request, response, abort)
+
+import sqlalchemy
+from sqlalchemy import func, cast, Integer
+import ckan.model as model
+from ga_model import GA_Url, GA_Stat, GA_ReferralStat, GA_Publisher
log = logging.getLogger('ckanext.ga-report')
+
+def _get_month_name(strdate):
+ import calendar
+ from time import strptime
+ d = strptime(strdate, '%Y-%m')
+ return '%s %s' % (calendar.month_name[d.tm_mon], d.tm_year)
+
+
+def _month_details(cls):
+ '''Returns a list of all the month names'''
+ months = []
+ vals = model.Session.query(cls.period_name).filter(cls.period_name!='All').distinct().all()
+ for m in vals:
+ months.append( (m[0], _get_month_name(m[0])))
+ return sorted(months, key=operator.itemgetter(0), reverse=True)
+
+
class GaReport(BaseController):
+
+ def csv(self, month):
+ import csv
+
+ q = model.Session.query(GA_Stat)
+ if month != 'all':
+ q = q.filter(GA_Stat.period_name==month)
+ entries = q.order_by('GA_Stat.period_name, GA_Stat.stat_name, GA_Stat.key').all()
+
+ response.headers['Content-Type'] = "text/csv; charset=utf-8"
+ response.headers['Content-Disposition'] = str('attachment; filename=stats_%s.csv' % (month,))
+
+ writer = csv.writer(response)
+ writer.writerow(["Period", "Statistic", "Key", "Value"])
+
+ for entry in entries:
+ writer.writerow([entry.period_name.encode('utf-8'),
+ entry.stat_name.encode('utf-8'),
+ entry.key.encode('utf-8'),
+ entry.value.encode('utf-8')])
+
def index(self):
- return render('index.html')
-
+
+ # Get the month details by fetching distinct values and determining the
+ # month names from the values.
+ c.months = _month_details(GA_Stat)
+
+ # Work out which month to show, based on query params of the first item
+ c.month_desc = 'all months'
+ c.month = request.params.get('month', '')
+ if c.month:
+ c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month])
+
+ q = model.Session.query(GA_Stat).\
+ filter(GA_Stat.stat_name=='Totals')
+ if c.month:
+ q = q.filter(GA_Stat.period_name==c.month)
+ entries = q.order_by('ga_stat.key').all()
+
+ def clean_key(key, val):
+ if key in ['Average time on site', 'Pages per visit', 'New visits', 'Bounce rate (home page)']:
+ val = "%.2f" % round(float(val), 2)
+ if key == 'Average time on site':
+ mins, secs = divmod(float(val), 60)
+ hours, mins = divmod(mins, 60)
+ val = '%02d:%02d:%02d (%s seconds) ' % (hours, mins, secs, val)
+ if key in ['New visits','Bounce rate (home page)']:
+ val = "%s%%" % val
+ if key in ['Total page views', 'Total visits']:
+ val = int(val)
+
+ return key, val
+
+ c.global_totals = []
+ if c.month:
+ for e in entries:
+ key, val = clean_key(e.key, e.value)
+ c.global_totals.append((key, val))
+ else:
+ d = collections.defaultdict(list)
+ for e in entries:
+ d[e.key].append(float(e.value))
+ for k, v in d.iteritems():
+ if k in ['Total page views', 'Total visits']:
+ v = sum(v)
+ else:
+ v = float(sum(v))/len(v)
+ key, val = clean_key(k,v)
+
+ c.global_totals.append((key, val))
+ c.global_totals = sorted(c.global_totals, key=operator.itemgetter(0))
+
+ keys = {
+ 'Browser versions': 'browser_versions',
+ 'Browsers': 'browsers',
+ 'Operating Systems versions': 'os_versions',
+ 'Operating Systems': 'os',
+ 'Social sources': 'social_networks',
+ 'Languages': 'languages',
+ 'Country': 'country'
+ }
+
+ def shorten_name(name, length=60):
+ return (name[:length] + '..') if len(name) > 60 else name
+
+ def fill_out_url(url):
+ import urlparse
+ return urlparse.urljoin(g.site_url, url)
+
+ c.social_referrer_totals, c.social_referrers = [], []
+ q = model.Session.query(GA_ReferralStat)
+ q = q.filter(GA_ReferralStat.period_name==c.month) if c.month else q
+ q = q.order_by('ga_referrer.count::int desc')
+ for entry in q.all():
+ c.social_referrers.append((shorten_name(entry.url), fill_out_url(entry.url),
+ entry.source,entry.count))
+
+ q = model.Session.query(GA_ReferralStat.url,
+ func.sum(GA_ReferralStat.count).label('count'))
+ q = q.filter(GA_ReferralStat.period_name==c.month) if c.month else q
+ q = q.order_by('count desc').group_by(GA_ReferralStat.url)
+ for entry in q.all():
+ c.social_referrer_totals.append((shorten_name(entry[0]), fill_out_url(entry[0]),'',
+ entry[1]))
+
+ for k, v in keys.iteritems():
+ q = model.Session.query(GA_Stat).\
+ filter(GA_Stat.stat_name==k)
+ if c.month:
+ entries = []
+ q = q.filter(GA_Stat.period_name==c.month).\
+ order_by('ga_stat.value::int desc')
+
+ d = collections.defaultdict(int)
+ for e in q.all():
+ d[e.key] += int(e.value)
+ entries = []
+ for key, val in d.iteritems():
+ entries.append((key,val,))
+ entries = sorted(entries, key=operator.itemgetter(1), reverse=True)
+
+ # Get the total for each set of values and then set the value as
+ # a percentage of the total
+ if k == 'Social sources':
+ total = sum([x for n,x in c.global_totals if n == 'Total visits'])
+ else:
+ total = sum([num for _,num in entries])
+ setattr(c, v, [(k,_percent(v,total)) for k,v in entries ])
+
+ return render('ga_report/site/index.html')
+
+
+class GaDatasetReport(BaseController):
+ """
+ Displays the pageview and visit count for datasets
+ with options to filter by publisher and time period.
+ """
+ def publisher_csv(self, month):
+ '''
+ Returns a CSV of each publisher with the total number of dataset
+ views & visits.
+ '''
+ c.month = month if not month == 'all' else ''
+ response.headers['Content-Type'] = "text/csv; charset=utf-8"
+ response.headers['Content-Disposition'] = str('attachment; filename=publishers_%s.csv' % (month,))
+
+ writer = csv.writer(response)
+ writer.writerow(["Publisher Title", "Publisher Name", "Views", "Visits", "Period Name"])
+
+ for publisher,view,visit in _get_top_publishers(None):
+ writer.writerow([publisher.title.encode('utf-8'),
+ publisher.name.encode('utf-8'),
+ view,
+ visit,
+ month])
+
+ def dataset_csv(self, id='all', month='all'):
+ '''
+ Returns a CSV with the number of views & visits for each dataset.
+
+ :param id: A Publisher ID or None if you want for all
+ :param month: The time period, or 'all'
+ '''
+ c.month = month if not month == 'all' else ''
+ if id != 'all':
+ c.publisher = model.Group.get(id)
+ if not c.publisher:
+ abort(404, 'A publisher with that name could not be found')
+
+ packages = self._get_packages(c.publisher)
+ response.headers['Content-Type'] = "text/csv; charset=utf-8"
+ response.headers['Content-Disposition'] = \
+ str('attachment; filename=datasets_%s_%s.csv' % (c.publisher_name, month,))
+
+ writer = csv.writer(response)
+ writer.writerow(["Dataset Title", "Dataset Name", "Views", "Visits", "Period Name"])
+
+ for package,view,visit in packages:
+ writer.writerow([package.title.encode('utf-8'),
+ package.name.encode('utf-8'),
+ view,
+ visit,
+ month])
+
+ def publishers(self):
+ '''A list of publishers and the number of views/visits for each'''
+
+ # Get the month details by fetching distinct values and determining the
+ # month names from the values.
+ c.months = _month_details(GA_Url)
+
+ # Work out which month to show, based on query params of the first item
+ c.month = request.params.get('month', '')
+ c.month_desc = 'all months'
+ if c.month:
+ c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month])
+
+ c.top_publishers = _get_top_publishers()
+ return render('ga_report/publisher/index.html')
+
+ def _get_packages(self, publisher=None, count=-1):
+ '''Returns the datasets in order of views'''
+ if count == -1:
+ count = sys.maxint
+
+ month = c.month or 'All'
+
+ q = model.Session.query(GA_Url,model.Package)\
+ .filter(model.Package.name==GA_Url.package_id)\
+ .filter(GA_Url.url.like('/dataset/%'))
+ if publisher:
+ q = q.filter(GA_Url.department_id==publisher.name)
+ q = q.filter(GA_Url.period_name==month)
+ q = q.order_by('ga_url.pageviews::int desc')
+ top_packages = []
+ for entry,package in q.limit(count):
+ if package:
+ top_packages.append((package, entry.pageviews, entry.visits))
+ else:
+ log.warning('Could not find package associated package')
+
+ return top_packages
+
+ def read(self):
+ '''
+ Lists the most popular datasets across all publishers
+ '''
+ return self.read_publisher(None)
+
+ def read_publisher(self, id):
+ '''
+ Lists the most popular datasets for a publisher (or across all publishers)
+ '''
+ count = 20
+
+ c.publishers = _get_publishers()
+
+ id = request.params.get('publisher', id)
+ if id and id != 'all':
+ c.publisher = model.Group.get(id)
+ if not c.publisher:
+ abort(404, 'A publisher with that name could not be found')
+ c.publisher_name = c.publisher.name
+ c.top_packages = [] # package, dataset_views in c.top_packages
+
+ # Get the month details by fetching distinct values and determining the
+ # month names from the values.
+ c.months = _month_details(GA_Url)
+
+ # Work out which month to show, based on query params of the first item
+ c.month = request.params.get('month', '')
+ if not c.month:
+ c.month_desc = 'all months'
+ else:
+ c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month])
+
+ month = c.month or 'All'
+ c.publisher_page_views = 0
+ q = model.Session.query(GA_Url).\
+ filter(GA_Url.url=='/publisher/%s' % c.publisher_name)
+ entry = q.filter(GA_Url.period_name==c.month).first()
+ c.publisher_page_views = entry.pageviews if entry else 0
+
+ c.top_packages = self._get_packages(c.publisher, 20)
+
+ return render('ga_report/publisher/read.html')
+
+def _get_top_publishers(limit=20):
+ '''
+ Returns a list of the top 20 publishers by dataset visits.
+ (The number to show can be varied with 'limit')
+ '''
+ month = c.month or 'All'
+ connection = model.Session.connection()
+ q = """
+ select department_id, sum(pageviews::int) views, sum(visits::int) visits
+ from ga_url
+ where department_id <> ''
+ and package_id <> ''
+ and url like '/dataset/%%'
+ and period_name=%s
+ group by department_id order by views desc
+ """
+ if limit:
+ q = q + " limit %s;" % (limit)
+
+ top_publishers = []
+ res = connection.execute(q, month)
+ for row in res:
+ g = model.Group.get(row[0])
+ if g:
+ top_publishers.append((g, row[1], row[2]))
+ return top_publishers
+
+
+def _get_publishers():
+ '''
+ Returns a list of all publishers. Each item is a tuple:
+ (name, title)
+ '''
+ publishers = []
+ for pub in model.Session.query(model.Group).\
+ filter(model.Group.type=='publisher').\
+ filter(model.Group.state=='active').\
+ order_by(model.Group.name):
+ publishers.append((pub.name, pub.title))
+ return publishers
+
+def _percent(num, total):
+ p = 100 * float(num)/float(total)
+ return "%.2f%%" % round(p, 2)
+
--- a/ckanext/ga_report/download_analytics.py
+++ b/ckanext/ga_report/download_analytics.py
@@ -1,9 +1,9 @@
import os
import logging
import datetime
-
+import collections
from pylons import config
-
+from ga_model import _normalize_url
import ga_model
#from ga_client import GA
@@ -11,18 +11,31 @@
log = logging.getLogger('ckanext.ga-report')
FORMAT_MONTH = '%Y-%m'
+MIN_VIEWS = 50
+MIN_VISITS = 20
class DownloadAnalytics(object):
'''Downloads and stores analytics info'''
- def __init__(self, service=None, profile_id=None):
+ def __init__(self, service=None, profile_id=None, delete_first=False,
+ skip_url_stats=False):
self.period = config['ga-report.period']
self.service = service
self.profile_id = profile_id
-
-
- def all_(self):
- self.since_date(datetime.datetime(2010, 1, 1))
+ self.delete_first = delete_first
+ self.skip_url_stats = skip_url_stats
+
+ def specific_month(self, date):
+ import calendar
+
+ first_of_this_month = datetime.datetime(date.year, date.month, 1)
+ _, last_day_of_month = calendar.monthrange(int(date.year), int(date.month))
+ last_of_this_month = datetime.datetime(date.year, date.month, last_day_of_month)
+ periods = ((date.strftime(FORMAT_MONTH),
+ last_day_of_month,
+ first_of_this_month, last_of_this_month),)
+ self.download_and_store(periods)
+
def latest(self):
if self.period == 'monthly':
@@ -37,13 +50,13 @@
self.download_and_store(periods)
- def since_date(self, since_date):
+ def for_date(self, for_date):
assert isinstance(since_date, datetime.datetime)
periods = [] # (period_name, period_complete_day, start_date, end_date)
if self.period == 'monthly':
first_of_the_months_until_now = []
- year = since_date.year
- month = since_date.month
+ year = for_date.year
+ month = for_date.month
now = datetime.datetime.now()
first_of_this_month = datetime.datetime(now.year, now.month, 1)
while True:
@@ -81,32 +94,76 @@
def download_and_store(self, periods):
for period_name, period_complete_day, start_date, end_date in periods:
- log.info('Downloading Analytics for period "%s" (%s - %s)',
+ log.info('Period "%s" (%s - %s)',
self.get_full_period_name(period_name, period_complete_day),
- start_date.strftime('%Y %m %d'),
- end_date.strftime('%Y %m %d'))
-
- data = self.download(start_date, end_date, '~/dataset/[a-z0-9-_]+')
- log.info('Storing Dataset Analytics for period "%s"',
- self.get_full_period_name(period_name, period_complete_day))
- self.store(period_name, period_complete_day, data, )
-
- data = self.download(start_date, end_date, '~/publisher/[a-z0-9-_]+')
- log.info('Storing Publisher Analytics for period "%s"',
- self.get_full_period_name(period_name, period_complete_day))
- self.store(period_name, period_complete_day, data,)
- ga_model.update_publisher_stats(period_name)
-
+ start_date.strftime('%Y-%m-%d'),
+ end_date.strftime('%Y-%m-%d'))
+
+ if self.delete_first:
+ log.info('Deleting existing Analytics for this period "%s"',
+ period_name)
+ ga_model.delete(period_name)
+
+ if not self.skip_url_stats:
+ # Clean out old url data before storing the new
+ ga_model.pre_update_url_stats(period_name)
+
+ accountName = config.get('googleanalytics.account')
+
+ log.info('Downloading analytics for dataset views')
+ data = self.download(start_date, end_date, '~/%s/dataset/[a-z0-9-_]+' % accountName)
+
+ log.info('Storing dataset views (%i rows)', len(data.get('url')))
+ self.store(period_name, period_complete_day, data, )
+
+ log.info('Downloading analytics for publisher views')
+ data = self.download(start_date, end_date, '~/%s/publisher/[a-z0-9-_]+' % accountName)
+
+ log.info('Storing publisher views (%i rows)', len(data.get('url')))
+ self.store(period_name, period_complete_day, data,)
+
+ log.info('Aggregating datasets by publisher')
+ ga_model.update_publisher_stats(period_name) # about 30 seconds.
+
+ log.info('Downloading and storing analytics for site-wide stats')
self.sitewide_stats( period_name )
-
- def download(self, start_date, end_date, path='~/dataset/[a-z0-9-_]+'):
+ log.info('Downloading and storing analytics for social networks')
+ self.update_social_info(period_name, start_date, end_date)
+
+
+ def update_social_info(self, period_name, start_date, end_date):
+ start_date = start_date.strftime('%Y-%m-%d')
+ end_date = end_date.strftime('%Y-%m-%d')
+ query = 'ga:hasSocialSourceReferral=~Yes$'
+ metrics = 'ga:entrances'
+ sort = '-ga:entrances'
+
+ # Supported query params at
+ # https://developers.google.com/analytics/devguides/reporting/core/v3/reference
+ results = self.service.data().ga().get(
+ ids='ga:' + self.profile_id,
+ filters=query,
+ start_date=start_date,
+ metrics=metrics,
+ sort=sort,
+ dimensions="ga:landingPagePath,ga:socialNetwork",
+ max_results=10000,
+ end_date=end_date).execute()
+ data = collections.defaultdict(list)
+ rows = results.get('rows',[])
+ for row in rows:
+ data[_normalize_url(row[0])].append( (row[1], int(row[2]),) )
+ ga_model.update_social(period_name, data)
+
+
+ def download(self, start_date, end_date, path=None):
'''Get data from GA for a given time period'''
start_date = start_date.strftime('%Y-%m-%d')
end_date = end_date.strftime('%Y-%m-%d')
query = 'ga:pagePath=%s$' % path
- metrics = 'ga:uniquePageviews, ga:visits'
- sort = '-ga:uniquePageviews'
+ metrics = 'ga:pageviews, ga:visits'
+ sort = '-ga:pageviews'
# Supported query params at
# https://developers.google.com/analytics/devguides/reporting/core/v3/reference
@@ -120,21 +177,22 @@
max_results=10000,
end_date=end_date).execute()
- if os.getenv('DEBUG'):
- import pprint
- pprint.pprint(results)
- print 'Total results: %s' % results.get('totalResults')
-
packages = []
for entry in results.get('rows'):
(loc,pageviews,visits) = entry
- packages.append( ('http:/' + loc, pageviews, visits,) ) # Temporary hack
+ url = _normalize_url('http:/' + loc) # strips off domain e.g. www.data.gov.uk or data.gov.uk
+
+ if not url.startswith('/dataset/') and not url.startswith('/publisher/'):
+ # filter out strays like:
+ # /data/user/login?came_from=http://data.gov.uk/dataset/os-code-point-open
+ # /403.html?page=/about&from=http://data.gov.uk/publisher/planning-inspectorate
+ continue
+ packages.append( (url, pageviews, visits,) ) # Temporary hack
return dict(url=packages)
def store(self, period_name, period_complete_day, data):
if 'url' in data:
ga_model.update_url_stats(period_name, period_complete_day, data['url'])
-
def sitewide_stats(self, period_name):
import calendar
@@ -143,50 +201,76 @@
start_date = '%s-01' % period_name
end_date = '%s-%s' % (period_name, last_day_of_month)
- print 'Sitewide_stats for %s (%s -> %s)' % (period_name, start_date, end_date)
-
funcs = ['_totals_stats', '_social_stats', '_os_stats',
'_locale_stats', '_browser_stats', '_mobile_stats']
for f in funcs:
- print ' + Fetching %s stats' % f.split('_')[1]
+ log.info('Downloading analytics for %s' % f.split('_')[1])
getattr(self, f)(start_date, end_date, period_name)
+ def _get_results(result_data, f):
+ data = {}
+ for result in result_data:
+ key = f(result)
+ data[key] = data.get(key,0) + result[1]
+ return data
def _totals_stats(self, start_date, end_date, period_name):
""" Fetches distinct totals, total pageviews etc """
results = self.service.data().ga().get(
ids='ga:' + self.profile_id,
start_date=start_date,
- metrics='ga:uniquePageviews',
- sort='-ga:uniquePageviews',
- max_results=10000,
- end_date=end_date).execute()
- result_data = results.get('rows')
- ga_model.update_sitewide_stats(period_name, "Totals", {'Total pageviews': result_data[0][0]})
-
- results = self.service.data().ga().get(
- ids='ga:' + self.profile_id,
- start_date=start_date,
- metrics='ga:pageviewsPerVisit,ga:bounces,ga:avgTimeOnSite,ga:percentNewVisits',
+ metrics='ga:pageviews',
+ sort='-ga:pageviews',
+ max_results=10000,
+ end_date=end_date).execute()
+ result_data = results.get('rows')
+ ga_model.update_sitewide_stats(period_name, "Totals", {'Total page views': result_data[0][0]})
+
+ results = self.service.data().ga().get(
+ ids='ga:' + self.profile_id,
+ start_date=start_date,
+ metrics='ga:pageviewsPerVisit,ga:avgTimeOnSite,ga:percentNewVisits,ga:visits',
max_results=10000,
end_date=end_date).execute()
result_data = results.get('rows')
data = {
'Pages per visit': result_data[0][0],
- 'Bounces': result_data[0][1],
- 'Average time on site': result_data[0][2],
- 'Percent new visits': result_data[0][3],
+ 'Average time on site': result_data[0][1],
+ 'New visits': result_data[0][2],
+ 'Total visits': result_data[0][3],
}
ga_model.update_sitewide_stats(period_name, "Totals", data)
+ # Bounces from / or another configurable page.
+ path = '/%s%s' % (config.get('googleanalytics.account'),
+ config.get('ga-report.bounce_url', '/'))
+ results = self.service.data().ga().get(
+ ids='ga:' + self.profile_id,
+ filters='ga:pagePath==%s' % (path,),
+ start_date=start_date,
+ metrics='ga:bounces,ga:pageviews',
+ dimensions='ga:pagePath',
+ max_results=10000,
+ end_date=end_date).execute()
+ result_data = results.get('rows')
+ if not result_data or len(result_data) != 1:
+ log.error('Could not pinpoint the bounces for path: %s. Got results: %r',
+ path, result_data)
+ return
+ results = result_data[0]
+ bounces, total = [float(x) for x in result_data[0][1:]]
+ pct = 100 * bounces/total
+ log.info('%d bounces from %d total == %s', bounces, total, pct)
+ ga_model.update_sitewide_stats(period_name, "Totals", {'Bounce rate (home page)': pct})
+
def _locale_stats(self, start_date, end_date, period_name):
""" Fetches stats about language and country """
results = self.service.data().ga().get(
ids='ga:' + self.profile_id,
start_date=start_date,
- metrics='ga:uniquePageviews',
- sort='-ga:uniquePageviews',
+ metrics='ga:pageviews',
+ sort='-ga:pageviews',
dimensions="ga:language,ga:country",
max_results=10000,
end_date=end_date).execute()
@@ -194,11 +278,13 @@
data = {}
for result in result_data:
data[result[0]] = data.get(result[0], 0) + int(result[2])
+ self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Languages", data)
data = {}
for result in result_data:
data[result[1]] = data.get(result[1], 0) + int(result[2])
+ self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Country", data)
@@ -207,19 +293,17 @@
results = self.service.data().ga().get(
ids='ga:' + self.profile_id,
start_date=start_date,
- metrics='ga:uniquePageviews',
- sort='-ga:uniquePageviews',
+ metrics='ga:pageviews',
+ sort='-ga:pageviews',
dimensions="ga:socialNetwork,ga:referralPath",
max_results=10000,
end_date=end_date).execute()
result_data = results.get('rows')
- twitter_links = []
data = {}
for result in result_data:
if not result[0] == '(not set)':
data[result[0]] = data.get(result[0], 0) + int(result[2])
- if result[0] == 'Twitter':
- twitter_links.append(result[1])
+ self._filter_out_long_tail(data, 3)
ga_model.update_sitewide_stats(period_name, "Social sources", data)
@@ -228,8 +312,8 @@
results = self.service.data().ga().get(
ids='ga:' + self.profile_id,
start_date=start_date,
- metrics='ga:uniquePageviews',
- sort='-ga:uniquePageviews',
+ metrics='ga:pageviews',
+ sort='-ga:pageviews',
dimensions="ga:operatingSystem,ga:operatingSystemVersion",
max_results=10000,
end_date=end_date).execute()
@@ -237,12 +321,14 @@
data = {}
for result in result_data:
data[result[0]] = data.get(result[0], 0) + int(result[2])
+ self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Operating Systems", data)
data = {}
for result in result_data:
- key = "%s (%s)" % (result[0],result[1])
- data[key] = result[2]
+ if int(result[2]) >= MIN_VIEWS:
+ key = "%s %s" % (result[0],result[1])
+ data[key] = result[2]
ga_model.update_sitewide_stats(period_name, "Operating Systems versions", data)
@@ -251,23 +337,48 @@
results = self.service.data().ga().get(
ids='ga:' + self.profile_id,
start_date=start_date,
- metrics='ga:uniquePageviews',
- sort='-ga:uniquePageviews',
+ metrics='ga:pageviews',
+ sort='-ga:pageviews',
dimensions="ga:browser,ga:browserVersion",
max_results=10000,
end_date=end_date).execute()
result_data = results.get('rows')
+ # e.g. [u'Firefox', u'19.0', u'20']
+
data = {}
for result in result_data:
data[result[0]] = data.get(result[0], 0) + int(result[2])
+ self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Browsers", data)
data = {}
for result in result_data:
- key = "%s (%s)" % (result[0],result[1])
- data[key] = result[2]
+ key = "%s %s" % (result[0], self._filter_browser_version(result[0], result[1]))
+ data[key] = data.get(key, 0) + int(result[2])
+ self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Browser versions", data)
+ @classmethod
+ def _filter_browser_version(cls, browser, version_str):
+ '''
+ Simplifies a browser version string if it is detailed.
+ i.e. groups together Firefox 3.5.1 and 3.5.2 to be just 3.
+ This is helpful when viewing stats and good to protect privacy.
+ '''
+ ver = version_str
+ parts = ver.split('.')
+ if len(parts) > 1:
+ if parts[1][0] == '0':
+ ver = parts[0]
+ else:
+ ver = "%s" % (parts[0])
+ # Special case complex version nums
+ if browser in ['Safari', 'Android Browser']:
+ ver = parts[0]
+ if len(ver) > 2:
+ num_hidden_digits = len(ver) - 2
+ ver = ver[0] + ver[1] + 'X' * num_hidden_digits
+ return ver
def _mobile_stats(self, start_date, end_date, period_name):
""" Info about mobile devices """
@@ -275,8 +386,8 @@
results = self.service.data().ga().get(
ids='ga:' + self.profile_id,
start_date=start_date,
- metrics='ga:uniquePageviews',
- sort='-ga:uniquePageviews',
+ metrics='ga:pageviews',
+ sort='-ga:pageviews',
dimensions="ga:mobileDeviceBranding, ga:mobileDeviceInfo",
max_results=10000,
end_date=end_date).execute()
@@ -285,10 +396,23 @@
data = {}
for result in result_data:
data[result[0]] = data.get(result[0], 0) + int(result[2])
+ self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Mobile brands", data)
data = {}
for result in result_data:
data[result[1]] = data.get(result[1], 0) + int(result[2])
+ self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Mobile devices", data)
+ @classmethod
+ def _filter_out_long_tail(cls, data, threshold=10):
+ '''
+ Given data which is a frequency distribution, filter out
+ results which are below a threshold count. This is good to protect
+ privacy.
+ '''
+ for key, value in data.items():
+ if value < threshold:
+ del data[key]
+
--- a/ckanext/ga_report/ga_auth.py
+++ b/ckanext/ga_report/ga_auth.py
@@ -53,7 +53,11 @@
return None
accountName = config.get('googleanalytics.account')
+ if not accountName:
+ raise Exception('googleanalytics.account needs to be configured')
webPropertyId = config.get('googleanalytics.id')
+ if not webPropertyId:
+ raise Exception('googleanalytics.id needs to be configured')
for acc in accounts.get('items'):
if acc.get('name') == accountName:
accountId = acc.get('id')
--- a/ckanext/ga_report/ga_model.py
+++ b/ckanext/ga_report/ga_model.py
@@ -1,10 +1,10 @@
import re
import uuid
-from sqlalchemy import Table, Column, MetaData
+from sqlalchemy import Table, Column, MetaData, ForeignKey
from sqlalchemy import types
from sqlalchemy.sql import select
-from sqlalchemy.orm import mapper
+from sqlalchemy.orm import mapper, relation
from sqlalchemy import func
import ckan.model as model
@@ -13,7 +13,7 @@
def make_uuid():
return unicode(uuid.uuid4())
-
+metadata = MetaData()
class GA_Url(object):
@@ -21,20 +21,6 @@
for k,v in kwargs.items():
setattr(self, k, v)
-class GA_Stat(object):
-
- def __init__(self, **kwargs):
- for k,v in kwargs.items():
- setattr(self, k, v)
-
-class GA_Publisher(object):
-
- def __init__(self, **kwargs):
- for k,v in kwargs.items():
- setattr(self, k, v)
-
-
-metadata = MetaData()
url_table = Table('ga_url', metadata,
Column('id', types.UnicodeText, primary_key=True,
default=make_uuid),
@@ -44,8 +30,16 @@
Column('visits', types.UnicodeText),
Column('url', types.UnicodeText),
Column('department_id', types.UnicodeText),
+ Column('package_id', types.UnicodeText),
)
mapper(GA_Url, url_table)
+
+
+class GA_Stat(object):
+
+ def __init__(self, **kwargs):
+ for k,v in kwargs.items():
+ setattr(self, k, v)
stat_table = Table('ga_stat', metadata,
Column('id', types.UnicodeText, primary_key=True,
@@ -57,6 +51,12 @@
mapper(GA_Stat, stat_table)
+class GA_Publisher(object):
+
+ def __init__(self, **kwargs):
+ for k,v in kwargs.items():
+ setattr(self, k, v)
+
pub_table = Table('ga_publisher', metadata,
Column('id', types.UnicodeText, primary_key=True,
default=make_uuid),
@@ -64,8 +64,29 @@
Column('publisher_name', types.UnicodeText),
Column('views', types.UnicodeText),
Column('visits', types.UnicodeText),
+ Column('toplevel', types.Boolean, default=False),
+ Column('subpublishercount', types.Integer, default=0),
+ Column('parent', types.UnicodeText),
)
mapper(GA_Publisher, pub_table)
+
+
+class GA_ReferralStat(object):
+
+ def __init__(self, **kwargs):
+ for k,v in kwargs.items():
+ setattr(self, k, v)
+
+referrer_table = Table('ga_referrer', metadata,
+ Column('id', types.UnicodeText, primary_key=True,
+ default=make_uuid),
+ Column('period_name', types.UnicodeText),
+ Column('source', types.UnicodeText),
+ Column('url', types.UnicodeText),
+ Column('count', types.Integer),
+ )
+mapper(GA_ReferralStat, referrer_table)
+
def init_tables():
@@ -90,11 +111,10 @@
>>> normalize_url('http://data.gov.uk/dataset/weekly_fuel_prices')
'/dataset/weekly_fuel_prices'
'''
- url = re.sub('https?://(www\.)?data.gov.uk', '', url)
- return url
-
-
-def _get_department_id_of_url(url):
+ return '/' + '/'.join(url.split('/')[3:])
+
+
+def _get_package_and_publisher(url):
# e.g. /dataset/fuel_prices
# e.g. /dataset/fuel_prices/resource/e63380d4
dataset_match = re.match('/dataset/([^/]+)(/.*)?', url)
@@ -104,12 +124,13 @@
if dataset:
publisher_groups = dataset.get_groups('publisher')
if publisher_groups:
- return publisher_groups[0].name
+ return dataset_ref,publisher_groups[0].name
+ return dataset_ref, None
else:
publisher_match = re.match('/publisher/([^/]+)(/.*)?', url)
if publisher_match:
- return publisher_match.groups()[0]
-
+ return None, publisher_match.groups()[0]
+ return None, None
def update_sitewide_stats(period_name, stat_name, data):
for k,v in data.iteritems():
@@ -134,41 +155,117 @@
model.Session.commit()
+def pre_update_url_stats(period_name):
+ model.Session.query(GA_Url).\
+ filter(GA_Url.period_name==period_name).delete()
+ model.Session.query(GA_Url).\
+ filter(GA_Url.period_name=='All').delete()
+
def update_url_stats(period_name, period_complete_day, url_data):
+ '''
+ Given a list of urls and number of hits for each during a given period,
+ stores them in GA_Url under the period and recalculates the totals for
+ the 'All' period.
+ '''
for url, views, visits in url_data:
- url = _normalize_url(url)
- department_id = _get_department_id_of_url(url)
-
- # see if the row for this url & month is in the table already
+ package, publisher = _get_package_and_publisher(url)
+
+
item = model.Session.query(GA_Url).\
filter(GA_Url.period_name==period_name).\
filter(GA_Url.url==url).first()
if item:
- item.period_name = period_name
- item.pageviews = views
- item.visits = visits
- item.department_id = department_id
+ item.pageviews = item.pageviews + views
+ item.visits = item.visits + visits
+ if not item.package_id:
+ item.package_id = package
+ if not item.department_id:
+ item.department_id = publisher
model.Session.add(item)
else:
- # create the row
values = {'id': make_uuid(),
'period_name': period_name,
'period_complete_day': period_complete_day,
'url': url,
'pageviews': views,
'visits': visits,
- 'department_id': department_id
+ 'department_id': publisher,
+ 'package_id': package
}
model.Session.add(GA_Url(**values))
model.Session.commit()
-
+ if package:
+ old_pageviews, old_visits = 0, 0
+ old = model.Session.query(GA_Url).\
+ filter(GA_Url.period_name=='All').\
+ filter(GA_Url.url==url).all()
+ old_pageviews = sum([int(o.pageviews) for o in old])
+ old_visits = sum([int(o.visits) for o in old])
+
+ entries = model.Session.query(GA_Url).\
+ filter(GA_Url.period_name!='All').\
+ filter(GA_Url.url==url).all()
+ values = {'id': make_uuid(),
+ 'period_name': 'All',
+ 'period_complete_day': 0,
+ 'url': url,
+ 'pageviews': sum([int(e.pageviews) for e in entries]) + old_pageviews,
+ 'visits': sum([int(e.visits) for e in entries]) + old_visits,
+ 'department_id': publisher,
+ 'package_id': package
+ }
+
+ model.Session.add(GA_Url(**values))
+ model.Session.commit()
+
+
+
+
+def update_social(period_name, data):
+ # Clean up first.
+ model.Session.query(GA_ReferralStat).\
+ filter(GA_ReferralStat.period_name==period_name).delete()
+
+ for url,data in data.iteritems():
+ for entry in data:
+ source = entry[0]
+ count = entry[1]
+
+ item = model.Session.query(GA_ReferralStat).\
+ filter(GA_ReferralStat.period_name==period_name).\
+ filter(GA_ReferralStat.source==source).\
+ filter(GA_ReferralStat.url==url).first()
+ if item:
+ item.count = item.count + count
+ model.Session.add(item)
+ else:
+ # create the row
+ values = {'id': make_uuid(),
+ 'period_name': period_name,
+ 'source': source,
+ 'url': url,
+ 'count': count,
+ }
+ model.Session.add(GA_ReferralStat(**values))
+ model.Session.commit()
def update_publisher_stats(period_name):
- publishers = get_top_level()
+ """
+ Updates the publisher stats from the data retrieved for /dataset/*
+ and /publisher/*. Will run against each dataset and generates the
+ totals for the entire tree beneath each publisher.
+ """
+ toplevel = get_top_level()
+ publishers = model.Session.query(model.Group).\
+ filter(model.Group.type=='publisher').\
+ filter(model.Group.state=='active').all()
for publisher in publishers:
- views, visits = update_publisher(period_name, publisher, publisher.name)
+ views, visits, subpub = update_publisher(period_name, publisher, publisher.name)
+ parent, parents = '', publisher.get_groups('publisher')
+ if parents:
+ parent = parents[0].name
item = model.Session.query(GA_Publisher).\
filter(GA_Publisher.period_name==period_name).\
filter(GA_Publisher.publisher_name==publisher.name).first()
@@ -176,6 +273,9 @@
item.views = views
item.visits = visits
item.publisher_name = publisher.name
+ item.toplevel = publisher in toplevel
+ item.subpublishercount = subpub
+ item.parent = parent
model.Session.add(item)
else:
# create the row
@@ -184,22 +284,26 @@
'publisher_name': publisher.name,
'views': views,
'visits': visits,
+ 'toplevel': publisher in toplevel,
+ 'subpublishercount': subpub,
+ 'parent': parent
}
model.Session.add(GA_Publisher(**values))
model.Session.commit()
def update_publisher(period_name, pub, part=''):
- views,visits = 0, 0
+ views,visits,subpub = 0, 0, 0
for publisher in go_down_tree(pub):
- f = model.Session.query(GA_Url).\
+ subpub = subpub + 1
+ items = model.Session.query(GA_Url).\
filter(GA_Url.period_name==period_name).\
- filter(GA_Url.url=='/publisher/' + publisher.name).first()
- if f:
- views = views + int(f.pageviews)
- visits = visits + int(f.visits)
-
- return views, visits
+ filter(GA_Url.department_id==publisher.name).all()
+ for item in items:
+ views = views + int(item.pageviews)
+ visits = visits + int(item.visits)
+
+ return views, visits, (subpub-1)
def get_top_level():
@@ -227,3 +331,15 @@
for grandchild in go_down_tree(child):
yield grandchild
+def delete(period_name):
+ '''
+ Deletes table data for the specified period, or specify 'all'
+ for all periods.
+ '''
+ for object_type in (GA_Url, GA_Stat, GA_Publisher, GA_ReferralStat):
+ q = model.Session.query(object_type)
+ if period_name != 'all':
+ q = q.filter_by(period_name=period_name)
+ q.delete()
+ model.Session.commit()
+
--- /dev/null
+++ b/ckanext/ga_report/helpers.py
@@ -1,1 +1,116 @@
+import logging
+import operator
+import ckan.lib.base as base
+import ckan.model as model
+from ckan.logic import get_action
+
+from ckanext.ga_report.ga_model import GA_Url, GA_Publisher
+from ckanext.ga_report.controller import _get_publishers
+_log = logging.getLogger(__name__)
+
+def popular_datasets(count=10):
+ import random
+
+ publisher = None
+ publishers = _get_publishers(30)
+ total = len(publishers)
+ while not publisher or not datasets:
+ rand = random.randrange(0, total)
+ publisher = publishers[rand][0]
+ if not publisher.state == 'active':
+ publisher = None
+ continue
+ datasets = _datasets_for_publisher(publisher, 10)[:count]
+
+ ctx = {
+ 'datasets': datasets,
+ 'publisher': publisher
+ }
+ return base.render_snippet('ga_report/ga_popular_datasets.html', **ctx)
+
+def single_popular_dataset(top=20):
+ '''Returns a random dataset from the most popular ones.
+
+ :param top: the number of top datasets to select from
+ '''
+ import random
+
+ top_datasets = model.Session.query(GA_Url).\
+ filter(GA_Url.url.like('/dataset/%')).\
+ order_by('ga_url.pageviews::int desc')
+ num_top_datasets = top_datasets.count()
+
+ dataset = None
+ if num_top_datasets:
+ count = 0
+ while not dataset:
+ rand = random.randrange(0, min(top, num_top_datasets))
+ ga_url = top_datasets[rand]
+ dataset = model.Package.get(ga_url.url[len('/dataset/'):])
+ if dataset and not dataset.state == 'active':
+ dataset = None
+ count += 1
+ if count > 10:
+ break
+ if not dataset:
+ # fallback
+ dataset = model.Session.query(model.Package)\
+ .filter_by(state='active').first()
+ if not dataset:
+ return None
+ dataset_dict = get_action('package_show')({'model': model,
+ 'session': model.Session,
+ 'validate': False},
+ {'id':dataset.id})
+ return dataset_dict
+
+def single_popular_dataset_html(top=20):
+ dataset_dict = single_popular_dataset(top)
+ groups = package.get('groups', [])
+ publishers = [ g for g in groups if g.get('type') == 'publisher' ]
+ publisher = publishers[0] if publishers else {'name':'', 'title': ''}
+ context = {
+ 'dataset': dataset_dict,
+ 'publisher': publisher_dict
+ }
+ return base.render_snippet('ga_report/ga_popular_single.html', **context)
+
+
+def most_popular_datasets(publisher, count=20):
+
+ if not publisher:
+ _log.error("No valid publisher passed to 'most_popular_datasets'")
+ return ""
+
+ results = _datasets_for_publisher(publisher, count)
+
+ ctx = {
+ 'dataset_count': len(results),
+ 'datasets': results,
+
+ 'publisher': publisher
+ }
+
+ return base.render_snippet('ga_report/publisher/popular.html', **ctx)
+
+def _datasets_for_publisher(publisher, count):
+ datasets = {}
+ entries = model.Session.query(GA_Url).\
+ filter(GA_Url.department_id==publisher.name).\
+ filter(GA_Url.url.like('/dataset/%')).\
+ order_by('ga_url.pageviews::int desc').all()
+ for entry in entries:
+ if len(datasets) < count:
+ p = model.Package.get(entry.url[len('/dataset/'):])
+ if not p in datasets:
+ datasets[p] = {'views':0, 'visits': 0}
+ datasets[p]['views'] = datasets[p]['views'] + int(entry.pageviews)
+ datasets[p]['visits'] = datasets[p]['visits'] + int(entry.visits)
+
+ results = []
+ for k, v in datasets.iteritems():
+ results.append((k,v['views'],v['visits']))
+
+ return sorted(results, key=operator.itemgetter(1), reverse=True)
+
--- a/ckanext/ga_report/plugin.py
+++ b/ckanext/ga_report/plugin.py
@@ -1,25 +1,73 @@
import logging
import ckan.lib.helpers as h
+import ckan.plugins as p
from ckan.plugins import implements, toolkit
-import gasnippet
-import commands
-import dbutil
+
+from ckanext.ga_report.helpers import (most_popular_datasets,
+ popular_datasets,
+ single_popular_dataset)
log = logging.getLogger('ckanext.ga-report')
-class GoogleAnalyticsPlugin(p.SingletonPlugin):
+class GAReportPlugin(p.SingletonPlugin):
implements(p.IConfigurer, inherit=True)
implements(p.IRoutes, inherit=True)
+ implements(p.ITemplateHelpers, inherit=True)
def update_config(self, config):
toolkit.add_template_directory(config, 'templates')
toolkit.add_public_directory(config, 'public')
+ def get_helpers(self):
+ """
+ A dictionary of extra helpers that will be available to provide
+ ga report info to templates.
+ """
+ return {
+ 'ga_report_installed': lambda: True,
+ 'popular_datasets': popular_datasets,
+ 'most_popular_datasets': most_popular_datasets,
+ 'single_popular_dataset': single_popular_dataset
+ }
+
def after_map(self, map):
+ # GaReport
map.connect(
- '/data/analytics/index',
- controller='ckanext.ga-report.controller:GaReport',
+ '/data/site-usage',
+ controller='ckanext.ga_report.controller:GaReport',
action='index'
+ )
+ map.connect(
+ '/data/site-usage/data_{month}.csv',
+ controller='ckanext.ga_report.controller:GaReport',
+ action='csv'
+ )
+
+ # GaDatasetReport
+ map.connect(
+ '/data/site-usage/publisher',
+ controller='ckanext.ga_report.controller:GaDatasetReport',
+ action='publishers'
+ )
+ map.connect(
+ '/data/site-usage/publishers_{month}.csv',
+ controller='ckanext.ga_report.controller:GaDatasetReport',
+ action='publisher_csv'
+ )
+ map.connect(
+ '/data/site-usage/dataset/datasets_{id}_{month}.csv',
+ controller='ckanext.ga_report.controller:GaDatasetReport',
+ action='dataset_csv'
+ )
+ map.connect(
+ '/data/site-usage/dataset',
+ controller='ckanext.ga_report.controller:GaDatasetReport',
+ action='read'
+ )
+ map.connect(
+ '/data/site-usage/dataset/{id}',
+ controller='ckanext.ga_report.controller:GaDatasetReport',
+ action='read_publisher'
)
return map
--- a/ckanext/ga_report/report_model.py
+++ /dev/null
--- /dev/null
+++ b/ckanext/ga_report/templates/ga_report/ga_popular_datasets.html
@@ -1,1 +1,27 @@
+
+
+
+
Popular datasets
+
${publisher.title}
+
+
+ -
+ ${h.link_to(dataset.title, h.url_for(controller='package', action='read', id=dataset.name))}
+
${h.truncate(dataset.notes, length=80, whole_word=True)}
+
+
+
+
+
+
+
+
+
+
+
--- /dev/null
+++ b/ckanext/ga_report/templates/ga_report/ga_popular_single.html
@@ -1,1 +1,31 @@
+
+
+
+
Featured dataset
+
+
+
+
+
${h.truncate(dataset['notes_rendered'], length=200, whole_word=True)}
+
+
+
+
+
+
+
+
+
+
--- /dev/null
+++ b/ckanext/ga_report/templates/ga_report/ga_util.html
@@ -1,1 +1,57 @@
+
+
+
+ Name |
+ Source |
+ Visits |
+
+
+
+ ${name} |
+ ${source} |
+ ${count} |
+
+
+
+
+
+
+
+ Name |
+ % ${title} |
+
+
+
+ ${name} |
+ ${value} |
+
+
+
+
+
+
+
+
+
+
--- /dev/null
+++ b/ckanext/ga_report/templates/ga_report/notes.html
@@ -1,1 +1,16 @@
+
+
+ Notes
+
+ - "Views" is the number of times a page was loaded in users' browsers.
+ - "Visits" is the number of unique user visits to a page, counted once for each visitor for each of their browsing sessions.
+ - These usage statistics are confined to users with javascript enabled, which excludes web crawlers and API calls.
+ - The results are not shown when the number of views/visits is tiny. Where these relate to site pages, results are available in full in the CSV download. Where these relate to users' web browser information, results are not disclosed, for privacy reasons.
+
+
+
+
--- /dev/null
+++ b/ckanext/ga_report/templates/ga_report/publisher/index.html
@@ -1,1 +1,73 @@
+
+
+
+ Usage by Publisher
+
+
+
+ Download
+
+ Download as CSV
+
+
+
+
+
+
+
+
+
Site Usage
+
+ ${usage_nav('Publishers')}
+
+
+
+
+
+
+ Publisher |
+
+ Dataset Views |
+
+
+
+ ${h.link_to(publisher.title, h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport', action='read_publisher', id=publisher.name))}
+ |
+
+ ${views} |
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
--- /dev/null
+++ b/ckanext/ga_report/templates/ga_report/publisher/popular.html
@@ -1,1 +1,25 @@
+
+
+ We do not currently have usage data for ${publisher.title}
+
+
+