Last fix for missing sidebar on index (global) page
Last fix for missing sidebar on index (global) page

import logging import logging
import datetime import datetime
import os import os
   
from pylons import config from pylons import config
   
from ckan.lib.cli import CkanCommand from ckan.lib.cli import CkanCommand
# No other CKAN imports allowed until _load_config is run, # No other CKAN imports allowed until _load_config is run,
# or logging is disabled # or logging is disabled
   
   
class InitDB(CkanCommand): class InitDB(CkanCommand):
"""Initialise the extension's database tables """Initialise the extension's database tables
""" """
summary = __doc__.split('\n')[0] summary = __doc__.split('\n')[0]
usage = __doc__ usage = __doc__
max_args = 0 max_args = 0
min_args = 0 min_args = 0
   
def command(self): def command(self):
self._load_config() self._load_config()
   
import ckan.model as model import ckan.model as model
model.Session.remove() model.Session.remove()
model.Session.configure(bind=model.meta.engine) model.Session.configure(bind=model.meta.engine)
log = logging.getLogger('ckanext.ga_report') log = logging.getLogger('ckanext.ga_report')
   
import ga_model import ga_model
ga_model.init_tables() ga_model.init_tables()
log.info("DB tables are setup") log.info("DB tables are setup")
   
   
class GetAuthToken(CkanCommand): class GetAuthToken(CkanCommand):
""" Get's the Google auth token """ Get's the Google auth token
   
Usage: paster getauthtoken <credentials_file> Usage: paster getauthtoken <credentials_file>
   
Where <credentials_file> is the file name containing the details Where <credentials_file> is the file name containing the details
for the service (obtained from https://code.google.com/apis/console). for the service (obtained from https://code.google.com/apis/console).
By default this is set to credentials.json By default this is set to credentials.json
""" """
summary = __doc__.split('\n')[0] summary = __doc__.split('\n')[0]
usage = __doc__ usage = __doc__
max_args = 0 max_args = 0
min_args = 0 min_args = 0
   
def command(self): def command(self):
""" """
In this case we don't want a valid service, but rather just to In this case we don't want a valid service, but rather just to
force the user through the auth flow. We allow this to complete to force the user through the auth flow. We allow this to complete to
act as a form of verification instead of just getting the token and act as a form of verification instead of just getting the token and
assuming it is correct. assuming it is correct.
""" """
from ga_auth import init_service from ga_auth import init_service
init_service('token.dat', init_service('token.dat',
self.args[0] if self.args self.args[0] if self.args
else 'credentials.json') else 'credentials.json')
   
class FixTimePeriods(CkanCommand): class FixTimePeriods(CkanCommand):
""" """
Fixes the 'All' records for GA_Urls Fixes the 'All' records for GA_Urls
   
It is possible that older urls that haven't recently been visited It is possible that older urls that haven't recently been visited
do not have All records. This command will traverse through those do not have All records. This command will traverse through those
records and generate valid All records for them. records and generate valid All records for them.
""" """
summary = __doc__.split('\n')[0] summary = __doc__.split('\n')[0]
usage = __doc__ usage = __doc__
max_args = 0 max_args = 0
min_args = 0 min_args = 0
   
def __init__(self, name): def __init__(self, name):
super(FixTimePeriods, self).__init__(name) super(FixTimePeriods, self).__init__(name)
   
def command(self): def command(self):
import ckan.model as model import ckan.model as model
from ga_model import post_update_url_stats from ga_model import post_update_url_stats
self._load_config() self._load_config()
model.Session.remove() model.Session.remove()
model.Session.configure(bind=model.meta.engine) model.Session.configure(bind=model.meta.engine)
   
log = logging.getLogger('ckanext.ga_report') log = logging.getLogger('ckanext.ga_report')
   
log.info("Updating 'All' records for old URLs") log.info("Updating 'All' records for old URLs")
post_update_url_stats() post_update_url_stats()
log.info("Processing complete") log.info("Processing complete")
   
   
   
class LoadAnalytics(CkanCommand): class LoadAnalytics(CkanCommand):
"""Get data from Google Analytics API and save it """Get data from Google Analytics API and save it
in the ga_model in the ga_model
   
Usage: paster loadanalytics <time-period> Usage: paster loadanalytics <time-period>
   
Where <time-period> is: Where <time-period> is:
all - data for all time all - data for all time
latest - (default) just the 'latest' data latest - (default) just the 'latest' data
YYYY-MM - just data for the specific month YYYY-MM - just data for the specific month
""" """
summary = __doc__.split('\n')[0] summary = __doc__.split('\n')[0]
usage = __doc__ usage = __doc__
max_args = 1 max_args = 1
min_args = 0 min_args = 0
   
def __init__(self, name): def __init__(self, name):
super(LoadAnalytics, self).__init__(name) super(LoadAnalytics, self).__init__(name)
self.parser.add_option('-d', '--delete-first', self.parser.add_option('-d', '--delete-first',
action='store_true', action='store_true',
default=False, default=False,
dest='delete_first', dest='delete_first',
help='Delete data for the period first') help='Delete data for the period first')
self.parser.add_option('-s', '--skip_url_stats', self.parser.add_option('-s', '--skip_url_stats',
action='store_true', action='store_true',
default=False, default=False,
dest='skip_url_stats', dest='skip_url_stats',
help='Skip the download of URL data - just do site-wide stats') help='Skip the download of URL data - just do site-wide stats')
  self.token = ""
   
def command(self): def command(self):
self._load_config() self._load_config()
   
from download_analytics import DownloadAnalytics from download_analytics import DownloadAnalytics
from ga_auth import (init_service, get_profile_id) from ga_auth import (init_service, get_profile_id)
   
ga_token_filepath = os.path.expanduser(config.get('googleanalytics.token.filepath', '')) ga_token_filepath = os.path.expanduser(config.get('googleanalytics.token.filepath', ''))
if not ga_token_filepath: if not ga_token_filepath:
print 'ERROR: In the CKAN config you need to specify the filepath of the ' \ print 'ERROR: In the CKAN config you need to specify the filepath of the ' \
'Google Analytics token file under key: googleanalytics.token.filepath' 'Google Analytics token file under key: googleanalytics.token.filepath'
return return
   
try: try:
svc = init_service(ga_token_filepath, None) self.token, svc = init_service(ga_token_filepath, None)
except TypeError: except TypeError:
print ('Have you correctly run the getauthtoken task and ' print ('Have you correctly run the getauthtoken task and '
'specified the correct token file in the CKAN config under ' 'specified the correct token file in the CKAN config under '
'"googleanalytics.token.filepath"?') '"googleanalytics.token.filepath"?')
return return
   
downloader = DownloadAnalytics(svc, profile_id=get_profile_id(svc), downloader = DownloadAnalytics(svc, self.token, profile_id=get_profile_id(svc),
delete_first=self.options.delete_first, delete_first=self.options.delete_first,
skip_url_stats=self.options.skip_url_stats) skip_url_stats=self.options.skip_url_stats)
   
time_period = self.args[0] if self.args else 'latest' time_period = self.args[0] if self.args else 'latest'
if time_period == 'all': if time_period == 'all':
downloader.all_() downloader.all_()
elif time_period == 'latest': elif time_period == 'latest':
downloader.latest() downloader.latest()
else: else:
# The month to use # The month to use
for_date = datetime.datetime.strptime(time_period, '%Y-%m') for_date = datetime.datetime.strptime(time_period, '%Y-%m')
downloader.specific_month(for_date) downloader.specific_month(for_date)
   
import re import re
import csv import csv
import sys import sys
import json import json
import logging import logging
import operator import operator
import collections import collections
from ckan.lib.base import (BaseController, c, g, render, request, response, abort) from ckan.lib.base import (BaseController, c, g, render, request, response, abort)
   
import sqlalchemy import sqlalchemy
from sqlalchemy import func, cast, Integer from sqlalchemy import func, cast, Integer
import ckan.model as model import ckan.model as model
from ga_model import GA_Url, GA_Stat, GA_ReferralStat, GA_Publisher from ga_model import GA_Url, GA_Stat, GA_ReferralStat, GA_Publisher
   
log = logging.getLogger('ckanext.ga-report') log = logging.getLogger('ckanext.ga-report')
   
DOWNLOADS_AVAILABLE_FROM = '2012-12' DOWNLOADS_AVAILABLE_FROM = '2012-12'
   
def _get_month_name(strdate): def _get_month_name(strdate):
import calendar import calendar
from time import strptime from time import strptime
d = strptime(strdate, '%Y-%m') d = strptime(strdate, '%Y-%m')
return '%s %s' % (calendar.month_name[d.tm_mon], d.tm_year) return '%s %s' % (calendar.month_name[d.tm_mon], d.tm_year)
   
def _get_unix_epoch(strdate): def _get_unix_epoch(strdate):
from time import strptime,mktime from time import strptime,mktime
d = strptime(strdate, '%Y-%m') d = strptime(strdate, '%Y-%m')
return int(mktime(d)) return int(mktime(d))
   
def _month_details(cls, stat_key=None): def _month_details(cls, stat_key=None):
''' '''
Returns a list of all the periods for which we have data, unfortunately Returns a list of all the periods for which we have data, unfortunately
knows too much about the type of the cls being passed as GA_Url has a knows too much about the type of the cls being passed as GA_Url has a
more complex query more complex query
   
This may need extending if we add a period_name to the stats This may need extending if we add a period_name to the stats
''' '''
months = [] months = []
day = None day = None
   
q = model.Session.query(cls.period_name,cls.period_complete_day)\ q = model.Session.query(cls.period_name,cls.period_complete_day)\
.filter(cls.period_name!='All').distinct(cls.period_name) .filter(cls.period_name!='All').distinct(cls.period_name)
if stat_key: if stat_key:
q= q.filter(cls.stat_name==stat_key) q= q.filter(cls.stat_name==stat_key)
   
vals = q.order_by("period_name desc").all() vals = q.order_by("period_name desc").all()
   
if vals and vals[0][1]: if vals and vals[0][1]:
day = int(vals[0][1]) day = int(vals[0][1])
ordinal = 'th' if 11 <= day <= 13 \ ordinal = 'th' if 11 <= day <= 13 \
else {1:'st',2:'nd',3:'rd'}.get(day % 10, 'th') else {1:'st',2:'nd',3:'rd'}.get(day % 10, 'th')
day = "{day}{ordinal}".format(day=day, ordinal=ordinal) day = "{day}{ordinal}".format(day=day, ordinal=ordinal)
   
for m in vals: for m in vals:
months.append( (m[0], _get_month_name(m[0]))) months.append( (m[0], _get_month_name(m[0])))
   
return months, day return months, day
   
   
class GaReport(BaseController): class GaReport(BaseController):
   
def csv(self, month): def csv(self, month):
import csv import csv
   
q = model.Session.query(GA_Stat).filter(GA_Stat.stat_name!='Downloads') q = model.Session.query(GA_Stat).filter(GA_Stat.stat_name!='Downloads')
if month != 'all': if month != 'all':
q = q.filter(GA_Stat.period_name==month) q = q.filter(GA_Stat.period_name==month)
entries = q.order_by('GA_Stat.period_name, GA_Stat.stat_name, GA_Stat.key').all() entries = q.order_by('GA_Stat.period_name, GA_Stat.stat_name, GA_Stat.key').all()
   
response.headers['Content-Type'] = "text/csv; charset=utf-8" response.headers['Content-Type'] = "text/csv; charset=utf-8"
response.headers['Content-Disposition'] = str('attachment; filename=stats_%s.csv' % (month,)) response.headers['Content-Disposition'] = str('attachment; filename=stats_%s.csv' % (month,))
   
writer = csv.writer(response) writer = csv.writer(response)
writer.writerow(["Period", "Statistic", "Key", "Value"]) writer.writerow(["Period", "Statistic", "Key", "Value"])
   
for entry in entries: for entry in entries:
writer.writerow([entry.period_name.encode('utf-8'), writer.writerow([entry.period_name.encode('utf-8'),
entry.stat_name.encode('utf-8'), entry.stat_name.encode('utf-8'),
entry.key.encode('utf-8'), entry.key.encode('utf-8'),
entry.value.encode('utf-8')]) entry.value.encode('utf-8')])
   
   
def index(self): def index(self):
   
# Get the month details by fetching distinct values and determining the # Get the month details by fetching distinct values and determining the
# month names from the values. # month names from the values.
c.months, c.day = _month_details(GA_Stat) c.months, c.day = _month_details(GA_Stat)
   
# Work out which month to show, based on query params of the first item # Work out which month to show, based on query params of the first item
c.month_desc = 'all months' c.month_desc = 'all months'
c.month = request.params.get('month', '') c.month = request.params.get('month', '')
if c.month: if c.month:
c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month]) c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month])
   
q = model.Session.query(GA_Stat).\ q = model.Session.query(GA_Stat).\
filter(GA_Stat.stat_name=='Totals') filter(GA_Stat.stat_name=='Totals')
if c.month: if c.month:
q = q.filter(GA_Stat.period_name==c.month) q = q.filter(GA_Stat.period_name==c.month)
entries = q.order_by('ga_stat.key').all() entries = q.order_by('ga_stat.key').all()
   
def clean_key(key, val): def clean_key(key, val):
if key in ['Average time on site', 'Pages per visit', 'New visits', 'Bounce rate (home page)']: if key in ['Average time on site', 'Pages per visit', 'New visits', 'Bounce rate (home page)']:
val = "%.2f" % round(float(val), 2) val = "%.2f" % round(float(val), 2)
if key == 'Average time on site': if key == 'Average time on site':
mins, secs = divmod(float(val), 60) mins, secs = divmod(float(val), 60)
hours, mins = divmod(mins, 60) hours, mins = divmod(mins, 60)
val = '%02d:%02d:%02d (%s seconds) ' % (hours, mins, secs, val) val = '%02d:%02d:%02d (%s seconds) ' % (hours, mins, secs, val)
if key in ['New visits','Bounce rate (home page)']: if key in ['New visits','Bounce rate (home page)']:
val = "%s%%" % val val = "%s%%" % val
if key in ['Total page views', 'Total visits']: if key in ['Total page views', 'Total visits']:
val = int(val) val = int(val)
   
return key, val return key, val
   
# Query historic values for sparkline rendering # Query historic values for sparkline rendering
sparkline_query = model.Session.query(GA_Stat)\ sparkline_query = model.Session.query(GA_Stat)\
.filter(GA_Stat.stat_name=='Totals')\ .filter(GA_Stat.stat_name=='Totals')\
.order_by(GA_Stat.period_name) .order_by(GA_Stat.period_name)
sparkline_data = {} sparkline_data = {}
for x in sparkline_query: for x in sparkline_query:
sparkline_data[x.key] = sparkline_data.get(x.key,[]) sparkline_data[x.key] = sparkline_data.get(x.key,[])
key, val = clean_key(x.key,float(x.value)) key, val = clean_key(x.key,float(x.value))
tooltip = '%s: %s' % (_get_month_name(x.period_name), val) tooltip = '%s: %s' % (_get_month_name(x.period_name), val)
sparkline_data[x.key].append( (tooltip,x.value) ) sparkline_data[x.key].append( (tooltip,x.value) )
# Trim the latest month, as it looks like a huge dropoff # Trim the latest month, as it looks like a huge dropoff
for key in sparkline_data: for key in sparkline_data:
sparkline_data[key] = sparkline_data[key][:-1] sparkline_data[key] = sparkline_data[key][:-1]
   
c.global_totals = [] c.global_totals = []
if c.month: if c.month:
for e in entries: for e in entries:
key, val = clean_key(e.key, e.value) key, val = clean_key(e.key, e.value)
sparkline = sparkline_data[e.key] sparkline = sparkline_data[e.key]
c.global_totals.append((key, val, sparkline)) c.global_totals.append((key, val, sparkline))
else: else:
d = collections.defaultdict(list) d = collections.defaultdict(list)
for e in entries: for e in entries:
d[e.key].append(float(e.value)) d[e.key].append(float(e.value))
for k, v in d.iteritems(): for k, v in d.iteritems():
if k in ['Total page views', 'Total visits']: if k in ['Total page views', 'Total visits']:
v = sum(v) v = sum(v)
else: else:
v = float(sum(v))/float(len(v)) v = float(sum(v))/float(len(v))
sparkline = sparkline_data[k] sparkline = sparkline_data[k]
key, val = clean_key(k,v) key, val = clean_key(k,v)
   
c.global_totals.append((key, val, sparkline)) c.global_totals.append((key, val, sparkline))
# Sort the global totals into a more pleasant order # Sort the global totals into a more pleasant order
def sort_func(x): def sort_func(x):
key = x[0] key = x[0]
total_order = ['Total page views','Total visits','Pages per visit'] total_order = ['Total page views','Total visits','Pages per visit']
if key in total_order: if key in total_order:
return total_order.index(key) return total_order.index(key)
return 999 return 999
c.global_totals = sorted(c.global_totals, key=sort_func) c.global_totals = sorted(c.global_totals, key=sort_func)
   
keys = { keys = {
'Browser versions': 'browser_versions', 'Browser versions': 'browser_versions',
'Browsers': 'browsers', 'Browsers': 'browsers',
'Operating Systems versions': 'os_versions', 'Operating Systems versions': 'os_versions',
'Operating Systems': 'os', 'Operating Systems': 'os',
'Social sources': 'social_networks', 'Social sources': 'social_networks',
'Languages': 'languages', 'Languages': 'languages',
'Country': 'country' 'Country': 'country'
} }
   
def shorten_name(name, length=60): def shorten_name(name, length=60):
return (name[:length] + '..') if len(name) > 60 else name return (name[:length] + '..') if len(name) > 60 else name
   
def fill_out_url(url): def fill_out_url(url):
import urlparse import urlparse
return urlparse.urljoin(g.site_url, url) return urlparse.urljoin(g.site_url, url)
   
c.social_referrer_totals, c.social_referrers = [], [] c.social_referrer_totals, c.social_referrers = [], []
q = model.Session.query(GA_ReferralStat) q = model.Session.query(GA_ReferralStat)
q = q.filter(GA_ReferralStat.period_name==c.month) if c.month else q q = q.filter(GA_ReferralStat.period_name==c.month) if c.month else q
q = q.order_by('ga_referrer.count::int desc') q = q.order_by('ga_referrer.count::int desc')
for entry in q.all(): for entry in q.all():
c.social_referrers.append((shorten_name(entry.url), fill_out_url(entry.url), c.social_referrers.append((shorten_name(entry.url), fill_out_url(entry.url),
entry.source,entry.count)) entry.source,entry.count))
   
q = model.Session.query(GA_ReferralStat.url, q = model.Session.query(GA_ReferralStat.url,
func.sum(GA_ReferralStat.count).label('count')) func.sum(GA_ReferralStat.count).label('count'))
q = q.filter(GA_ReferralStat.period_name==c.month) if c.month else q q = q.filter(GA_ReferralStat.period_name==c.month) if c.month else q
q = q.order_by('count desc').group_by(GA_ReferralStat.url) q = q.order_by('count desc').group_by(GA_ReferralStat.url)
for entry in q.all(): for entry in q.all():
c.social_referrer_totals.append((shorten_name(entry[0]), fill_out_url(entry[0]),'', c.social_referrer_totals.append((shorten_name(entry[0]), fill_out_url(entry[0]),'',
entry[1])) entry[1]))
   
for k, v in keys.iteritems(): for k, v in keys.iteritems():
q = model.Session.query(GA_Stat).\ q = model.Session.query(GA_Stat).\
filter(GA_Stat.stat_name==k).\ filter(GA_Stat.stat_name==k).\
order_by(GA_Stat.period_name) order_by(GA_Stat.period_name)
# Buffer the tabular data # Buffer the tabular data
if c.month: if c.month:
entries = [] entries = []
q = q.filter(GA_Stat.period_name==c.month).\ q = q.filter(GA_Stat.period_name==c.month).\
order_by('ga_stat.value::int desc') order_by('ga_stat.value::int desc')
d = collections.defaultdict(int) d = collections.defaultdict(int)
for e in q.all(): for e in q.all():
d[e.key] += int(e.value) d[e.key] += int(e.value)
entries = [] entries = []
for key, val in d.iteritems(): for key, val in d.iteritems():
entries.append((key,val,)) entries.append((key,val,))
entries = sorted(entries, key=operator.itemgetter(1), reverse=True) entries = sorted(entries, key=operator.itemgetter(1), reverse=True)
   
# Run a query on all months to gather graph data # Run a query on all months to gather graph data
graph_query = model.Session.query(GA_Stat).\ graph_query = model.Session.query(GA_Stat).\
filter(GA_Stat.stat_name==k).\ filter(GA_Stat.stat_name==k).\
order_by(GA_Stat.period_name) order_by(GA_Stat.period_name)
graph_dict = {} graph_dict = {}
for stat in graph_query: for stat in graph_query:
graph_dict[ stat.key ] = graph_dict.get(stat.key,{ graph_dict[ stat.key ] = graph_dict.get(stat.key,{
'name':stat.key, 'name':stat.key,
'raw': {} 'raw': {}
}) })
graph_dict[ stat.key ]['raw'][stat.period_name] = float(stat.value) graph_dict[ stat.key ]['raw'][stat.period_name] = float(stat.value)
stats_in_table = [x[0] for x in entries] stats_in_table = [x[0] for x in entries]
stats_not_in_table = set(graph_dict.keys()) - set(stats_in_table) stats_not_in_table = set(graph_dict.keys()) - set(stats_in_table)
stats = stats_in_table + sorted(list(stats_not_in_table)) stats = stats_in_table + sorted(list(stats_not_in_table))
graph = [graph_dict[x] for x in stats] graph = [graph_dict[x] for x in stats]
setattr(c, v+'_graph', json.dumps( _to_rickshaw(graph,percentageMode=True) )) setattr(c, v+'_graph', json.dumps( _to_rickshaw(graph,percentageMode=True) ))
   
# Get the total for each set of values and then set the value as # Get the total for each set of values and then set the value as
# a percentage of the total # a percentage of the total
if k == 'Social sources': if k == 'Social sources':
total = sum([x for n,x,graph in c.global_totals if n == 'Total visits']) total = sum([x for n,x,graph in c.global_totals if n == 'Total visits'])
else: else:
total = sum([num for _,num in entries]) total = sum([num for _,num in entries])
setattr(c, v, [(k,_percent(v,total)) for k,v in entries ]) setattr(c, v, [(k,_percent(v,total)) for k,v in entries ])
   
return render('ga_report/site/index.html') return render('ga_report/site/index.html')
   
   
class GaDatasetReport(BaseController): class GaDatasetReport(BaseController):
""" """
Displays the pageview and visit count for datasets Displays the pageview and visit count for datasets
with options to filter by publisher and time period. with options to filter by publisher and time period.
""" """
def publisher_csv(self, month): def publisher_csv(self, month):
''' '''
Returns a CSV of each publisher with the total number of dataset Returns a CSV of each publisher with the total number of dataset
views & visits. views & visits.
''' '''
c.month = month if not month == 'all' else '' c.month = month if not month == 'all' else ''
response.headers['Content-Type'] = "text/csv; charset=utf-8" response.headers['Content-Type'] = "text/csv; charset=utf-8"
response.headers['Content-Disposition'] = str('attachment; filename=publishers_%s.csv' % (month,)) response.headers['Content-Disposition'] = str('attachment; filename=publishers_%s.csv' % (month,))
   
writer = csv.writer(response) writer = csv.writer(response)
writer.writerow(["Publisher Title", "Publisher Name", "Views", "Visits", "Period Name"]) writer.writerow(["Publisher Title", "Publisher Name", "Views", "Visits", "Period Name"])
   
top_publishers = _get_top_publishers(limit=None) top_publishers = _get_top_publishers(limit=None)
   
for publisher,view,visit in top_publishers: for publisher,view,visit in top_publishers:
writer.writerow([publisher.title.encode('utf-8'), writer.writerow([publisher.title.encode('utf-8'),
publisher.name.encode('utf-8'), publisher.name.encode('utf-8'),
view, view,
visit, visit,
month]) month])
   
def dataset_csv(self, id='all', month='all'): def dataset_csv(self, id='all', month='all'):
''' '''
Returns a CSV with the number of views & visits for each dataset. Returns a CSV with the number of views & visits for each dataset.
   
:param id: A Publisher ID or None if you want for all :param id: A Publisher ID or None if you want for all
:param month: The time period, or 'all' :param month: The time period, or 'all'
''' '''
c.month = month if not month == 'all' else '' c.month = month if not month == 'all' else ''
if id != 'all': if id != 'all':
c.publisher = model.Group.get(id) c.publisher = model.Group.get(id)
if not c.publisher: if not c.publisher:
abort(404, 'A publisher with that name could not be found') abort(404, 'A publisher with that name could not be found')
   
packages = self._get_packages(publisher=c.publisher, month=c.month) packages = self._get_packages(publisher=c.publisher, month=c.month)
response.headers['Content-Type'] = "text/csv; charset=utf-8" response.headers['Content-Type'] = "text/csv; charset=utf-8"
response.headers['Content-Disposition'] = \ response.headers['Content-Disposition'] = \
str('attachment; filename=datasets_%s_%s.csv' % (c.publisher_name, month,)) str('attachment; filename=datasets_%s_%s.csv' % (c.publisher_name, month,))
   
writer = csv.writer(response) writer = csv.writer(response)
writer.writerow(["Dataset Title", "Dataset Name", "Views", "Visits", "Resource downloads", "Period Name"]) writer.writerow(["Dataset Title", "Dataset Name", "Views", "Visits", "Resource downloads", "Period Name"])
   
for package,view,visit,downloads in packages: for package,view,visit,downloads in packages:
writer.writerow([package.title.encode('utf-8'), writer.writerow([package.title.encode('utf-8'),
package.name.encode('utf-8'), package.name.encode('utf-8'),
view, view,
visit, visit,
downloads, downloads,
month]) month])
   
def publishers(self): def publishers(self):
'''A list of publishers and the number of views/visits for each''' '''A list of publishers and the number of views/visits for each'''
   
# Get the month details by fetching distinct values and determining the # Get the month details by fetching distinct values and determining the
# month names from the values. # month names from the values.
c.months, c.day = _month_details(GA_Url) c.months, c.day = _month_details(GA_Url)
   
# Work out which month to show, based on query params of the first item # Work out which month to show, based on query params of the first item
c.month = request.params.get('month', '') c.month = request.params.get('month', '')
c.month_desc = 'all months' c.month_desc = 'all months'
if c.month: if c.month:
c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month]) c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month])
   
c.top_publishers = _get_top_publishers() c.top_publishers = _get_top_publishers()
graph_data = _get_top_publishers_graph() graph_data = _get_top_publishers_graph()
c.top_publishers_graph = json.dumps( _to_rickshaw(graph_data) ) c.top_publishers_graph = json.dumps( _to_rickshaw(graph_data) )
   
return render('ga_report/publisher/index.html') x = render('ga_report/publisher/index.html')
   
  return x
   
def _get_packages(self, publisher=None, month='', count=-1): def _get_packages(self, publisher=None, month='', count=-1):
'''Returns the datasets in order of views''' '''Returns the datasets in order of views'''
have_download_data = True have_download_data = True
month = month or 'All' month = month or 'All'
if month != 'All': if month != 'All':
have_download_data = month >= DOWNLOADS_AVAILABLE_FROM have_download_data = month >= DOWNLOADS_AVAILABLE_FROM
   
q = model.Session.query(GA_Url,model.Package)\ q = model.Session.query(GA_Url,model.Package)\
.filter(model.Package.name==GA_Url.package_id)\ .filter(model.Package.name==GA_Url.package_id)\
.filter(GA_Url.url.like('/dataset/%')) .filter(GA_Url.url.like('/dataset/%'))
if publisher: if publisher:
q = q.filter(GA_Url.department_id==publisher.name) q = q.filter(GA_Url.department_id==publisher.name)
q = q.filter(GA_Url.period_name==month) q = q.filter(GA_Url.period_name==month)
q = q.order_by('ga_url.pageviews::int desc') q = q.order_by('ga_url.pageviews::int desc')
top_packages = [] top_packages = []
if count == -1: if count == -1:
entries = q.all() entries = q.all()
else: else:
entries = q.limit(count) entries = q.limit(count)
   
for entry,package in entries: for entry,package in entries:
if package: if package:
# Downloads .... # Downloads ....
if have_download_data: if have_download_data:
dls = model.Session.query(GA_Stat).\ dls = model.Session.query(GA_Stat).\
filter(GA_Stat.stat_name=='Downloads').\ filter(GA_Stat.stat_name=='Downloads').\
filter(GA_Stat.key==package.name) filter(GA_Stat.key==package.name)
if month != 'All': # Fetch everything unless the month is specific if month != 'All': # Fetch everything unless the month is specific
dls = dls.filter(GA_Stat.period_name==month) dls = dls.filter(GA_Stat.period_name==month)
downloads = 0 downloads = 0
for x in dls: for x in dls:
downloads += int(x.value) downloads += int(x.value)
else: else:
downloads = 'No data' downloads = 'No data'
top_packages.append((package, entry.pageviews, entry.visits, downloads)) top_packages.append((package, entry.pageviews, entry.visits, downloads))
else: else:
log.warning('Could not find package associated package') log.warning('Could not find package associated package')
   
return top_packages return top_packages
   
def read(self): def read(self):
''' '''
Lists the most popular datasets across all publishers Lists the most popular datasets across all publishers
''' '''
return self.read_publisher(None) return self.read_publisher(None)
   
def read_publisher(self, id): def read_publisher(self, id):
''' '''
Lists the most popular datasets for a publisher (or across all publishers) Lists the most popular datasets for a publisher (or across all publishers)
''' '''
count = 20 count = 20
   
c.publishers = _get_publishers() c.publishers = _get_publishers()
   
id = request.params.get('publisher', id) id = request.params.get('publisher', id)
if id and id != 'all': if id and id != 'all':
c.publisher = model.Group.get(id) c.publisher = model.Group.get(id)
if not c.publisher: if not c.publisher:
abort(404, 'A publisher with that name could not be found') abort(404, 'A publisher with that name could not be found')
c.publisher_name = c.publisher.name c.publisher_name = c.publisher.name
c.top_packages = [] # package, dataset_views in c.top_packages c.top_packages = [] # package, dataset_views in c.top_packages
   
# Get the month details by fetching distinct values and determining the # Get the month details by fetching distinct values and determining the
# month names from the values. # month names from the values.
c.months, c.day = _month_details(GA_Url) c.months, c.day = _month_details(GA_Url)
   
# Work out which month to show, based on query params of the first item # Work out which month to show, based on query params of the first item
c.month = request.params.get('month', '') c.month = request.params.get('month', '')
if not c.month: if not c.month:
c.month_desc = 'all months' c.month_desc = 'all months'
else: else:
c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month]) c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month])
   
month = c.month or 'All' month = c.month or 'All'
c.publisher_page_views = 0 c.publisher_page_views = 0
q = model.Session.query(GA_Url).\ q = model.Session.query(GA_Url).\
filter(GA_Url.url=='/publisher/%s' % c.publisher_name) filter(GA_Url.url=='/publisher/%s' % c.publisher_name)
entry = q.filter(GA_Url.period_name==c.month).first() entry = q.filter(GA_Url.period_name==c.month).first()
c.publisher_page_views = entry.pageviews if entry else 0 c.publisher_page_views = entry.pageviews if entry else 0
   
c.top_packages = self._get_packages(publisher=c.publisher, count=20, month=c.month) c.top_packages = self._get_packages(publisher=c.publisher, count=20, month=c.month)
   
# Graph query # Graph query
top_packages_all_time = self._get_packages(publisher=c.publisher, count=20, month='All') top_packages_all_time = self._get_packages(publisher=c.publisher, count=20, month='All')
top_package_names = [ x[0].name for x in top_packages_all_time ] top_package_names = [ x[0].name for x in top_packages_all_time ]
graph_query = model.Session.query(GA_Url,model.Package)\ graph_query = model.Session.query(GA_Url,model.Package)\
.filter(model.Package.name==GA_Url.package_id)\ .filter(model.Package.name==GA_Url.package_id)\
.filter(GA_Url.url.like('/dataset/%'))\ .filter(GA_Url.url.like('/dataset/%'))\
.filter(GA_Url.package_id.in_(top_package_names)) .filter(GA_Url.package_id.in_(top_package_names))
all_series = {} all_series = {}
for entry,package in graph_query: for entry,package in graph_query:
if not package: continue if not package: continue
if entry.period_name=='All': continue if entry.period_name=='All': continue
all_series[package.name] = all_series.get(package.name,{ all_series[package.name] = all_series.get(package.name,{
'name':package.title, 'name':package.title,
'raw': {} 'raw': {}
}) })
all_series[package.name]['raw'][entry.period_name] = int(entry.pageviews) all_series[package.name]['raw'][entry.period_name] = int(entry.pageviews)
graph = [ all_series[series_name] for series_name in top_package_names ] graph = [ all_series[series_name] for series_name in top_package_names ]
c.graph_data = json.dumps( _to_rickshaw(graph) ) c.graph_data = json.dumps( _to_rickshaw(graph) )
   
return render('ga_report/publisher/read.html') return render('ga_report/publisher/read.html')
   
def _to_rickshaw(data, percentageMode=False): def _to_rickshaw(data, percentageMode=False):
if data==[]: if data==[]:
return data return data
# x-axis is every month in c.months. Note that data might not exist # x-axis is every month in c.months. Note that data might not exist
# for entire history, eg. for recently-added datasets # for entire history, eg. for recently-added datasets
x_axis = [x[0] for x in c.months] x_axis = [x[0] for x in c.months]
x_axis.reverse() # Ascending order x_axis.reverse() # Ascending order
x_axis = x_axis[:-1] # Remove latest month x_axis = x_axis[:-1] # Remove latest month
totals = {} totals = {}
for series in data: for series in data:
series['data'] = [] series['data'] = []
for x_string in x_axis: for x_string in x_axis:
x = _get_unix_epoch( x_string ) x = _get_unix_epoch( x_string )
y = series['raw'].get(x_string,0) y = series['raw'].get(x_string,0)
series['data'].append({'x':x,'y':y}) series['data'].append({'x':x,'y':y})
totals[x] = totals.get(x,0)+y totals[x] = totals.get(x,0)+y
if not percentageMode: if not percentageMode:
return data return data
# Turn all data into percentages # Turn all data into percentages
# Roll insignificant series into a catch-all # Roll insignificant series into a catch-all
THRESHOLD = 1 THRESHOLD = 1
raw_data = data raw_data = data
data = [] data = []
for series in raw_data: for series in raw_data:
for point in series['data']: for point in series['data']:
percentage = (100*float(point['y'])) / totals[point['x']] percentage = (100*float(point['y'])) / totals[point['x']]
if not (series in data) and percentage>THRESHOLD: if not (series in data) and percentage>THRESHOLD:
data.append(series) data.append(series)
point['y'] = percentage point['y'] = percentage
others = [ x for x in raw_data if not (x in data) ] others = [ x for x in raw_data if not (x in data) ]
if len(others): if len(others):
data_other = [] data_other = []
for i in range(len(x_axis)): for i in range(len(x_axis)):
x = _get_unix_epoch(x_axis[i]) x = _get_unix_epoch(x_axis[i])
y = 0 y = 0
for series in others: for series in others:
y += series['data'][i]['y'] y += series['data'][i]['y']
data_other.append({'x':x,'y':y}) data_other.append({'x':x,'y':y})
data.append({ data.append({
'name':'Other', 'name':'Other',
'data': data_other 'data': data_other
}) })
return data return data
   
   
def _get_top_publishers(limit=20): def _get_top_publishers(limit=20):
''' '''
Returns a list of the top 20 publishers by dataset visits. Returns a list of the top 20 publishers by dataset visits.
(The number to show can be varied with 'limit') (The number to show can be varied with 'limit')
''' '''
month = c.month or 'All' month = c.month or 'All'
connection = model.Session.connection() connection = model.Session.connection()
q = """ q = """
select department_id, sum(pageviews::int) views, sum(visits::int) visits select department_id, sum(pageviews::int) views, sum(visits::int) visits
from ga_url from ga_url
where department_id <> '' where department_id <> ''
and package_id <> '' and package_id <> ''
and url like '/dataset/%%' and url like '/dataset/%%'
and period_name=%s and period_name=%s
group by department_id order by views desc group by department_id order by views desc
""" """
if limit: if limit:
q = q + " limit %s;" % (limit) q = q + " limit %s;" % (limit)
   
top_publishers = [] top_publishers = []
res = connection.execute(q, month) res = connection.execute(q, month)
for row in res: for row in res:
g = model.Group.get(row[0]) g = model.Group.get(row[0])
if g: if g:
top_publishers.append((g, row[1], row[2])) top_publishers.append((g, row[1], row[2]))
return top_publishers return top_publishers
   
   
def _get_top_publishers_graph(limit=20): def _get_top_publishers_graph(limit=20):
''' '''
Returns a list of the top 20 publishers by dataset visits. Returns a list of the top 20 publishers by dataset visits.
(The number to show can be varied with 'limit') (The number to show can be varied with 'limit')
''' '''
connection = model.Session.connection() connection = model.Session.connection()
q = """ q = """
select department_id, sum(pageviews::int) views select department_id, sum(pageviews::int) views
from ga_url from ga_url
where department_id <> '' where department_id <> ''
and package_id <> '' and package_id <> ''
and url like '/dataset/%%' and url like '/dataset/%%'
and period_name='All' and period_name='All'
group by department_id order by views desc group by department_id order by views desc
""" """
if limit: if limit:
q = q + " limit %s;" % (limit) q = q + " limit %s;" % (limit)
   
res = connection.execute(q) res = connection.execute(q)
department_ids = [ row[0] for row in res ] department_ids = [ row[0] for row in res ]
   
# Query for a history graph of these department ids # Query for a history graph of these department ids
q = model.Session.query( q = model.Session.query(
GA_Url.department_id, GA_Url.department_id,
GA_Url.period_name, GA_Url.period_name,
func.sum(cast(GA_Url.pageviews,sqlalchemy.types.INT)))\ func.sum(cast(GA_Url.pageviews,sqlalchemy.types.INT)))\
.filter( GA_Url.department_id.in_(department_ids) )\ .filter( GA_Url.department_id.in_(department_ids) )\
.filter( GA_Url.url.like('/dataset/%') )\ .filter( GA_Url.url.like('/dataset/%') )\
.filter( GA_Url.package_id!='' )\ .filter( GA_Url.package_id!='' )\
.group_by( GA_Url.department_id, GA_Url.period_name ) .group_by( GA_Url.department_id, GA_Url.period_name )
graph_dict = {} graph_dict = {}
for dept_id,period_name,views in q: for dept_id,period_name,views in q:
graph_dict[dept_id] = graph_dict.get( dept_id, { graph_dict[dept_id] = graph_dict.get( dept_id, {
'name' : model.Group.get(dept_id).title, 'name' : model.Group.get(dept_id).title,
'raw' : {} 'raw' : {}
}) })
graph_dict[dept_id]['raw'][period_name] = views graph_dict[dept_id]['raw'][period_name] = views
return [ graph_dict[id] for id in department_ids ] return [ graph_dict[id] for id in department_ids ]
   
   
def _get_publishers(): def _get_publishers():
''' '''
Returns a list of all publishers. Each item is a tuple: Returns a list of all publishers. Each item is a tuple:
(name, title) (name, title)
''' '''
publishers = [] publishers = []
for pub in model.Session.query(model.Group).\ for pub in model.Session.query(model.Group).\
filter(model.Group.type=='publisher').\ filter(model.Group.type=='organization').\
filter(model.Group.state=='active').\ filter(model.Group.state=='active').\
order_by(model.Group.name): order_by(model.Group.name):
publishers.append((pub.name, pub.title)) publishers.append((pub.name, pub.title))
return publishers return publishers
   
def _percent(num, total): def _percent(num, total):
p = 100 * float(num)/float(total) p = 100 * float(num)/float(total)
return "%.2f%%" % round(p, 2) return "%.2f%%" % round(p, 2)
   
import os import os
import logging import logging
import datetime import datetime
  import httplib
import collections import collections
  import requests
  import json
from pylons import config from pylons import config
from ga_model import _normalize_url from ga_model import _normalize_url
import ga_model import ga_model
   
#from ga_client import GA #from ga_client import GA
   
log = logging.getLogger('ckanext.ga-report') log = logging.getLogger('ckanext.ga-report')
   
FORMAT_MONTH = '%Y-%m' FORMAT_MONTH = '%Y-%m'
MIN_VIEWS = 50 MIN_VIEWS = 50
MIN_VISITS = 20 MIN_VISITS = 20
MIN_DOWNLOADS = 10 MIN_DOWNLOADS = 10
   
class DownloadAnalytics(object): class DownloadAnalytics(object):
'''Downloads and stores analytics info''' '''Downloads and stores analytics info'''
   
def __init__(self, service=None, profile_id=None, delete_first=False, def __init__(self, service=None, token=None, profile_id=None, delete_first=False,
skip_url_stats=False): skip_url_stats=False):
self.period = config['ga-report.period'] self.period = config['ga-report.period']
self.service = service self.service = service
self.profile_id = profile_id self.profile_id = profile_id
self.delete_first = delete_first self.delete_first = delete_first
self.skip_url_stats = skip_url_stats self.skip_url_stats = skip_url_stats
  self.token = token
   
def specific_month(self, date): def specific_month(self, date):
import calendar import calendar
   
first_of_this_month = datetime.datetime(date.year, date.month, 1) first_of_this_month = datetime.datetime(date.year, date.month, 1)
_, last_day_of_month = calendar.monthrange(int(date.year), int(date.month)) _, last_day_of_month = calendar.monthrange(int(date.year), int(date.month))
last_of_this_month = datetime.datetime(date.year, date.month, last_day_of_month) last_of_this_month = datetime.datetime(date.year, date.month, last_day_of_month)
# if this is the latest month, note that it is only up until today # if this is the latest month, note that it is only up until today
now = datetime.datetime.now() now = datetime.datetime.now()
if now.year == date.year and now.month == date.month: if now.year == date.year and now.month == date.month:
last_day_of_month = now.day last_day_of_month = now.day
last_of_this_month = now last_of_this_month = now
periods = ((date.strftime(FORMAT_MONTH), periods = ((date.strftime(FORMAT_MONTH),
last_day_of_month, last_day_of_month,
first_of_this_month, last_of_this_month),) first_of_this_month, last_of_this_month),)
self.download_and_store(periods) self.download_and_store(periods)
   
   
def latest(self): def latest(self):
if self.period == 'monthly': if self.period == 'monthly':
# from first of this month to today # from first of this month to today
now = datetime.datetime.now() now = datetime.datetime.now()
first_of_this_month = datetime.datetime(now.year, now.month, 1) first_of_this_month = datetime.datetime(now.year, now.month, 1)
periods = ((now.strftime(FORMAT_MONTH), periods = ((now.strftime(FORMAT_MONTH),
now.day, now.day,
first_of_this_month, now),) first_of_this_month, now),)
else: else:
raise NotImplementedError raise NotImplementedError
self.download_and_store(periods) self.download_and_store(periods)
   
   
def for_date(self, for_date): def for_date(self, for_date):
assert isinstance(since_date, datetime.datetime) assert isinstance(since_date, datetime.datetime)
periods = [] # (period_name, period_complete_day, start_date, end_date) periods = [] # (period_name, period_complete_day, start_date, end_date)
if self.period == 'monthly': if self.period == 'monthly':
first_of_the_months_until_now = [] first_of_the_months_until_now = []
year = for_date.year year = for_date.year
month = for_date.month month = for_date.month
now = datetime.datetime.now() now = datetime.datetime.now()
first_of_this_month = datetime.datetime(now.year, now.month, 1) first_of_this_month = datetime.datetime(now.year, now.month, 1)
while True: while True:
first_of_the_month = datetime.datetime(year, month, 1) first_of_the_month = datetime.datetime(year, month, 1)
if first_of_the_month == first_of_this_month: if first_of_the_month == first_of_this_month:
periods.append((now.strftime(FORMAT_MONTH), periods.append((now.strftime(FORMAT_MONTH),
now.day, now.day,
first_of_this_month, now)) first_of_this_month, now))
break break
elif first_of_the_month < first_of_this_month: elif first_of_the_month < first_of_this_month:
in_the_next_month = first_of_the_month + datetime.timedelta(40) in_the_next_month = first_of_the_month + datetime.timedelta(40)
last_of_the_month = datetime.datetime(in_the_next_month.year, last_of_the_month = datetime.datetime(in_the_next_month.year,
in_the_next_month.month, 1)\ in_the_next_month.month, 1)\
- datetime.timedelta(1) - datetime.timedelta(1)
periods.append((now.strftime(FORMAT_MONTH), 0, periods.append((now.strftime(FORMAT_MONTH), 0,
first_of_the_month, last_of_the_month)) first_of_the_month, last_of_the_month))
else: else:
# first_of_the_month has got to the future somehow # first_of_the_month has got to the future somehow
break break
month += 1 month += 1
if month > 12: if month > 12:
year += 1 year += 1
month = 1 month = 1
else: else:
raise NotImplementedError raise NotImplementedError
self.download_and_store(periods) self.download_and_store(periods)
   
@staticmethod @staticmethod
def get_full_period_name(period_name, period_complete_day): def get_full_period_name(period_name, period_complete_day):
if period_complete_day: if period_complete_day:
return period_name + ' (up to %ith)' % period_complete_day return period_name + ' (up to %ith)' % period_complete_day
else: else:
return period_name return period_name
   
   
def download_and_store(self, periods): def download_and_store(self, periods):
for period_name, period_complete_day, start_date, end_date in periods: for period_name, period_complete_day, start_date, end_date in periods:
log.info('Period "%s" (%s - %s)', log.info('Period "%s" (%s - %s)',
self.get_full_period_name(period_name, period_complete_day), self.get_full_period_name(period_name, period_complete_day),
start_date.strftime('%Y-%m-%d'), start_date.strftime('%Y-%m-%d'),
end_date.strftime('%Y-%m-%d')) end_date.strftime('%Y-%m-%d'))
   
if self.delete_first: if self.delete_first:
log.info('Deleting existing Analytics for this period "%s"', log.info('Deleting existing Analytics for this period "%s"',
period_name) period_name)
ga_model.delete(period_name) ga_model.delete(period_name)
   
if not self.skip_url_stats: if not self.skip_url_stats:
# Clean out old url data before storing the new # Clean out old url data before storing the new
ga_model.pre_update_url_stats(period_name) ga_model.pre_update_url_stats(period_name)
   
accountName = config.get('googleanalytics.account') accountName = config.get('googleanalytics.account')
   
log.info('Downloading analytics for dataset views') log.info('Downloading analytics for dataset views')
data = self.download(start_date, end_date, '~/%s/dataset/[a-z0-9-_]+' % accountName) data = self.download(start_date, end_date, '~/%s/dataset/[a-z0-9-_]+' % accountName)
   
log.info('Storing dataset views (%i rows)', len(data.get('url'))) log.info('Storing dataset views (%i rows)', len(data.get('url')))
self.store(period_name, period_complete_day, data, ) self.store(period_name, period_complete_day, data, )
   
log.info('Downloading analytics for publisher views') log.info('Downloading analytics for publisher views')
data = self.download(start_date, end_date, '~/%s/publisher/[a-z0-9-_]+' % accountName) data = self.download(start_date, end_date, '~/%s/publisher/[a-z0-9-_]+' % accountName)
   
log.info('Storing publisher views (%i rows)', len(data.get('url'))) log.info('Storing publisher views (%i rows)', len(data.get('url')))
self.store(period_name, period_complete_day, data,) self.store(period_name, period_complete_day, data,)
   
# Make sure the All records are correct. # Make sure the All records are correct.
ga_model.post_update_url_stats() ga_model.post_update_url_stats()
   
log.info('Associating datasets with their publisher') log.info('Associating datasets with their publisher')
ga_model.update_publisher_stats(period_name) # about 30 seconds. ga_model.update_publisher_stats(period_name) # about 30 seconds.
   
   
log.info('Downloading and storing analytics for site-wide stats') log.info('Downloading and storing analytics for site-wide stats')
self.sitewide_stats( period_name, period_complete_day ) self.sitewide_stats( period_name, period_complete_day )
   
log.info('Downloading and storing analytics for social networks') log.info('Downloading and storing analytics for social networks')
self.update_social_info(period_name, start_date, end_date) self.update_social_info(period_name, start_date, end_date)
   
   
def update_social_info(self, period_name, start_date, end_date): def update_social_info(self, period_name, start_date, end_date):
start_date = start_date.strftime('%Y-%m-%d') start_date = start_date.strftime('%Y-%m-%d')
end_date = end_date.strftime('%Y-%m-%d') end_date = end_date.strftime('%Y-%m-%d')
query = 'ga:hasSocialSourceReferral=~Yes$' query = 'ga:hasSocialSourceReferral=~Yes$'
metrics = 'ga:entrances' metrics = 'ga:entrances'
sort = '-ga:entrances' sort = '-ga:entrances'
   
# Supported query params at try:
# https://developers.google.com/analytics/devguides/reporting/core/v3/reference # Because of issues of invalid responses, we are going to make these requests
results = self.service.data().ga().get( # ourselves.
ids='ga:' + self.profile_id, headers = {'authorization': 'Bearer ' + self.token}
filters=query,  
start_date=start_date, args = dict(ids='ga:' + self.profile_id,
metrics=metrics, filters=query,
sort=sort, metrics=metrics,
dimensions="ga:landingPagePath,ga:socialNetwork", sort=sort,
max_results=10000, dimensions="ga:landingPagePath,ga:socialNetwork",
end_date=end_date).execute() max_results=10000)
   
  args['start-date'] = start_date
  args['end-date'] = end_date
   
  results = self._get_json(args)
  except Exception, e:
  log.exception(e)
  results = dict(url=[])
   
   
data = collections.defaultdict(list) data = collections.defaultdict(list)
rows = results.get('rows',[]) rows = results.get('rows',[])
for row in rows: for row in rows:
url = _normalize_url('http:/' + row[0]) url = _normalize_url('http:/' + row[0])
data[url].append( (row[1], int(row[2]),) ) data[url].append( (row[1], int(row[2]),) )
ga_model.update_social(period_name, data) ga_model.update_social(period_name, data)
   
   
def download(self, start_date, end_date, path=None): def download(self, start_date, end_date, path=None):
'''Get data from GA for a given time period''' '''Get data from GA for a given time period'''
start_date = start_date.strftime('%Y-%m-%d') start_date = start_date.strftime('%Y-%m-%d')
end_date = end_date.strftime('%Y-%m-%d') end_date = end_date.strftime('%Y-%m-%d')
query = 'ga:pagePath=%s$' % path query = 'ga:pagePath=%s$' % path
metrics = 'ga:pageviews, ga:visits' metrics = 'ga:pageviews, ga:visits'
sort = '-ga:pageviews' sort = '-ga:pageviews'
   
# Supported query params at # Supported query params at
# https://developers.google.com/analytics/devguides/reporting/core/v3/reference # https://developers.google.com/analytics/devguides/reporting/core/v3/reference
results = self.service.data().ga().get( try:
ids='ga:' + self.profile_id, # Because of issues of invalid responses, we are going to make these requests
filters=query, # ourselves.
start_date=start_date, headers = {'authorization': 'Bearer ' + self.token}
metrics=metrics,  
sort=sort, args = {}
dimensions="ga:pagePath", args["sort"] = "-ga:pageviews"
max_results=10000, args["max-results"] = 100000
end_date=end_date).execute() args["dimensions"] = "ga:pagePath"
  args["start-date"] = start_date
  args["end-date"] = end_date
  args["metrics"] = metrics
  args["ids"] = "ga:" + self.profile_id
  args["filters"] = query
  args["alt"] = "json"
   
  r = requests.get("https://www.googleapis.com/analytics/v3/data/ga", params=args, headers=headers)
  if r.status_code != 200:
  raise Exception("Request with params: %s failed" % args)
   
  results = json.loads(r.content)
  print len(results.keys())
  except Exception, e:
  log.exception(e)
  #return dict(url=[])
  raise e
   
packages = [] packages = []
log.info("There are %d results" % results['totalResults']) log.info("There are %d results" % results['totalResults'])
for entry in results.get('rows'): for entry in results.get('rows'):
(loc,pageviews,visits) = entry (loc,pageviews,visits) = entry
url = _normalize_url('http:/' + loc) # strips off domain e.g. www.data.gov.uk or data.gov.uk url = _normalize_url('http:/' + loc) # strips off domain e.g. www.data.gov.uk or data.gov.uk
   
if not url.startswith('/dataset/') and not url.startswith('/publisher/'): if not url.startswith('/dataset/') and not url.startswith('/publisher/'):
# filter out strays like: # filter out strays like:
# /data/user/login?came_from=http://data.gov.uk/dataset/os-code-point-open # /data/user/login?came_from=http://data.gov.uk/dataset/os-code-point-open
# /403.html?page=/about&from=http://data.gov.uk/publisher/planning-inspectorate # /403.html?page=/about&from=http://data.gov.uk/publisher/planning-inspectorate
continue continue
packages.append( (url, pageviews, visits,) ) # Temporary hack packages.append( (url, pageviews, visits,) ) # Temporary hack
return dict(url=packages) return dict(url=packages)
   
def store(self, period_name, period_complete_day, data): def store(self, period_name, period_complete_day, data):
if 'url' in data: if 'url' in data:
ga_model.update_url_stats(period_name, period_complete_day, data['url']) ga_model.update_url_stats(period_name, period_complete_day, data['url'])
   
def sitewide_stats(self, period_name, period_complete_day): def sitewide_stats(self, period_name, period_complete_day):
import calendar import calendar
year, month = period_name.split('-') year, month = period_name.split('-')
_, last_day_of_month = calendar.monthrange(int(year), int(month)) _, last_day_of_month = calendar.monthrange(int(year), int(month))
   
start_date = '%s-01' % period_name start_date = '%s-01' % period_name
end_date = '%s-%s' % (period_name, last_day_of_month) end_date = '%s-%s' % (period_name, last_day_of_month)
funcs = ['_totals_stats', '_social_stats', '_os_stats', funcs = ['_totals_stats', '_social_stats', '_os_stats',
'_locale_stats', '_browser_stats', '_mobile_stats', '_download_stats'] '_locale_stats', '_browser_stats', '_mobile_stats', '_download_stats']
for f in funcs: for f in funcs:
log.info('Downloading analytics for %s' % f.split('_')[1]) log.info('Downloading analytics for %s' % f.split('_')[1])
getattr(self, f)(start_date, end_date, period_name, period_complete_day) getattr(self, f)(start_date, end_date, period_name, period_complete_day)
   
def _get_results(result_data, f): def _get_results(result_data, f):
data = {} data = {}
for result in result_data: for result in result_data:
key = f(result) key = f(result)
data[key] = data.get(key,0) + result[1] data[key] = data.get(key,0) + result[1]
return data return data
   
  def _get_json(self, params, prev_fail=False):
  if prev_fail:
  import os
  ga_token_filepath = os.path.expanduser(config.get('googleanalytics.token.filepath', ''))
  if not ga_token_filepath:
  print 'ERROR: In the CKAN config you need to specify the filepath of the ' \
  'Google Analytics token file under key: googleanalytics.token.filepath'
  return
   
  try:
  self.token, svc = init_service(ga_token_filepath, None)
  except TypeError:
  print ('Have you correctly run the getauthtoken task and '
  'specified the correct token file in the CKAN config under '
  '"googleanalytics.token.filepath"?')
   
  try:
  # Because of issues of invalid responses, we are going to make these requests
  # ourselves.
  headers = {'authorization': 'Bearer ' + self.token}
  r = requests.get("https://www.googleapis.com/analytics/v3/data/ga", params=params, headers=headers)
  if r.status_code != 200:
  log.info("STATUS: %s" % (r.status_code,))
  log.info("CONTENT: %s" % (r.content,))
  raise Exception("Request with params: %s failed" % params)
   
  return json.loads(r.content)
  except Exception, e:
  if not prev_fail:
  print e
  results = self._get_json(self, params, prev_fail=True)
  else:
  log.exception(e)
   
  return dict(url=[])
   
def _totals_stats(self, start_date, end_date, period_name, period_complete_day): def _totals_stats(self, start_date, end_date, period_name, period_complete_day):
""" Fetches distinct totals, total pageviews etc """ """ Fetches distinct totals, total pageviews etc """
results = self.service.data().ga().get( try:
ids='ga:' + self.profile_id, args = {}
start_date=start_date, args["max-results"] = 100000
metrics='ga:pageviews', args["start-date"] = start_date
sort='-ga:pageviews', args["end-date"] = end_date
max_results=10000, args["ids"] = "ga:" + self.profile_id
end_date=end_date).execute()  
  args["metrics"] = "ga:pageviews"
  args["sort"] = "-ga:pageviews"
  args["alt"] = "json"
   
  results = self._get_json(args)
  except Exception, e:
  log.exception(e)
  results = dict(url=[])
   
result_data = results.get('rows') result_data = results.get('rows')
ga_model.update_sitewide_stats(period_name, "Totals", {'Total page views': result_data[0][0]}, ga_model.update_sitewide_stats(period_name, "Totals", {'Total page views': result_data[0][0]},
period_complete_day) period_complete_day)
   
results = self.service.data().ga().get( try:
ids='ga:' + self.profile_id, # Because of issues of invalid responses, we are going to make these requests
start_date=start_date, # ourselves.
metrics='ga:pageviewsPerVisit,ga:avgTimeOnSite,ga:percentNewVisits,ga:visits', headers = {'authorization': 'Bearer ' + self.token}
max_results=10000,  
end_date=end_date).execute() args = {}
  args["max-results"] = 100000
  args["start-date"] = start_date
  args["end-date"] = end_date
  args["ids"] = "ga:" + self.profile_id
   
  args["metrics"] = "ga:pageviewsPerVisit,ga:avgTimeOnSite,ga:percentNewVisits,ga:visits"
  args["alt"] = "json"
   
  results = self._get_json(args)
  except Exception, e:
  log.exception(e)
  results = dict(url=[])
   
result_data = results.get('rows') result_data = results.get('rows')
data = { data = {
'Pages per visit': result_data[0][0], 'Pages per visit': result_data[0][0],
'Average time on site': result_data[0][1], 'Average time on site': result_data[0][1],
'New visits': result_data[0][2], 'New visits': result_data[0][2],
'Total visits': result_data[0][3], 'Total visits': result_data[0][3],
} }
ga_model.update_sitewide_stats(period_name, "Totals", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Totals", data, period_complete_day)
   
# Bounces from / or another configurable page. # Bounces from / or another configurable page.
path = '/%s%s' % (config.get('googleanalytics.account'), path = '/%s%s' % (config.get('googleanalytics.account'),
config.get('ga-report.bounce_url', '/')) config.get('ga-report.bounce_url', '/'))
results = self.service.data().ga().get(  
ids='ga:' + self.profile_id, try:
filters='ga:pagePath==%s' % (path,), # Because of issues of invalid responses, we are going to make these requests
start_date=start_date, # ourselves.
metrics='ga:visitBounceRate', headers = {'authorization': 'Bearer ' + self.token}
dimensions='ga:pagePath',  
max_results=10000, args = {}
end_date=end_date).execute() args["max-results"] = 100000
  args["start-date"] = start_date
  args["end-date"] = end_date
  args["ids"] = "ga:" + self.profile_id
   
  args["filters"] = 'ga:pagePath==%s' % (path,)
  args["dimensions"] = 'ga:pagePath'
  args["metrics"] = "ga:visitBounceRate"
  args["alt"] = "json"
   
  results = self._get_json(args)
  except Exception, e:
  log.exception(e)
  results = dict(url=[])
   
result_data = results.get('rows') result_data = results.get('rows')
if not result_data or len(result_data) != 1: if not result_data or len(result_data) != 1:
log.error('Could not pinpoint the bounces for path: %s. Got results: %r', log.error('Could not pinpoint the bounces for path: %s. Got results: %r',
path, result_data) path, result_data)
return return
results = result_data[0] results = result_data[0]
bounces = float(results[1]) bounces = float(results[1])
# visitBounceRate is already a % # visitBounceRate is already a %
log.info('Google reports visitBounceRate as %s', bounces) log.info('Google reports visitBounceRate as %s', bounces)
ga_model.update_sitewide_stats(period_name, "Totals", {'Bounce rate (home page)': float(bounces)}, ga_model.update_sitewide_stats(period_name, "Totals", {'Bounce rate (home page)': float(bounces)},
period_complete_day) period_complete_day)
   
   
def _locale_stats(self, start_date, end_date, period_name, period_complete_day): def _locale_stats(self, start_date, end_date, period_name, period_complete_day):
""" Fetches stats about language and country """ """ Fetches stats about language and country """
results = self.service.data().ga().get(  
ids='ga:' + self.profile_id, try:
start_date=start_date, # Because of issues of invalid responses, we are going to make these requests
metrics='ga:pageviews', # ourselves.
sort='-ga:pageviews', headers = {'authorization': 'Bearer ' + self.token}
dimensions="ga:language,ga:country",  
max_results=10000, args = {}
end_date=end_date).execute() args["max-results"] = 100000
  args["start-date"] = start_date
  args["end-date"] = end_date
  args["ids"] = "ga:" + self.profile_id
   
  args["dimensions"] = "ga:language,ga:country"
  args["metrics"] = "ga:pageviews"
  args["sort"] = "-ga:pageviews"
  args["alt"] = "json"
   
  results = self._get_json(args)
  except Exception, e:
  log.exception(e)
  results = dict(url=[])
   
result_data = results.get('rows') result_data = results.get('rows')
data = {} data = {}
for result in result_data: for result in result_data:
data[result[0]] = data.get(result[0], 0) + int(result[2]) data[result[0]] = data.get(result[0], 0) + int(result[2])
self._filter_out_long_tail(data, MIN_VIEWS) self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Languages", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Languages", data, period_complete_day)
   
data = {} data = {}
for result in result_data: for result in result_data:
data[result[1]] = data.get(result[1], 0) + int(result[2]) data[result[1]] = data.get(result[1], 0) + int(result[2])
self._filter_out_long_tail(data, MIN_VIEWS) self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Country", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Country", data, period_complete_day)
   
   
def _download_stats(self, start_date, end_date, period_name, period_complete_day): def _download_stats(self, start_date, end_date, period_name, period_complete_day):
""" Fetches stats about data downloads """ """ Fetches stats about data downloads """
import ckan.model as model import ckan.model as model
   
data = {} data = {}
   
results = self.service.data().ga().get( try:
ids='ga:' + self.profile_id, # Because of issues of invalid responses, we are going to make these requests
start_date=start_date, # ourselves.
filters='ga:eventAction==download', headers = {'authorization': 'Bearer ' + self.token}
metrics='ga:totalEvents',  
sort='-ga:totalEvents', args = {}
dimensions="ga:eventLabel", args["max-results"] = 100000
max_results=10000, args["start-date"] = start_date
end_date=end_date).execute() args["end-date"] = end_date
  args["ids"] = "ga:" + self.profile_id
   
  args["filters"] = 'ga:eventAction==download'
  args["dimensions"] = "ga:eventLabel"
  args["metrics"] = "ga:totalEvents"
  args["alt"] = "json"
   
  results = self._get_json(args)
  except Exception, e:
  log.exception(e)
  results = dict(url=[])
   
result_data = results.get('rows') result_data = results.get('rows')
if not result_data: if not result_data:
# We may not have data for this time period, so we need to bail # We may not have data for this time period, so we need to bail
# early. # early.
log.info("There is no download data for this time period") log.info("There is no download data for this time period")
return return
   
def process_result_data(result_data, cached=False): def process_result_data(result_data, cached=False):
progress_total = len(result_data) progress_total = len(result_data)
progress_count = 0 progress_count = 0
resources_not_matched = [] resources_not_matched = []
for result in result_data: for result in result_data:
progress_count += 1 progress_count += 1
if progress_count % 100 == 0: if progress_count % 100 == 0:
log.debug('.. %d/%d done so far', progress_count, progress_total) log.debug('.. %d/%d done so far', progress_count, progress_total)
   
url = result[0].strip() url = result[0].strip()
   
# Get package id associated with the resource that has this URL. # Get package id associated with the resource that has this URL.
q = model.Session.query(model.Resource) q = model.Session.query(model.Resource)
if cached: if cached:
r = q.filter(model.Resource.cache_url.like("%s%%" % url)).first() r = q.filter(model.Resource.cache_url.like("%s%%" % url)).first()
else: else:
r = q.filter(model.Resource.url.like("%s%%" % url)).first() r = q.filter(model.Resource.url.like("%s%%" % url)).first()
   
package_name = r.resource_group.package.name if r else "" package_name = r.resource_group.package.name if r else ""
if package_name: if package_name:
data[package_name] = data.get(package_name, 0) + int(result[1]) data[package_name] = data.get(package_name, 0) + int(result[1])
else: else:
resources_not_matched.append(url) resources_not_matched.append(url)
continue continue
if resources_not_matched: if resources_not_matched:
log.debug('Could not match %i or %i resource URLs to datasets. e.g. %r', log.debug('Could not match %i or %i resource URLs to datasets. e.g. %r',
len(resources_not_matched), progress_total, resources_not_matched[:3]) len(resources_not_matched), progress_total, resources_not_matched[:3])
   
log.info('Associating downloads of resource URLs with their respective datasets') log.info('Associating downloads of resource URLs with their respective datasets')
process_result_data(results.get('rows')) process_result_data(results.get('rows'))
   
results = self.service.data().ga().get( try:
ids='ga:' + self.profile_id, # Because of issues of invalid responses, we are going to make these requests
start_date=start_date, # ourselves.
filters='ga:eventAction==download-cache', headers = {'authorization': 'Bearer ' + self.token}
metrics='ga:totalEvents',  
sort='-ga:totalEvents', args = dict( ids='ga:' + self.profile_id,
dimensions="ga:eventLabel", filters='ga:eventAction==download-cache',
max_results=10000, metrics='ga:totalEvents',
end_date=end_date).execute() sort='-ga:totalEvents',
  dimensions="ga:eventLabel",
  max_results=10000)
  args['start-date'] = start_date
  args['end-date'] = end_date
   
  results = self._get_json(args)
  except Exception, e:
  log.exception(e)
  results = dict(url=[])
   
log.info('Associating downloads of cache resource URLs with their respective datasets') log.info('Associating downloads of cache resource URLs with their respective datasets')
process_result_data(results.get('rows'), cached=False) process_result_data(results.get('rows'), cached=False)
   
self._filter_out_long_tail(data, MIN_DOWNLOADS) self._filter_out_long_tail(data, MIN_DOWNLOADS)
ga_model.update_sitewide_stats(period_name, "Downloads", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Downloads", data, period_complete_day)
   
def _social_stats(self, start_date, end_date, period_name, period_complete_day): def _social_stats(self, start_date, end_date, period_name, period_complete_day):
""" Finds out which social sites people are referred from """ """ Finds out which social sites people are referred from """
results = self.service.data().ga().get(  
ids='ga:' + self.profile_id, try:
start_date=start_date, # Because of issues of invalid responses, we are going to make these requests
metrics='ga:pageviews', # ourselves.
sort='-ga:pageviews', headers = {'authorization': 'Bearer ' + self.token}
dimensions="ga:socialNetwork,ga:referralPath",  
max_results=10000, args = dict( ids='ga:' + self.profile_id,
end_date=end_date).execute() metrics='ga:pageviews',
  sort='-ga:pageviews',
  dimensions="ga:socialNetwork,ga:referralPath",
  max_results=10000)
  args['start-date'] = start_date
  args['end-date'] = end_date
   
  results = self._get_json(args)
  except Exception, e:
  log.exception(e)
  results = dict(url=[])
   
result_data = results.get('rows') result_data = results.get('rows')
data = {} data = {}
for result in result_data: for result in result_data:
if not result[0] == '(not set)': if not result[0] == '(not set)':
data[result[0]] = data.get(result[0], 0) + int(result[2]) data[result[0]] = data.get(result[0], 0) + int(result[2])
self._filter_out_long_tail(data, 3) self._filter_out_long_tail(data, 3)
ga_model.update_sitewide_stats(period_name, "Social sources", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Social sources", data, period_complete_day)
   
   
def _os_stats(self, start_date, end_date, period_name, period_complete_day): def _os_stats(self, start_date, end_date, period_name, period_complete_day):
""" Operating system stats """ """ Operating system stats """
results = self.service.data().ga().get( try:
ids='ga:' + self.profile_id, # Because of issues of invalid responses, we are going to make these requests
start_date=start_date, # ourselves.
metrics='ga:pageviews', headers = {'authorization': 'Bearer ' + self.token}
sort='-ga:pageviews',  
dimensions="ga:operatingSystem,ga:operatingSystemVersion", args = dict( ids='ga:' + self.profile_id,
max_results=10000, metrics='ga:pageviews',
end_date=end_date).execute() sort='-ga:pageviews',
  dimensions="ga:operatingSystem,ga:operatingSystemVersion",
  max_results=10000)
  args['start-date'] = start_date
  args['end-date'] = end_date
   
  results = self._get_json(args)
  except Exception, e:
  log.exception(e)
  results = dict(url=[])
   
result_data = results.get('rows') result_data = results.get('rows')
data = {} data = {}
for result in result_data: for result in result_data:
data[result[0]] = data.get(result[0], 0) + int(result[2]) data[result[0]] = data.get(result[0], 0) + int(result[2])
self._filter_out_long_tail(data, MIN_VIEWS) self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Operating Systems", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Operating Systems", data, period_complete_day)
   
data = {} data = {}
for result in result_data: for result in result_data:
if int(result[2]) >= MIN_VIEWS: if int(result[2]) >= MIN_VIEWS:
key = "%s %s" % (result[0],result[1]) key = "%s %s" % (result[0],result[1])
data[key] = result[2] data[key] = result[2]
ga_model.update_sitewide_stats(period_name, "Operating Systems versions", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Operating Systems versions", data, period_complete_day)
   
   
def _browser_stats(self, start_date, end_date, period_name, period_complete_day): def _browser_stats(self, start_date, end_date, period_name, period_complete_day):
""" Information about browsers and browser versions """ """ Information about browsers and browser versions """
results = self.service.data().ga().get(  
ids='ga:' + self.profile_id, try:
start_date=start_date, # Because of issues of invalid responses, we are going to make these requests
metrics='ga:pageviews', # ourselves.
sort='-ga:pageviews', headers = {'authorization': 'Bearer ' + self.token}
dimensions="ga:browser,ga:browserVersion",  
max_results=10000, args = dict( ids='ga:' + self.profile_id,
end_date=end_date).execute() metrics='ga:pageviews',
  sort='-ga:pageviews',
  dimensions="ga:browser,ga:browserVersion",
  max_results=10000)
   
  args['start-date'] = start_date
  args['end-date'] = end_date
   
  results = self._get_json(args)
  except Exception, e:
  log.exception(e)
  results = dict(url=[])
   
   
result_data = results.get('rows') result_data = results.get('rows')
# e.g. [u'Firefox', u'19.0', u'20'] # e.g. [u'Firefox', u'19.0', u'20']
   
data = {} data = {}
for result in result_data: for result in result_data:
data[result[0]] = data.get(result[0], 0) + int(result[2]) data[result[0]] = data.get(result[0], 0) + int(result[2])
self._filter_out_long_tail(data, MIN_VIEWS) self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Browsers", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Browsers", data, period_complete_day)
   
data = {} data = {}
for result in result_data: for result in result_data:
key = "%s %s" % (result[0], self._filter_browser_version(result[0], result[1])) key = "%s %s" % (result[0], self._filter_browser_version(result[0], result[1]))
data[key] = data.get(key, 0) + int(result[2]) data[key] = data.get(key, 0) + int(result[2])
self._filter_out_long_tail(data, MIN_VIEWS) self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Browser versions", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Browser versions", data, period_complete_day)
   
@classmethod @classmethod
def _filter_browser_version(cls, browser, version_str): def _filter_browser_version(cls, browser, version_str):
''' '''
Simplifies a browser version string if it is detailed. Simplifies a browser version string if it is detailed.
i.e. groups together Firefox 3.5.1 and 3.5.2 to be just 3. i.e. groups together Firefox 3.5.1 and 3.5.2 to be just 3.
This is helpful when viewing stats and good to protect privacy. This is helpful when viewing stats and good to protect privacy.
''' '''
ver = version_str ver = version_str
parts = ver.split('.') parts = ver.split('.')
if len(parts) > 1: if len(parts) > 1:
if parts[1][0] == '0': if parts[1][0] == '0':
ver = parts[0] ver = parts[0]
else: else:
ver = "%s" % (parts[0]) ver = "%s" % (parts[0])
# Special case complex version nums # Special case complex version nums
if browser in ['Safari', 'Android Browser']: if browser in ['Safari', 'Android Browser']:
ver = parts[0] ver = parts[0]
if len(ver) > 2: if len(ver) > 2:
num_hidden_digits = len(ver) - 2 num_hidden_digits = len(ver) - 2
ver = ver[0] + ver[1] + 'X' * num_hidden_digits ver = ver[0] + ver[1] + 'X' * num_hidden_digits
return ver return ver
   
def _mobile_stats(self, start_date, end_date, period_name, period_complete_day): def _mobile_stats(self, start_date, end_date, period_name, period_complete_day):
""" Info about mobile devices """ """ Info about mobile devices """
   
results = self.service.data().ga().get( try:
ids='ga:' + self.profile_id, # Because of issues of invalid responses, we are going to make these requests
start_date=start_date, # ourselves.
metrics='ga:pageviews', headers = {'authorization': 'Bearer ' + self.token}
sort='-ga:pageviews',  
dimensions="ga:mobileDeviceBranding, ga:mobileDeviceInfo", args = dict( ids='ga:' + self.profile_id,
max_results=10000, metrics='ga:pageviews',
end_date=end_date).execute() sort='-ga:pageviews',
  dimensions="ga:mobileDeviceBranding, ga:mobileDeviceInfo",
  max_results=10000)
  args['start-date'] = start_date
  args['end-date'] = end_date
   
  results = self._get_json(args)
  except Exception, e:
  log.exception(e)
  results = dict(url=[])
   
   
result_data = results.get('rows') result_data = results.get('rows')
data = {} data = {}
for result in result_data: for result in result_data:
data[result[0]] = data.get(result[0], 0) + int(result[2]) data[result[0]] = data.get(result[0], 0) + int(result[2])
self._filter_out_long_tail(data, MIN_VIEWS) self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Mobile brands", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Mobile brands", data, period_complete_day)
   
data = {} data = {}
for result in result_data: for result in result_data:
data[result[1]] = data.get(result[1], 0) + int(result[2]) data[result[1]] = data.get(result[1], 0) + int(result[2])
self._filter_out_long_tail(data, MIN_VIEWS) self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Mobile devices", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Mobile devices", data, period_complete_day)
   
@classmethod @classmethod
def _filter_out_long_tail(cls, data, threshold=10): def _filter_out_long_tail(cls, data, threshold=10):
''' '''
Given data which is a frequency distribution, filter out Given data which is a frequency distribution, filter out
results which are below a threshold count. This is good to protect results which are below a threshold count. This is good to protect
privacy. privacy.
''' '''
for key, value in data.items(): for key, value in data.items():
if value < threshold: if value < threshold:
del data[key] del data[key]
   
import os import os
import httplib2 import httplib2
from apiclient.discovery import build from apiclient.discovery import build
from oauth2client.client import flow_from_clientsecrets from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage from oauth2client.file import Storage
from oauth2client.tools import run from oauth2client.tools import run
   
from pylons import config from pylons import config
   
   
def _prepare_credentials(token_filename, credentials_filename): def _prepare_credentials(token_filename, credentials_filename):
""" """
Either returns the user's oauth credentials or uses the credentials Either returns the user's oauth credentials or uses the credentials
file to generate a token (by forcing the user to login in the browser) file to generate a token (by forcing the user to login in the browser)
""" """
storage = Storage(token_filename) storage = Storage(token_filename)
credentials = storage.get() credentials = storage.get()
   
if credentials is None or credentials.invalid: if credentials is None or credentials.invalid:
flow = flow_from_clientsecrets(credentials_filename, flow = flow_from_clientsecrets(credentials_filename,
scope='https://www.googleapis.com/auth/analytics.readonly', scope='https://www.googleapis.com/auth/analytics.readonly',
message="Can't find the credentials file") message="Can't find the credentials file")
credentials = run(flow, storage) credentials = run(flow, storage)
   
return credentials return credentials
   
   
def init_service(token_file, credentials_file): def init_service(token_file, credentials_file):
""" """
Given a file containing the user's oauth token (and another with Given a file containing the user's oauth token (and another with
credentials in case we need to generate the token) will return a credentials in case we need to generate the token) will return a
service object representing the analytics API. service object representing the analytics API.
""" """
http = httplib2.Http() http = httplib2.Http()
   
credentials = _prepare_credentials(token_file, credentials_file) credentials = _prepare_credentials(token_file, credentials_file)
http = credentials.authorize(http) # authorize the http object http = credentials.authorize(http) # authorize the http object
   
return build('analytics', 'v3', http=http) return credentials.access_token, build('analytics', 'v3', http=http)
   
   
def get_profile_id(service): def get_profile_id(service):
""" """
Get the profile ID for this user and the service specified by the Get the profile ID for this user and the service specified by the
'googleanalytics.id' configuration option. This function iterates 'googleanalytics.id' configuration option. This function iterates
over all of the accounts available to the user who invoked the over all of the accounts available to the user who invoked the
service to find one where the account name matches (in case the service to find one where the account name matches (in case the
user has several). user has several).
""" """
accounts = service.management().accounts().list().execute() accounts = service.management().accounts().list().execute()
   
if not accounts.get('items'): if not accounts.get('items'):
return None return None
   
accountName = config.get('googleanalytics.account') accountName = config.get('googleanalytics.account')
if not accountName: if not accountName:
raise Exception('googleanalytics.account needs to be configured') raise Exception('googleanalytics.account needs to be configured')
webPropertyId = config.get('googleanalytics.id') webPropertyId = config.get('googleanalytics.id')
if not webPropertyId: if not webPropertyId:
raise Exception('googleanalytics.id needs to be configured') raise Exception('googleanalytics.id needs to be configured')
for acc in accounts.get('items'): for acc in accounts.get('items'):
if acc.get('name') == accountName: if acc.get('name') == accountName:
accountId = acc.get('id') accountId = acc.get('id')
   
webproperties = service.management().webproperties().list(accountId=accountId).execute() webproperties = service.management().webproperties().list(accountId=accountId).execute()
   
profiles = service.management().profiles().list( profiles = service.management().profiles().list(
accountId=accountId, webPropertyId=webPropertyId).execute() accountId=accountId, webPropertyId=webPropertyId).execute()
   
if profiles.get('items'): if profiles.get('items'):
return profiles.get('items')[0].get('id') return profiles.get('items')[0].get('id')
   
return None return None
   
import re import re
import uuid import uuid
   
from sqlalchemy import Table, Column, MetaData, ForeignKey from sqlalchemy import Table, Column, MetaData, ForeignKey
from sqlalchemy import types from sqlalchemy import types
from sqlalchemy.sql import select from sqlalchemy.sql import select
from sqlalchemy.orm import mapper, relation from sqlalchemy.orm import mapper, relation
from sqlalchemy import func from sqlalchemy import func
   
import ckan.model as model import ckan.model as model
from ckan.lib.base import * from ckan.lib.base import *
   
log = __import__('logging').getLogger(__name__) log = __import__('logging').getLogger(__name__)
   
def make_uuid(): def make_uuid():
return unicode(uuid.uuid4()) return unicode(uuid.uuid4())
   
metadata = MetaData() metadata = MetaData()
   
class GA_Url(object): class GA_Url(object):
   
def __init__(self, **kwargs): def __init__(self, **kwargs):
for k,v in kwargs.items(): for k,v in kwargs.items():
setattr(self, k, v) setattr(self, k, v)
   
url_table = Table('ga_url', metadata, url_table = Table('ga_url', metadata,
Column('id', types.UnicodeText, primary_key=True, Column('id', types.UnicodeText, primary_key=True,
default=make_uuid), default=make_uuid),
Column('period_name', types.UnicodeText), Column('period_name', types.UnicodeText),
Column('period_complete_day', types.Integer), Column('period_complete_day', types.Integer),
Column('pageviews', types.UnicodeText), Column('pageviews', types.UnicodeText),
Column('visits', types.UnicodeText), Column('visits', types.UnicodeText),
Column('url', types.UnicodeText), Column('url', types.UnicodeText),
Column('department_id', types.UnicodeText), Column('department_id', types.UnicodeText),
Column('package_id', types.UnicodeText), Column('package_id', types.UnicodeText),
) )
mapper(GA_Url, url_table) mapper(GA_Url, url_table)
   
   
class GA_Stat(object): class GA_Stat(object):
   
def __init__(self, **kwargs): def __init__(self, **kwargs):
for k,v in kwargs.items(): for k,v in kwargs.items():
setattr(self, k, v) setattr(self, k, v)
   
stat_table = Table('ga_stat', metadata, stat_table = Table('ga_stat', metadata,
Column('id', types.UnicodeText, primary_key=True, Column('id', types.UnicodeText, primary_key=True,
default=make_uuid), default=make_uuid),
Column('period_name', types.UnicodeText), Column('period_name', types.UnicodeText),
Column('period_complete_day', types.UnicodeText), Column('period_complete_day', types.UnicodeText),
Column('stat_name', types.UnicodeText), Column('stat_name', types.UnicodeText),
Column('key', types.UnicodeText), Column('key', types.UnicodeText),
Column('value', types.UnicodeText), ) Column('value', types.UnicodeText), )
mapper(GA_Stat, stat_table) mapper(GA_Stat, stat_table)
   
   
class GA_Publisher(object): class GA_Publisher(object):
   
def __init__(self, **kwargs): def __init__(self, **kwargs):
for k,v in kwargs.items(): for k,v in kwargs.items():
setattr(self, k, v) setattr(self, k, v)
   
pub_table = Table('ga_publisher', metadata, pub_table = Table('ga_publisher', metadata,
Column('id', types.UnicodeText, primary_key=True, Column('id', types.UnicodeText, primary_key=True,
default=make_uuid), default=make_uuid),
Column('period_name', types.UnicodeText), Column('period_name', types.UnicodeText),
Column('publisher_name', types.UnicodeText), Column('publisher_name', types.UnicodeText),
Column('views', types.UnicodeText), Column('views', types.UnicodeText),
Column('visits', types.UnicodeText), Column('visits', types.UnicodeText),
Column('toplevel', types.Boolean, default=False), Column('toplevel', types.Boolean, default=False),
Column('subpublishercount', types.Integer, default=0), Column('subpublishercount', types.Integer, default=0),
Column('parent', types.UnicodeText), Column('parent', types.UnicodeText),
) )
mapper(GA_Publisher, pub_table) mapper(GA_Publisher, pub_table)
   
   
class GA_ReferralStat(object): class GA_ReferralStat(object):
   
def __init__(self, **kwargs): def __init__(self, **kwargs):
for k,v in kwargs.items(): for k,v in kwargs.items():
setattr(self, k, v) setattr(self, k, v)
   
referrer_table = Table('ga_referrer', metadata, referrer_table = Table('ga_referrer', metadata,
Column('id', types.UnicodeText, primary_key=True, Column('id', types.UnicodeText, primary_key=True,
default=make_uuid), default=make_uuid),
Column('period_name', types.UnicodeText), Column('period_name', types.UnicodeText),
Column('source', types.UnicodeText), Column('source', types.UnicodeText),
Column('url', types.UnicodeText), Column('url', types.UnicodeText),
Column('count', types.Integer), Column('count', types.Integer),
) )
mapper(GA_ReferralStat, referrer_table) mapper(GA_ReferralStat, referrer_table)
   
   
   
def init_tables(): def init_tables():
metadata.create_all(model.meta.engine) metadata.create_all(model.meta.engine)
   
   
cached_tables = {} cached_tables = {}
   
   
def get_table(name): def get_table(name):
if name not in cached_tables: if name not in cached_tables:
meta = MetaData() meta = MetaData()
meta.reflect(bind=model.meta.engine) meta.reflect(bind=model.meta.engine)
table = meta.tables[name] table = meta.tables[name]
cached_tables[name] = table cached_tables[name] = table
return cached_tables[name] return cached_tables[name]
   
   
def _normalize_url(url): def _normalize_url(url):
'''Strip off the hostname etc. Do this before storing it. '''Strip off the hostname etc. Do this before storing it.
   
>>> normalize_url('http://data.gov.uk/dataset/weekly_fuel_prices') >>> normalize_url('http://data.gov.uk/dataset/weekly_fuel_prices')
'/dataset/weekly_fuel_prices' '/dataset/weekly_fuel_prices'
''' '''
return '/' + '/'.join(url.split('/')[3:]) return '/' + '/'.join(url.split('/')[3:])
   
   
def _get_package_and_publisher(url): def _get_package_and_publisher(url):
# e.g. /dataset/fuel_prices # e.g. /dataset/fuel_prices
# e.g. /dataset/fuel_prices/resource/e63380d4 # e.g. /dataset/fuel_prices/resource/e63380d4
dataset_match = re.match('/dataset/([^/]+)(/.*)?', url) dataset_match = re.match('/dataset/([^/]+)(/.*)?', url)
if dataset_match: if dataset_match:
dataset_ref = dataset_match.groups()[0] dataset_ref = dataset_match.groups()[0]
dataset = model.Package.get(dataset_ref) dataset = model.Package.get(dataset_ref)
if dataset: if dataset:
publisher_groups = dataset.get_groups('publisher') publisher_groups = dataset.get_groups('organization')
if publisher_groups: if publisher_groups:
return dataset_ref,publisher_groups[0].name return dataset_ref,publisher_groups[0].name
return dataset_ref, None return dataset_ref, None
else: else:
publisher_match = re.match('/publisher/([^/]+)(/.*)?', url) publisher_match = re.match('/publisher/([^/]+)(/.*)?', url)
if publisher_match: if publisher_match:
return None, publisher_match.groups()[0] return None, publisher_match.groups()[0]
return None, None return None, None
   
def update_sitewide_stats(period_name, stat_name, data, period_complete_day): def update_sitewide_stats(period_name, stat_name, data, period_complete_day):
for k,v in data.iteritems(): for k,v in data.iteritems():
item = model.Session.query(GA_Stat).\ item = model.Session.query(GA_Stat).\
filter(GA_Stat.period_name==period_name).\ filter(GA_Stat.period_name==period_name).\
filter(GA_Stat.key==k).\ filter(GA_Stat.key==k).\
filter(GA_Stat.stat_name==stat_name).first() filter(GA_Stat.stat_name==stat_name).first()
if item: if item:
item.period_name = period_name item.period_name = period_name
item.key = k item.key = k
item.value = v item.value = v
item.period_complete_day = period_complete_day item.period_complete_day = period_complete_day
model.Session.add(item) model.Session.add(item)
else: else:
# create the row # create the row
values = {'id': make_uuid(), values = {'id': make_uuid(),
'period_name': period_name, 'period_name': period_name,
'period_complete_day': period_complete_day, 'period_complete_day': period_complete_day,
'key': k, 'key': k,
'value': v, 'value': v,
'stat_name': stat_name 'stat_name': stat_name
} }
model.Session.add(GA_Stat(**values)) model.Session.add(GA_Stat(**values))
model.Session.commit() model.Session.commit()
   
   
def pre_update_url_stats(period_name): def pre_update_url_stats(period_name):
q = model.Session.query(GA_Url).\ q = model.Session.query(GA_Url).\
filter(GA_Url.period_name==period_name) filter(GA_Url.period_name==period_name)
log.debug("Deleting %d '%s' records" % (q.count(), period_name)) log.debug("Deleting %d '%s' records" % (q.count(), period_name))
q.delete() q.delete()
   
q = model.Session.query(GA_Url).\ q = model.Session.query(GA_Url).\
filter(GA_Url.period_name == 'All') filter(GA_Url.period_name == 'All')
log.debug("Deleting %d 'All' records..." % q.count()) log.debug("Deleting %d 'All' records..." % q.count())
q.delete() q.delete()
   
model.Session.flush() model.Session.flush()
model.Session.commit() model.Session.commit()
model.repo.commit_and_remove() model.repo.commit_and_remove()
log.debug('...done') log.debug('...done')
   
def post_update_url_stats(): def post_update_url_stats():
   
""" Check the distinct url field in ga_url and make sure """ Check the distinct url field in ga_url and make sure
it has an All record. If not then create one. it has an All record. If not then create one.
   
After running this then every URL should have an All After running this then every URL should have an All
record regardless of whether the URL has an entry for record regardless of whether the URL has an entry for
the month being currently processed. the month being currently processed.
""" """
log.debug('Post-processing "All" records...') log.debug('Post-processing "All" records...')
query = """select url, pageviews::int, visits::int query = """select url, pageviews::int, visits::int
from ga_url from ga_url
where url not in (select url from ga_url where period_name ='All')""" where url not in (select url from ga_url where period_name ='All')"""
connection = model.Session.connection() connection = model.Session.connection()
res = connection.execute(query) res = connection.execute(query)
   
views, visits = {}, {} views, visits = {}, {}
# url, views, visits # url, views, visits
for row in res: for row in res:
views[row[0]] = views.get(row[0], 0) + row[1] views[row[0]] = views.get(row[0], 0) + row[1]
visits[row[0]] = visits.get(row[0], 0) + row[2] visits[row[0]] = visits.get(row[0], 0) + row[2]
   
progress_total = len(views.keys()) progress_total = len(views.keys())
progress_count = 0 progress_count = 0
for key in views.keys(): for key in views.keys():
progress_count += 1 progress_count += 1
if progress_count % 100 == 0: if progress_count % 100 == 0:
log.debug('.. %d/%d done so far', progress_count, progress_total) log.debug('.. %d/%d done so far', progress_count, progress_total)
   
package, publisher = _get_package_and_publisher(key) package, publisher = _get_package_and_publisher(key)
   
values = {'id': make_uuid(), values = {'id': make_uuid(),
'period_name': "All", 'period_name': "All",
'period_complete_day': 0, 'period_complete_day': 0,
'url': key, 'url': key,
'pageviews': views[key], 'pageviews': views[key],
'visits': visits[key], 'visits': visits[key],
'department_id': publisher, 'department_id': publisher,
'package_id': package 'package_id': package
} }
model.Session.add(GA_Url(**values)) model.Session.add(GA_Url(**values))
model.Session.commit() model.Session.commit()
log.debug('..done') log.debug('..done')
   
   
def update_url_stats(period_name, period_complete_day, url_data): def update_url_stats(period_name, period_complete_day, url_data):
''' '''
Given a list of urls and number of hits for each during a given period, Given a list of urls and number of hits for each during a given period,
stores them in GA_Url under the period and recalculates the totals for stores them in GA_Url under the period and recalculates the totals for
the 'All' period. the 'All' period.
''' '''
progress_total = len(url_data) progress_total = len(url_data)
progress_count = 0 progress_count = 0
for url, views, visits in url_data: for url, views, visits in url_data:
progress_count += 1 progress_count += 1
if progress_count % 100 == 0: if progress_count % 100 == 0:
log.debug('.. %d/%d done so far', progress_count, progress_total) log.debug('.. %d/%d done so far', progress_count, progress_total)
   
package, publisher = _get_package_and_publisher(url) package, publisher = _get_package_and_publisher(url)
   
item = model.Session.query(GA_Url).\ item = model.Session.query(GA_Url).\
filter(GA_Url.period_name==period_name).\ filter(GA_Url.period_name==period_name).\
filter(GA_Url.url==url).first() filter(GA_Url.url==url).first()
if item: if item:
item.pageviews = item.pageviews + views item.pageviews = item.pageviews + views
item.visits = item.visits + visits item.visits = item.visits + visits
if not item.package_id: if not item.package_id:
item.package_id = package item.package_id = package
if not item.department_id: if not item.department_id:
item.department_id = publisher item.department_id = publisher
model.Session.add(item) model.Session.add(item)
else: else:
values = {'id': make_uuid(), values = {'id': make_uuid(),
'period_name': period_name, 'period_name': period_name,
'period_complete_day': period_complete_day, 'period_complete_day': period_complete_day,
'url': url, 'url': url,
'pageviews': views, 'pageviews': views,
'visits': visits, 'visits': visits,
'department_id': publisher, 'department_id': publisher,
'package_id': package 'package_id': package
} }
model.Session.add(GA_Url(**values)) model.Session.add(GA_Url(**values))
model.Session.commit() model.Session.commit()
   
if package: if package:
old_pageviews, old_visits = 0, 0 old_pageviews, old_visits = 0, 0
old = model.Session.query(GA_Url).\ old = model.Session.query(GA_Url).\
filter(GA_Url.period_name=='All').\ filter(GA_Url.period_name=='All').\
filter(GA_Url.url==url).all() filter(GA_Url.url==url).all()
old_pageviews = sum([int(o.pageviews) for o in old]) old_pageviews = sum([int(o.pageviews) for o in old])
old_visits = sum([int(o.visits) for o in old]) old_visits = sum([int(o.visits) for o in old])
   
entries = model.Session.query(GA_Url).\ entries = model.Session.query(GA_Url).\
filter(GA_Url.period_name!='All').\ filter(GA_Url.period_name!='All').\
filter(GA_Url.url==url).all() filter(GA_Url.url==url).all()
values = {'id': make_uuid(), values = {'id': make_uuid(),
'period_name': 'All', 'period_name': 'All',
'period_complete_day': 0, 'period_complete_day': 0,
'url': url, 'url': url,
'pageviews': sum([int(e.pageviews) for e in entries]) + int(old_pageviews), 'pageviews': sum([int(e.pageviews) for e in entries]) + int(old_pageviews),
'visits': sum([int(e.visits or 0) for e in entries]) + int(old_visits), 'visits': sum([int(e.visits or 0) for e in entries]) + int(old_visits),
'department_id': publisher, 'department_id': publisher,
'package_id': package 'package_id': package
} }
   
model.Session.add(GA_Url(**values)) model.Session.add(GA_Url(**values))
model.Session.commit() model.Session.commit()
   
   
   
   
def update_social(period_name, data): def update_social(period_name, data):
# Clean up first. # Clean up first.
model.Session.query(GA_ReferralStat).\ model.Session.query(GA_ReferralStat).\
filter(GA_ReferralStat.period_name==period_name).delete() filter(GA_ReferralStat.period_name==period_name).delete()
   
for url,data in data.iteritems(): for url,data in data.iteritems():
for entry in data: for entry in data:
source = entry[0] source = entry[0]
count = entry[1] count = entry[1]
   
item = model.Session.query(GA_ReferralStat).\ item = model.Session.query(GA_ReferralStat).\
filter(GA_ReferralStat.period_name==period_name).\ filter(GA_ReferralStat.period_name==period_name).\
filter(GA_ReferralStat.source==source).\ filter(GA_ReferralStat.source==source).\
filter(GA_ReferralStat.url==url).first() filter(GA_ReferralStat.url==url).first()
if item: if item:
item.count = item.count + count item.count = item.count + count
model.Session.add(item) model.Session.add(item)
else: else:
# create the row # create the row
values = {'id': make_uuid(), values = {'id': make_uuid(),
'period_name': period_name, 'period_name': period_name,
'source': source, 'source': source,
'url': url, 'url': url,
'count': count, 'count': count,
} }
model.Session.add(GA_ReferralStat(**values)) model.Session.add(GA_ReferralStat(**values))
model.Session.commit() model.Session.commit()
   
def update_publisher_stats(period_name): def update_publisher_stats(period_name):
""" """
Updates the publisher stats from the data retrieved for /dataset/* Updates the publisher stats from the data retrieved for /dataset/*
and /publisher/*. Will run against each dataset and generates the and /publisher/*. Will run against each dataset and generates the
totals for the entire tree beneath each publisher. totals for the entire tree beneath each publisher.
""" """
toplevel = get_top_level() toplevel = get_top_level()
publishers = model.Session.query(model.Group).\ publishers = model.Session.query(model.Group).\
filter(model.Group.type=='publisher').\ filter(model.Group.type=='organization').\
filter(model.Group.state=='active').all() filter(model.Group.state=='active').all()
for publisher in publishers: for publisher in publishers:
views, visits, subpub = update_publisher(period_name, publisher, publisher.name) views, visits, subpub = update_publisher(period_name, publisher, publisher.name)
parent, parents = '', publisher.get_groups('publisher') parent, parents = '', publisher.get_groups('organization')
if parents: if parents:
parent = parents[0].name parent = parents[0].name
item = model.Session.query(GA_Publisher).\ item = model.Session.query(GA_Publisher).\
filter(GA_Publisher.period_name==period_name).\ filter(GA_Publisher.period_name==period_name).\
filter(GA_Publisher.publisher_name==publisher.name).first() filter(GA_Publisher.publisher_name==publisher.name).first()
if item: if item:
item.views = views item.views = views
item.visits = visits item.visits = visits
item.publisher_name = publisher.name item.publisher_name = publisher.name
item.toplevel = publisher in toplevel item.toplevel = publisher in toplevel
item.subpublishercount = subpub item.subpublishercount = subpub
item.parent = parent item.parent = parent
model.Session.add(item) model.Session.add(item)
else: else:
# create the row # create the row
values = {'id': make_uuid(), values = {'id': make_uuid(),
'period_name': period_name, 'period_name': period_name,
'publisher_name': publisher.name, 'publisher_name': publisher.name,
'views': views, 'views': views,
'visits': visits, 'visits': visits,
'toplevel': publisher in toplevel, 'toplevel': publisher in toplevel,
'subpublishercount': subpub, 'subpublishercount': subpub,
'parent': parent 'parent': parent
} }
model.Session.add(GA_Publisher(**values)) model.Session.add(GA_Publisher(**values))
model.Session.commit() model.Session.commit()
   
   
def update_publisher(period_name, pub, part=''): def update_publisher(period_name, pub, part=''):
views,visits,subpub = 0, 0, 0 views,visits,subpub = 0, 0, 0
for publisher in go_down_tree(pub): for publisher in go_down_tree(pub):
subpub = subpub + 1 subpub = subpub + 1
items = model.Session.query(GA_Url).\ items = model.Session.query(GA_Url).\
filter(GA_Url.period_name==period_name).\ filter(GA_Url.period_name==period_name).\
filter(GA_Url.department_id==publisher.name).all() filter(GA_Url.department_id==publisher.name).all()
for item in items: for item in items:
views = views + int(item.pageviews) views = views + int(item.pageviews)
visits = visits + int(item.visits) visits = visits + int(item.visits)
   
return views, visits, (subpub-1) return views, visits, (subpub-1)
   
   
def get_top_level(): def get_top_level():
'''Returns the top level publishers.''' '''Returns the top level publishers.'''
return model.Session.query(model.Group).\ return model.Session.query(model.Group).\
outerjoin(model.Member, model.Member.table_id == model.Group.id and \ outerjoin(model.Member, model.Member.table_id == model.Group.id and \
model.Member.table_name == 'group' and \ model.Member.table_name == 'group' and \
model.Member.state == 'active').\ model.Member.state == 'active').\
filter(model.Member.id==None).\ filter(model.Member.id==None).\
filter(model.Group.type=='publisher').\ filter(model.Group.type=='organization').\
order_by(model.Group.name).all() order_by(model.Group.name).all()
   
def get_children(publisher): def get_children(publisher):
'''Finds child publishers for the given publisher (object). (Not recursive)''' '''Finds child publishers for the given publisher (object). (Not recursive i.e. returns one level)'''
from ckan.model.group import HIERARCHY_CTE return publisher.get_children_groups(type='organization')
return model.Session.query(model.Group).\  
from_statement(HIERARCHY_CTE).params(id=publisher.id, type='publisher').\  
all()  
   
def go_down_tree(publisher): def go_down_tree(publisher):
'''Provided with a publisher object, it walks down the hierarchy and yields each publisher, '''Provided with a publisher object, it walks down the hierarchy and yields each publisher,
including the one you supply.''' including the one you supply.'''
yield publisher yield publisher
for child in get_children(publisher): for child in get_children(publisher):
for grandchild in go_down_tree(child): for grandchild in go_down_tree(child):
yield grandchild yield grandchild
   
def delete(period_name): def delete(period_name):
''' '''
Deletes table data for the specified period, or specify 'all' Deletes table data for the specified period, or specify 'all'
for all periods. for all periods.
''' '''
for object_type in (GA_Url, GA_Stat, GA_Publisher, GA_ReferralStat): for object_type in (GA_Url, GA_Stat, GA_Publisher, GA_ReferralStat):
q = model.Session.query(object_type) q = model.Session.query(object_type)
if period_name != 'All': if period_name != 'All':
q = q.filter_by(period_name=period_name) q = q.filter_by(period_name=period_name)
q.delete() q.delete()
model.repo.commit_and_remove() model.repo.commit_and_remove()
   
def get_score_for_dataset(dataset_name): def get_score_for_dataset(dataset_name):
''' '''
Returns a "current popularity" score for a dataset, Returns a "current popularity" score for a dataset,
based on how many views it has had recently. based on how many views it has had recently.
''' '''
import datetime import datetime
now = datetime.datetime.now() now = datetime.datetime.now()
last_month = now - datetime.timedelta(days=30) last_month = now - datetime.timedelta(days=30)
period_names = ['%s-%02d' % (last_month.year, last_month.month), period_names = ['%s-%02d' % (last_month.year, last_month.month),
'%s-%02d' % (now.year, now.month), '%s-%02d' % (now.year, now.month),
] ]
   
score = 0 score = 0
for period_name in period_names: for period_name in period_names:
score /= 2 # previous periods are discounted by 50% score /= 2 # previous periods are discounted by 50%
entry = model.Session.query(GA_Url)\ entry = model.Session.query(GA_Url)\
.filter(GA_Url.period_name==period_name)\ .filter(GA_Url.period_name==period_name)\
.filter(GA_Url.package_id==dataset_name).first() .filter(GA_Url.package_id==dataset_name).first()
# score # score
if entry: if entry:
views = float(entry.pageviews) views = float(entry.pageviews)
if entry.period_complete_day: if entry.period_complete_day:
views_per_day = views / entry.period_complete_day views_per_day = views / entry.period_complete_day
else: else:
views_per_day = views / 15 # guess views_per_day = views / 15 # guess
score += views_per_day score += views_per_day
   
score = int(score * 100) score = int(score * 100)
log.debug('Popularity %s: %s', score, dataset_name) log.debug('Popularity %s: %s', score, dataset_name)
return score return score
   
import logging import logging
import operator import operator
   
import ckan.lib.base as base import ckan.lib.base as base
import ckan.model as model import ckan.model as model
from ckan.logic import get_action from ckan.logic import get_action
   
from ckanext.ga_report.ga_model import GA_Url, GA_Publisher from ckanext.ga_report.ga_model import GA_Url, GA_Publisher
from ckanext.ga_report.controller import _get_publishers from ckanext.ga_report.controller import _get_publishers
_log = logging.getLogger(__name__) _log = logging.getLogger(__name__)
   
def popular_datasets(count=10): def popular_datasets(count=10):
import random import random
   
publisher = None publisher = None
publishers = _get_publishers(30) publishers = _get_publishers(30)
total = len(publishers) total = len(publishers)
while not publisher or not datasets: while not publisher or not datasets:
rand = random.randrange(0, total) rand = random.randrange(0, total)
publisher = publishers[rand][0] publisher = publishers[rand][0]
if not publisher.state == 'active': if not publisher.state == 'active':
publisher = None publisher = None
continue continue
datasets = _datasets_for_publisher(publisher, 10)[:count] datasets = _datasets_for_publisher(publisher, 10)[:count]
   
ctx = { ctx = {
'datasets': datasets, 'datasets': datasets,
'publisher': publisher 'publisher': publisher
} }
return base.render_snippet('ga_report/ga_popular_datasets.html', **ctx) return base.render_snippet('ga_report/ga_popular_datasets.html', **ctx)
   
def single_popular_dataset(top=20): def single_popular_dataset(top=20):
'''Returns a random dataset from the most popular ones. '''Returns a random dataset from the most popular ones.
   
:param top: the number of top datasets to select from :param top: the number of top datasets to select from
''' '''
import random import random
   
top_datasets = model.Session.query(GA_Url).\ top_datasets = model.Session.query(GA_Url).\
filter(GA_Url.url.like('/dataset/%')).\ filter(GA_Url.url.like('/dataset/%')).\
order_by('ga_url.pageviews::int desc') order_by('ga_url.pageviews::int desc')
num_top_datasets = top_datasets.count() num_top_datasets = top_datasets.count()
   
dataset = None dataset = None
if num_top_datasets: if num_top_datasets:
count = 0 count = 0
while not dataset: while not dataset:
rand = random.randrange(0, min(top, num_top_datasets)) rand = random.randrange(0, min(top, num_top_datasets))
ga_url = top_datasets[rand] ga_url = top_datasets[rand]
dataset = model.Package.get(ga_url.url[len('/dataset/'):]) dataset = model.Package.get(ga_url.url[len('/dataset/'):])
if dataset and not dataset.state == 'active': if dataset and not dataset.state == 'active':
dataset = None dataset = None
# When testing, it is possible that top datasets are not available # When testing, it is possible that top datasets are not available
# so only go round this loop a few times before falling back on # so only go round this loop a few times before falling back on
# a random dataset. # a random dataset.
count += 1 count += 1
if count > 10: if count > 10:
break break
if not dataset: if not dataset:
# fallback # fallback
dataset = model.Session.query(model.Package)\ dataset = model.Session.query(model.Package)\
.filter_by(state='active').first() .filter_by(state='active').first()
if not dataset: if not dataset:
return None return None
dataset_dict = get_action('package_show')({'model': model, dataset_dict = get_action('package_show')({'model': model,
'session': model.Session, 'session': model.Session,
'validate': False}, 'validate': False},
{'id':dataset.id}) {'id':dataset.id})
return dataset_dict return dataset_dict
   
def single_popular_dataset_html(top=20): def single_popular_dataset_html(top=20):
dataset_dict = single_popular_dataset(top) dataset_dict = single_popular_dataset(top)
groups = package.get('groups', []) groups = package.get('groups', [])
publishers = [ g for g in groups if g.get('type') == 'publisher' ] publishers = [ g for g in groups if g.get('type') == 'organization' ]
publisher = publishers[0] if publishers else {'name':'', 'title': ''} publisher = publishers[0] if publishers else {'name':'', 'title': ''}
context = { context = {
'dataset': dataset_dict, 'dataset': dataset_dict,
'publisher': publisher_dict 'publisher': publisher_dict
} }
return base.render_snippet('ga_report/ga_popular_single.html', **context) return base.render_snippet('ga_report/ga_popular_single.html', **context)
   
   
def most_popular_datasets(publisher, count=20, preview_image=None): def most_popular_datasets(publisher, count=20, preview_image=None):
   
if not publisher: if not publisher:
_log.error("No valid publisher passed to 'most_popular_datasets'") _log.error("No valid publisher passed to 'most_popular_datasets'")
return "" return ""
   
results = _datasets_for_publisher(publisher, count) results = _datasets_for_publisher(publisher, count)
   
ctx = { ctx = {
'dataset_count': len(results), 'dataset_count': len(results),
'datasets': results, 'datasets': results,
   
'publisher': publisher, 'publisher': publisher,
'preview_image': preview_image 'preview_image': preview_image
} }
   
return base.render_snippet('ga_report/publisher/popular.html', **ctx) return base.render_snippet('ga_report/publisher/popular.html', **ctx)
   
def _datasets_for_publisher(publisher, count): def _datasets_for_publisher(publisher, count):
datasets = {} datasets = {}
entries = model.Session.query(GA_Url).\ entries = model.Session.query(GA_Url).\
filter(GA_Url.department_id==publisher.name).\ filter(GA_Url.department_id==publisher.name).\
filter(GA_Url.url.like('/dataset/%')).\ filter(GA_Url.url.like('/dataset/%')).\
order_by('ga_url.pageviews::int desc').all() order_by('ga_url.pageviews::int desc').all()
for entry in entries: for entry in entries:
if len(datasets) < count: if len(datasets) < count:
p = model.Package.get(entry.url[len('/dataset/'):]) p = model.Package.get(entry.url[len('/dataset/'):])
   
if not p: if not p:
_log.warning("Could not find Package for {url}".format(url=entry.url)) _log.warning("Could not find Package for {url}".format(url=entry.url))
continue continue
   
  if not p.state == 'active':
  _log.warning("Package {0} is not active, it is {1}".format(p.name, p.state))
  continue
   
if not p in datasets: if not p in datasets:
datasets[p] = {'views':0, 'visits': 0} datasets[p] = {'views':0, 'visits': 0}
   
datasets[p]['views'] = datasets[p]['views'] + int(entry.pageviews) datasets[p]['views'] = datasets[p]['views'] + int(entry.pageviews)
datasets[p]['visits'] = datasets[p]['visits'] + int(entry.visits) datasets[p]['visits'] = datasets[p]['visits'] + int(entry.visits)
   
results = [] results = []
for k, v in datasets.iteritems(): for k, v in datasets.iteritems():
results.append((k,v['views'],v['visits'])) results.append((k,v['views'],v['visits']))
   
return sorted(results, key=operator.itemgetter(1), reverse=True) return sorted(results, key=operator.itemgetter(1), reverse=True)
   
def month_option_title(month_iso, months, day): def month_option_title(month_iso, months, day):
month_isos = [ iso_code for (iso_code,name) in months ] month_isos = [ iso_code for (iso_code,name) in months ]
try: try:
index = month_isos.index(month_iso) index = month_isos.index(month_iso)
except ValueError: except ValueError:
_log.error('Month "%s" not found in list of months.' % month_iso) _log.error('Month "%s" not found in list of months.' % month_iso)
return month_iso return month_iso
month_name = months[index][1] month_name = months[index][1]
if index==0: if index==0:
return month_name + (' (up to %s)'%day) return month_name + (' (up to %s)'%day)
return month_name return month_name
   
   
   
  /* Modernizr 2.6.2 (Custom Build) | MIT & BSD
  * Build: http://modernizr.com/download/#-fontface-backgroundsize-borderimage-borderradius-boxshadow-flexbox-hsla-multiplebgs-opacity-rgba-textshadow-cssanimations-csscolumns-generatedcontent-cssgradients-cssreflections-csstransforms-csstransforms3d-csstransitions-applicationcache-canvas-canvastext-draganddrop-hashchange-history-audio-video-indexeddb-input-inputtypes-localstorage-postmessage-sessionstorage-websockets-websqldatabase-webworkers-geolocation-inlinesvg-smil-svg-svgclippaths-touch-webgl-shiv-cssclasses-addtest-prefixed-teststyles-testprop-testallprops-hasevent-prefixes-domprefixes-load
  */
  ;
 
 
 
  window.Modernizr = (function( window, document, undefined ) {
 
  var version = '2.6.2',
 
  Modernizr = {},
 
  enableClasses = true,
 
  docElement = document.documentElement,
 
  mod = 'modernizr',
  modElem = document.createElement(mod),
  mStyle = modElem.style,
 
  inputElem = document.createElement('input') ,
 
  smile = ':)',
 
  toString = {}.toString,
 
  prefixes = ' -webkit- -moz- -o- -ms- '.split(' '),
 
 
 
  omPrefixes = 'Webkit Moz O ms',
 
  cssomPrefixes = omPrefixes.split(' '),
 
  domPrefixes = omPrefixes.toLowerCase().split(' '),
 
  ns = {'svg': 'http://www.w3.org/2000/svg'},
 
  tests = {},
  inputs = {},
  attrs = {},
 
  classes = [],
 
  slice = classes.slice,
 
  featureName,
 
 
  injectElementWithStyles = function( rule, callback, nodes, testnames ) {
 
  var style, ret, node, docOverflow,
  div = document.createElement('div'),
  body = document.body,
  fakeBody = body || document.createElement('body');
 
  if ( parseInt(nodes, 10) ) {
  while ( nodes-- ) {
  node = document.createElement('div');
  node.id = testnames ? testnames[nodes] : mod + (nodes + 1);
  div.appendChild(node);
  }
  }
 
  style = ['&#173;','<style id="s', mod, '">', rule, '</style>'].join('');
  div.id = mod;
  (body ? div : fakeBody).innerHTML += style;
  fakeBody.appendChild(div);
  if ( !body ) {
  fakeBody.style.background = '';
  fakeBody.style.overflow = 'hidden';
  docOverflow = docElement.style.overflow;
  docElement.style.overflow = 'hidden';
  docElement.appendChild(fakeBody);
  }
 
  ret = callback(div, rule);
  if ( !body ) {
  fakeBody.parentNode.removeChild(fakeBody);
  docElement.style.overflow = docOverflow;
  } else {
  div.parentNode.removeChild(div);
  }
 
  return !!ret;
 
  },
 
 
 
  isEventSupported = (function() {
 
  var TAGNAMES = {
  'select': 'input', 'change': 'input',
  'submit': 'form', 'reset': 'form',
  'error': 'img', 'load': 'img', 'abort': 'img'
  };
 
  function isEventSupported( eventName, element ) {
 
  element = element || document.createElement(TAGNAMES[eventName] || 'div');
  eventName = 'on' + eventName;
 
  var isSupported = eventName in element;
 
  if ( !isSupported ) {
  if ( !element.setAttribute ) {
  element = document.createElement('div');
  }
  if ( element.setAttribute && element.removeAttribute ) {
  element.setAttribute(eventName, '');
  isSupported = is(element[eventName], 'function');
 
  if ( !is(element[eventName], 'undefined') ) {
  element[eventName] = undefined;
  }
  element.removeAttribute(eventName);
  }
  }
 
  element = null;
  return isSupported;
  }
  return isEventSupported;
  })(),
 
 
  _hasOwnProperty = ({}).hasOwnProperty, hasOwnProp;
 
  if ( !is(_hasOwnProperty, 'undefined') && !is(_hasOwnProperty.call, 'undefined') ) {
  hasOwnProp = function (object, property) {
  return _hasOwnProperty.call(object, property);
  };
  }
  else {
  hasOwnProp = function (object, property) {
  return ((property in object) && is(object.constructor.prototype[property], 'undefined'));
  };
  }
 
 
  if (!Function.prototype.bind) {
  Function.prototype.bind = function bind(that) {
 
  var target = this;
 
  if (typeof target != "function") {
  throw new TypeError();
  }
 
  var args = slice.call(arguments, 1),
  bound = function () {
 
  if (this instanceof bound) {
 
  var F = function(){};
  F.prototype = target.prototype;
  var self = new F();
 
  var result = target.apply(
  self,
  args.concat(slice.call(arguments))
  );
  if (Object(result) === result) {
  return result;
  }
  return self;
 
  } else {
 
  return target.apply(
  that,
  args.concat(slice.call(arguments))
  );
 
  }
 
  };
 
  return bound;
  };
  }
 
  function setCss( str ) {
  mStyle.cssText = str;
  }
 
  function setCssAll( str1, str2 ) {
  return setCss(prefixes.join(str1 + ';') + ( str2 || '' ));
  }
 
  function is( obj, type ) {
  return typeof obj === type;
  }
 
  function contains( str, substr ) {
  return !!~('' + str).indexOf(substr);
  }
 
  function testProps( props, prefixed ) {
  for ( var i in props ) {
  var prop = props[i];
  if ( !contains(prop, "-") && mStyle[prop] !== undefined ) {
  return prefixed == 'pfx' ? prop : true;
  }
  }
  return false;
  }
 
  function testDOMProps( props, obj, elem ) {
  for ( var i in props ) {
  var item = obj[props[i]];
  if ( item !== undefined) {
 
  if (elem === false) return props[i];
 
  if (is(item, 'function')){
  return item.bind(elem || obj);
  }
 
  return item;
  }
  }
  return false;
  }
 
  function testPropsAll( prop, prefixed, elem ) {
 
  var ucProp = prop.charAt(0).toUpperCase() + prop.slice(1),
  props = (prop + ' ' + cssomPrefixes.join(ucProp + ' ') + ucProp).split(' ');
 
  if(is(prefixed, "string") || is(prefixed, "undefined")) {
  return testProps(props, prefixed);
 
  } else {
  props = (prop + ' ' + (domPrefixes).join(ucProp + ' ') + ucProp).split(' ');
  return testDOMProps(props, prefixed, elem);
  }
  } tests['flexbox'] = function() {
  return testPropsAll('flexWrap');
  }; tests['canvas'] = function() {
  var elem = document.createElement('canvas');
  return !!(elem.getContext && elem.getContext('2d'));
  };
 
  tests['canvastext'] = function() {
  return !!(Modernizr['canvas'] && is(document.createElement('canvas').getContext('2d').fillText, 'function'));
  };
 
 
 
  tests['webgl'] = function() {
  return !!window.WebGLRenderingContext;
  };
 
 
  tests['touch'] = function() {
  var bool;
 
  if(('ontouchstart' in window) || window.DocumentTouch && document instanceof DocumentTouch) {
  bool = true;
  } else {
  injectElementWithStyles(['@media (',prefixes.join('touch-enabled),('),mod,')','{#modernizr{top:9px;position:absolute}}'].join(''), function( node ) {
  bool = node.offsetTop === 9;
  });
  }
 
  return bool;
  };
 
 
 
  tests['geolocation'] = function() {
  return 'geolocation' in navigator;
  };
 
 
  tests['postmessage'] = function() {
  return !!window.postMessage;
  };
 
 
  tests['websqldatabase'] = function() {
  return !!window.openDatabase;
  };
 
  tests['indexedDB'] = function() {
  return !!testPropsAll("indexedDB", window);
  };
 
  tests['hashchange'] = function() {
  return isEventSupported('hashchange', window) && (document.documentMode === undefined || document.documentMode > 7);
  };
 
  tests['history'] = function() {
  return !!(window.history && history.pushState);
  };
 
  tests['draganddrop'] = function() {
  var div = document.createElement('div');
  return ('draggable' in div) || ('ondragstart' in div && 'ondrop' in div);
  };
 
  tests['websockets'] = function() {
  return 'WebSocket' in window || 'MozWebSocket' in window;
  };
 
 
  tests['rgba'] = function() {
  setCss('background-color:rgba(150,255,150,.5)');
 
  return contains(mStyle.backgroundColor, 'rgba');
  };
 
  tests['hsla'] = function() {
  setCss('background-color:hsla(120,40%,100%,.5)');
 
  return contains(mStyle.backgroundColor, 'rgba') || contains(mStyle.backgroundColor, 'hsla');
  };
 
  tests['multiplebgs'] = function() {
  setCss('background:url(https://),url(https://),red url(https://)');
 
  return (/(url\s*\(.*?){3}/).test(mStyle.background);
  }; tests['backgroundsize'] = function() {
  return testPropsAll('backgroundSize');
  };
 
  tests['borderimage'] = function() {
  return testPropsAll('borderImage');
  };
 
 
 
  tests['borderradius'] = function() {
  return testPropsAll('borderRadius');
  };
 
  tests['boxshadow'] = function() {
  return testPropsAll('boxShadow');
  };
 
  tests['textshadow'] = function() {
  return document.createElement('div').style.textShadow === '';
  };
 
 
  tests['opacity'] = function() {
  setCssAll('opacity:.55');
 
  return (/^0.55$/).test(mStyle.opacity);
  };
 
 
  tests['cssanimations'] = function() {
  return testPropsAll('animationName');
  };
 
 
  tests['csscolumns'] = function() {
  return testPropsAll('columnCount');
  };
 
 
  tests['cssgradients'] = function() {
  var str1 = 'background-image:',
  str2 = 'gradient(linear,left top,right bottom,from(#9f9),to(white));',
  str3 = 'linear-gradient(left top,#9f9, white);';
 
  setCss(
  (str1 + '-webkit- '.split(' ').join(str2 + str1) +
  prefixes.join(str3 + str1)).slice(0, -str1.length)
  );
 
  return contains(mStyle.backgroundImage, 'gradient');
  };
 
 
  tests['cssreflections'] = function() {
  return testPropsAll('boxReflect');
  };
 
 
  tests['csstransforms'] = function() {
  return !!testPropsAll('transform');
  };
 
 
  tests['csstransforms3d'] = function() {
 
  var ret = !!testPropsAll('perspective');
 
  if ( ret && 'webkitPerspective' in docElement.style ) {
 
  injectElementWithStyles('@media (transform-3d),(-webkit-transform-3d){#modernizr{left:9px;position:absolute;height:3px;}}', function( node, rule ) {
  ret = node.offsetLeft === 9 && node.offsetHeight === 3;
  });
  }
  return ret;
  };
 
 
  tests['csstransitions'] = function() {
  return testPropsAll('transition');
  };
 
 
 
  tests['fontface'] = function() {
  var bool;
 
  injectElementWithStyles('@font-face {font-family:"font";src:url("https://")}', function( node, rule ) {
  var style = document.getElementById('smodernizr'),
  sheet = style.sheet || style.styleSheet,
  cssText = sheet ? (sheet.cssRules && sheet.cssRules[0] ? sheet.cssRules[0].cssText : sheet.cssText || '') : '';
 
  bool = /src/i.test(cssText) && cssText.indexOf(rule.split(' ')[0]) === 0;
  });
 
  return bool;
  };
 
  tests['generatedcontent'] = function() {
  var bool;
 
  injectElementWithStyles(['#',mod,'{font:0/0 a}#',mod,':after{content:"',smile,'";visibility:hidden;font:3px/1 a}'].join(''), function( node ) {
  bool = node.offsetHeight >= 3;
  });
 
  return bool;
  };
  tests['video'] = function() {
  var elem = document.createElement('video'),
  bool = false;
 
  try {
  if ( bool = !!elem.canPlayType ) {
  bool = new Boolean(bool);
  bool.ogg = elem.canPlayType('video/ogg; codecs="theora"') .replace(/^no$/,'');
 
  bool.h264 = elem.canPlayType('video/mp4; codecs="avc1.42E01E"') .replace(/^no$/,'');
 
  bool.webm = elem.canPlayType('video/webm; codecs="vp8, vorbis"').replace(/^no$/,'');
  }
 
  } catch(e) { }
 
  return bool;
  };
 
  tests['audio'] = function() {
  var elem = document.createElement('audio'),
  bool = false;
 
  try {
  if ( bool = !!elem.canPlayType ) {
  bool = new Boolean(bool);
  bool.ogg = elem.canPlayType('audio/ogg; codecs="vorbis"').replace(/^no$/,'');
  bool.mp3 = elem.canPlayType('audio/mpeg;') .replace(/^no$/,'');
 
  bool.wav = elem.canPlayType('audio/wav; codecs="1"') .replace(/^no$/,'');
  bool.m4a = ( elem.canPlayType('audio/x-m4a;') ||
  elem.canPlayType('audio/aac;')) .replace(/^no$/,'');
  }
  } catch(e) { }
 
  return bool;
  };
 
 
  tests['localstorage'] = function() {
  try {
  localStorage.setItem(mod, mod);
  localStorage.removeItem(mod);
  return true;
  } catch(e) {
  return false;
  }
  };
 
  tests['sessionstorage'] = function() {
  try {
  sessionStorage.setItem(mod, mod);
  sessionStorage.removeItem(mod);
  return true;
  } catch(e) {
  return false;
  }
  };
 
 
  tests['webworkers'] = function() {
  return !!window.Worker;
  };
 
 
  tests['applicationcache'] = function() {
  return !!window.applicationCache;
  };
 
 
  tests['svg'] = function() {
  return !!document.createElementNS && !!document.createElementNS(ns.svg, 'svg').createSVGRect;
  };
 
  tests['inlinesvg'] = function() {
  var div = document.createElement('div');
  div.innerHTML = '<svg/>';
  return (div.firstChild && div.firstChild.namespaceURI) == ns.svg;
  };
 
  tests['smil'] = function() {
  return !!document.createElementNS && /SVGAnimate/.test(toString.call(document.createElementNS(ns.svg, 'animate')));
  };
 
 
  tests['svgclippaths'] = function() {
  return !!document.createElementNS && /SVGClipPath/.test(toString.call(document.createElementNS(ns.svg, 'clipPath')));
  };
 
  function webforms() {
  Modernizr['input'] = (function( props ) {
  for ( var i = 0, len = props.length; i < len; i++ ) {
  attrs[ props[i] ] = !!(props[i] in inputElem);
  }
  if (attrs.list){
  attrs.list = !!(document.createElement('datalist') && window.HTMLDataListElement);
  }
  return attrs;
  })('autocomplete autofocus list placeholder max min multiple pattern required step'.split(' '));
  Modernizr['inputtypes'] = (function(props) {
 
  for ( var i = 0, bool, inputElemType, defaultView, len = props.length; i < len; i++ ) {
 
  inputElem.setAttribute('type', inputElemType = props[i]);
  bool = inputElem.type !== 'text';
 
  if ( bool ) {
 
  inputElem.value = smile;
  inputElem.style.cssText = 'position:absolute;visibility:hidden;';
 
  if ( /^range$/.test(inputElemType) && inputElem.style.WebkitAppearance !== undefined ) {
 
  docElement.appendChild(inputElem);
  defaultView = document.defaultView;
 
  bool = defaultView.getComputedStyle &&
  defaultView.getComputedStyle(inputElem, null).WebkitAppearance !== 'textfield' &&
  (inputElem.offsetHeight !== 0);
 
  docElement.removeChild(inputElem);
 
  } else if ( /^(search|tel)$/.test(inputElemType) ){
  } else if ( /^(url|email)$/.test(inputElemType) ) {
  bool = inputElem.checkValidity && inputElem.checkValidity() === false;
 
  } else {
  bool = inputElem.value != smile;
  }
  }
 
  inputs[ props[i] ] = !!bool;
  }
  return inputs;
  })('search tel url email datetime date month week time datetime-local number range color'.split(' '));
  }
  for ( var feature in tests ) {
  if ( hasOwnProp(tests, feature) ) {
  featureName = feature.toLowerCase();
  Modernizr[featureName] = tests[feature]();
 
  classes.push((Modernizr[featureName] ? '' : 'no-') + featureName);
  }
  }
 
  Modernizr.input || webforms();
 
 
  Modernizr.addTest = function ( feature, test ) {
  if ( typeof feature == 'object' ) {
  for ( var key in feature ) {
  if ( hasOwnProp( feature, key ) ) {
  Modernizr.addTest( key, feature[ key ] );
  }
  }
  } else {
 
  feature = feature.toLowerCase();
 
  if ( Modernizr[feature] !== undefined ) {
  return Modernizr;
  }
 
  test = typeof test == 'function' ? test() : test;
 
  if (typeof enableClasses !== "undefined" && enableClasses) {
  docElement.className += ' ' + (test ? '' : 'no-') + feature;
  }
  Modernizr[feature] = test;
 
  }
 
  return Modernizr;
  };
 
 
  setCss('');
  modElem = inputElem = null;
 
  ;(function(window, document) {
  var options = window.html5 || {};
 
  var reSkip = /^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i;
 
  var saveClones = /^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i;
 
  var supportsHtml5Styles;
 
  var expando = '_html5shiv';
 
  var expanID = 0;
 
  var expandoData = {};
 
  var supportsUnknownElements;
 
  (function() {
  try {
  var a = document.createElement('a');
  a.innerHTML = '<xyz></xyz>';
  supportsHtml5Styles = ('hidden' in a);
 
  supportsUnknownElements = a.childNodes.length == 1 || (function() {
  (document.createElement)('a');
  var frag = document.createDocumentFragment();
  return (
  typeof frag.cloneNode == 'undefined' ||
  typeof frag.createDocumentFragment == 'undefined' ||
  typeof frag.createElement == 'undefined'
  );
  }());
  } catch(e) {
  supportsHtml5Styles = true;
  supportsUnknownElements = true;
  }
 
  }()); function addStyleSheet(ownerDocument, cssText) {
  var p = ownerDocument.createElement('p'),
  parent = ownerDocument.getElementsByTagName('head')[0] || ownerDocument.documentElement;
 
  p.innerHTML = 'x<style>' + cssText + '</style>';
  return parent.insertBefore(p.lastChild, parent.firstChild);
  }
 
  function getElements() {
  var elements = html5.elements;
  return typeof elements == 'string' ? elements.split(' ') : elements;
  }
 
  function getExpandoData(ownerDocument) {
  var data = expandoData[ownerDocument[expando]];
  if (!data) {
  data = {};
  expanID++;
  ownerDocument[expando] = expanID;
  expandoData[expanID] = data;
  }
  return data;
  }
 
  function createElement(nodeName, ownerDocument, data){
  if (!ownerDocument) {
  ownerDocument = document;
  }
  if(supportsUnknownElements){
  return ownerDocument.createElement(nodeName);
  }
  if (!data) {
  data = getExpandoData(ownerDocument);
  }
  var node;
 
  if (data.cache[nodeName]) {
  node = data.cache[nodeName].cloneNode();
  } else if (saveClones.test(nodeName)) {
  node = (data.cache[nodeName] = data.createElem(nodeName)).cloneNode();
  } else {
  node = data.createElem(nodeName);
  }
 
  return node.canHaveChildren && !reSkip.test(nodeName) ? data.frag.appendChild(node) : node;
  }
 
  function createDocumentFragment(ownerDocument, data){
  if (!ownerDocument) {
  ownerDocument = document;
  }
  if(supportsUnknownElements){
  return ownerDocument.createDocumentFragment();
  }
  data = data || getExpandoData(ownerDocument);
  var clone = data.frag.cloneNode(),
  i = 0,
  elems = getElements(),
  l = elems.length;
  for(;i<l;i++){
  clone.createElement(elems[i]);
  }
  return clone;
  }
 
  function shivMethods(ownerDocument, data) {
  if (!data.cache) {
  data.cache = {};
  data.createElem = ownerDocument.createElement;
  data.createFrag = ownerDocument.createDocumentFragment;
  data.frag = data.createFrag();
  }
 
 
  ownerDocument.createElement = function(nodeName) {
  if (!html5.shivMethods) {
  return data.createElem(nodeName);
  }
  return createElement(nodeName, ownerDocument, data);
  };
 
  ownerDocument.createDocumentFragment = Function('h,f', 'return function(){' +
  'var n=f.cloneNode(),c=n.createElement;' +
  'h.shivMethods&&(' +
  getElements().join().replace(/\w+/g, function(nodeName) {
  data.createElem(nodeName);
  data.frag.createElement(nodeName);
  return 'c("' + nodeName + '")';
  }) +
  ');return n}'
  )(html5, data.frag);
  } function shivDocument(ownerDocument) {
  if (!ownerDocument) {
  ownerDocument = document;
  }
  var data = getExpandoData(ownerDocument);
 
  if (html5.shivCSS && !supportsHtml5Styles && !data.hasCSS) {
  data.hasCSS = !!addStyleSheet(ownerDocument,
  'article,aside,figcaption,figure,footer,header,hgroup,nav,section{display:block}' +
  'mark{background:#FF0;color:#000}'
  );
  }
  if (!supportsUnknownElements) {
  shivMethods(ownerDocument, data);
  }
  return ownerDocument;
  } var html5 = {
 
  'elements': options.elements || 'abbr article aside audio bdi canvas data datalist details figcaption figure footer header hgroup mark meter nav output progress section summary time video',
 
  'shivCSS': (options.shivCSS !== false),
 
  'supportsUnknownElements': supportsUnknownElements,
 
  'shivMethods': (options.shivMethods !== false),
 
  'type': 'default',
 
  'shivDocument': shivDocument,
 
  createElement: createElement,
 
  createDocumentFragment: createDocumentFragment
  }; window.html5 = html5;
 
  shivDocument(document);
 
  }(this, document));
 
  Modernizr._version = version;
 
  Modernizr._prefixes = prefixes;
  Modernizr._domPrefixes = domPrefixes;
  Modernizr._cssomPrefixes = cssomPrefixes;
 
 
  Modernizr.hasEvent = isEventSupported;
 
  Modernizr.testProp = function(prop){
  return testProps([prop]);
  };
 
  Modernizr.testAllProps = testPropsAll;
 
 
  Modernizr.testStyles = injectElementWithStyles;
  Modernizr.prefixed = function(prop, obj, elem){
  if(!obj) {
  return testPropsAll(prop, 'pfx');
  } else {
  return testPropsAll(prop, obj, elem);
  }
  };
 
 
  docElement.className = docElement.className.replace(/(^|\s)no-js(\s|$)/, '$1$2') +
 
  (enableClasses ? ' js ' + classes.join(' ') : '');
 
  return Modernizr;
 
  })(this, this.document);
  /*yepnope1.5.4|WTFPL*/
  (function(a,b,c){function d(a){return"[object Function]"==o.call(a)}function e(a){return"string"==typeof a}function f(){}function g(a){return!a||"loaded"==a||"complete"==a||"uninitialized"==a}function h(){var a=p.shift();q=1,a?a.t?m(function(){("c"==a.t?B.injectCss:B.injectJs)(a.s,0,a.a,a.x,a.e,1)},0):(a(),h()):q=0}function i(a,c,d,e,f,i,j){function k(b){if(!o&&g(l.readyState)&&(u.r=o=1,!q&&h(),l.onload=l.onreadystatechange=null,b)){"img"!=a&&m(function(){t.removeChild(l)},50);for(var d in y[c])y[c].hasOwnProperty(d)&&y[c][d].onload()}}var j=j||B.errorTimeout,l=b.createElement(a),o=0,r=0,u={t:d,s:c,e:f,a:i,x:j};1===y[c]&&(r=1,y[c]=[]),"object"==a?l.data=c:(l.src=c,l.type=a),l.width=l.height="0",l.onerror=l.onload=l.onreadystatechange=function(){k.call(this,r)},p.splice(e,0,u),"img"!=a&&(r||2===y[c]?(t.insertBefore(l,s?null:n),m(k,j)):y[c].push(l))}function j(a,b,c,d,f){return q=0,b=b||"j",e(a)?i("c"==b?v:u,a,b,this.i++,c,d,f):(p.splice(this.i++,0,a),1==p.length&&h()),this}function k(){var a=B;return a.loader={load:j,i:0},a}var l=b.documentElement,m=a.setTimeout,n=b.getElementsByTagName("script")[0],o={}.toString,p=[],q=0,r="MozAppearance"in l.style,s=r&&!!b.createRange().compareNode,t=s?l:n.parentNode,l=a.opera&&"[object Opera]"==o.call(a.opera),l=!!b.attachEvent&&!l,u=r?"object":l?"script":"img",v=l?"script":u,w=Array.isArray||function(a){return"[object Array]"==o.call(a)},x=[],y={},z={timeout:function(a,b){return b.length&&(a.timeout=b[0]),a}},A,B;B=function(a){function b(a){var a=a.split("!"),b=x.length,c=a.pop(),d=a.length,c={url:c,origUrl:c,prefixes:a},e,f,g;for(f=0;f<d;f++)g=a[f].split("="),(e=z[g.shift()])&&(c=e(c,g));for(f=0;f<b;f++)c=x[f](c);return c}function g(a,e,f,g,h){var i=b(a),j=i.autoCallback;i.url.split(".").pop().split("?").shift(),i.bypass||(e&&(e=d(e)?e:e[a]||e[g]||e[a.split("/").pop().split("?")[0]]),i.instead?i.instead(a,e,f,g,h):(y[i.url]?i.noexec=!0:y[i.url]=1,f.load(i.url,i.forceCSS||!i.forceJS&&"css"==i.url.split(".").pop().split("?").shift()?"c":c,i.noexec,i.attrs,i.timeout),(d(e)||d(j))&&f.load(function(){k(),e&&e(i.origUrl,h,g),j&&j(i.origUrl,h,g),y[i.url]=2})))}function h(a,b){function c(a,c){if(a){if(e(a))c||(j=function(){var a=[].slice.call(arguments);k.apply(this,a),l()}),g(a,j,b,0,h);else if(Object(a)===a)for(n in m=function(){var b=0,c;for(c in a)a.hasOwnProperty(c)&&b++;return b}(),a)a.hasOwnProperty(n)&&(!c&&!--m&&(d(j)?j=function(){var a=[].slice.call(arguments);k.apply(this,a),l()}:j[n]=function(a){return function(){var b=[].slice.call(arguments);a&&a.apply(this,b),l()}}(k[n])),g(a[n],j,b,n,h))}else!c&&l()}var h=!!a.test,i=a.load||a.both,j=a.callback||f,k=j,l=a.complete||f,m,n;c(h?a.yep:a.nope,!!i),i&&c(i)}var i,j,l=this.yepnope.loader;if(e(a))g(a,0,l,0);else if(w(a))for(i=0;i<a.length;i++)j=a[i],e(j)?g(j,0,l,0):w(j)?B(j):Object(j)===j&&h(j,l);else Object(a)===a&&h(a,l)},B.addPrefix=function(a,b){z[a]=b},B.addFilter=function(a){x.push(a)},B.errorTimeout=1e4,null==b.readyState&&b.addEventListener&&(b.readyState="loading",b.addEventListener("DOMContentLoaded",A=function(){b.removeEventListener("DOMContentLoaded",A,0),b.readyState="complete"},0)),a.yepnope=k(),a.yepnope.executeStack=h,a.yepnope.injectJs=function(a,c,d,e,i,j){var k=b.createElement("script"),l,o,e=e||B.errorTimeout;k.src=a;for(o in d)k.setAttribute(o,d[o]);c=j?h:c||f,k.onreadystatechange=k.onload=function(){!l&&g(k.readyState)&&(l=1,c(),k.onload=k.onreadystatechange=null)},m(function(){l||(l=1,c(1))},e),i?k.onload():n.parentNode.insertBefore(k,n)},a.yepnope.injectCss=function(a,c,d,e,g,i){var e=b.createElement("link"),j,c=i?h:c||f;e.href=a,e.rel="stylesheet",e.type="text/css";for(j in d)e.setAttribute(j,d[j]);g||(n.parentNode.insertBefore(e,n),m(c,0))}})(this,document);
  Modernizr.load=function(){yepnope.apply(window,[].slice.call(arguments,0));};
  ;
<html xmlns:py="http://genshi.edgewall.org/" <html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n" xmlns:i18n="http://genshi.edgewall.org/i18n"
xmlns:xi="http://www.w3.org/2001/XInclude" xmlns:xi="http://www.w3.org/2001/XInclude"
py:strip=""> py:strip="">
   
<xi:include href="../ga_util.html" /> <xi:include href="../ga_util.html" />
   
<py:def function="page_title">Usage by Publisher</py:def> <py:def function="page_title">Usage by Publisher</py:def>
   
   
   
<py:match path="primarysidebar"> <py:match path="primarysidebar">
${ga_sidebar(download_link=h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport',action='publisher_csv',month=c.month or 'all'))} ${ga_sidebar(download_link=h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport',action='publisher_csv',month=c.month or 'all'))}
</py:match> </py:match>
   
   
<py:def function="optional_head"> <py:def function="optional_head">
<link rel="stylesheet" type="text/css" href="/scripts/vendor/rickshaw.min.css"/> <link rel="stylesheet" type="text/css" href="/scripts/vendor/rickshaw.min.css"/>
<link rel="stylesheet" type="text/css" href="/css/ga_report.css?1"/> <link rel="stylesheet" type="text/css" href="/css/ga_report.css?1"/>
  <script type="text/javascript" src="/scripts/modernizr-2.6.2.custom.js"></script>
<script type="text/javascript" src="/scripts/ckanext_ga_reports.js?1"></script> <script type="text/javascript" src="/scripts/ckanext_ga_reports.js?1"></script>
<script type="text/javascript" src="/scripts/vendor/jquery.sparkline.modified.js"></script> <script type="text/javascript" src="/scripts/vendor/jquery.sparkline.modified.js"></script>
<script type="text/javascript" src="/scripts/rickshaw_ie7_shim.js"></script> <script type="text/javascript" src="/scripts/rickshaw_ie7_shim.js"></script>
<script type="text/javascript" src="/scripts/vendor/d3.v2.js"></script> <script type="text/javascript" src="/scripts/vendor/d3.v2.js"></script>
<script type="text/javascript" src="/scripts/vendor/d3.layout.min.js"></script> <script type="text/javascript" src="/scripts/vendor/d3.layout.min.js"></script>
<script type="text/javascript" src="/scripts/vendor/rickshaw.min.js"></script> <script type="text/javascript" src="/scripts/vendor/rickshaw.min.js"></script>
</py:def> </py:def>
<py:def function="page_heading">Site Usage ${usage_nav('Publishers')}</py:def> <py:def function="page_heading">Site Usage ${usage_nav('Publishers')}</py:def>
   
<div py:match="content"> <div py:match="content">
   
   
<div class="boxed"> <div class="boxed">
   
${rickshaw_graph(c.top_publishers_graph,'publishers')} ${rickshaw_graph(c.top_publishers_graph,'publishers')}
   
<hr/> <hr/>
<form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport',action='publishers')}" method="get"> <form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport',action='publishers')}" method="get">
<div class="controls"> <div class="controls">
<h4 class="ga-reports-heading">Statistics for</h4> <h4 class="ga-reports-heading">Statistics for</h4>
${month_selector(c.month, c.months, c.day)} ${month_selector(c.month, c.months, c.day)}
</div> </div>
</form> </form>
   
<table class="ga-reports-table table table-condensed table-bordered table-striped"> <table class="ga-reports-table table table-condensed table-bordered table-striped">
<tr> <tr>
<th>Publisher</th> <th>Publisher</th>
<th class="td-numeric">Dataset Views</th> <th class="td-numeric">Dataset Views</th>
</tr> </tr>
<py:for each="publisher, views, visits in c.top_publishers"> <py:for each="publisher, views, visits in c.top_publishers">
<tr> <tr>
<td> <td>
${h.link_to(publisher.title, h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport', action='read_publisher', id=publisher.name) + (("?month=" + c.month) if c.month else ''))} ${h.link_to(publisher.title, h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport', action='read_publisher', id=publisher.name) + (("?month=" + c.month) if c.month else ''))}
</td> </td>
<td class="td-numeric">${views}</td> <td class="td-numeric">${views}</td>
</tr> </tr>
</py:for> </py:for>
</table> </table>
</div><!--/boxed--> </div><!--/boxed-->
</div><!--/content--> </div><!--/content-->
   
<py:def function="optional_footer"> <py:def function="optional_footer">
<script type="text/javascript"> <script type="text/javascript">
$(function() { $(function() {
CKAN.GA_Reports.bind_month_selector(); CKAN.GA_Reports.bind_month_selector();
}); });
</script> </script>
</py:def> </py:def>
<xi:include href="../../layout.html" /> <xi:include href="../../layout.html" />
   
</html> </html>
   
   
<html xmlns:py="http://genshi.edgewall.org/" <html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n" xmlns:i18n="http://genshi.edgewall.org/i18n"
xmlns:xi="http://www.w3.org/2001/XInclude" xmlns:xi="http://www.w3.org/2001/XInclude"
py:strip=""> py:strip="">
   
<xi:include href="../ga_util.html" /> <xi:include href="../ga_util.html" />
   
<py:def function="page_title">Usage by Dataset</py:def> <py:def function="page_title">Usage by Dataset</py:def>
   
<py:def function="optional_head"> <py:def function="optional_head">
<link rel="stylesheet" type="text/css" href="/scripts/vendor/rickshaw.min.css"/> <link rel="stylesheet" type="text/css" href="/scripts/vendor/rickshaw.min.css"/>
<link rel="stylesheet" type="text/css" href="/css/ga_report.css?1"/> <link rel="stylesheet" type="text/css" href="/css/ga_report.css?1"/>
  <script type="text/javascript" src="/scripts/modernizr-2.6.2.custom.js"></script>
<script type="text/javascript" src="/scripts/ckanext_ga_reports.js?1"></script> <script type="text/javascript" src="/scripts/ckanext_ga_reports.js?1"></script>
<script type="text/javascript" src="/scripts/vendor/jquery.sparkline.modified.js"></script> <script type="text/javascript" src="/scripts/vendor/jquery.sparkline.modified.js"></script>
<script type="text/javascript" src="/scripts/rickshaw_ie7_shim.js"></script> <script type="text/javascript" src="/scripts/rickshaw_ie7_shim.js"></script>
<script type="text/javascript" src="/scripts/vendor/d3.v2.js"></script> <script type="text/javascript" src="/scripts/vendor/d3.v2.js"></script>
<script type="text/javascript" src="/scripts/vendor/d3.layout.min.js"></script> <script type="text/javascript" src="/scripts/vendor/d3.layout.min.js"></script>
<script type="text/javascript" src="/scripts/vendor/rickshaw.min.js"></script> <script type="text/javascript" src="/scripts/vendor/rickshaw.min.js"></script>
</py:def> </py:def>
   
<py:match path="primarysidebar"> <py:match path="primarysidebar">
${ga_sidebar(download_link=h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport',action='dataset_csv',id=c.publisher_name or 'all',month=c.month or 'all'))} ${ga_sidebar(download_link=h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport',action='dataset_csv',id=c.publisher_name or 'all',month=c.month or 'all'))}
</py:match> </py:match>
<py:def function="page_heading">Site Usage ${usage_nav('Datasets')}</py:def> <py:def function="page_heading">Site Usage ${usage_nav('Datasets')}</py:def>
   
<div py:match="content"> <div py:match="content">
   
<div class="boxed"> <div class="boxed">
<h3 py:if="c.publisher"><a href="${h.url_for(controller='ckanext.dgu.controllers.publisher:PublisherController',action='read',id=c.publisher.name)}">${c.publisher.title}</a></h3> <h3 py:if="c.publisher"><a href="${h.url_for(controller='ckanext.dgu.controllers.publisher:PublisherController',action='read',id=c.publisher.name)}">${c.publisher.title}</a></h3>
   
<py:if test="c.graph_data"> <py:if test="c.graph_data">
${rickshaw_graph(c.graph_data,'dataset-downloads',debug=True)} ${rickshaw_graph(c.graph_data,'dataset-downloads',debug=True)}
</py:if> </py:if>
<form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport',action='read')}" method="get"> <form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport',action='read')}" method="get">
<div class="controls"> <div class="controls">
   
${month_selector(c.month, c.months, c.day)} ${month_selector(c.month, c.months, c.day)}
   
<select name="publisher"> <select name="publisher">
<option value='' py:attrs="{'selected': 'selected' if not c.publisher else None}">All publishers</option> <option value='' py:attrs="{'selected': 'selected' if not c.publisher else None}">All publishers</option>
<py:for each="val,desc in c.publishers"> <py:for each="val,desc in c.publishers">
<option value='${val}' py:attrs="{'selected': 'selected' if c.publisher_name == val else None}">${desc}</option> <option value='${val}' py:attrs="{'selected': 'selected' if c.publisher_name == val else None}">${desc}</option>
</py:for> </py:for>
</select> </select>
<input class="btn button btn-primary" type='submit' value="Update"/> <input class="btn button btn-primary" type='submit' value="Update"/>
</div> </div>
</form> </form>
<py:if test="c.month"> <py:if test="c.month">
<h4>Statistics for ${h.month_option_title(c.month,c.months,c.day)}:</h4> <h4>Statistics for ${h.month_option_title(c.month,c.months,c.day)}:</h4>
</py:if> </py:if>
<py:if test="not c.month"> <py:if test="not c.month">
<h4>Statistics for all months:</h4> <h4>Statistics for all months:</h4>
</py:if> </py:if>
<div class="alert alert-info" py:if="not c.top_packages">No page views in this period.</div> <div class="alert alert-info" py:if="not c.top_packages">No page views in this period.</div>
<py:if test="c.top_packages"> <py:if test="c.top_packages">
<table class="ga-reports-table table table-condensed table-bordered table-striped"> <table class="ga-reports-table table table-condensed table-bordered table-striped">
<tr> <tr>
<th>Dataset</th> <th>Dataset</th>
<th>Views</th> <th>Views</th>
<th>Downloads</th> <th>Downloads</th>
</tr> </tr>
<py:for each="package, views, visits,downloads in c.top_packages"> <py:for each="package, views, visits,downloads in c.top_packages">
<tr> <tr>
<td> <td>
${h.link_to(package.title or package.name, h.url_for(controller='package', action='read', id=package.name))} ${h.link_to(package.title or package.name, h.url_for(controller='package', action='read', id=package.name))}
</td> </td>
<td class="td-numeric">${views}</td> <td class="td-numeric">${views}</td>
<td class="td-numeric">${downloads}</td> <td class="td-numeric">${downloads}</td>
</tr> </tr>
</py:for> </py:for>
</table> </table>
</py:if> </py:if>
   
</div> </div>
   
</div> </div>
   
<xi:include href="../../layout.html" /> <xi:include href="../../layout.html" />
</html> </html>
   
   
   
   
<html xmlns:py="http://genshi.edgewall.org/" <html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n" xmlns:i18n="http://genshi.edgewall.org/i18n"
xmlns:xi="http://www.w3.org/2001/XInclude" xmlns:xi="http://www.w3.org/2001/XInclude"
py:strip=""> py:strip="">
   
<xi:include href="../ga_util.html" /> <xi:include href="../ga_util.html" />
   
<py:def function="page_title">Site usage</py:def> <py:def function="page_title">Site usage</py:def>
   
<py:def function="optional_head"> <py:def function="optional_head">
<link rel="stylesheet" type="text/css" href="/scripts/vendor/rickshaw.min.css"/> <link rel="stylesheet" type="text/css" href="/scripts/vendor/rickshaw.min.css"/>
<link rel="stylesheet" type="text/css" href="/css/ga_report.css?1"/> <link rel="stylesheet" type="text/css" href="/css/ga_report.css?1"/>
  <script type="text/javascript" src="/scripts/modernizr-2.6.2.custom.js"></script>
<script type="text/javascript" src="/scripts/ckanext_ga_reports.js?1"></script> <script type="text/javascript" src="/scripts/ckanext_ga_reports.js?1"></script>
<script type="text/javascript" src="/scripts/vendor/jquery.sparkline.modified.js"></script> <script type="text/javascript" src="/scripts/vendor/jquery.sparkline.modified.js"></script>
<script type="text/javascript" src="/scripts/rickshaw_ie7_shim.js"></script> <script type="text/javascript" src="/scripts/rickshaw_ie7_shim.js"></script>
<script type="text/javascript" src="/scripts/vendor/d3.v2.js"></script> <script type="text/javascript" src="/scripts/vendor/d3.v2.js"></script>
<script type="text/javascript" src="/scripts/vendor/d3.layout.min.js"></script> <script type="text/javascript" src="/scripts/vendor/d3.layout.min.js"></script>
<script type="text/javascript" src="/scripts/vendor/rickshaw.min.js"></script> <script type="text/javascript" src="/scripts/vendor/rickshaw.min.js"></script>
</py:def> </py:def>
   
<py:match path="primarysidebar"> <py:match path="primarysidebar">
${ga_sidebar(download_link=h.url_for(controller='ckanext.ga_report.controller:GaReport',action='csv',month=c.month or 'all'))} ${ga_sidebar(download_link=h.url_for(controller='ckanext.ga_report.controller:GaReport',action='csv',month=c.month or 'all'))}
</py:match> </py:match>
   
<py:def function="page_heading">Site Usage ${usage_nav('Site-wide')}</py:def> <py:def function="page_heading">Site Usage ${usage_nav('Site-wide')}</py:def>
   
<div py:match="content"> <div py:match="content">
   
<div class="boxed"> <div class="boxed">
<div class="tabbable"> <div class="tabbable">
<ul class="nav nav-tabs"> <ul class="nav nav-tabs">
<li class="active"><a href="#totals" data-toggle="hashtab">Totals</a></li> <li class="active"><a href="#totals" data-toggle="hashtab">Totals</a></li>
<li class="dropdown"> <li class="dropdown">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">Browsers <a href="#" class="dropdown-toggle" data-toggle="dropdown">Browsers
<b class="caret"></b></a> <b class="caret"></b></a>
<ul class="dropdown-menu"> <ul class="dropdown-menu">
<li><a href="#browsers_names" data-toggle="hashtab">Browsers</a></li> <li><a href="#browsers_names" data-toggle="hashtab">Browsers</a></li>
<li><a href="#browsers_versions" data-toggle="hashtab">Versions</a></li> <li><a href="#browsers_versions" data-toggle="hashtab">Versions</a></li>
</ul> </ul>
</li> </li>
<li class="dropdown"> <li class="dropdown">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">Operating Systems <a href="#" class="dropdown-toggle" data-toggle="dropdown">Operating Systems
<b class="caret"></b></a> <b class="caret"></b></a>
<ul class="dropdown-menu"> <ul class="dropdown-menu">
<li><a href="#os" data-toggle="hashtab">Operating Systems</a></li> <li><a href="#os" data-toggle="hashtab">Operating Systems</a></li>
<li><a href="#os_versions" data-toggle="hashtab">Versions</a></li> <li><a href="#os_versions" data-toggle="hashtab">Versions</a></li>
</ul> </ul>
</li> </li>
<li class="dropdown"> <li class="dropdown">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">Social <a href="#" class="dropdown-toggle" data-toggle="dropdown">Social
<b class="caret"></b></a> <b class="caret"></b></a>
<ul class="dropdown-menu"> <ul class="dropdown-menu">
<li><a href="#social_networks" data-toggle="hashtab">All networks</a></li> <li><a href="#social_networks" data-toggle="hashtab">All networks</a></li>
<li><a href="#social_referrals_totals" data-toggle="hashtab">Referral links</a></li> <li><a href="#social_referrals_totals" data-toggle="hashtab">Referral links</a></li>
</ul> </ul>
</li> </li>
<li><a href="#languages" data-toggle="hashtab">Languages</a></li> <li><a href="#languages" data-toggle="hashtab">Languages</a></li>
<li><a href="#country" data-toggle="hashtab">Country</a></li> <li><a href="#country" data-toggle="hashtab">Country</a></li>
</ul> </ul>
<div class="tab-content"> <div class="tab-content">
<div class="tab-pane active" id="totals"> <div class="tab-pane active" id="totals">
<form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaReport',action='index')}" method="get"> <form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaReport',action='index')}" method="get">
<h4 class="ga-reports-heading">Show stats table for:</h4> <h4 class="ga-reports-heading">Show stats table for:</h4>
${month_selector(c.month, c.months, c.day)} ${month_selector(c.month, c.months, c.day)}
</form> </form>
<table class="ga-reports-table table table-condensed table-bordered table-striped"> <table class="ga-reports-table table table-condensed table-bordered table-striped">
<tr> <tr>
<th>Name</th> <th>Name</th>
<th class="td-numeric">Value</th> <th class="td-numeric">Value</th>
<th>History</th> <th>History</th>
</tr> </tr>
<py:for each="name, value, graph in c.global_totals"> <py:for each="name, value, graph in c.global_totals">
<tr> <tr>
<td>${name}</td> <td>${name}</td>
<td class="td-numeric">${value}</td> <td class="td-numeric">${value}</td>
<td class="sparkline-cell"> <td class="sparkline-cell">
<span class="sparkline" sparkTooltips="${','.join([x for x,y in graph])}"> <span class="sparkline" sparkTooltips="${','.join([x for x,y in graph])}">
${','.join([y for x,y in graph])} ${','.join([y for x,y in graph])}
</span> </span>
</td> </td>
</tr> </tr>
</py:for> </py:for>
</table> </table>
</div> </div>
<div class="tab-pane" id="browsers_versions"> <div class="tab-pane" id="browsers_versions">
${rickshaw_graph(c.browser_versions_graph,'browser-versions',mode='stack')} ${rickshaw_graph(c.browser_versions_graph,'browser-versions',mode='stack')}
<hr/> <hr/>
<p>Note: Where a browser has a large number of versions, these have been grouped together.</p> <p>Note: Where a browser has a large number of versions, these have been grouped together.</p>
<form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaReport',action='index')}" method="get"> <form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaReport',action='index')}" method="get">
<h4 class="ga-reports-heading">Show stats table for:</h4> <h4 class="ga-reports-heading">Show stats table for:</h4>
${month_selector(c.month, c.months, c.day)} ${month_selector(c.month, c.months, c.day)}
</form> </form>
${stat_table(c.browser_versions)} ${stat_table(c.browser_versions)}
</div> </div>
<div class="tab-pane" id="browsers_names"> <div class="tab-pane" id="browsers_names">
${rickshaw_graph(c.browsers_graph,'browsers',mode='stack')} ${rickshaw_graph(c.browsers_graph,'browsers',mode='stack')}
<hr/> <hr/>
<form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaReport',action='index')}" method="get"> <form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaReport',action='index')}" method="get">
<h4 class="ga-reports-heading">Show stats table for:</h4> <h4 class="ga-reports-heading">Show stats table for:</h4>
${month_selector(c.month, c.months, c.day)} ${month_selector(c.month, c.months, c.day)}
</form> </form>
${stat_table(c.browsers)} ${stat_table(c.browsers)}
</div> </div>
<div class="tab-pane" id="os"> <div class="tab-pane" id="os">
${rickshaw_graph(c.os_graph,'os',mode='stack')} ${rickshaw_graph(c.os_graph,'os',mode='stack')}
<hr/> <hr/>
<form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaReport',action='index')}" method="get"> <form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaReport',action='index')}" method="get">
<h4 class="ga-reports-heading">Show stats table for:</h4> <h4 class="ga-reports-heading">Show stats table for:</h4>
${month_selector(c.month, c.months, c.day)} ${month_selector(c.month, c.months, c.day)}
</form> </form>
${stat_table(c.os)} ${stat_table(c.os)}
</div> </div>
<div class="tab-pane" id="os_versions"> <div class="tab-pane" id="os_versions">
${rickshaw_graph(c.os_versions_graph,'os_versions',mode='stack')} ${rickshaw_graph(c.os_versions_graph,'os_versions',mode='stack')}
<hr/> <hr/>
<form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaReport',action='index')}" method="get"> <form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaReport',action='index')}" method="get">
<h4 class="ga-reports-heading">Show stats table for:</h4> <h4 class="ga-reports-heading">Show stats table for:</h4>
${month_selector(c.month, c.months, c.day)} ${month_selector(c.month, c.months, c.day)}
</form> </form>
${stat_table(c.os_versions)} ${stat_table(c.os_versions)}
</div> </div>
<div class="tab-pane" id="social_referrals_totals"> <div class="tab-pane" id="social_referrals_totals">
<p>Number of visits that were referred from social networks</p> <p>Number of visits that were referred from social networks</p>
<form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaReport',action='index')}" method="get"> <form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaReport',action='index')}" method="get">
<h4 class="ga-reports-heading">Show stats table for:</h4> <h4 class="ga-reports-heading">Show stats table for:</h4>
${month_selector(c.month, c.months, c.day)} ${month_selector(c.month, c.months, c.day)}
</form> </form>
${social_table(c.social_referrer_totals)} ${social_table(c.social_referrer_totals)}
</div> </div>
<div class="tab-pane" id="social_networks"> <div class="tab-pane" id="social_networks">
${rickshaw_graph(c.social_networks_graph, 'social_networks',mode='stack')} ${rickshaw_graph(c.social_networks_graph, 'social_networks',mode='stack')}
<hr/> <hr/>
<p>Percentage of visits that were referred from these social networks</p> <p>Percentage of visits that were referred from these social networks</p>
<form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaReport',action='index')}" method="get"> <form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaReport',action='index')}" method="get">
<h4 class="ga-reports-heading">Show stats table for:</h4> <h4 class="ga-reports-heading">Show stats table for:</h4>
${month_selector(c.month, c.months, c.day)} ${month_selector(c.month, c.months, c.day)}
</form> </form>
${stat_table(c.social_networks, 'Visits')} ${stat_table(c.social_networks, 'Visits')}
</div> </div>
<div class="tab-pane" id="languages"> <div class="tab-pane" id="languages">
${rickshaw_graph(c.languages_graph,'languages',mode='stack')} ${rickshaw_graph(c.languages_graph,'languages',mode='stack')}
<hr/> <hr/>
<form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaReport',action='index')}" method="get"> <form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaReport',action='index')}" method="get">
<h4 class="ga-reports-heading">Show stats table for:</h4> <h4 class="ga-reports-heading">Show stats table for:</h4>
${month_selector(c.month, c.months, c.day)} ${month_selector(c.month, c.months, c.day)}
</form> </form>
${stat_table(c.languages)} ${stat_table(c.languages)}
</div> </div>
<div class="tab-pane" id="country"> <div class="tab-pane" id="country">
${rickshaw_graph(c.country_graph,'country',mode='stack')} ${rickshaw_graph(c.country_graph,'country',mode='stack')}
<hr/> <hr/>
<form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaReport',action='index')}" method="get"> <form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaReport',action='index')}" method="get">
<h4 class="ga-reports-heading">Show stats table for:</h4> <h4 class="ga-reports-heading">Show stats table for:</h4>
${month_selector(c.month, c.months, c.day)} ${month_selector(c.month, c.months, c.day)}
</form> </form>
${stat_table(c.country)} ${stat_table(c.country)}
</div> </div>
</div> </div>
</div> </div>
</div> </div>
</div> </div>
   
   
<py:def function="optional_footer"> <py:def function="optional_footer">
<script type="text/javascript"> <script type="text/javascript">
$(function() { $(function() {
CKAN.GA_Reports.bind_sparklines(); CKAN.GA_Reports.bind_sparklines();
CKAN.GA_Reports.bind_sidebar(); CKAN.GA_Reports.bind_sidebar();
CKAN.GA_Reports.bind_month_selector(); CKAN.GA_Reports.bind_month_selector();
}); });
</script> </script>
</py:def> </py:def>
   
<xi:include href="../../layout.html" /> <xi:include href="../../layout.html" />
</html> </html>