import re |
import re |
import csv |
import csv |
import sys |
import sys |
import json |
import json |
import logging |
import logging |
import operator |
import operator |
import collections |
import collections |
from ckan.lib.base import (BaseController, c, g, render, request, response, abort) |
from ckan.lib.base import (BaseController, c, g, render, request, response, abort) |
|
|
import sqlalchemy |
import sqlalchemy |
from sqlalchemy import func, cast, Integer |
from sqlalchemy import func, cast, Integer |
import ckan.model as model |
import ckan.model as model |
from ga_model import GA_Url, GA_Stat, GA_ReferralStat, GA_Publisher |
from ga_model import GA_Url, GA_Stat, GA_ReferralStat, GA_Publisher |
|
|
log = logging.getLogger('ckanext.ga-report') |
log = logging.getLogger('ckanext.ga-report') |
|
|
DOWNLOADS_AVAILABLE_FROM = '2012-12' |
DOWNLOADS_AVAILABLE_FROM = '2012-12' |
|
|
def _get_month_name(strdate): |
def _get_month_name(strdate): |
import calendar |
import calendar |
from time import strptime |
from time import strptime |
d = strptime(strdate, '%Y-%m') |
d = strptime(strdate, '%Y-%m') |
return '%s %s' % (calendar.month_name[d.tm_mon], d.tm_year) |
return '%s %s' % (calendar.month_name[d.tm_mon], d.tm_year) |
|
|
def _get_unix_epoch(strdate): |
def _get_unix_epoch(strdate): |
from time import strptime,mktime |
from time import strptime,mktime |
d = strptime(strdate, '%Y-%m') |
d = strptime(strdate, '%Y-%m') |
return int(mktime(d)) |
return int(mktime(d)) |
|
|
def _month_details(cls, stat_key=None): |
def _month_details(cls, stat_key=None): |
''' |
''' |
Returns a list of all the periods for which we have data, unfortunately |
Returns a list of all the periods for which we have data, unfortunately |
knows too much about the type of the cls being passed as GA_Url has a |
knows too much about the type of the cls being passed as GA_Url has a |
more complex query |
more complex query |
|
|
This may need extending if we add a period_name to the stats |
This may need extending if we add a period_name to the stats |
''' |
''' |
months = [] |
months = [] |
day = None |
day = None |
|
|
q = model.Session.query(cls.period_name,cls.period_complete_day)\ |
q = model.Session.query(cls.period_name,cls.period_complete_day)\ |
.filter(cls.period_name!='All').distinct(cls.period_name) |
.filter(cls.period_name!='All').distinct(cls.period_name) |
if stat_key: |
if stat_key: |
q= q.filter(cls.stat_name==stat_key) |
q= q.filter(cls.stat_name==stat_key) |
|
|
vals = q.order_by("period_name desc").all() |
vals = q.order_by("period_name desc").all() |
|
|
if vals and vals[0][1]: |
if vals and vals[0][1]: |
day = int(vals[0][1]) |
day = int(vals[0][1]) |
ordinal = 'th' if 11 <= day <= 13 \ |
ordinal = 'th' if 11 <= day <= 13 \ |
else {1:'st',2:'nd',3:'rd'}.get(day % 10, 'th') |
else {1:'st',2:'nd',3:'rd'}.get(day % 10, 'th') |
day = "{day}{ordinal}".format(day=day, ordinal=ordinal) |
day = "{day}{ordinal}".format(day=day, ordinal=ordinal) |
|
|
for m in vals: |
for m in vals: |
months.append( (m[0], _get_month_name(m[0]))) |
months.append( (m[0], _get_month_name(m[0]))) |
|
|
return months, day |
return months, day |
|
|
|
|
class GaReport(BaseController): |
class GaReport(BaseController): |
|
|
def csv(self, month): |
def csv(self, month): |
import csv |
import csv |
|
|
q = model.Session.query(GA_Stat).filter(GA_Stat.stat_name!='Downloads') |
q = model.Session.query(GA_Stat).filter(GA_Stat.stat_name!='Downloads') |
if month != 'all': |
if month != 'all': |
q = q.filter(GA_Stat.period_name==month) |
q = q.filter(GA_Stat.period_name==month) |
entries = q.order_by('GA_Stat.period_name, GA_Stat.stat_name, GA_Stat.key').all() |
entries = q.order_by('GA_Stat.period_name, GA_Stat.stat_name, GA_Stat.key').all() |
|
|
response.headers['Content-Type'] = "text/csv; charset=utf-8" |
response.headers['Content-Type'] = "text/csv; charset=utf-8" |
response.headers['Content-Disposition'] = str('attachment; filename=stats_%s.csv' % (month,)) |
response.headers['Content-Disposition'] = str('attachment; filename=stats_%s.csv' % (month,)) |
|
|
writer = csv.writer(response) |
writer = csv.writer(response) |
writer.writerow(["Period", "Statistic", "Key", "Value"]) |
writer.writerow(["Period", "Statistic", "Key", "Value"]) |
|
|
for entry in entries: |
for entry in entries: |
writer.writerow([entry.period_name.encode('utf-8'), |
writer.writerow([entry.period_name.encode('utf-8'), |
entry.stat_name.encode('utf-8'), |
entry.stat_name.encode('utf-8'), |
entry.key.encode('utf-8'), |
entry.key.encode('utf-8'), |
entry.value.encode('utf-8')]) |
entry.value.encode('utf-8')]) |
|
|
|
|
def index(self): |
def index(self): |
|
|
# Get the month details by fetching distinct values and determining the |
# Get the month details by fetching distinct values and determining the |
# month names from the values. |
# month names from the values. |
c.months, c.day = _month_details(GA_Stat) |
c.months, c.day = _month_details(GA_Stat) |
|
|
# Work out which month to show, based on query params of the first item |
# Work out which month to show, based on query params of the first item |
c.month_desc = 'all months' |
c.month_desc = 'all months' |
c.month = request.params.get('month', '') |
c.month = request.params.get('month', '') |
if c.month: |
if c.month: |
c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month]) |
c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month]) |
|
|
q = model.Session.query(GA_Stat).\ |
q = model.Session.query(GA_Stat).\ |
filter(GA_Stat.stat_name=='Totals') |
filter(GA_Stat.stat_name=='Totals') |
if c.month: |
if c.month: |
q = q.filter(GA_Stat.period_name==c.month) |
q = q.filter(GA_Stat.period_name==c.month) |
entries = q.order_by('ga_stat.key').all() |
entries = q.order_by('ga_stat.key').all() |
|
|
def clean_key(key, val): |
def clean_key(key, val): |
if key in ['Average time on site', 'Pages per visit', 'New visits', 'Bounce rate (home page)']: |
if key in ['Average time on site', 'Pages per visit', 'New visits', 'Bounce rate (home page)']: |
val = "%.2f" % round(float(val), 2) |
val = "%.2f" % round(float(val), 2) |
if key == 'Average time on site': |
if key == 'Average time on site': |
mins, secs = divmod(float(val), 60) |
mins, secs = divmod(float(val), 60) |
hours, mins = divmod(mins, 60) |
hours, mins = divmod(mins, 60) |
val = '%02d:%02d:%02d (%s seconds) ' % (hours, mins, secs, val) |
val = '%02d:%02d:%02d (%s seconds) ' % (hours, mins, secs, val) |
if key in ['New visits','Bounce rate (home page)']: |
if key in ['New visits','Bounce rate (home page)']: |
val = "%s%%" % val |
val = "%s%%" % val |
if key in ['Total page views', 'Total visits']: |
if key in ['Total page views', 'Total visits']: |
val = int(val) |
val = int(val) |
|
|
return key, val |
return key, val |
|
|
# Query historic values for sparkline rendering |
# Query historic values for sparkline rendering |
sparkline_query = model.Session.query(GA_Stat)\ |
sparkline_query = model.Session.query(GA_Stat)\ |
.filter(GA_Stat.stat_name=='Totals')\ |
.filter(GA_Stat.stat_name=='Totals')\ |
.order_by(GA_Stat.period_name) |
.order_by(GA_Stat.period_name) |
sparkline_data = {} |
sparkline_data = {} |
for x in sparkline_query: |
for x in sparkline_query: |
sparkline_data[x.key] = sparkline_data.get(x.key,[]) |
sparkline_data[x.key] = sparkline_data.get(x.key,[]) |
key, val = clean_key(x.key,float(x.value)) |
key, val = clean_key(x.key,float(x.value)) |
tooltip = '%s: %s' % (_get_month_name(x.period_name), val) |
tooltip = '%s: %s' % (_get_month_name(x.period_name), val) |
sparkline_data[x.key].append( (tooltip,x.value) ) |
sparkline_data[x.key].append( (tooltip,x.value) ) |
# Trim the latest month, as it looks like a huge dropoff |
# Trim the latest month, as it looks like a huge dropoff |
for key in sparkline_data: |
for key in sparkline_data: |
sparkline_data[key] = sparkline_data[key][:-1] |
sparkline_data[key] = sparkline_data[key][:-1] |
|
|
c.global_totals = [] |
c.global_totals = [] |
if c.month: |
if c.month: |
for e in entries: |
for e in entries: |
key, val = clean_key(e.key, e.value) |
key, val = clean_key(e.key, e.value) |
sparkline = sparkline_data[e.key] |
sparkline = sparkline_data[e.key] |
c.global_totals.append((key, val, sparkline)) |
c.global_totals.append((key, val, sparkline)) |
else: |
else: |
d = collections.defaultdict(list) |
d = collections.defaultdict(list) |
for e in entries: |
for e in entries: |
d[e.key].append(float(e.value)) |
d[e.key].append(float(e.value)) |
for k, v in d.iteritems(): |
for k, v in d.iteritems(): |
if k in ['Total page views', 'Total visits']: |
if k in ['Total page views', 'Total visits']: |
v = sum(v) |
v = sum(v) |
else: |
else: |
v = float(sum(v))/float(len(v)) |
v = float(sum(v))/float(len(v)) |
sparkline = sparkline_data[k] |
sparkline = sparkline_data[k] |
key, val = clean_key(k,v) |
key, val = clean_key(k,v) |
|
|
c.global_totals.append((key, val, sparkline)) |
c.global_totals.append((key, val, sparkline)) |
# Sort the global totals into a more pleasant order |
# Sort the global totals into a more pleasant order |
def sort_func(x): |
def sort_func(x): |
key = x[0] |
key = x[0] |
total_order = ['Total page views','Total visits','Pages per visit'] |
total_order = ['Total page views','Total visits','Pages per visit'] |
if key in total_order: |
if key in total_order: |
return total_order.index(key) |
return total_order.index(key) |
return 999 |
return 999 |
c.global_totals = sorted(c.global_totals, key=sort_func) |
c.global_totals = sorted(c.global_totals, key=sort_func) |
|
|
keys = { |
keys = { |
'Browser versions': 'browser_versions', |
'Browser versions': 'browser_versions', |
'Browsers': 'browsers', |
'Browsers': 'browsers', |
'Operating Systems versions': 'os_versions', |
'Operating Systems versions': 'os_versions', |
'Operating Systems': 'os', |
'Operating Systems': 'os', |
'Social sources': 'social_networks', |
'Social sources': 'social_networks', |
'Languages': 'languages', |
'Languages': 'languages', |
'Country': 'country' |
'Country': 'country' |
} |
} |
|
|
def shorten_name(name, length=60): |
def shorten_name(name, length=60): |
return (name[:length] + '..') if len(name) > 60 else name |
return (name[:length] + '..') if len(name) > 60 else name |
|
|
def fill_out_url(url): |
def fill_out_url(url): |
import urlparse |
import urlparse |
return urlparse.urljoin(g.site_url, url) |
return urlparse.urljoin(g.site_url, url) |
|
|
c.social_referrer_totals, c.social_referrers = [], [] |
c.social_referrer_totals, c.social_referrers = [], [] |
q = model.Session.query(GA_ReferralStat) |
q = model.Session.query(GA_ReferralStat) |
q = q.filter(GA_ReferralStat.period_name==c.month) if c.month else q |
q = q.filter(GA_ReferralStat.period_name==c.month) if c.month else q |
q = q.order_by('ga_referrer.count::int desc') |
q = q.order_by('ga_referrer.count::int desc') |
for entry in q.all(): |
for entry in q.all(): |
c.social_referrers.append((shorten_name(entry.url), fill_out_url(entry.url), |
c.social_referrers.append((shorten_name(entry.url), fill_out_url(entry.url), |
entry.source,entry.count)) |
entry.source,entry.count)) |
|
|
q = model.Session.query(GA_ReferralStat.url, |
q = model.Session.query(GA_ReferralStat.url, |
func.sum(GA_ReferralStat.count).label('count')) |
func.sum(GA_ReferralStat.count).label('count')) |
q = q.filter(GA_ReferralStat.period_name==c.month) if c.month else q |
q = q.filter(GA_ReferralStat.period_name==c.month) if c.month else q |
q = q.order_by('count desc').group_by(GA_ReferralStat.url) |
q = q.order_by('count desc').group_by(GA_ReferralStat.url) |
for entry in q.all(): |
for entry in q.all(): |
c.social_referrer_totals.append((shorten_name(entry[0]), fill_out_url(entry[0]),'', |
c.social_referrer_totals.append((shorten_name(entry[0]), fill_out_url(entry[0]),'', |
entry[1])) |
entry[1])) |
|
|
for k, v in keys.iteritems(): |
for k, v in keys.iteritems(): |
q = model.Session.query(GA_Stat).\ |
q = model.Session.query(GA_Stat).\ |
filter(GA_Stat.stat_name==k).\ |
filter(GA_Stat.stat_name==k).\ |
order_by(GA_Stat.period_name) |
order_by(GA_Stat.period_name) |
# Run the query on all months to gather graph data |
# Run the query on all months to gather graph data |
graph = {} |
graph = {} |
for stat in q: |
for stat in q: |
graph[ stat.key ] = graph.get(stat.key,{ |
graph[ stat.key ] = graph.get(stat.key,{ |
'name':stat.key, |
'name':stat.key, |
'data': [] |
'data': [] |
}) |
}) |
graph[ stat.key ]['data'].append({ |
graph[ stat.key ]['data'].append({ |
'x':_get_unix_epoch(stat.period_name), |
'x':_get_unix_epoch(stat.period_name), |
'y':float(stat.value) |
'y':float(stat.value) |
}) |
}) |
setattr(c, v+'_graph', json.dumps( _to_rickshaw(graph.values(),percentageMode=True) )) |
setattr(c, v+'_graph', json.dumps( _to_rickshaw(graph.values(),percentageMode=True) )) |
|
|
# Buffer the tabular data |
# Buffer the tabular data |
if c.month: |
if c.month: |
entries = [] |
entries = [] |
q = q.filter(GA_Stat.period_name==c.month).\ |
q = q.filter(GA_Stat.period_name==c.month).\ |
order_by('ga_stat.value::int desc') |
order_by('ga_stat.value::int desc') |
|
|
d = collections.defaultdict(int) |
d = collections.defaultdict(int) |
for e in q.all(): |
for e in q.all(): |
d[e.key] += int(e.value) |
d[e.key] += int(e.value) |
entries = [] |
entries = [] |
for key, val in d.iteritems(): |
for key, val in d.iteritems(): |
entries.append((key,val,)) |
entries.append((key,val,)) |
entries = sorted(entries, key=operator.itemgetter(1), reverse=True) |
entries = sorted(entries, key=operator.itemgetter(1), reverse=True) |
|
|
# Get the total for each set of values and then set the value as |
# Get the total for each set of values and then set the value as |
# a percentage of the total |
# a percentage of the total |
if k == 'Social sources': |
if k == 'Social sources': |
total = sum([x for n,x,graph in c.global_totals if n == 'Total visits']) |
total = sum([x for n,x,graph in c.global_totals if n == 'Total visits']) |
else: |
else: |
total = sum([num for _,num in entries]) |
total = sum([num for _,num in entries]) |
setattr(c, v, [(k,_percent(v,total)) for k,v in entries ]) |
setattr(c, v, [(k,_percent(v,total)) for k,v in entries ]) |
|
|
return render('ga_report/site/index.html') |
return render('ga_report/site/index.html') |
|
|
|
|
class GaDatasetReport(BaseController): |
class GaDatasetReport(BaseController): |
""" |
""" |
Displays the pageview and visit count for datasets |
Displays the pageview and visit count for datasets |
with options to filter by publisher and time period. |
with options to filter by publisher and time period. |
""" |
""" |
def publisher_csv(self, month): |
def publisher_csv(self, month): |
''' |
''' |
Returns a CSV of each publisher with the total number of dataset |
Returns a CSV of each publisher with the total number of dataset |
views & visits. |
views & visits. |
''' |
''' |
c.month = month if not month == 'all' else '' |
c.month = month if not month == 'all' else '' |
response.headers['Content-Type'] = "text/csv; charset=utf-8" |
response.headers['Content-Type'] = "text/csv; charset=utf-8" |
response.headers['Content-Disposition'] = str('attachment; filename=publishers_%s.csv' % (month,)) |
response.headers['Content-Disposition'] = str('attachment; filename=publishers_%s.csv' % (month,)) |
|
|
writer = csv.writer(response) |
writer = csv.writer(response) |
writer.writerow(["Publisher Title", "Publisher Name", "Views", "Visits", "Period Name"]) |
writer.writerow(["Publisher Title", "Publisher Name", "Views", "Visits", "Period Name"]) |
|
|
top_publishers, top_publishers_graph = _get_top_publishers(None) |
top_publishers, top_publishers_graph = _get_top_publishers(None) |
|
|
for publisher,view,visit in top_publishers: |
for publisher,view,visit in top_publishers: |
writer.writerow([publisher.title.encode('utf-8'), |
writer.writerow([publisher.title.encode('utf-8'), |
publisher.name.encode('utf-8'), |
publisher.name.encode('utf-8'), |
view, |
view, |
visit, |
visit, |
month]) |
month]) |
|
|
def dataset_csv(self, id='all', month='all'): |
def dataset_csv(self, id='all', month='all'): |
''' |
''' |
Returns a CSV with the number of views & visits for each dataset. |
Returns a CSV with the number of views & visits for each dataset. |
|
|
:param id: A Publisher ID or None if you want for all |
:param id: A Publisher ID or None if you want for all |
:param month: The time period, or 'all' |
:param month: The time period, or 'all' |
''' |
''' |
c.month = month if not month == 'all' else '' |
c.month = month if not month == 'all' else '' |
if id != 'all': |
if id != 'all': |
c.publisher = model.Group.get(id) |
c.publisher = model.Group.get(id) |
if not c.publisher: |
if not c.publisher: |
abort(404, 'A publisher with that name could not be found') |
abort(404, 'A publisher with that name could not be found') |
|
|
packages = self._get_packages(c.publisher) |
packages = self._get_packages(c.publisher) |
response.headers['Content-Type'] = "text/csv; charset=utf-8" |
response.headers['Content-Type'] = "text/csv; charset=utf-8" |
response.headers['Content-Disposition'] = \ |
response.headers['Content-Disposition'] = \ |
str('attachment; filename=datasets_%s_%s.csv' % (c.publisher_name, month,)) |
str('attachment; filename=datasets_%s_%s.csv' % (c.publisher_name, month,)) |
|
|
writer = csv.writer(response) |
writer = csv.writer(response) |
writer.writerow(["Dataset Title", "Dataset Name", "Views", "Visits", "Resource downloads", "Period Name"]) |
writer.writerow(["Dataset Title", "Dataset Name", "Views", "Visits", "Resource downloads", "Period Name"]) |
|
|
for package,view,visit,downloads in packages: |
for package,view,visit,downloads in packages: |
writer.writerow([package.title.encode('utf-8'), |
writer.writerow([package.title.encode('utf-8'), |
package.name.encode('utf-8'), |
package.name.encode('utf-8'), |
view, |
view, |
visit, |
visit, |
downloads, |
downloads, |
month]) |
month]) |
|
|
def publishers(self): |
def publishers(self): |
'''A list of publishers and the number of views/visits for each''' |
'''A list of publishers and the number of views/visits for each''' |
|
|
# Get the month details by fetching distinct values and determining the |
# Get the month details by fetching distinct values and determining the |
# month names from the values. |
# month names from the values. |
c.months, c.day = _month_details(GA_Url) |
c.months, c.day = _month_details(GA_Url) |
|
|
# Work out which month to show, based on query params of the first item |
# Work out which month to show, based on query params of the first item |
c.month = request.params.get('month', '') |
c.month = request.params.get('month', '') |
c.month_desc = 'all months' |
c.month_desc = 'all months' |
if c.month: |
if c.month: |
c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month]) |
c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month]) |
|
|
c.top_publishers, graph_data = _get_top_publishers() |
c.top_publishers, graph_data = _get_top_publishers() |
c.top_publishers_graph = json.dumps( _to_rickshaw(graph_data.values()) ) |
c.top_publishers_graph = json.dumps( _to_rickshaw(graph_data.values()) ) |
|
|
return render('ga_report/publisher/index.html') |
return render('ga_report/publisher/index.html') |
|
|
def _get_packages(self, publisher=None, count=-1): |
def _get_packages(self, publisher=None, count=-1): |
'''Returns the datasets in order of views''' |
'''Returns the datasets in order of views''' |
have_download_data = True |
have_download_data = True |
month = c.month or 'All' |
month = c.month or 'All' |
if month != 'All': |
if month != 'All': |
have_download_data = month >= DOWNLOADS_AVAILABLE_FROM |
have_download_data = month >= DOWNLOADS_AVAILABLE_FROM |
|
|
q = model.Session.query(GA_Url,model.Package)\ |
q = model.Session.query(GA_Url,model.Package)\ |
.filter(model.Package.name==GA_Url.package_id)\ |
.filter(model.Package.name==GA_Url.package_id)\ |
.filter(GA_Url.url.like('/dataset/%')) |
.filter(GA_Url.url.like('/dataset/%')) |
if publisher: |
if publisher: |
q = q.filter(GA_Url.department_id==publisher.name) |
q = q.filter(GA_Url.department_id==publisher.name) |
q = q.filter(GA_Url.period_name==month) |
q = q.filter(GA_Url.period_name==month) |
q = q.order_by('ga_url.pageviews::int desc') |
q = q.order_by('ga_url.pageviews::int desc') |
top_packages = [] |
top_packages = [] |
if count == -1: |
if count == -1: |
entries = q.all() |
entries = q.all() |
else: |
else: |
entries = q.limit(count) |
entries = q.limit(count) |
|
|
for entry,package in entries: |
for entry,package in entries: |
if package: |
if package: |
# Downloads .... |
# Downloads .... |
if have_download_data: |
if have_download_data: |
dls = model.Session.query(GA_Stat).\ |
dls = model.Session.query(GA_Stat).\ |
filter(GA_Stat.stat_name=='Downloads').\ |
filter(GA_Stat.stat_name=='Downloads').\ |
filter(GA_Stat.key==package.name) |
filter(GA_Stat.key==package.name) |