import re | import re |
import csv | import csv |
import sys | import sys |
import json | import json |
import logging | import logging |
import operator | import operator |
import collections | import collections |
from ckan.lib.base import (BaseController, c, g, render, request, response, abort) | from ckan.lib.base import (BaseController, c, g, render, request, response, abort) |
import sqlalchemy | import sqlalchemy |
from sqlalchemy import func, cast, Integer | from sqlalchemy import func, cast, Integer |
import ckan.model as model | import ckan.model as model |
from ga_model import GA_Url, GA_Stat, GA_ReferralStat, GA_Publisher | from ga_model import GA_Url, GA_Stat, GA_ReferralStat, GA_Publisher |
log = logging.getLogger('ckanext.ga-report') | log = logging.getLogger('ckanext.ga-report') |
DOWNLOADS_AVAILABLE_FROM = '2012-12' | DOWNLOADS_AVAILABLE_FROM = '2012-12' |
def _get_month_name(strdate): | def _get_month_name(strdate): |
import calendar | import calendar |
from time import strptime | from time import strptime |
d = strptime(strdate, '%Y-%m') | d = strptime(strdate, '%Y-%m') |
return '%s %s' % (calendar.month_name[d.tm_mon], d.tm_year) | return '%s %s' % (calendar.month_name[d.tm_mon], d.tm_year) |
def _get_unix_epoch(strdate): | def _get_unix_epoch(strdate): |
from time import strptime,mktime | from time import strptime,mktime |
d = strptime(strdate, '%Y-%m') | d = strptime(strdate, '%Y-%m') |
return int(mktime(d)) | return int(mktime(d)) |
def _month_details(cls, stat_key=None): | def _month_details(cls, stat_key=None): |
''' | ''' |
Returns a list of all the periods for which we have data, unfortunately | Returns a list of all the periods for which we have data, unfortunately |
knows too much about the type of the cls being passed as GA_Url has a | knows too much about the type of the cls being passed as GA_Url has a |
more complex query | more complex query |
This may need extending if we add a period_name to the stats | This may need extending if we add a period_name to the stats |
''' | ''' |
months = [] | months = [] |
day = None | day = None |
q = model.Session.query(cls.period_name,cls.period_complete_day)\ | q = model.Session.query(cls.period_name,cls.period_complete_day)\ |
.filter(cls.period_name!='All').distinct(cls.period_name) | .filter(cls.period_name!='All').distinct(cls.period_name) |
if stat_key: | if stat_key: |
q= q.filter(cls.stat_name==stat_key) | q= q.filter(cls.stat_name==stat_key) |
vals = q.order_by("period_name desc").all() | vals = q.order_by("period_name desc").all() |
if vals and vals[0][1]: | if vals and vals[0][1]: |
day = int(vals[0][1]) | day = int(vals[0][1]) |
ordinal = 'th' if 11 <= day <= 13 \ | ordinal = 'th' if 11 <= day <= 13 \ |
else {1:'st',2:'nd',3:'rd'}.get(day % 10, 'th') | else {1:'st',2:'nd',3:'rd'}.get(day % 10, 'th') |
day = "{day}{ordinal}".format(day=day, ordinal=ordinal) | day = "{day}{ordinal}".format(day=day, ordinal=ordinal) |
for m in vals: | for m in vals: |
months.append( (m[0], _get_month_name(m[0]))) | months.append( (m[0], _get_month_name(m[0]))) |
return months, day | return months, day |
class GaReport(BaseController): | class GaReport(BaseController): |
def csv(self, month): | def csv(self, month): |
import csv | import csv |
q = model.Session.query(GA_Stat).filter(GA_Stat.stat_name!='Downloads') | q = model.Session.query(GA_Stat).filter(GA_Stat.stat_name!='Downloads') |
if month != 'all': | if month != 'all': |
q = q.filter(GA_Stat.period_name==month) | q = q.filter(GA_Stat.period_name==month) |
entries = q.order_by('GA_Stat.period_name, GA_Stat.stat_name, GA_Stat.key').all() | entries = q.order_by('GA_Stat.period_name, GA_Stat.stat_name, GA_Stat.key').all() |
response.headers['Content-Type'] = "text/csv; charset=utf-8" | response.headers['Content-Type'] = "text/csv; charset=utf-8" |
response.headers['Content-Disposition'] = str('attachment; filename=stats_%s.csv' % (month,)) | response.headers['Content-Disposition'] = str('attachment; filename=stats_%s.csv' % (month,)) |
writer = csv.writer(response) | writer = csv.writer(response) |
writer.writerow(["Period", "Statistic", "Key", "Value"]) | writer.writerow(["Period", "Statistic", "Key", "Value"]) |
for entry in entries: | for entry in entries: |
writer.writerow([entry.period_name.encode('utf-8'), | writer.writerow([entry.period_name.encode('utf-8'), |
entry.stat_name.encode('utf-8'), | entry.stat_name.encode('utf-8'), |
entry.key.encode('utf-8'), | entry.key.encode('utf-8'), |
entry.value.encode('utf-8')]) | entry.value.encode('utf-8')]) |
def index(self): | def index(self): |
# Get the month details by fetching distinct values and determining the | # Get the month details by fetching distinct values and determining the |
# month names from the values. | # month names from the values. |
c.months, c.day = _month_details(GA_Stat) | c.months, c.day = _month_details(GA_Stat) |
# Work out which month to show, based on query params of the first item | # Work out which month to show, based on query params of the first item |
c.month_desc = 'all months' | c.month_desc = 'all months' |
c.month = request.params.get('month', '') | c.month = request.params.get('month', '') |
if c.month: | if c.month: |
c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month]) | c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month]) |
q = model.Session.query(GA_Stat).\ | q = model.Session.query(GA_Stat).\ |
filter(GA_Stat.stat_name=='Totals') | filter(GA_Stat.stat_name=='Totals') |
if c.month: | if c.month: |
q = q.filter(GA_Stat.period_name==c.month) | q = q.filter(GA_Stat.period_name==c.month) |
entries = q.order_by('ga_stat.key').all() | entries = q.order_by('ga_stat.key').all() |
def clean_key(key, val): | def clean_key(key, val): |
if key in ['Average time on site', 'Pages per visit', 'New visits', 'Bounce rate (home page)']: | if key in ['Average time on site', 'Pages per visit', 'New visits', 'Bounce rate (home page)']: |
val = "%.2f" % round(float(val), 2) | val = "%.2f" % round(float(val), 2) |
if key == 'Average time on site': | if key == 'Average time on site': |
mins, secs = divmod(float(val), 60) | mins, secs = divmod(float(val), 60) |
hours, mins = divmod(mins, 60) | hours, mins = divmod(mins, 60) |
val = '%02d:%02d:%02d (%s seconds) ' % (hours, mins, secs, val) | val = '%02d:%02d:%02d (%s seconds) ' % (hours, mins, secs, val) |
if key in ['New visits','Bounce rate (home page)']: | if key in ['New visits','Bounce rate (home page)']: |
val = "%s%%" % val | val = "%s%%" % val |
if key in ['Total page views', 'Total visits']: | if key in ['Total page views', 'Total visits']: |
val = int(val) | val = int(val) |
return key, val | return key, val |
# Query historic values for sparkline rendering | # Query historic values for sparkline rendering |
sparkline_query = model.Session.query(GA_Stat)\ | sparkline_query = model.Session.query(GA_Stat)\ |
.filter(GA_Stat.stat_name=='Totals')\ | .filter(GA_Stat.stat_name=='Totals')\ |
.order_by(GA_Stat.period_name) | .order_by(GA_Stat.period_name) |
sparkline_data = {} | sparkline_data = {} |
for x in sparkline_query: | for x in sparkline_query: |
sparkline_data[x.key] = sparkline_data.get(x.key,[]) | sparkline_data[x.key] = sparkline_data.get(x.key,[]) |
key, val = clean_key(x.key,float(x.value)) | key, val = clean_key(x.key,float(x.value)) |
tooltip = '%s: %s' % (_get_month_name(x.period_name), val) | tooltip = '%s: %s' % (_get_month_name(x.period_name), val) |
sparkline_data[x.key].append( (tooltip,x.value) ) | sparkline_data[x.key].append( (tooltip,x.value) ) |
# Trim the latest month, as it looks like a huge dropoff | # Trim the latest month, as it looks like a huge dropoff |
for key in sparkline_data: | for key in sparkline_data: |
sparkline_data[key] = sparkline_data[key][:-1] | sparkline_data[key] = sparkline_data[key][:-1] |
c.global_totals = [] | c.global_totals = [] |
if c.month: | if c.month: |
for e in entries: | for e in entries: |
key, val = clean_key(e.key, e.value) | key, val = clean_key(e.key, e.value) |
sparkline = sparkline_data[e.key] | sparkline = sparkline_data[e.key] |
c.global_totals.append((key, val, sparkline)) | c.global_totals.append((key, val, sparkline)) |
else: | else: |
d = collections.defaultdict(list) | d = collections.defaultdict(list) |
for e in entries: | for e in entries: |
d[e.key].append(float(e.value)) | d[e.key].append(float(e.value)) |
for k, v in d.iteritems(): | for k, v in d.iteritems(): |
if k in ['Total page views', 'Total visits']: | if k in ['Total page views', 'Total visits']: |
v = sum(v) | v = sum(v) |
else: | else: |
v = float(sum(v))/float(len(v)) | v = float(sum(v))/float(len(v)) |
sparkline = sparkline_data[k] | sparkline = sparkline_data[k] |
key, val = clean_key(k,v) | key, val = clean_key(k,v) |
c.global_totals.append((key, val, sparkline)) | c.global_totals.append((key, val, sparkline)) |
# Sort the global totals into a more pleasant order | # Sort the global totals into a more pleasant order |
def sort_func(x): | def sort_func(x): |
key = x[0] | key = x[0] |
total_order = ['Total page views','Total visits','Pages per visit'] | total_order = ['Total page views','Total visits','Pages per visit'] |
if key in total_order: | if key in total_order: |
return total_order.index(key) | return total_order.index(key) |
return 999 | return 999 |
c.global_totals = sorted(c.global_totals, key=sort_func) | c.global_totals = sorted(c.global_totals, key=sort_func) |
keys = { | keys = { |
'Browser versions': 'browser_versions', | 'Browser versions': 'browser_versions', |
'Browsers': 'browsers', | 'Browsers': 'browsers', |
'Operating Systems versions': 'os_versions', | 'Operating Systems versions': 'os_versions', |
'Operating Systems': 'os', | 'Operating Systems': 'os', |
'Social sources': 'social_networks', | 'Social sources': 'social_networks', |
'Languages': 'languages', | 'Languages': 'languages', |
'Country': 'country' | 'Country': 'country' |
} | } |
def shorten_name(name, length=60): | def shorten_name(name, length=60): |
return (name[:length] + '..') if len(name) > 60 else name | return (name[:length] + '..') if len(name) > 60 else name |
def fill_out_url(url): | def fill_out_url(url): |
import urlparse | import urlparse |
return urlparse.urljoin(g.site_url, url) | return urlparse.urljoin(g.site_url, url) |
c.social_referrer_totals, c.social_referrers = [], [] | c.social_referrer_totals, c.social_referrers = [], [] |
q = model.Session.query(GA_ReferralStat) | q = model.Session.query(GA_ReferralStat) |
q = q.filter(GA_ReferralStat.period_name==c.month) if c.month else q | q = q.filter(GA_ReferralStat.period_name==c.month) if c.month else q |
q = q.order_by('ga_referrer.count::int desc') | q = q.order_by('ga_referrer.count::int desc') |
for entry in q.all(): | for entry in q.all(): |
c.social_referrers.append((shorten_name(entry.url), fill_out_url(entry.url), | c.social_referrers.append((shorten_name(entry.url), fill_out_url(entry.url), |
entry.source,entry.count)) | entry.source,entry.count)) |
q = model.Session.query(GA_ReferralStat.url, | q = model.Session.query(GA_ReferralStat.url, |
func.sum(GA_ReferralStat.count).label('count')) | func.sum(GA_ReferralStat.count).label('count')) |
q = q.filter(GA_ReferralStat.period_name==c.month) if c.month else q | q = q.filter(GA_ReferralStat.period_name==c.month) if c.month else q |
q = q.order_by('count desc').group_by(GA_ReferralStat.url) | q = q.order_by('count desc').group_by(GA_ReferralStat.url) |
for entry in q.all(): | for entry in q.all(): |
c.social_referrer_totals.append((shorten_name(entry[0]), fill_out_url(entry[0]),'', | c.social_referrer_totals.append((shorten_name(entry[0]), fill_out_url(entry[0]),'', |
entry[1])) | entry[1])) |
for k, v in keys.iteritems(): | for k, v in keys.iteritems(): |
q = model.Session.query(GA_Stat).\ | q = model.Session.query(GA_Stat).\ |
filter(GA_Stat.stat_name==k).\ | filter(GA_Stat.stat_name==k).\ |
order_by(GA_Stat.period_name) | order_by(GA_Stat.period_name) |
# Buffer the tabular data | # Buffer the tabular data |
if c.month: | if c.month: |
entries = [] | entries = [] |
q = q.filter(GA_Stat.period_name==c.month).\ | q = q.filter(GA_Stat.period_name==c.month).\ |
order_by('ga_stat.value::int desc') | order_by('ga_stat.value::int desc') |
d = collections.defaultdict(int) | d = collections.defaultdict(int) |
for e in q.all(): | for e in q.all(): |
d[e.key] += int(e.value) | d[e.key] += int(e.value) |
entries = [] | entries = [] |
for key, val in d.iteritems(): | for key, val in d.iteritems(): |
entries.append((key,val,)) | entries.append((key,val,)) |
entries = sorted(entries, key=operator.itemgetter(1), reverse=True) | entries = sorted(entries, key=operator.itemgetter(1), reverse=True) |
# Run a query on all months to gather graph data | # Run a query on all months to gather graph data |
graph_query = model.Session.query(GA_Stat).\ | graph_query = model.Session.query(GA_Stat).\ |
filter(GA_Stat.stat_name==k).\ | filter(GA_Stat.stat_name==k).\ |
order_by(GA_Stat.period_name) | order_by(GA_Stat.period_name) |
graph_dict = {} | graph_dict = {} |
for stat in graph_query: | for stat in graph_query: |
graph_dict[ stat.key ] = graph_dict.get(stat.key,{ | graph_dict[ stat.key ] = graph_dict.get(stat.key,{ |
'name':stat.key, | 'name':stat.key, |
'raw': {} | 'raw': {} |
}) | }) |
graph_dict[ stat.key ]['raw'][stat.period_name] = float(stat.value) | graph_dict[ stat.key ]['raw'][stat.period_name] = float(stat.value) |
stats_in_table = [x[0] for x in entries] | stats_in_table = [x[0] for x in entries] |
stats_not_in_table = set(graph_dict.keys()) - set(stats_in_table) | stats_not_in_table = set(graph_dict.keys()) - set(stats_in_table) |
stats = stats_in_table + sorted(list(stats_not_in_table)) | stats = stats_in_table + sorted(list(stats_not_in_table)) |
graph = [graph_dict[x] for x in stats] | graph = [graph_dict[x] for x in stats] |
setattr(c, v+'_graph', json.dumps( _to_rickshaw(graph,percentageMode=True) )) | setattr(c, v+'_graph', json.dumps( _to_rickshaw(graph,percentageMode=True) )) |
# Get the total for each set of values and then set the value as | # Get the total for each set of values and then set the value as |
# a percentage of the total | # a percentage of the total |
if k == 'Social sources': | if k == 'Social sources': |
total = sum([x for n,x,graph in c.global_totals if n == 'Total visits']) | total = sum([x for n,x,graph in c.global_totals if n == 'Total visits']) |
else: | else: |
total = sum([num for _,num in entries]) | total = sum([num for _,num in entries]) |
setattr(c, v, [(k,_percent(v,total)) for k,v in entries ]) | setattr(c, v, [(k,_percent(v,total)) for k,v in entries ]) |
return render('ga_report/site/index.html') | return render('ga_report/site/index.html') |
class GaDatasetReport(BaseController): | class GaDatasetReport(BaseController): |
""" | """ |
Displays the pageview and visit count for datasets | Displays the pageview and visit count for datasets |
with options to filter by publisher and time period. | with options to filter by publisher and time period. |
""" | """ |
def publisher_csv(self, month): | def publisher_csv(self, month): |
''' | ''' |
Returns a CSV of each publisher with the total number of dataset | Returns a CSV of each publisher with the total number of dataset |
views & visits. | views & visits. |
''' | ''' |
c.month = month if not month == 'all' else '' | c.month = month if not month == 'all' else '' |
response.headers['Content-Type'] = "text/csv; charset=utf-8" | response.headers['Content-Type'] = "text/csv; charset=utf-8" |
response.headers['Content-Disposition'] = str('attachment; filename=publishers_%s.csv' % (month,)) | response.headers['Content-Disposition'] = str('attachment; filename=publishers_%s.csv' % (month,)) |
writer = csv.writer(response) | writer = csv.writer(response) |
writer.writerow(["Publisher Title", "Publisher Name", "Views", "Visits", "Period Name"]) | writer.writerow(["Publisher Title", "Publisher Name", "Views", "Visits", "Period Name"]) |
top_publishers = _get_top_publishers(limit=None) | top_publishers = _get_top_publishers(limit=None) |
for publisher,view,visit in top_publishers: | for publisher,view,visit in top_publishers: |
writer.writerow([publisher.title.encode('utf-8'), | writer.writerow([publisher.title.encode('utf-8'), |
publisher.name.encode('utf-8'), | publisher.name.encode('utf-8'), |
view, | view, |
visit, | visit, |
month]) | month]) |
def dataset_csv(self, id='all', month='all'): | def dataset_csv(self, id='all', month='all'): |
''' | ''' |
Returns a CSV with the number of views & visits for each dataset. | Returns a CSV with the number of views & visits for each dataset. |
:param id: A Publisher ID or None if you want for all | :param id: A Publisher ID or None if you want for all |
:param month: The time period, or 'all' | :param month: The time period, or 'all' |
''' | ''' |
c.month = month if not month == 'all' else '' | c.month = month if not month == 'all' else '' |
if id != 'all': | if id != 'all': |
c.publisher = model.Group.get(id) | c.publisher = model.Group.get(id) |
if not c.publisher: | if not c.publisher: |
abort(404, 'A publisher with that name could not be found') | abort(404, 'A publisher with that name could not be found') |
packages = self._get_packages(publisher=c.publisher, month=c.month) | packages = self._get_packages(publisher=c.publisher, month=c.month) |
response.headers['Content-Type'] = "text/csv; charset=utf-8" | response.headers['Content-Type'] = "text/csv; charset=utf-8" |
response.headers['Content-Disposition'] = \ | response.headers['Content-Disposition'] = \ |
str('attachment; filename=datasets_%s_%s.csv' % (c.publisher_name, month,)) | str('attachment; filename=datasets_%s_%s.csv' % (c.publisher_name, month,)) |
writer = csv.writer(response) | writer = csv.writer(response) |
writer.writerow(["Dataset Title", "Dataset Name", "Views", "Visits", "Resource downloads", "Period Name"]) | writer.writerow(["Dataset Title", "Dataset Name", "Views", "Visits", "Resource downloads", "Period Name"]) |
for package,view,visit,downloads in packages: | for package,view,visit,downloads in packages: |
writer.writerow([package.title.encode('utf-8'), | writer.writerow([package.title.encode('utf-8'), |
package.name.encode('utf-8'), | package.name.encode('utf-8'), |
view, | view, |
visit, | visit, |
downloads, | downloads, |
month]) | month]) |
def publishers(self): | def publishers(self): |
'''A list of publishers and the number of views/visits for each''' | '''A list of publishers and the number of views/visits for each''' |
# Get the month details by fetching distinct values and determining the | # Get the month details by fetching distinct values and determining the |
# month names from the values. | # month names from the values. |
c.months, c.day = _month_details(GA_Url) | c.months, c.day = _month_details(GA_Url) |
# Work out which month to show, based on query params of the first item | # Work out which month to show, based on query params of the first item |
c.month = request.params.get('month', '') | c.month = request.params.get('month', '') |
c.month_desc = 'all months' | c.month_desc = 'all months' |
if c.month: | if c.month: |
c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month]) | c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month]) |
c.top_publishers = _get_top_publishers() | c.top_publishers = _get_top_publishers() |
graph_data = _get_top_publishers_graph() | graph_data = _get_top_publishers_graph() |
c.top_publishers_graph = json.dumps( _to_rickshaw(graph_data) ) | c.top_publishers_graph = json.dumps( _to_rickshaw(graph_data) ) |
x = render('ga_report/publisher/index.html') | x = render('ga_report/publisher/index.html') |
return x | return x |
def _get_packages(self, publisher=None, month='', count=-1): | def _get_packages(self, publisher=None, month='', count=-1): |
'''Returns the datasets in order of views''' | '''Returns the datasets in order of views''' |
have_download_data = True | have_download_data = True |
month = month or 'All' | month = month or 'All' |
if month != 'All': | if month != 'All': |
have_download_data = month >= DOWNLOADS_AVAILABLE_FROM | have_download_data = month >= DOWNLOADS_AVAILABLE_FROM |
q = model.Session.query(GA_Url,model.Package)\ | q = model.Session.query(GA_Url,model.Package)\ |
.filter(model.Package.name==GA_Url.package_id)\ | .filter(model.Package.name==GA_Url.package_id)\ |
.filter(GA_Url.url.like('/dataset/%')) | .filter(GA_Url.url.like('/dataset/%')) |
if publisher: | if publisher: |
q = q.filter(GA_Url.department_id==publisher.name) | q = q.filter(GA_Url.department_id==publisher.name) |
q = q.filter(GA_Url.period_name==month) | q = q.filter(GA_Url.period_name==month) |
q = q.order_by('ga_url.pageviews::int desc') | q = q.order_by('ga_url.pageviews::int desc') |
top_packages = [] | top_packages = [] |
if count == -1: | if count == -1: |
entries = q.all() | entries = q.all() |
else: | else: |
entries = q.limit(count) | entries = q.limit(count) |
for entry,package in entries: | for entry,package in entries: |
if package: | if package: |
# Downloads .... | # Downloads .... |
if have_download_data: | if have_download_data: |
dls = model.Session.query(GA_Stat).\ | dls = model.Session.query(GA_Stat).\ |
filter(GA_Stat.stat_name=='Downloads').\ | filter(GA_Stat.stat_name=='Downloads').\ |
filter(GA_Stat.key==package.name) | filter(GA_Stat.key==package.name) |
if month != 'All': # Fetch everything unless the month is specific | if month != 'All': # Fetch everything unless the month is specific |
dls = dls.filter(GA_Stat.period_name==month) | dls = dls.filter(GA_Stat.period_name==month) |
downloads = 0 | downloads = 0 |
for x in dls: | for x in dls: |
downloads += int(x.value) | downloads += int(x.value) |
else: | else: |
downloads = 'No data' | downloads = 'No data' |
top_packages.append((package, entry.pageviews, entry.visits, downloads)) | top_packages.append((package, entry.pageviews, entry.visits, downloads)) |
else: | else: |
log.warning('Could not find package associated package') | log.warning('Could not find package associated package') |
return top_packages | return top_packages |
def read(self): | def read(self): |
''' | ''' |
Lists the most popular datasets across all publishers | Lists the most popular datasets across all publishers |
''' | ''' |
return self.read_publisher(None) | return self.read_publisher(None) |
def read_publisher(self, id): | def read_publisher(self, id): |
''' | ''' |
Lists the most popular datasets for a publisher (or across all publishers) | Lists the most popular datasets for a publisher (or across all publishers) |
''' | ''' |
count = 20 | count = 20 |
c.publishers = _get_publishers() | c.publishers = _get_publishers() |
id = request.params.get('publisher', id) | id = request.params.get('publisher', id) |
if id and id != 'all': | if id and id != 'all': |
c.publisher = model.Group.get(id) | c.publisher = model.Group.get(id) |
if not c.publisher: | if not c.publisher: |
abort(404, 'A publisher with that name could not be found') | abort(404, 'A publisher with that name could not be found') |
c.publisher_name = c.publisher.name | c.publisher_name = c.publisher.name |
c.top_packages = [] # package, dataset_views in c.top_packages | c.top_packages = [] # package, dataset_views in c.top_packages |
# Get the month details by fetching distinct values and determining the | # Get the month details by fetching distinct values and determining the |
# month names from the values. | # month names from the values. |
c.months, c.day = _month_details(GA_Url) | c.months, c.day = _month_details(GA_Url) |
# Work out which month to show, based on query params of the first item | # Work out which month to show, based on query params of the first item |
c.month = request.params.get('month', '') | c.month = request.params.get('month', '') |
if not c.month: | if not c.month: |
c.month_desc = 'all months' | c.month_desc = 'all months' |
else: | else: |
c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month]) | c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month]) |
month = c.month or 'All' | month = c.month or 'All' |
c.publisher_page_views = 0 | c.publisher_page_views = 0 |
q = model.Session.query(GA_Url).\ | q = model.Session.query(GA_Url).\ |
filter(GA_Url.url=='/publisher/%s' % c.publisher_name) | filter(GA_Url.url=='/publisher/%s' % c.publisher_name) |
entry = q.filter(GA_Url.period_name==c.month).first() | entry = q.filter(GA_Url.period_name==c.month).first() |
c.publisher_page_views = entry.pageviews if entry else 0 | c.publisher_page_views = entry.pageviews if entry else 0 |
c.top_packages = self._get_packages(publisher=c.publisher, count=20, month=c.month) | c.top_packages = self._get_packages(publisher=c.publisher, count=20, month=c.month) |
# Graph query | # Graph query |
top_packages_all_time = self._get_packages(publisher=c.publisher, count=20, month='All') | top_packages_all_time = self._get_packages(publisher=c.publisher, count=20, month='All') |
top_package_names = [ x[0].name for x in top_packages_all_time ] | top_package_names = [ x[0].name for x in top_packages_all_time ] |
graph_query = model.Session.query(GA_Url,model.Package)\ | graph_query = model.Session.query(GA_Url,model.Package)\ |
.filter(model.Package.name==GA_Url.package_id)\ | .filter(model.Package.name==GA_Url.package_id)\ |
.filter(GA_Url.url.like('/dataset/%'))\ | .filter(GA_Url.url.like('/dataset/%'))\ |
.filter(GA_Url.package_id.in_(top_package_names)) | .filter(GA_Url.package_id.in_(top_package_names)) |
all_series = {} | all_series = {} |
for entry,package in graph_query: | for entry,package in graph_query: |
if not package: continue | if not package: continue |
if entry.period_name=='All': continue | if entry.period_name=='All': continue |
all_series[package.name] = all_series.get(package.name,{ | all_series[package.name] = all_series.get(package.name,{ |
'name':package.title, | 'name':package.title, |
'raw': {} | 'raw': {} |
}) | }) |
all_series[package.name]['raw'][entry.period_name] = int(entry.pageviews) | all_series[package.name]['raw'][entry.period_name] = int(entry.pageviews) |
graph = [ all_series[series_name] for series_name in top_package_names ] | graph = [ all_series[series_name] for series_name in top_package_names ] |
c.graph_data = json.dumps( _to_rickshaw(graph) ) | c.graph_data = json.dumps( _to_rickshaw(graph) ) |
return render('ga_report/publisher/read.html') | return render('ga_report/publisher/read.html') |
def _to_rickshaw(data, percentageMode=False): | def _to_rickshaw(data, percentageMode=False): |
if data==[]: | if data==[]: |
return data | return data |
# x-axis is every month in c.months. Note that data might not exist | # x-axis is every month in c.months. Note that data might not exist |
# for entire history, eg. for recently-added datasets | # for entire history, eg. for recently-added datasets |
x_axis = [x[0] for x in c.months] | x_axis = [x[0] for x in c.months] |
x_axis.reverse() # Ascending order | x_axis.reverse() # Ascending order |
x_axis = x_axis[:-1] # Remove latest month | x_axis = x_axis[:-1] # Remove latest month |
totals = {} | totals = {} |
for series in data: | for series in data: |
series['data'] = [] | series['data'] = [] |
for x_string in x_axis: | for x_string in x_axis: |
x = _get_unix_epoch( x_string ) | x = _get_unix_epoch( x_string ) |
y = series['raw'].get(x_string,0) | y = series['raw'].get(x_string,0) |
series['data'].append({'x':x,'y':y}) | series['data'].append({'x':x,'y':y}) |
totals[x] = totals.get(x,0)+y | totals[x] = totals.get(x,0)+y |
if not percentageMode: | if not percentageMode: |
return data | return data |
# Turn all data into percentages | # Turn all data into percentages |
# Roll insignificant series into a catch-all | # Roll insignificant series into a catch-all |
THRESHOLD = 1 | THRESHOLD = 1 |
raw_data = data | raw_data = data |
data = [] | data = [] |
for series in raw_data: | for series in raw_data: |
for point in series['data']: | for point in series['data']: |
percentage = (100*float(point['y'])) / totals[point['x']] | percentage = (100*float(point['y'])) / totals[point['x']] |
if not (series in data) and percentage>THRESHOLD: | if not (series in data) and percentage>THRESHOLD: |
data.append(series) | data.append(series) |
point['y'] = percentage | point['y'] = percentage |
others = [ x for x in raw_data if not (x in data) ] | others = [ x for x in raw_data if not (x in data) ] |
if len(others): | if len(others): |
data_other = [] | data_other = [] |
for i in range(len(x_axis)): | for i in range(len(x_axis)): |
x = _get_unix_epoch(x_axis[i]) | x = _get_unix_epoch(x_axis[i]) |
y = 0 | y = 0 |
for series in others: | for series in others: |
y += series['data'][i]['y'] | y += series['data'][i]['y'] |
data_other.append({'x':x,'y':y}) | data_other.append({'x':x,'y':y}) |
data.append({ | data.append({ |
'name':'Other', | 'name':'Other', |
'data': data_other | 'data': data_other |
}) | }) |
return data | return data |
def _get_top_publishers(limit=20): | def _get_top_publishers(limit=20): |
''' | ''' |
Returns a list of the top 20 publishers by dataset visits. | Returns a list of the top 20 publishers by dataset visits. |
(The number to show can be varied with 'limit') | (The number to show can be varied with 'limit') |
''' | ''' |
month = c.month or 'All' | month = c.month or 'All' |
connection = model.Session.connection() | connection = model.Session.connection() |
q = """ | q = """ |
select department_id, sum(pageviews::int) views, sum(visits::int) visits | select department_id, sum(pageviews::int) views, sum(visits::int) visits |
from ga_url | from ga_url |
where department_id <> '' | where department_id <> '' |
and package_id <> '' | and package_id <> '' |
and url like '/dataset/%%' | and url like '/dataset/%%' |
and period_name=%s | and period_name=%s |
group by department_id order by views desc | group by department_id order by views desc |
""" | """ |
if limit: | if limit: |
q = q + " limit %s;" % (limit) | q = q + " limit %s;" % (limit) |
top_publishers = [] | top_publishers = [] |
res = connection.execute(q, month) | res = connection.execute(q, month) |
for row in res: | for row in res: |
g = model.Group.get(row[0]) | g = model.Group.get(row[0]) |
if g: | if g: |
top_publishers.append((g, row[1], row[2])) | top_publishers.append((g, row[1], row[2])) |
return top_publishers | return top_publishers |
def _get_top_publishers_graph(limit=20): | def _get_top_publishers_graph(limit=20): |
''' | ''' |
Returns a list of the top 20 publishers by dataset visits. | Returns a list of the top 20 publishers by dataset visits. |
(The number to show can be varied with 'limit') | (The number to show can be varied with 'limit') |
''' | ''' |
connection = model.Session.connection() | connection = model.Session.connection() |
q = """ | q = """ |
select department_id, sum(pageviews::int) views | select department_id, sum(pageviews::int) views |
from ga_url | from ga_url |
where department_id <> '' | where department_id <> '' |
and package_id <> '' | and package_id <> '' |
and url like '/dataset/%%' | and url like '/dataset/%%' |
and period_name='All' | and period_name='All' |
group by department_id order by views desc | group by department_id order by views desc |
""" | """ |
if limit: | if limit: |
q = q + " limit %s;" % (limit) | q = q + " limit %s;" % (limit) |
res = connection.execute(q) | res = connection.execute(q) |
department_ids = [ row[0] for row in res ] | department_ids = [ row[0] for row in res ] |
# Query for a history graph of these department ids | # Query for a history graph of these department ids |
q = model.Session.query( | q = model.Session.query( |
GA_Url.department_id, | GA_Url.department_id, |
GA_Url.period_name, | GA_Url.period_name, |
func.sum(cast(GA_Url.pageviews,sqlalchemy.types.INT)))\ | func.sum(cast(GA_Url.pageviews,sqlalchemy.types.INT)))\ |
.filter( GA_Url.department_id.in_(department_ids) )\ | .filter( GA_Url.department_id.in_(department_ids) )\ |
.filter( GA_Url.url.like('/dataset/%') )\ | .filter( GA_Url.url.like('/dataset/%') )\ |
.filter( GA_Url.package_id!='' )\ | .filter( GA_Url.package_id!='' )\ |
.group_by( GA_Url.department_id, GA_Url.period_name ) | .group_by( GA_Url.department_id, GA_Url.period_name ) |
graph_dict = {} | graph_dict = {} |
for dept_id,period_name,views in q: | for dept_id,period_name,views in q: |
graph_dict[dept_id] = graph_dict.get( dept_id, { | graph_dict[dept_id] = graph_dict.get( dept_id, { |
'name' : model.Group.get(dept_id).title, | 'name' : model.Group.get(dept_id).title, |
'raw' : {} | 'raw' : {} |
}) | }) |
graph_dict[dept_id]['raw'][period_name] = views | graph_dict[dept_id]['raw'][period_name] = views |
return [ graph_dict[id] for id in department_ids ] | return [ graph_dict[id] for id in department_ids ] |
def _get_publishers(): | def _get_publishers(): |
''' | ''' |
Returns a list of all publishers. Each item is a tuple: | Returns a list of all publishers. Each item is a tuple: |
(name, title) | (name, title) |
''' | ''' |
publishers = [] | publishers = [] |
for pub in model.Session.query(model.Group).\ | for pub in model.Session.query(model.Group).\ |
filter(model.Group.type=='publisher').\ | filter(model.Group.type=='organization').\ |
filter(model.Group.state=='active').\ | filter(model.Group.state=='active').\ |
order_by(model.Group.name): | order_by(model.Group.name): |
publishers.append((pub.name, pub.title)) | publishers.append((pub.name, pub.title)) |
return publishers | return publishers |
def _percent(num, total): | def _percent(num, total): |
p = 100 * float(num)/float(total) | p = 100 * float(num)/float(total) |
return "%.2f%%" % round(p, 2) | return "%.2f%%" % round(p, 2) |
import re | import re |
import uuid | import uuid |
from sqlalchemy import Table, Column, MetaData, ForeignKey | from sqlalchemy import Table, Column, MetaData, ForeignKey |
from sqlalchemy import types | from sqlalchemy import types |
from sqlalchemy.sql import select | from sqlalchemy.sql import select |
from sqlalchemy.orm import mapper, relation | from sqlalchemy.orm import mapper, relation |
from sqlalchemy import func | from sqlalchemy import func |
import ckan.model as model | import ckan.model as model |
from ckan.lib.base import * | from ckan.lib.base import * |
log = __import__('logging').getLogger(__name__) | log = __import__('logging').getLogger(__name__) |
def make_uuid(): | def make_uuid(): |
return unicode(uuid.uuid4()) | return unicode(uuid.uuid4()) |
metadata = MetaData() | metadata = MetaData() |
class GA_Url(object): | class GA_Url(object): |
def __init__(self, **kwargs): | def __init__(self, **kwargs): |
for k,v in kwargs.items(): | for k,v in kwargs.items(): |
setattr(self, k, v) | setattr(self, k, v) |
url_table = Table('ga_url', metadata, | url_table = Table('ga_url', metadata, |
Column('id', types.UnicodeText, primary_key=True, | Column('id', types.UnicodeText, primary_key=True, |
default=make_uuid), | default=make_uuid), |
Column('period_name', types.UnicodeText), | Column('period_name', types.UnicodeText), |
Column('period_complete_day', types.Integer), | Column('period_complete_day', types.Integer), |
Column('pageviews', types.UnicodeText), | Column('pageviews', types.UnicodeText), |
Column('visits', types.UnicodeText), | Column('visits', types.UnicodeText), |
Column('url', types.UnicodeText), | Column('url', types.UnicodeText), |
Column('department_id', types.UnicodeText), | Column('department_id', types.UnicodeText), |
Column('package_id', types.UnicodeText), | Column('package_id', types.UnicodeText), |
) | ) |
mapper(GA_Url, url_table) | mapper(GA_Url, url_table) |
class GA_Stat(object): | class GA_Stat(object): |
def __init__(self, **kwargs): | def __init__(self, **kwargs): |
for k,v in kwargs.items(): | for k,v in kwargs.items(): |
setattr(self, k, v) | setattr(self, k, v) |
stat_table = Table('ga_stat', metadata, | stat_table = Table('ga_stat', metadata, |
Column('id', types.UnicodeText, primary_key=True, | Column('id', types.UnicodeText, primary_key=True, |
default=make_uuid), | default=make_uuid), |
Column('period_name', types.UnicodeText), | Column('period_name', types.UnicodeText), |
Column('period_complete_day', types.UnicodeText), | Column('period_complete_day', types.UnicodeText), |
Column('stat_name', types.UnicodeText), | Column('stat_name', types.UnicodeText), |
Column('key', types.UnicodeText), | Column('key', types.UnicodeText), |
Column('value', types.UnicodeText), ) | Column('value', types.UnicodeText), ) |
mapper(GA_Stat, stat_table) | mapper(GA_Stat, stat_table) |
class GA_Publisher(object): | class GA_Publisher(object): |
def __init__(self, **kwargs): | def __init__(self, **kwargs): |
for k,v in kwargs.items(): | for k,v in kwargs.items(): |
setattr(self, k, v) | setattr(self, k, v) |
pub_table = Table('ga_publisher', metadata, | pub_table = Table('ga_publisher', metadata, |
Column('id', types.UnicodeText, primary_key=True, | Column('id', types.UnicodeText, primary_key=True, |
default=make_uuid), | default=make_uuid), |
Column('period_name', types.UnicodeText), | Column('period_name', types.UnicodeText), |
Column('publisher_name', types.UnicodeText), | Column('publisher_name', types.UnicodeText), |
Column('views', types.UnicodeText), | Column('views', types.UnicodeText), |
Column('visits', types.UnicodeText), | Column('visits', types.UnicodeText), |
Column('toplevel', types.Boolean, default=False), | Column('toplevel', types.Boolean, default=False), |
Column('subpublishercount', types.Integer, default=0), | Column('subpublishercount', types.Integer, default=0), |
Column('parent', types.UnicodeText), | Column('parent', types.UnicodeText), |
) | ) |
mapper(GA_Publisher, pub_table) | mapper(GA_Publisher, pub_table) |
class GA_ReferralStat(object): | class GA_ReferralStat(object): |
def __init__(self, **kwargs): | def __init__(self, **kwargs): |
for k,v in kwargs.items(): | for k,v in kwargs.items(): |
setattr(self, k, v) | setattr(self, k, v) |
referrer_table = Table('ga_referrer', metadata, | referrer_table = Table('ga_referrer', metadata, |
Column('id', types.UnicodeText, primary_key=True, | Column('id', types.UnicodeText, primary_key=True, |
default=make_uuid), | default=make_uuid), |
Column('period_name', types.UnicodeText), | Column('period_name', types.UnicodeText), |
Column('source', types.UnicodeText), | Column('source', types.UnicodeText), |
Column('url', types.UnicodeText), | Column('url', types.UnicodeText), |
Column('count', types.Integer), | Column('count', types.Integer), |
) | ) |
mapper(GA_ReferralStat, referrer_table) | mapper(GA_ReferralStat, referrer_table) |
def init_tables(): | def init_tables(): |
metadata.create_all(model.meta.engine) | metadata.create_all(model.meta.engine) |
cached_tables = {} | cached_tables = {} |
def get_table(name): | def get_table(name): |
if name not in cached_tables: | if name not in cached_tables: |
meta = MetaData() | meta = MetaData() |
meta.reflect(bind=model.meta.engine) | meta.reflect(bind=model.meta.engine) |
table = meta.tables[name] | table = meta.tables[name] |
cached_tables[name] = table | cached_tables[name] = table |
return cached_tables[name] | return cached_tables[name] |
def _normalize_url(url): | def _normalize_url(url): |
'''Strip off the hostname etc. Do this before storing it. | '''Strip off the hostname etc. Do this before storing it. |
>>> normalize_url('http://data.gov.uk/dataset/weekly_fuel_prices') | >>> normalize_url('http://data.gov.uk/dataset/weekly_fuel_prices') |
'/dataset/weekly_fuel_prices' | '/dataset/weekly_fuel_prices' |
''' | ''' |
return '/' + '/'.join(url.split('/')[3:]) | return '/' + '/'.join(url.split('/')[3:]) |
def _get_package_and_publisher(url): | def _get_package_and_publisher(url): |
# e.g. /dataset/fuel_prices | # e.g. /dataset/fuel_prices |
# e.g. /dataset/fuel_prices/resource/e63380d4 | # e.g. /dataset/fuel_prices/resource/e63380d4 |
dataset_match = re.match('/dataset/([^/]+)(/.*)?', url) | dataset_match = re.match('/dataset/([^/]+)(/.*)?', url) |
if dataset_match: | if dataset_match: |
dataset_ref = dataset_match.groups()[0] | dataset_ref = dataset_match.groups()[0] |
dataset = model.Package.get(dataset_ref) | dataset = model.Package.get(dataset_ref) |
if dataset: | if dataset: |
publisher_groups = dataset.get_groups('publisher') | publisher_groups = dataset.get_groups('organization') |
if publisher_groups: | if publisher_groups: |
return dataset_ref,publisher_groups[0].name | return dataset_ref,publisher_groups[0].name |
return dataset_ref, None | return dataset_ref, None |
else: | else: |
publisher_match = re.match('/publisher/([^/]+)(/.*)?', url) | publisher_match = re.match('/publisher/([^/]+)(/.*)?', url) |
if publisher_match: | if publisher_match: |
return None, publisher_match.groups()[0] | return None, publisher_match.groups()[0] |
return None, None | return None, None |
def update_sitewide_stats(period_name, stat_name, data, period_complete_day): | def update_sitewide_stats(period_name, stat_name, data, period_complete_day): |
for k,v in data.iteritems(): | for k,v in data.iteritems(): |
item = model.Session.query(GA_Stat).\ | item = model.Session.query(GA_Stat).\ |
filter(GA_Stat.period_name==period_name).\ | filter(GA_Stat.period_name==period_name).\ |
filter(GA_Stat.key==k).\ | filter(GA_Stat.key==k).\ |
filter(GA_Stat.stat_name==stat_name).first() | filter(GA_Stat.stat_name==stat_name).first() |
if item: | if item: |
item.period_name = period_name | item.period_name = period_name |
item.key = k | item.key = k |
item.value = v | item.value = v |
item.period_complete_day = period_complete_day | item.period_complete_day = period_complete_day |
model.Session.add(item) | model.Session.add(item) |
else: | else: |
# create the row | # create the row |
values = {'id': make_uuid(), | values = {'id': make_uuid(), |
'period_name': period_name, | 'period_name': period_name, |
'period_complete_day': period_complete_day, | 'period_complete_day': period_complete_day, |
'key': k, | 'key': k, |
'value': v, | 'value': v, |
'stat_name': stat_name | 'stat_name': stat_name |
} | } |
model.Session.add(GA_Stat(**values)) | model.Session.add(GA_Stat(**values)) |
model.Session.commit() | model.Session.commit() |
def pre_update_url_stats(period_name): | def pre_update_url_stats(period_name): |
q = model.Session.query(GA_Url).\ | q = model.Session.query(GA_Url).\ |
filter(GA_Url.period_name==period_name) | filter(GA_Url.period_name==period_name) |
log.debug("Deleting %d '%s' records" % (q.count(), period_name)) | log.debug("Deleting %d '%s' records" % (q.count(), period_name)) |
q.delete() | q.delete() |
q = model.Session.query(GA_Url).\ | q = model.Session.query(GA_Url).\ |
filter(GA_Url.period_name == 'All') | filter(GA_Url.period_name == 'All') |
log.debug("Deleting %d 'All' records..." % q.count()) | log.debug("Deleting %d 'All' records..." % q.count()) |
q.delete() | q.delete() |
model.Session.flush() | model.Session.flush() |
model.Session.commit() | model.Session.commit() |
model.repo.commit_and_remove() | model.repo.commit_and_remove() |
log.debug('...done') | log.debug('...done') |
def post_update_url_stats(): | def post_update_url_stats(): |
""" Check the distinct url field in ga_url and make sure | """ Check the distinct url field in ga_url and make sure |
it has an All record. If not then create one. | it has an All record. If not then create one. |
After running this then every URL should have an All | After running this then every URL should have an All |
record regardless of whether the URL has an entry for | record regardless of whether the URL has an entry for |
the month being currently processed. | the month being currently processed. |
""" | """ |
log.debug('Post-processing "All" records...') | log.debug('Post-processing "All" records...') |
query = """select url, pageviews::int, visits::int | query = """select url, pageviews::int, visits::int |
from ga_url | from ga_url |
where url not in (select url from ga_url where period_name ='All')""" | where url not in (select url from ga_url where period_name ='All')""" |
connection = model.Session.connection() | connection = model.Session.connection() |
res = connection.execute(query) | res = connection.execute(query) |
views, visits = {}, {} | views, visits = {}, {} |
# url, views, visits | # url, views, visits |
for row in res: | for row in res: |
views[row[0]] = views.get(row[0], 0) + row[1] | views[row[0]] = views.get(row[0], 0) + row[1] |
visits[row[0]] = visits.get(row[0], 0) + row[2] | visits[row[0]] = visits.get(row[0], 0) + row[2] |
progress_total = len(views.keys()) | progress_total = len(views.keys()) |
progress_count = 0 | progress_count = 0 |
for key in views.keys(): | for key in views.keys(): |
progress_count += 1 | progress_count += 1 |
if progress_count % 100 == 0: | if progress_count % 100 == 0: |
log.debug('.. %d/%d done so far', progress_count, progress_total) | log.debug('.. %d/%d done so far', progress_count, progress_total) |
package, publisher = _get_package_and_publisher(key) | package, publisher = _get_package_and_publisher(key) |
values = {'id': make_uuid(), | values = {'id': make_uuid(), |
'period_name': "All", | 'period_name': "All", |
'period_complete_day': 0, | 'period_complete_day': 0, |
'url': key, | 'url': key, |
'pageviews': views[key], | 'pageviews': views[key], |
'visits': visits[key], | 'visits': visits[key], |
'department_id': publisher, | 'department_id': publisher, |
'package_id': package | 'package_id': package |
} | } |
model.Session.add(GA_Url(**values)) | model.Session.add(GA_Url(**values)) |
model.Session.commit() | model.Session.commit() |
log.debug('..done') | log.debug('..done') |
def update_url_stats(period_name, period_complete_day, url_data): | def update_url_stats(period_name, period_complete_day, url_data): |
''' | ''' |
Given a list of urls and number of hits for each during a given period, | Given a list of urls and number of hits for each during a given period, |
stores them in GA_Url under the period and recalculates the totals for | stores them in GA_Url under the period and recalculates the totals for |
the 'All' period. | the 'All' period. |
''' | ''' |
progress_total = len(url_data) | progress_total = len(url_data) |
progress_count = 0 | progress_count = 0 |
for url, views, visits in url_data: | for url, views, visits in url_data: |
progress_count += 1 | progress_count += 1 |
if progress_count % 100 == 0: | if progress_count % 100 == 0: |
log.debug('.. %d/%d done so far', progress_count, progress_total) | log.debug('.. %d/%d done so far', progress_count, progress_total) |
package, publisher = _get_package_and_publisher(url) | package, publisher = _get_package_and_publisher(url) |
item = model.Session.query(GA_Url).\ | item = model.Session.query(GA_Url).\ |
filter(GA_Url.period_name==period_name).\ | filter(GA_Url.period_name==period_name).\ |
filter(GA_Url.url==url).first() | filter(GA_Url.url==url).first() |
if item: | if item: |
item.pageviews = item.pageviews + views | item.pageviews = item.pageviews + views |
item.visits = item.visits + visits | item.visits = item.visits + visits |
if not item.package_id: | if not item.package_id: |
item.package_id = package | item.package_id = package |
if not item.department_id: | if not item.department_id: |
item.department_id = publisher | item.department_id = publisher |
model.Session.add(item) | model.Session.add(item) |
else: | else: |
values = {'id': make_uuid(), | values = {'id': make_uuid(), |
'period_name': period_name, | 'period_name': period_name, |
'period_complete_day': period_complete_day, | 'period_complete_day': period_complete_day, |
'url': url, | 'url': url, |
'pageviews': views, | 'pageviews': views, |
'visits': visits, | 'visits': visits, |
'department_id': publisher, | 'department_id': publisher, |
'package_id': package | 'package_id': package |
} | } |
model.Session.add(GA_Url(**values)) | model.Session.add(GA_Url(**values)) |
model.Session.commit() | model.Session.commit() |
if package: | if package: |
old_pageviews, old_visits = 0, 0 | old_pageviews, old_visits = 0, 0 |
old = model.Session.query(GA_Url).\ | old = model.Session.query(GA_Url).\ |
filter(GA_Url.period_name=='All').\ | filter(GA_Url.period_name=='All').\ |
filter(GA_Url.url==url).all() | filter(GA_Url.url==url).all() |
old_pageviews = sum([int(o.pageviews) for o in old]) | old_pageviews = sum([int(o.pageviews) for o in old]) |
old_visits = sum([int(o.visits) for o in old]) | old_visits = sum([int(o.visits) for o in old]) |
entries = model.Session.query(GA_Url).\ | entries = model.Session.query(GA_Url).\ |
filter(GA_Url.period_name!='All').\ | filter(GA_Url.period_name!='All').\ |
filter(GA_Url.url==url).all() | filter(GA_Url.url==url).all() |
values = {'id': make_uuid(), | values = {'id': make_uuid(), |
'period_name': 'All', | 'period_name': 'All', |
'period_complete_day': 0, | 'period_complete_day': 0, |
'url': url, | 'url': url, |
'pageviews': sum([int(e.pageviews) for e in entries]) + int(old_pageviews), | 'pageviews': sum([int(e.pageviews) for e in entries]) + int(old_pageviews), |
'visits': sum([int(e.visits or 0) for e in entries]) + int(old_visits), | 'visits': sum([int(e.visits or 0) for e in entries]) + int(old_visits), |
'department_id': publisher, | 'department_id': publisher, |
'package_id': package | 'package_id': package |
} | } |
model.Session.add(GA_Url(**values)) | model.Session.add(GA_Url(**values)) |
model.Session.commit() | model.Session.commit() |
def update_social(period_name, data): | def update_social(period_name, data): |
# Clean up first. | # Clean up first. |
model.Session.query(GA_ReferralStat).\ | model.Session.query(GA_ReferralStat).\ |
filter(GA_ReferralStat.period_name==period_name).delete() | filter(GA_ReferralStat.period_name==period_name).delete() |
for url,data in data.iteritems(): | for url,data in data.iteritems(): |
for entry in data: | for entry in data: |
source = entry[0] | source = entry[0] |
count = entry[1] | count = entry[1] |
item = model.Session.query(GA_ReferralStat).\ | item = model.Session.query(GA_ReferralStat).\ |
filter(GA_ReferralStat.period_name==period_name).\ | filter(GA_ReferralStat.period_name==period_name).\ |
filter(GA_ReferralStat.source==source).\ | filter(GA_ReferralStat.source==source).\ |
filter(GA_ReferralStat.url==url).first() | filter(GA_ReferralStat.url==url).first() |
if item: | if item: |
item.count = item.count + count | item.count = item.count + count |
model.Session.add(item) | model.Session.add(item) |
else: | else: |
# create the row | # create the row |
values = {'id': make_uuid(), | values = {'id': make_uuid(), |
'period_name': period_name, | 'period_name': period_name, |
'source': source, | 'source': source, |
'url': url, | 'url': url, |
'count': count, | 'count': count, |
} | } |
model.Session.add(GA_ReferralStat(**values)) | model.Session.add(GA_ReferralStat(**values)) |
model.Session.commit() | model.Session.commit() |
def update_publisher_stats(period_name): | def update_publisher_stats(period_name): |
""" | """ |
Updates the publisher stats from the data retrieved for /dataset/* | Updates the publisher stats from the data retrieved for /dataset/* |
and /publisher/*. Will run against each dataset and generates the | and /publisher/*. Will run against each dataset and generates the |
totals for the entire tree beneath each publisher. | totals for the entire tree beneath each publisher. |
""" | """ |
toplevel = get_top_level() | toplevel = get_top_level() |
publishers = model.Session.query(model.Group).\ | publishers = model.Session.query(model.Group).\ |
filter(model.Group.type=='publisher').\ | filter(model.Group.type=='organization').\ |
filter(model.Group.state=='active').all() | filter(model.Group.state=='active').all() |
for publisher in publishers: | for publisher in publishers: |
views, visits, subpub = update_publisher(period_name, publisher, publisher.name) | views, visits, subpub = update_publisher(period_name, publisher, publisher.name) |
parent, parents = '', publisher.get_groups('publisher') | parent, parents = '', publisher.get_groups('organization') |
if parents: | if parents: |
parent = parents[0].name | parent = parents[0].name |
item = model.Session.query(GA_Publisher).\ | item = model.Session.query(GA_Publisher).\ |
filter(GA_Publisher.period_name==period_name).\ | filter(GA_Publisher.period_name==period_name).\ |
filter(GA_Publisher.publisher_name==publisher.name).first() | filter(GA_Publisher.publisher_name==publisher.name).first() |
if item: | if item: |
item.views = views | item.views = views |
item.visits = visits | item.visits = visits |
item.publisher_name = publisher.name | item.publisher_name = publisher.name |
item.toplevel = publisher in toplevel | item.toplevel = publisher in toplevel |
item.subpublishercount = subpub | item.subpublishercount = subpub |
item.parent = parent | item.parent = parent |
model.Session.add(item) | model.Session.add(item) |
else: | else: |
# create the row | # create the row |
values = {'id': make_uuid(), | values = {'id': make_uuid(), |
'period_name': period_name, | 'period_name': period_name, |
'publisher_name': publisher.name, | 'publisher_name': publisher.name, |
'views': views, | 'views': views, |
'visits': visits, | 'visits': visits, |
'toplevel': publisher in toplevel, | 'toplevel': publisher in toplevel, |
'subpublishercount': subpub, | 'subpublishercount': subpub, |
'parent': parent | 'parent': parent |
} | } |
model.Session.add(GA_Publisher(**values)) | model.Session.add(GA_Publisher(**values)) |
model.Session.commit() | model.Session.commit() |
def update_publisher(period_name, pub, part=''): | def update_publisher(period_name, pub, part=''): |
views,visits,subpub = 0, 0, 0 | views,visits,subpub = 0, 0, 0 |
for publisher in go_down_tree(pub): | for publisher in go_down_tree(pub): |
subpub = subpub + 1 | subpub = subpub + 1 |
items = model.Session.query(GA_Url).\ | items = model.Session.query(GA_Url).\ |
filter(GA_Url.period_name==period_name).\ | filter(GA_Url.period_name==period_name).\ |
filter(GA_Url.department_id==publisher.name).all() | filter(GA_Url.department_id==publisher.name).all() |
for item in items: | for item in items: |
views = views + int(item.pageviews) | views = views + int(item.pageviews) |
visits = visits + int(item.visits) | visits = visits + int(item.visits) |
return views, visits, (subpub-1) | return views, visits, (subpub-1) |
def get_top_level(): | def get_top_level(): |
'''Returns the top level publishers.''' | '''Returns the top level publishers.''' |
return model.Session.query(model.Group).\ | return model.Session.query(model.Group).\ |
outerjoin(model.Member, model.Member.table_id == model.Group.id and \ | outerjoin(model.Member, model.Member.table_id == model.Group.id and \ |
model.Member.table_name == 'group' and \ | model.Member.table_name == 'group' and \ |
model.Member.state == 'active').\ | model.Member.state == 'active').\ |
filter(model.Member.id==None).\ | filter(model.Member.id==None).\ |
filter(model.Group.type=='publisher').\ | filter(model.Group.type=='organization').\ |
order_by(model.Group.name).all() | order_by(model.Group.name).all() |
def get_children(publisher): | def get_children(publisher): |
'''Finds child publishers for the given publisher (object). (Not recursive i.e. returns one level)''' | '''Finds child publishers for the given publisher (object). (Not recursive i.e. returns one level)''' |
return publisher.get_children_groups(type='organization') | return publisher.get_children_groups(type='organization') |
def go_down_tree(publisher): | def go_down_tree(publisher): |
'''Provided with a publisher object, it walks down the hierarchy and yields each publisher, | '''Provided with a publisher object, it walks down the hierarchy and yields each publisher, |
including the one you supply.''' | including the one you supply.''' |
yield publisher | yield publisher |
for child in get_children(publisher): | for child in get_children(publisher): |
for grandchild in go_down_tree(child): | for grandchild in go_down_tree(child): |
yield grandchild | yield grandchild |
def delete(period_name): | def delete(period_name): |
''' | ''' |
Deletes table data for the specified period, or specify 'all' | Deletes table data for the specified period, or specify 'all' |
for all periods. | for all periods. |
''' | ''' |
for object_type in (GA_Url, GA_Stat, GA_Publisher, GA_ReferralStat): | for object_type in (GA_Url, GA_Stat, GA_Publisher, GA_ReferralStat): |
q = model.Session.query(object_type) | q = model.Session.query(object_type) |
if period_name != 'All': | if period_name != 'All': |
q = q.filter_by(period_name=period_name) | q = q.filter_by(period_name=period_name) |
q.delete() | q.delete() |
model.repo.commit_and_remove() | model.repo.commit_and_remove() |
def get_score_for_dataset(dataset_name): | def get_score_for_dataset(dataset_name): |
''' | ''' |
Returns a "current popularity" score for a dataset, | Returns a "current popularity" score for a dataset, |
based on how many views it has had recently. | based on how many views it has had recently. |
''' | ''' |
import datetime | import datetime |
now = datetime.datetime.now() | now = datetime.datetime.now() |
last_month = now - datetime.timedelta(days=30) | last_month = now - datetime.timedelta(days=30) |
period_names = ['%s-%02d' % (last_month.year, last_month.month), | period_names = ['%s-%02d' % (last_month.year, last_month.month), |
'%s-%02d' % (now.year, now.month), | '%s-%02d' % (now.year, now.month), |
] | ] |
score = 0 | score = 0 |
for period_name in period_names: | for period_name in period_names: |
score /= 2 # previous periods are discounted by 50% | score /= 2 # previous periods are discounted by 50% |
entry = model.Session.query(GA_Url)\ | entry = model.Session.query(GA_Url)\ |
.filter(GA_Url.period_name==period_name)\ | .filter(GA_Url.period_name==period_name)\ |
.filter(GA_Url.package_id==dataset_name).first() | .filter(GA_Url.package_id==dataset_name).first() |
# score | # score |
if entry: | if entry: |
views = float(entry.pageviews) | views = float(entry.pageviews) |
if entry.period_complete_day: | if entry.period_complete_day: |
views_per_day = views / entry.period_complete_day | views_per_day = views / entry.period_complete_day |
else: | else: |
views_per_day = views / 15 # guess | views_per_day = views / 15 # guess |
score += views_per_day | score += views_per_day |
score = int(score * 100) | score = int(score * 100) |
log.debug('Popularity %s: %s', score, dataset_name) | log.debug('Popularity %s: %s', score, dataset_name) |
return score | return score |
import logging | import logging |
import operator | import operator |
import ckan.lib.base as base | import ckan.lib.base as base |
import ckan.model as model | import ckan.model as model |
from ckan.logic import get_action | from ckan.logic import get_action |
from ckanext.ga_report.ga_model import GA_Url, GA_Publisher | from ckanext.ga_report.ga_model import GA_Url, GA_Publisher |
from ckanext.ga_report.controller import _get_publishers | from ckanext.ga_report.controller import _get_publishers |
_log = logging.getLogger(__name__) | _log = logging.getLogger(__name__) |
def popular_datasets(count=10): | def popular_datasets(count=10): |
import random | import random |
publisher = None | publisher = None |
publishers = _get_publishers(30) | publishers = _get_publishers(30) |
total = len(publishers) | total = len(publishers) |
while not publisher or not datasets: | while not publisher or not datasets: |
rand = random.randrange(0, total) | rand = random.randrange(0, total) |
publisher = publishers[rand][0] | publisher = publishers[rand][0] |
if not publisher.state == 'active': | if not publisher.state == 'active': |
publisher = None | publisher = None |
continue | continue |
datasets = _datasets_for_publisher(publisher, 10)[:count] | datasets = _datasets_for_publisher(publisher, 10)[:count] |
ctx = { | ctx = { |
'datasets': datasets, | 'datasets': datasets, |
'publisher': publisher | 'publisher': publisher |
} | } |
return base.render_snippet('ga_report/ga_popular_datasets.html', **ctx) | return base.render_snippet('ga_report/ga_popular_datasets.html', **ctx) |
def single_popular_dataset(top=20): | def single_popular_dataset(top=20): |
'''Returns a random dataset from the most popular ones. | '''Returns a random dataset from the most popular ones. |
:param top: the number of top datasets to select from | :param top: the number of top datasets to select from |
''' | ''' |
import random | import random |
top_datasets = model.Session.query(GA_Url).\ | top_datasets = model.Session.query(GA_Url).\ |
filter(GA_Url.url.like('/dataset/%')).\ | filter(GA_Url.url.like('/dataset/%')).\ |
order_by('ga_url.pageviews::int desc') | order_by('ga_url.pageviews::int desc') |
num_top_datasets = top_datasets.count() | num_top_datasets = top_datasets.count() |
dataset = None | dataset = None |
if num_top_datasets: | if num_top_datasets: |
count = 0 | count = 0 |
while not dataset: | while not dataset: |
rand = random.randrange(0, min(top, num_top_datasets)) | rand = random.randrange(0, min(top, num_top_datasets)) |
ga_url = top_datasets[rand] | ga_url = top_datasets[rand] |
dataset = model.Package.get(ga_url.url[len('/dataset/'):]) | dataset = model.Package.get(ga_url.url[len('/dataset/'):]) |
if dataset and not dataset.state == 'active': | if dataset and not dataset.state == 'active': |
dataset = None | dataset = None |
# When testing, it is possible that top datasets are not available | # When testing, it is possible that top datasets are not available |
# so only go round this loop a few times before falling back on | # so only go round this loop a few times before falling back on |
# a random dataset. | # a random dataset. |
count += 1 | count += 1 |
if count > 10: | if count > 10: |
break | break |
if not dataset: | if not dataset: |
# fallback | # fallback |
dataset = model.Session.query(model.Package)\ | dataset = model.Session.query(model.Package)\ |
.filter_by(state='active').first() | .filter_by(state='active').first() |
if not dataset: | if not dataset: |
return None | return None |
dataset_dict = get_action('package_show')({'model': model, | dataset_dict = get_action('package_show')({'model': model, |
'session': model.Session, | 'session': model.Session, |
'validate': False}, | 'validate': False}, |
{'id':dataset.id}) | {'id':dataset.id}) |
return dataset_dict | return dataset_dict |
def single_popular_dataset_html(top=20): | def single_popular_dataset_html(top=20): |
dataset_dict = single_popular_dataset(top) | dataset_dict = single_popular_dataset(top) |
groups = package.get('groups', []) | groups = package.get('groups', []) |
publishers = [ g for g in groups if g.get('type') == 'publisher' ] | publishers = [ g for g in groups if g.get('type') == 'organization' ] |
publisher = publishers[0] if publishers else {'name':'', 'title': ''} | publisher = publishers[0] if publishers else {'name':'', 'title': ''} |
context = { | context = { |
'dataset': dataset_dict, | 'dataset': dataset_dict, |
'publisher': publisher_dict | 'publisher': publisher_dict |
} | } |
return base.render_snippet('ga_report/ga_popular_single.html', **context) | return base.render_snippet('ga_report/ga_popular_single.html', **context) |
def most_popular_datasets(publisher, count=20, preview_image=None): | def most_popular_datasets(publisher, count=20, preview_image=None): |
if not publisher: | if not publisher: |
_log.error("No valid publisher passed to 'most_popular_datasets'") | _log.error("No valid publisher passed to 'most_popular_datasets'") |
return "" | return "" |
results = _datasets_for_publisher(publisher, count) | results = _datasets_for_publisher(publisher, count) |
ctx = { | ctx = { |
'dataset_count': len(results), | 'dataset_count': len(results), |
'datasets': results, | 'datasets': results, |
'publisher': publisher, | 'publisher': publisher, |
'preview_image': preview_image | 'preview_image': preview_image |
} | } |
return base.render_snippet('ga_report/publisher/popular.html', **ctx) | return base.render_snippet('ga_report/publisher/popular.html', **ctx) |
def _datasets_for_publisher(publisher, count): | def _datasets_for_publisher(publisher, count): |
datasets = {} | datasets = {} |
entries = model.Session.query(GA_Url).\ | entries = model.Session.query(GA_Url).\ |
filter(GA_Url.department_id==publisher.name).\ | filter(GA_Url.department_id==publisher.name).\ |
filter(GA_Url.url.like('/dataset/%')).\ | filter(GA_Url.url.like('/dataset/%')).\ |
order_by('ga_url.pageviews::int desc').all() | order_by('ga_url.pageviews::int desc').all() |
for entry in entries: | for entry in entries: |
if len(datasets) < count: | if len(datasets) < count: |
p = model.Package.get(entry.url[len('/dataset/'):]) | p = model.Package.get(entry.url[len('/dataset/'):]) |
if not p: | if not p: |
_log.warning("Could not find Package for {url}".format(url=entry.url)) | _log.warning("Could not find Package for {url}".format(url=entry.url)) |
continue | continue |
if not p.state == 'active': | if not p.state == 'active': |
_log.warning("Package {0} is not active, it is {1}".format(p.name, p.state)) | _log.warning("Package {0} is not active, it is {1}".format(p.name, p.state)) |
continue | continue |
if not p in datasets: | if not p in datasets: |
datasets[p] = {'views':0, 'visits': 0} | datasets[p] = {'views':0, 'visits': 0} |
datasets[p]['views'] = datasets[p]['views'] + int(entry.pageviews) | datasets[p]['views'] = datasets[p]['views'] + int(entry.pageviews) |
datasets[p]['visits'] = datasets[p]['visits'] + int(entry.visits) | datasets[p]['visits'] = datasets[p]['visits'] + int(entry.visits) |
results = [] | results = [] |
for k, v in datasets.iteritems(): | for k, v in datasets.iteritems(): |
results.append((k,v['views'],v['visits'])) | results.append((k,v['views'],v['visits'])) |
return sorted(results, key=operator.itemgetter(1), reverse=True) | return sorted(results, key=operator.itemgetter(1), reverse=True) |
def month_option_title(month_iso, months, day): | def month_option_title(month_iso, months, day): |
month_isos = [ iso_code for (iso_code,name) in months ] | month_isos = [ iso_code for (iso_code,name) in months ] |
try: | try: |
index = month_isos.index(month_iso) | index = month_isos.index(month_iso) |
except ValueError: | except ValueError: |
_log.error('Month "%s" not found in list of months.' % month_iso) | _log.error('Month "%s" not found in list of months.' % month_iso) |
return month_iso | return month_iso |
month_name = months[index][1] | month_name = months[index][1] |
if index==0: | if index==0: |
return month_name + (' (up to %s)'%day) | return month_name + (' (up to %s)'%day) |
return month_name | return month_name |