Updated with more logging
Updated with more logging

import os import os
import logging import logging
import datetime import datetime
import collections import collections
from pylons import config from pylons import config
from ga_model import _normalize_url from ga_model import _normalize_url
import ga_model import ga_model
   
#from ga_client import GA #from ga_client import GA
   
log = logging.getLogger('ckanext.ga-report') log = logging.getLogger('ckanext.ga-report')
   
FORMAT_MONTH = '%Y-%m' FORMAT_MONTH = '%Y-%m'
MIN_VIEWS = 50 MIN_VIEWS = 50
MIN_VISITS = 20 MIN_VISITS = 20
   
class DownloadAnalytics(object): class DownloadAnalytics(object):
'''Downloads and stores analytics info''' '''Downloads and stores analytics info'''
   
def __init__(self, service=None, profile_id=None, delete_first=False, def __init__(self, service=None, profile_id=None, delete_first=False,
skip_url_stats=False): skip_url_stats=False):
self.period = config['ga-report.period'] self.period = config['ga-report.period']
self.service = service self.service = service
self.profile_id = profile_id self.profile_id = profile_id
self.delete_first = delete_first self.delete_first = delete_first
self.skip_url_stats = skip_url_stats self.skip_url_stats = skip_url_stats
   
def specific_month(self, date): def specific_month(self, date):
import calendar import calendar
   
first_of_this_month = datetime.datetime(date.year, date.month, 1) first_of_this_month = datetime.datetime(date.year, date.month, 1)
_, last_day_of_month = calendar.monthrange(int(date.year), int(date.month)) _, last_day_of_month = calendar.monthrange(int(date.year), int(date.month))
last_of_this_month = datetime.datetime(date.year, date.month, last_day_of_month) last_of_this_month = datetime.datetime(date.year, date.month, last_day_of_month)
periods = ((date.strftime(FORMAT_MONTH), periods = ((date.strftime(FORMAT_MONTH),
last_day_of_month, last_day_of_month,
first_of_this_month, last_of_this_month),) first_of_this_month, last_of_this_month),)
self.download_and_store(periods) self.download_and_store(periods)
   
   
def latest(self): def latest(self):
if self.period == 'monthly': if self.period == 'monthly':
# from first of this month to today # from first of this month to today
now = datetime.datetime.now() now = datetime.datetime.now()
first_of_this_month = datetime.datetime(now.year, now.month, 1) first_of_this_month = datetime.datetime(now.year, now.month, 1)
periods = ((now.strftime(FORMAT_MONTH), periods = ((now.strftime(FORMAT_MONTH),
now.day, now.day,
first_of_this_month, now),) first_of_this_month, now),)
else: else:
raise NotImplementedError raise NotImplementedError
self.download_and_store(periods) self.download_and_store(periods)
   
   
def for_date(self, for_date): def for_date(self, for_date):
assert isinstance(since_date, datetime.datetime) assert isinstance(since_date, datetime.datetime)
periods = [] # (period_name, period_complete_day, start_date, end_date) periods = [] # (period_name, period_complete_day, start_date, end_date)
if self.period == 'monthly': if self.period == 'monthly':
first_of_the_months_until_now = [] first_of_the_months_until_now = []
year = for_date.year year = for_date.year
month = for_date.month month = for_date.month
now = datetime.datetime.now() now = datetime.datetime.now()
first_of_this_month = datetime.datetime(now.year, now.month, 1) first_of_this_month = datetime.datetime(now.year, now.month, 1)
while True: while True:
first_of_the_month = datetime.datetime(year, month, 1) first_of_the_month = datetime.datetime(year, month, 1)
if first_of_the_month == first_of_this_month: if first_of_the_month == first_of_this_month:
periods.append((now.strftime(FORMAT_MONTH), periods.append((now.strftime(FORMAT_MONTH),
now.day, now.day,
first_of_this_month, now)) first_of_this_month, now))
break break
elif first_of_the_month < first_of_this_month: elif first_of_the_month < first_of_this_month:
in_the_next_month = first_of_the_month + datetime.timedelta(40) in_the_next_month = first_of_the_month + datetime.timedelta(40)
last_of_the_month = datetime.datetime(in_the_next_month.year, last_of_the_month = datetime.datetime(in_the_next_month.year,
in_the_next_month.month, 1)\ in_the_next_month.month, 1)\
- datetime.timedelta(1) - datetime.timedelta(1)
periods.append((now.strftime(FORMAT_MONTH), 0, periods.append((now.strftime(FORMAT_MONTH), 0,
first_of_the_month, last_of_the_month)) first_of_the_month, last_of_the_month))
else: else:
# first_of_the_month has got to the future somehow # first_of_the_month has got to the future somehow
break break
month += 1 month += 1
if month > 12: if month > 12:
year += 1 year += 1
month = 1 month = 1
else: else:
raise NotImplementedError raise NotImplementedError
self.download_and_store(periods) self.download_and_store(periods)
   
@staticmethod @staticmethod
def get_full_period_name(period_name, period_complete_day): def get_full_period_name(period_name, period_complete_day):
if period_complete_day: if period_complete_day:
return period_name + ' (up to %ith)' % period_complete_day return period_name + ' (up to %ith)' % period_complete_day
else: else:
return period_name return period_name
   
   
def download_and_store(self, periods): def download_and_store(self, periods):
for period_name, period_complete_day, start_date, end_date in periods: for period_name, period_complete_day, start_date, end_date in periods:
log.info('Period "%s" (%s - %s)', log.info('Period "%s" (%s - %s)',
self.get_full_period_name(period_name, period_complete_day), self.get_full_period_name(period_name, period_complete_day),
start_date.strftime('%Y-%m-%d'), start_date.strftime('%Y-%m-%d'),
end_date.strftime('%Y-%m-%d')) end_date.strftime('%Y-%m-%d'))
   
if self.delete_first: if self.delete_first:
log.info('Deleting existing Analytics for this period "%s"', log.info('Deleting existing Analytics for this period "%s"',
period_name) period_name)
ga_model.delete(period_name) ga_model.delete(period_name)
   
if not self.skip_url_stats: if not self.skip_url_stats:
# Clean out old url data before storing the new # Clean out old url data before storing the new
ga_model.pre_update_url_stats(period_name) ga_model.pre_update_url_stats(period_name)
   
accountName = config.get('googleanalytics.account') accountName = config.get('googleanalytics.account')
   
log.info('Downloading analytics for dataset views') log.info('Downloading analytics for dataset views')
data = self.download(start_date, end_date, '~/%s/dataset/[a-z0-9-_]+' % accountName) data = self.download(start_date, end_date, '~/%s/dataset/[a-z0-9-_]+' % accountName)
   
log.info('Storing dataset views (%i rows)', len(data.get('url'))) log.info('Storing dataset views (%i rows)', len(data.get('url')))
self.store(period_name, period_complete_day, data, ) self.store(period_name, period_complete_day, data, )
   
log.info('Downloading analytics for publisher views') log.info('Downloading analytics for publisher views')
data = self.download(start_date, end_date, '~/%s/publisher/[a-z0-9-_]+' % accountName) data = self.download(start_date, end_date, '~/%s/publisher/[a-z0-9-_]+' % accountName)
   
log.info('Storing publisher views (%i rows)', len(data.get('url'))) log.info('Storing publisher views (%i rows)', len(data.get('url')))
self.store(period_name, period_complete_day, data,) self.store(period_name, period_complete_day, data,)
   
log.info('Aggregating datasets by publisher') log.info('Aggregating datasets by publisher')
ga_model.update_publisher_stats(period_name) # about 30 seconds. ga_model.update_publisher_stats(period_name) # about 30 seconds.
   
log.info('Downloading and storing analytics for site-wide stats') log.info('Downloading and storing analytics for site-wide stats')
self.sitewide_stats( period_name, period_complete_day ) self.sitewide_stats( period_name, period_complete_day )
   
log.info('Downloading and storing analytics for social networks') log.info('Downloading and storing analytics for social networks')
self.update_social_info(period_name, start_date, end_date) self.update_social_info(period_name, start_date, end_date)
   
   
def update_social_info(self, period_name, start_date, end_date): def update_social_info(self, period_name, start_date, end_date):
start_date = start_date.strftime('%Y-%m-%d') start_date = start_date.strftime('%Y-%m-%d')
end_date = end_date.strftime('%Y-%m-%d') end_date = end_date.strftime('%Y-%m-%d')
query = 'ga:hasSocialSourceReferral=~Yes$' query = 'ga:hasSocialSourceReferral=~Yes$'
metrics = 'ga:entrances' metrics = 'ga:entrances'
sort = '-ga:entrances' sort = '-ga:entrances'
   
# Supported query params at # Supported query params at
# https://developers.google.com/analytics/devguides/reporting/core/v3/reference # https://developers.google.com/analytics/devguides/reporting/core/v3/reference
results = self.service.data().ga().get( results = self.service.data().ga().get(
ids='ga:' + self.profile_id, ids='ga:' + self.profile_id,
filters=query, filters=query,
start_date=start_date, start_date=start_date,
metrics=metrics, metrics=metrics,
sort=sort, sort=sort,
dimensions="ga:landingPagePath,ga:socialNetwork", dimensions="ga:landingPagePath,ga:socialNetwork",
max_results=10000, max_results=10000,
end_date=end_date).execute() end_date=end_date).execute()
data = collections.defaultdict(list) data = collections.defaultdict(list)
rows = results.get('rows',[]) rows = results.get('rows',[])
for row in rows: for row in rows:
data[_normalize_url(row[0])].append( (row[1], int(row[2]),) ) url = _normalize_url('http:/' + row[0])
  data[url].append( (row[1], int(row[2]),) )
ga_model.update_social(period_name, data) ga_model.update_social(period_name, data)
   
   
def download(self, start_date, end_date, path=None): def download(self, start_date, end_date, path=None):
'''Get data from GA for a given time period''' '''Get data from GA for a given time period'''
start_date = start_date.strftime('%Y-%m-%d') start_date = start_date.strftime('%Y-%m-%d')
end_date = end_date.strftime('%Y-%m-%d') end_date = end_date.strftime('%Y-%m-%d')
query = 'ga:pagePath=%s$' % path query = 'ga:pagePath=%s$' % path
metrics = 'ga:pageviews, ga:visits' metrics = 'ga:pageviews, ga:visits'
sort = '-ga:pageviews' sort = '-ga:pageviews'
   
# Supported query params at # Supported query params at
# https://developers.google.com/analytics/devguides/reporting/core/v3/reference # https://developers.google.com/analytics/devguides/reporting/core/v3/reference
results = self.service.data().ga().get( results = self.service.data().ga().get(
ids='ga:' + self.profile_id, ids='ga:' + self.profile_id,
filters=query, filters=query,
start_date=start_date, start_date=start_date,
metrics=metrics, metrics=metrics,
sort=sort, sort=sort,
dimensions="ga:pagePath", dimensions="ga:pagePath",
max_results=10000, max_results=10000,
end_date=end_date).execute() end_date=end_date).execute()
   
packages = [] packages = []
for entry in results.get('rows'): for entry in results.get('rows'):
(loc,pageviews,visits) = entry (loc,pageviews,visits) = entry
url = _normalize_url('http:/' + loc) # strips off domain e.g. www.data.gov.uk or data.gov.uk url = _normalize_url('http:/' + loc) # strips off domain e.g. www.data.gov.uk or data.gov.uk
   
if not url.startswith('/dataset/') and not url.startswith('/publisher/'): if not url.startswith('/dataset/') and not url.startswith('/publisher/'):
# filter out strays like: # filter out strays like:
# /data/user/login?came_from=http://data.gov.uk/dataset/os-code-point-open # /data/user/login?came_from=http://data.gov.uk/dataset/os-code-point-open
# /403.html?page=/about&from=http://data.gov.uk/publisher/planning-inspectorate # /403.html?page=/about&from=http://data.gov.uk/publisher/planning-inspectorate
continue continue
packages.append( (url, pageviews, visits,) ) # Temporary hack packages.append( (url, pageviews, visits,) ) # Temporary hack
return dict(url=packages) return dict(url=packages)
   
def store(self, period_name, period_complete_day, data): def store(self, period_name, period_complete_day, data):
if 'url' in data: if 'url' in data:
ga_model.update_url_stats(period_name, period_complete_day, data['url']) ga_model.update_url_stats(period_name, period_complete_day, data['url'])
   
def sitewide_stats(self, period_name, period_complete_day): def sitewide_stats(self, period_name, period_complete_day):
import calendar import calendar
year, month = period_name.split('-') year, month = period_name.split('-')
_, last_day_of_month = calendar.monthrange(int(year), int(month)) _, last_day_of_month = calendar.monthrange(int(year), int(month))
   
start_date = '%s-01' % period_name start_date = '%s-01' % period_name
end_date = '%s-%s' % (period_name, last_day_of_month) end_date = '%s-%s' % (period_name, last_day_of_month)
funcs = ['_totals_stats', '_social_stats', '_os_stats', funcs = ['_totals_stats', '_social_stats', '_os_stats',
'_locale_stats', '_browser_stats', '_mobile_stats'] '_locale_stats', '_browser_stats', '_mobile_stats']
for f in funcs: for f in funcs:
log.info('Downloading analytics for %s' % f.split('_')[1]) log.info('Downloading analytics for %s' % f.split('_')[1])
getattr(self, f)(start_date, end_date, period_name, period_complete_day) getattr(self, f)(start_date, end_date, period_name, period_complete_day)
   
def _get_results(result_data, f): def _get_results(result_data, f):
data = {} data = {}
for result in result_data: for result in result_data:
key = f(result) key = f(result)
data[key] = data.get(key,0) + result[1] data[key] = data.get(key,0) + result[1]
return data return data
   
def _totals_stats(self, start_date, end_date, period_name, period_complete_day): def _totals_stats(self, start_date, end_date, period_name, period_complete_day):
""" Fetches distinct totals, total pageviews etc """ """ Fetches distinct totals, total pageviews etc """
results = self.service.data().ga().get( results = self.service.data().ga().get(
ids='ga:' + self.profile_id, ids='ga:' + self.profile_id,
start_date=start_date, start_date=start_date,
metrics='ga:pageviews', metrics='ga:pageviews',
sort='-ga:pageviews', sort='-ga:pageviews',
max_results=10000, max_results=10000,
end_date=end_date).execute() end_date=end_date).execute()
result_data = results.get('rows') result_data = results.get('rows')
ga_model.update_sitewide_stats(period_name, "Totals", {'Total page views': result_data[0][0]}, ga_model.update_sitewide_stats(period_name, "Totals", {'Total page views': result_data[0][0]},
period_complete_day) period_complete_day)
   
results = self.service.data().ga().get( results = self.service.data().ga().get(
ids='ga:' + self.profile_id, ids='ga:' + self.profile_id,
start_date=start_date, start_date=start_date,
metrics='ga:pageviewsPerVisit,ga:avgTimeOnSite,ga:percentNewVisits,ga:visits', metrics='ga:pageviewsPerVisit,ga:avgTimeOnSite,ga:percentNewVisits,ga:visits',
max_results=10000, max_results=10000,
end_date=end_date).execute() end_date=end_date).execute()
result_data = results.get('rows') result_data = results.get('rows')
data = { data = {
'Pages per visit': result_data[0][0], 'Pages per visit': result_data[0][0],
'Average time on site': result_data[0][1], 'Average time on site': result_data[0][1],
'New visits': result_data[0][2], 'New visits': result_data[0][2],
'Total visits': result_data[0][3], 'Total visits': result_data[0][3],
} }
ga_model.update_sitewide_stats(period_name, "Totals", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Totals", data, period_complete_day)
   
# Bounces from / or another configurable page. # Bounces from / or another configurable page.
path = '/%s%s' % (config.get('googleanalytics.account'), path = '/%s%s' % (config.get('googleanalytics.account'),
config.get('ga-report.bounce_url', '/')) config.get('ga-report.bounce_url', '/'))
results = self.service.data().ga().get( results = self.service.data().ga().get(
ids='ga:' + self.profile_id, ids='ga:' + self.profile_id,
filters='ga:pagePath==%s' % (path,), filters='ga:pagePath==%s' % (path,),
start_date=start_date, start_date=start_date,
metrics='ga:bounces,ga:pageviews', metrics='ga:bounces,ga:pageviews',
dimensions='ga:pagePath', dimensions='ga:pagePath',
max_results=10000, max_results=10000,
end_date=end_date).execute() end_date=end_date).execute()
result_data = results.get('rows') result_data = results.get('rows')
if not result_data or len(result_data) != 1: if not result_data or len(result_data) != 1:
log.error('Could not pinpoint the bounces for path: %s. Got results: %r', log.error('Could not pinpoint the bounces for path: %s. Got results: %r',
path, result_data) path, result_data)
return return
results = result_data[0] results = result_data[0]
bounces, total = [float(x) for x in result_data[0][1:]] bounces, total = [float(x) for x in result_data[0][1:]]
pct = 100 * bounces/total pct = 100 * bounces/total
log.info('%d bounces from %d total == %s', bounces, total, pct) log.info('%d bounces from %d total == %s', bounces, total, pct)
ga_model.update_sitewide_stats(period_name, "Totals", {'Bounce rate (home page)': pct}, ga_model.update_sitewide_stats(period_name, "Totals", {'Bounce rate (home page)': pct},
period_complete_day) period_complete_day)
   
   
def _locale_stats(self, start_date, end_date, period_name, period_complete_day): def _locale_stats(self, start_date, end_date, period_name, period_complete_day):
""" Fetches stats about language and country """ """ Fetches stats about language and country """
results = self.service.data().ga().get( results = self.service.data().ga().get(
ids='ga:' + self.profile_id, ids='ga:' + self.profile_id,
start_date=start_date, start_date=start_date,
metrics='ga:pageviews', metrics='ga:pageviews',
sort='-ga:pageviews', sort='-ga:pageviews',
dimensions="ga:language,ga:country", dimensions="ga:language,ga:country",
max_results=10000, max_results=10000,
end_date=end_date).execute() end_date=end_date).execute()
result_data = results.get('rows') result_data = results.get('rows')
data = {} data = {}
for result in result_data: for result in result_data:
data[result[0]] = data.get(result[0], 0) + int(result[2]) data[result[0]] = data.get(result[0], 0) + int(result[2])
self._filter_out_long_tail(data, MIN_VIEWS) self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Languages", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Languages", data, period_complete_day)
   
data = {} data = {}
for result in result_data: for result in result_data:
data[result[1]] = data.get(result[1], 0) + int(result[2]) data[result[1]] = data.get(result[1], 0) + int(result[2])
self._filter_out_long_tail(data, MIN_VIEWS) self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Country", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Country", data, period_complete_day)
   
   
def _social_stats(self, start_date, end_date, period_name, period_complete_day): def _social_stats(self, start_date, end_date, period_name, period_complete_day):
""" Finds out which social sites people are referred from """ """ Finds out which social sites people are referred from """
results = self.service.data().ga().get( results = self.service.data().ga().get(
ids='ga:' + self.profile_id, ids='ga:' + self.profile_id,
start_date=start_date, start_date=start_date,
metrics='ga:pageviews', metrics='ga:pageviews',
sort='-ga:pageviews', sort='-ga:pageviews',
dimensions="ga:socialNetwork,ga:referralPath", dimensions="ga:socialNetwork,ga:referralPath",
max_results=10000, max_results=10000,
end_date=end_date).execute() end_date=end_date).execute()
result_data = results.get('rows') result_data = results.get('rows')
data = {} data = {}
for result in result_data: for result in result_data:
if not result[0] == '(not set)': if not result[0] == '(not set)':
data[result[0]] = data.get(result[0], 0) + int(result[2]) data[result[0]] = data.get(result[0], 0) + int(result[2])
self._filter_out_long_tail(data, 3) self._filter_out_long_tail(data, 3)
ga_model.update_sitewide_stats(period_name, "Social sources", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Social sources", data, period_complete_day)
   
   
def _os_stats(self, start_date, end_date, period_name, period_complete_day): def _os_stats(self, start_date, end_date, period_name, period_complete_day):
""" Operating system stats """ """ Operating system stats """
results = self.service.data().ga().get( results = self.service.data().ga().get(
ids='ga:' + self.profile_id, ids='ga:' + self.profile_id,
start_date=start_date, start_date=start_date,
metrics='ga:pageviews', metrics='ga:pageviews',
sort='-ga:pageviews', sort='-ga:pageviews',
dimensions="ga:operatingSystem,ga:operatingSystemVersion", dimensions="ga:operatingSystem,ga:operatingSystemVersion",
max_results=10000, max_results=10000,
end_date=end_date).execute() end_date=end_date).execute()
result_data = results.get('rows') result_data = results.get('rows')
data = {} data = {}
for result in result_data: for result in result_data:
data[result[0]] = data.get(result[0], 0) + int(result[2]) data[result[0]] = data.get(result[0], 0) + int(result[2])
self._filter_out_long_tail(data, MIN_VIEWS) self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Operating Systems", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Operating Systems", data, period_complete_day)
   
data = {} data = {}
for result in result_data: for result in result_data:
if int(result[2]) >= MIN_VIEWS: if int(result[2]) >= MIN_VIEWS:
key = "%s %s" % (result[0],result[1]) key = "%s %s" % (result[0],result[1])
data[key] = result[2] data[key] = result[2]
ga_model.update_sitewide_stats(period_name, "Operating Systems versions", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Operating Systems versions", data, period_complete_day)
   
   
def _browser_stats(self, start_date, end_date, period_name, period_complete_day): def _browser_stats(self, start_date, end_date, period_name, period_complete_day):
""" Information about browsers and browser versions """ """ Information about browsers and browser versions """
results = self.service.data().ga().get( results = self.service.data().ga().get(
ids='ga:' + self.profile_id, ids='ga:' + self.profile_id,
start_date=start_date, start_date=start_date,
metrics='ga:pageviews', metrics='ga:pageviews',
sort='-ga:pageviews', sort='-ga:pageviews',
dimensions="ga:browser,ga:browserVersion", dimensions="ga:browser,ga:browserVersion",
max_results=10000, max_results=10000,
end_date=end_date).execute() end_date=end_date).execute()
result_data = results.get('rows') result_data = results.get('rows')
# e.g. [u'Firefox', u'19.0', u'20'] # e.g. [u'Firefox', u'19.0', u'20']
   
data = {} data = {}
for result in result_data: for result in result_data:
data[result[0]] = data.get(result[0], 0) + int(result[2]) data[result[0]] = data.get(result[0], 0) + int(result[2])
self._filter_out_long_tail(data, MIN_VIEWS) self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Browsers", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Browsers", data, period_complete_day)
   
data = {} data = {}
for result in result_data: for result in result_data:
key = "%s %s" % (result[0], self._filter_browser_version(result[0], result[1])) key = "%s %s" % (result[0], self._filter_browser_version(result[0], result[1]))
data[key] = data.get(key, 0) + int(result[2]) data[key] = data.get(key, 0) + int(result[2])
self._filter_out_long_tail(data, MIN_VIEWS) self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Browser versions", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Browser versions", data, period_complete_day)
   
@classmethod @classmethod
def _filter_browser_version(cls, browser, version_str): def _filter_browser_version(cls, browser, version_str):
''' '''
Simplifies a browser version string if it is detailed. Simplifies a browser version string if it is detailed.
i.e. groups together Firefox 3.5.1 and 3.5.2 to be just 3. i.e. groups together Firefox 3.5.1 and 3.5.2 to be just 3.
This is helpful when viewing stats and good to protect privacy. This is helpful when viewing stats and good to protect privacy.
''' '''
ver = version_str ver = version_str
parts = ver.split('.') parts = ver.split('.')
if len(parts) > 1: if len(parts) > 1:
if parts[1][0] == '0': if parts[1][0] == '0':
ver = parts[0] ver = parts[0]
else: else:
ver = "%s" % (parts[0]) ver = "%s" % (parts[0])
# Special case complex version nums # Special case complex version nums
if browser in ['Safari', 'Android Browser']: if browser in ['Safari', 'Android Browser']:
ver = parts[0] ver = parts[0]
if len(ver) > 2: if len(ver) > 2:
num_hidden_digits = len(ver) - 2 num_hidden_digits = len(ver) - 2
ver = ver[0] + ver[1] + 'X' * num_hidden_digits ver = ver[0] + ver[1] + 'X' * num_hidden_digits
return ver return ver
   
def _mobile_stats(self, start_date, end_date, period_name, period_complete_day): def _mobile_stats(self, start_date, end_date, period_name, period_complete_day):
""" Info about mobile devices """ """ Info about mobile devices """
   
results = self.service.data().ga().get( results = self.service.data().ga().get(
ids='ga:' + self.profile_id, ids='ga:' + self.profile_id,
start_date=start_date, start_date=start_date,
metrics='ga:pageviews', metrics='ga:pageviews',
sort='-ga:pageviews', sort='-ga:pageviews',
dimensions="ga:mobileDeviceBranding, ga:mobileDeviceInfo", dimensions="ga:mobileDeviceBranding, ga:mobileDeviceInfo",
max_results=10000, max_results=10000,
end_date=end_date).execute() end_date=end_date).execute()
   
result_data = results.get('rows') result_data = results.get('rows')
data = {} data = {}
for result in result_data: for result in result_data:
data[result[0]] = data.get(result[0], 0) + int(result[2]) data[result[0]] = data.get(result[0], 0) + int(result[2])
self._filter_out_long_tail(data, MIN_VIEWS) self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Mobile brands", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Mobile brands", data, period_complete_day)
   
data = {} data = {}
for result in result_data: for result in result_data:
data[result[1]] = data.get(result[1], 0) + int(result[2]) data[result[1]] = data.get(result[1], 0) + int(result[2])
self._filter_out_long_tail(data, MIN_VIEWS) self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Mobile devices", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Mobile devices", data, period_complete_day)
   
@classmethod @classmethod
def _filter_out_long_tail(cls, data, threshold=10): def _filter_out_long_tail(cls, data, threshold=10):
''' '''
Given data which is a frequency distribution, filter out Given data which is a frequency distribution, filter out
results which are below a threshold count. This is good to protect results which are below a threshold count. This is good to protect
privacy. privacy.
''' '''
for key, value in data.items(): for key, value in data.items():
if value < threshold: if value < threshold:
del data[key] del data[key]
   
import re import re
import uuid import uuid
   
from sqlalchemy import Table, Column, MetaData, ForeignKey from sqlalchemy import Table, Column, MetaData, ForeignKey
from sqlalchemy import types from sqlalchemy import types
from sqlalchemy.sql import select from sqlalchemy.sql import select
from sqlalchemy.orm import mapper, relation from sqlalchemy.orm import mapper, relation
from sqlalchemy import func from sqlalchemy import func
   
import ckan.model as model import ckan.model as model
from ckan.lib.base import * from ckan.lib.base import *
   
log = __import__('logging').getLogger(__name__) log = __import__('logging').getLogger(__name__)
   
def make_uuid(): def make_uuid():
return unicode(uuid.uuid4()) return unicode(uuid.uuid4())
   
metadata = MetaData() metadata = MetaData()
   
class GA_Url(object): class GA_Url(object):
   
def __init__(self, **kwargs): def __init__(self, **kwargs):
for k,v in kwargs.items(): for k,v in kwargs.items():
setattr(self, k, v) setattr(self, k, v)
   
url_table = Table('ga_url', metadata, url_table = Table('ga_url', metadata,
Column('id', types.UnicodeText, primary_key=True, Column('id', types.UnicodeText, primary_key=True,
default=make_uuid), default=make_uuid),
Column('period_name', types.UnicodeText), Column('period_name', types.UnicodeText),
Column('period_complete_day', types.Integer), Column('period_complete_day', types.Integer),
Column('pageviews', types.UnicodeText), Column('pageviews', types.UnicodeText),
Column('visits', types.UnicodeText), Column('visits', types.UnicodeText),
Column('url', types.UnicodeText), Column('url', types.UnicodeText),
Column('department_id', types.UnicodeText), Column('department_id', types.UnicodeText),
Column('package_id', types.UnicodeText), Column('package_id', types.UnicodeText),
) )
mapper(GA_Url, url_table) mapper(GA_Url, url_table)
   
   
class GA_Stat(object): class GA_Stat(object):
   
def __init__(self, **kwargs): def __init__(self, **kwargs):
for k,v in kwargs.items(): for k,v in kwargs.items():
setattr(self, k, v) setattr(self, k, v)
   
stat_table = Table('ga_stat', metadata, stat_table = Table('ga_stat', metadata,
Column('id', types.UnicodeText, primary_key=True, Column('id', types.UnicodeText, primary_key=True,
default=make_uuid), default=make_uuid),
Column('period_name', types.UnicodeText), Column('period_name', types.UnicodeText),
Column('period_complete_day', types.UnicodeText), Column('period_complete_day', types.UnicodeText),
Column('stat_name', types.UnicodeText), Column('stat_name', types.UnicodeText),
Column('key', types.UnicodeText), Column('key', types.UnicodeText),
Column('value', types.UnicodeText), ) Column('value', types.UnicodeText), )
mapper(GA_Stat, stat_table) mapper(GA_Stat, stat_table)
   
   
class GA_Publisher(object): class GA_Publisher(object):
   
def __init__(self, **kwargs): def __init__(self, **kwargs):
for k,v in kwargs.items(): for k,v in kwargs.items():
setattr(self, k, v) setattr(self, k, v)
   
pub_table = Table('ga_publisher', metadata, pub_table = Table('ga_publisher', metadata,
Column('id', types.UnicodeText, primary_key=True, Column('id', types.UnicodeText, primary_key=True,
default=make_uuid), default=make_uuid),
Column('period_name', types.UnicodeText), Column('period_name', types.UnicodeText),
Column('publisher_name', types.UnicodeText), Column('publisher_name', types.UnicodeText),
Column('views', types.UnicodeText), Column('views', types.UnicodeText),
Column('visits', types.UnicodeText), Column('visits', types.UnicodeText),
Column('toplevel', types.Boolean, default=False), Column('toplevel', types.Boolean, default=False),
Column('subpublishercount', types.Integer, default=0), Column('subpublishercount', types.Integer, default=0),
Column('parent', types.UnicodeText), Column('parent', types.UnicodeText),
) )
mapper(GA_Publisher, pub_table) mapper(GA_Publisher, pub_table)
   
   
class GA_ReferralStat(object): class GA_ReferralStat(object):
   
def __init__(self, **kwargs): def __init__(self, **kwargs):
for k,v in kwargs.items(): for k,v in kwargs.items():
setattr(self, k, v) setattr(self, k, v)
   
referrer_table = Table('ga_referrer', metadata, referrer_table = Table('ga_referrer', metadata,
Column('id', types.UnicodeText, primary_key=True, Column('id', types.UnicodeText, primary_key=True,
default=make_uuid), default=make_uuid),
Column('period_name', types.UnicodeText), Column('period_name', types.UnicodeText),
Column('source', types.UnicodeText), Column('source', types.UnicodeText),
Column('url', types.UnicodeText), Column('url', types.UnicodeText),
Column('count', types.Integer), Column('count', types.Integer),
) )
mapper(GA_ReferralStat, referrer_table) mapper(GA_ReferralStat, referrer_table)
   
   
   
def init_tables(): def init_tables():
metadata.create_all(model.meta.engine) metadata.create_all(model.meta.engine)
   
   
cached_tables = {} cached_tables = {}
   
   
def get_table(name): def get_table(name):
if name not in cached_tables: if name not in cached_tables:
meta = MetaData() meta = MetaData()
meta.reflect(bind=model.meta.engine) meta.reflect(bind=model.meta.engine)
table = meta.tables[name] table = meta.tables[name]
cached_tables[name] = table cached_tables[name] = table
return cached_tables[name] return cached_tables[name]
   
   
def _normalize_url(url): def _normalize_url(url):
'''Strip off the hostname etc. Do this before storing it. '''Strip off the hostname etc. Do this before storing it.
   
>>> normalize_url('http://data.gov.uk/dataset/weekly_fuel_prices') >>> normalize_url('http://data.gov.uk/dataset/weekly_fuel_prices')
'/dataset/weekly_fuel_prices' '/dataset/weekly_fuel_prices'
''' '''
return '/' + '/'.join(url.split('/')[3:]) return '/' + '/'.join(url.split('/')[3:])
   
   
def _get_package_and_publisher(url): def _get_package_and_publisher(url):
# e.g. /dataset/fuel_prices # e.g. /dataset/fuel_prices
# e.g. /dataset/fuel_prices/resource/e63380d4 # e.g. /dataset/fuel_prices/resource/e63380d4
dataset_match = re.match('/dataset/([^/]+)(/.*)?', url) dataset_match = re.match('/dataset/([^/]+)(/.*)?', url)
if dataset_match: if dataset_match:
dataset_ref = dataset_match.groups()[0] dataset_ref = dataset_match.groups()[0]
dataset = model.Package.get(dataset_ref) dataset = model.Package.get(dataset_ref)
if dataset: if dataset:
publisher_groups = dataset.get_groups('publisher') publisher_groups = dataset.get_groups('publisher')
if publisher_groups: if publisher_groups:
return dataset_ref,publisher_groups[0].name return dataset_ref,publisher_groups[0].name
return dataset_ref, None return dataset_ref, None
else: else:
publisher_match = re.match('/publisher/([^/]+)(/.*)?', url) publisher_match = re.match('/publisher/([^/]+)(/.*)?', url)
if publisher_match: if publisher_match:
return None, publisher_match.groups()[0] return None, publisher_match.groups()[0]
return None, None return None, None
   
def update_sitewide_stats(period_name, stat_name, data, period_complete_day): def update_sitewide_stats(period_name, stat_name, data, period_complete_day):
for k,v in data.iteritems(): for k,v in data.iteritems():
item = model.Session.query(GA_Stat).\ item = model.Session.query(GA_Stat).\
filter(GA_Stat.period_name==period_name).\ filter(GA_Stat.period_name==period_name).\
filter(GA_Stat.key==k).\ filter(GA_Stat.key==k).\
filter(GA_Stat.stat_name==stat_name).first() filter(GA_Stat.stat_name==stat_name).first()
if item: if item:
item.period_name = period_name item.period_name = period_name
item.key = k item.key = k
item.value = v item.value = v
item.period_complete_day = period_complete_day item.period_complete_day = period_complete_day
model.Session.add(item) model.Session.add(item)
else: else:
# create the row # create the row
values = {'id': make_uuid(), values = {'id': make_uuid(),
'period_name': period_name, 'period_name': period_name,
'period_complete_day': period_complete_day, 'period_complete_day': period_complete_day,
'key': k, 'key': k,
'value': v, 'value': v,
'stat_name': stat_name 'stat_name': stat_name
} }
model.Session.add(GA_Stat(**values)) model.Session.add(GA_Stat(**values))
model.Session.commit() model.Session.commit()
   
   
def pre_update_url_stats(period_name): def pre_update_url_stats(period_name):
  log.debug("Deleting '%s' records" % period_name)
model.Session.query(GA_Url).\ model.Session.query(GA_Url).\
filter(GA_Url.period_name==period_name).delete() filter(GA_Url.period_name==period_name).delete()
   
  count = model.Session.query(GA_Url).\
  filter(GA_Url.period_name == 'All').count()
  log.debug("Deleting %d 'All' records" % count)
model.Session.query(GA_Url).\ model.Session.query(GA_Url).\
filter(GA_Url.period_name=='All').delete() filter(GA_Url.period_name == 'All').delete()
  model.repo.commit_and_remove()
   
   
def update_url_stats(period_name, period_complete_day, url_data): def update_url_stats(period_name, period_complete_day, url_data):
''' '''
Given a list of urls and number of hits for each during a given period, Given a list of urls and number of hits for each during a given period,
stores them in GA_Url under the period and recalculates the totals for stores them in GA_Url under the period and recalculates the totals for
the 'All' period. the 'All' period.
''' '''
for url, views, visits in url_data: for url, views, visits in url_data:
package, publisher = _get_package_and_publisher(url) package, publisher = _get_package_and_publisher(url)
   
   
item = model.Session.query(GA_Url).\ item = model.Session.query(GA_Url).\
filter(GA_Url.period_name==period_name).\ filter(GA_Url.period_name==period_name).\
filter(GA_Url.url==url).first() filter(GA_Url.url==url).first()
if item: if item:
item.pageviews = item.pageviews + views item.pageviews = item.pageviews + views
item.visits = item.visits + visits item.visits = item.visits + visits
if not item.package_id: if not item.package_id:
item.package_id = package item.package_id = package
if not item.department_id: if not item.department_id:
item.department_id = publisher item.department_id = publisher
model.Session.add(item) model.Session.add(item)
else: else:
values = {'id': make_uuid(), values = {'id': make_uuid(),
'period_name': period_name, 'period_name': period_name,
'period_complete_day': period_complete_day, 'period_complete_day': period_complete_day,
'url': url, 'url': url,
'pageviews': views, 'pageviews': views,
'visits': visits, 'visits': visits,
'department_id': publisher, 'department_id': publisher,
'package_id': package 'package_id': package
} }
model.Session.add(GA_Url(**values)) model.Session.add(GA_Url(**values))
model.Session.commit() model.Session.commit()
   
if package: if package:
old_pageviews, old_visits = 0, 0 old_pageviews, old_visits = 0, 0
old = model.Session.query(GA_Url).\ old = model.Session.query(GA_Url).\
filter(GA_Url.period_name=='All').\ filter(GA_Url.period_name=='All').\
filter(GA_Url.url==url).all() filter(GA_Url.url==url).all()
old_pageviews = sum([int(o.pageviews) for o in old]) old_pageviews = sum([int(o.pageviews) for o in old])
old_visits = sum([int(o.visits) for o in old]) old_visits = sum([int(o.visits) for o in old])
   
entries = model.Session.query(GA_Url).\ entries = model.Session.query(GA_Url).\
filter(GA_Url.period_name!='All').\ filter(GA_Url.period_name!='All').\
filter(GA_Url.url==url).all() filter(GA_Url.url==url).all()
values = {'id': make_uuid(), values = {'id': make_uuid(),
'period_name': 'All', 'period_name': 'All',
'period_complete_day': 0, 'period_complete_day': 0,
'url': url, 'url': url,
'pageviews': sum([int(e.pageviews) for e in entries]) + old_pageviews, 'pageviews': sum([int(e.pageviews) for e in entries]) + int(old_pageviews),
'visits': sum([int(e.visits or 0) for e in entries]) + old_visits, 'visits': sum([int(e.visits or 0) for e in entries]) + int(old_visits),
'department_id': publisher, 'department_id': publisher,
'package_id': package 'package_id': package
} }
   
model.Session.add(GA_Url(**values)) model.Session.add(GA_Url(**values))
model.Session.commit() model.Session.commit()
   
   
   
   
def update_social(period_name, data): def update_social(period_name, data):
# Clean up first. # Clean up first.
model.Session.query(GA_ReferralStat).\ model.Session.query(GA_ReferralStat).\
filter(GA_ReferralStat.period_name==period_name).delete() filter(GA_ReferralStat.period_name==period_name).delete()
   
for url,data in data.iteritems(): for url,data in data.iteritems():
for entry in data: for entry in data:
source = entry[0] source = entry[0]
count = entry[1] count = entry[1]
   
item = model.Session.query(GA_ReferralStat).\ item = model.Session.query(GA_ReferralStat).\
filter(GA_ReferralStat.period_name==period_name).\ filter(GA_ReferralStat.period_name==period_name).\
filter(GA_ReferralStat.source==source).\ filter(GA_ReferralStat.source==source).\
filter(GA_ReferralStat.url==url).first() filter(GA_ReferralStat.url==url).first()
if item: if item:
item.count = item.count + count item.count = item.count + count
model.Session.add(item) model.Session.add(item)
else: else:
# create the row # create the row
values = {'id': make_uuid(), values = {'id': make_uuid(),
'period_name': period_name, 'period_name': period_name,
'source': source, 'source': source,
'url': url, 'url': url,
'count': count, 'count': count,
} }
model.Session.add(GA_ReferralStat(**values)) model.Session.add(GA_ReferralStat(**values))
model.Session.commit() model.Session.commit()
   
def update_publisher_stats(period_name): def update_publisher_stats(period_name):
""" """
Updates the publisher stats from the data retrieved for /dataset/* Updates the publisher stats from the data retrieved for /dataset/*
and /publisher/*. Will run against each dataset and generates the and /publisher/*. Will run against each dataset and generates the
totals for the entire tree beneath each publisher. totals for the entire tree beneath each publisher.
""" """
toplevel = get_top_level() toplevel = get_top_level()
publishers = model.Session.query(model.Group).\ publishers = model.Session.query(model.Group).\
filter(model.Group.type=='publisher').\ filter(model.Group.type=='publisher').\
filter(model.Group.state=='active').all() filter(model.Group.state=='active').all()
for publisher in publishers: for publisher in publishers:
views, visits, subpub = update_publisher(period_name, publisher, publisher.name) views, visits, subpub = update_publisher(period_name, publisher, publisher.name)
parent, parents = '', publisher.get_groups('publisher') parent, parents = '', publisher.get_groups('publisher')
if parents: if parents:
parent = parents[0].name parent = parents[0].name
item = model.Session.query(GA_Publisher).\ item = model.Session.query(GA_Publisher).\
filter(GA_Publisher.period_name==period_name).\ filter(GA_Publisher.period_name==period_name).\
filter(GA_Publisher.publisher_name==publisher.name).first() filter(GA_Publisher.publisher_name==publisher.name).first()
if item: if item:
item.views = views item.views = views
item.visits = visits item.visits = visits
item.publisher_name = publisher.name item.publisher_name = publisher.name
item.toplevel = publisher in toplevel item.toplevel = publisher in toplevel
item.subpublishercount = subpub item.subpublishercount = subpub
item.parent = parent item.parent = parent
model.Session.add(item) model.Session.add(item)
else: else:
# create the row # create the row
values = {'id': make_uuid(), values = {'id': make_uuid(),
'period_name': period_name, 'period_name': period_name,
'publisher_name': publisher.name, 'publisher_name': publisher.name,
'views': views, 'views': views,
'visits': visits, 'visits': visits,
'toplevel': publisher in toplevel, 'toplevel': publisher in toplevel,
'subpublishercount': subpub, 'subpublishercount': subpub,
'parent': parent 'parent': parent
} }
model.Session.add(GA_Publisher(**values)) model.Session.add(GA_Publisher(**values))
model.Session.commit() model.Session.commit()
   
   
def update_publisher(period_name, pub, part=''): def update_publisher(period_name, pub, part=''):
views,visits,subpub = 0, 0, 0 views,visits,subpub = 0, 0, 0
for publisher in go_down_tree(pub): for publisher in go_down_tree(pub):
subpub = subpub + 1 subpub = subpub + 1
items = model.Session.query(GA_Url).\ items = model.Session.query(GA_Url).\
filter(GA_Url.period_name==period_name).\ filter(GA_Url.period_name==period_name).\
filter(GA_Url.department_id==publisher.name).all() filter(GA_Url.department_id==publisher.name).all()
for item in items: for item in items:
views = views + int(item.pageviews) views = views + int(item.pageviews)
visits = visits + int(item.visits) visits = visits + int(item.visits)
   
return views, visits, (subpub-1) return views, visits, (subpub-1)
   
   
def get_top_level(): def get_top_level():
'''Returns the top level publishers.''' '''Returns the top level publishers.'''
return model.Session.query(model.Group).\ return model.Session.query(model.Group).\
outerjoin(model.Member, model.Member.table_id == model.Group.id and \ outerjoin(model.Member, model.Member.table_id == model.Group.id and \
model.Member.table_name == 'group' and \ model.Member.table_name == 'group' and \
model.Member.state == 'active').\ model.Member.state == 'active').\
filter(model.Member.id==None).\ filter(model.Member.id==None).\
filter(model.Group.type=='publisher').\ filter(model.Group.type=='publisher').\
order_by(model.Group.name).all() order_by(model.Group.name).all()
   
def get_children(publisher): def get_children(publisher):
'''Finds child publishers for the given publisher (object). (Not recursive)''' '''Finds child publishers for the given publisher (object). (Not recursive)'''
from ckan.model.group import HIERARCHY_CTE from ckan.model.group import HIERARCHY_CTE
return model.Session.query(model.Group).\ return model.Session.query(model.Group).\
from_statement(HIERARCHY_CTE).params(id=publisher.id, type='publisher').\ from_statement(HIERARCHY_CTE).params(id=publisher.id, type='publisher').\
all() all()
   
def go_down_tree(publisher): def go_down_tree(publisher):
'''Provided with a publisher object, it walks down the hierarchy and yields each publisher, '''Provided with a publisher object, it walks down the hierarchy and yields each publisher,
including the one you supply.''' including the one you supply.'''
yield publisher yield publisher
for child in get_children(publisher): for child in get_children(publisher):
for grandchild in go_down_tree(child): for grandchild in go_down_tree(child):
yield grandchild yield grandchild
   
def delete(period_name): def delete(period_name):
''' '''
Deletes table data for the specified period, or specify 'all' Deletes table data for the specified period, or specify 'all'
for all periods. for all periods.
''' '''
for object_type in (GA_Url, GA_Stat, GA_Publisher, GA_ReferralStat): for object_type in (GA_Url, GA_Stat, GA_Publisher, GA_ReferralStat):
q = model.Session.query(object_type) q = model.Session.query(object_type)
if period_name != 'all': if period_name != 'All':
q = q.filter_by(period_name=period_name) q = q.filter_by(period_name=period_name)
q.delete() q.delete()
model.Session.commit() model.repo.commit_and_remove()
   
def get_score_for_dataset(dataset_name): def get_score_for_dataset(dataset_name):
''' '''
Returns a "current popularity" score for a dataset, Returns a "current popularity" score for a dataset,
based on how many views it has had recently. based on how many views it has had recently.
''' '''
import datetime import datetime
now = datetime.datetime.now() now = datetime.datetime.now()
last_month = now - datetime.timedelta(days=30) last_month = now - datetime.timedelta(days=30)
period_names = ['%s-%02d' % (last_month.year, last_month.month), period_names = ['%s-%02d' % (last_month.year, last_month.month),
'%s-%02d' % (now.year, now.month), '%s-%02d' % (now.year, now.month),
] ]
   
score = 0 score = 0
for period_name in period_names: for period_name in period_names:
score /= 2 # previous periods are discounted by 50% score /= 2 # previous periods are discounted by 50%
entry = model.Session.query(GA_Url)\ entry = model.Session.query(GA_Url)\
.filter(GA_Url.period_name==period_name)\ .filter(GA_Url.period_name==period_name)\
.filter(GA_Url.package_id==dataset_name).first() .filter(GA_Url.package_id==dataset_name).first()
# score # score
if entry: if entry:
views = float(entry.pageviews) views = float(entry.pageviews)
if entry.period_complete_day: if entry.period_complete_day:
views_per_day = views / entry.period_complete_day views_per_day = views / entry.period_complete_day
else: else:
views_per_day = views / 15 # guess views_per_day = views / 15 # guess
score += views_per_day score += views_per_day
   
score = int(score * 100) score = int(score * 100)
log.debug('Popularity %s: %s', score, dataset_name) log.debug('Popularity %s: %s', score, dataset_name)
return score return score
   
import logging import logging
import operator import operator
   
import ckan.lib.base as base import ckan.lib.base as base
import ckan.model as model import ckan.model as model
from ckan.logic import get_action from ckan.logic import get_action
   
from ckanext.ga_report.ga_model import GA_Url, GA_Publisher from ckanext.ga_report.ga_model import GA_Url, GA_Publisher
from ckanext.ga_report.controller import _get_publishers from ckanext.ga_report.controller import _get_publishers
_log = logging.getLogger(__name__) _log = logging.getLogger(__name__)
   
def popular_datasets(count=10): def popular_datasets(count=10):
import random import random
   
publisher = None publisher = None
publishers = _get_publishers(30) publishers = _get_publishers(30)
total = len(publishers) total = len(publishers)
while not publisher or not datasets: while not publisher or not datasets:
rand = random.randrange(0, total) rand = random.randrange(0, total)
publisher = publishers[rand][0] publisher = publishers[rand][0]
if not publisher.state == 'active': if not publisher.state == 'active':
publisher = None publisher = None
continue continue
datasets = _datasets_for_publisher(publisher, 10)[:count] datasets = _datasets_for_publisher(publisher, 10)[:count]
   
ctx = { ctx = {
'datasets': datasets, 'datasets': datasets,
'publisher': publisher 'publisher': publisher
} }
return base.render_snippet('ga_report/ga_popular_datasets.html', **ctx) return base.render_snippet('ga_report/ga_popular_datasets.html', **ctx)
   
def single_popular_dataset(top=20): def single_popular_dataset(top=20):
'''Returns a random dataset from the most popular ones. '''Returns a random dataset from the most popular ones.
   
:param top: the number of top datasets to select from :param top: the number of top datasets to select from
''' '''
import random import random
   
top_datasets = model.Session.query(GA_Url).\ top_datasets = model.Session.query(GA_Url).\
filter(GA_Url.url.like('/dataset/%')).\ filter(GA_Url.url.like('/dataset/%')).\
order_by('ga_url.pageviews::int desc') order_by('ga_url.pageviews::int desc')
num_top_datasets = top_datasets.count() num_top_datasets = top_datasets.count()
   
dataset = None dataset = None
if num_top_datasets: if num_top_datasets:
count = 0 count = 0
while not dataset: while not dataset:
rand = random.randrange(0, min(top, num_top_datasets)) rand = random.randrange(0, min(top, num_top_datasets))
ga_url = top_datasets[rand] ga_url = top_datasets[rand]
dataset = model.Package.get(ga_url.url[len('/dataset/'):]) dataset = model.Package.get(ga_url.url[len('/dataset/'):])
if dataset and not dataset.state == 'active': if dataset and not dataset.state == 'active':
dataset = None dataset = None
count += 1 # When testing, it is possible that top datasets are not available
if count > 10: # so only go round this loop a few times before falling back on
break # a random dataset.
  count += 1
  if count > 10:
  break
if not dataset: if not dataset:
# fallback # fallback
dataset = model.Session.query(model.Package)\ dataset = model.Session.query(model.Package)\
.filter_by(state='active').first() .filter_by(state='active').first()
if not dataset: if not dataset:
return None return None
dataset_dict = get_action('package_show')({'model': model, dataset_dict = get_action('package_show')({'model': model,
'session': model.Session, 'session': model.Session,
'validate': False}, 'validate': False},
{'id':dataset.id}) {'id':dataset.id})
return dataset_dict return dataset_dict
   
def single_popular_dataset_html(top=20): def single_popular_dataset_html(top=20):
dataset_dict = single_popular_dataset(top) dataset_dict = single_popular_dataset(top)
groups = package.get('groups', []) groups = package.get('groups', [])
publishers = [ g for g in groups if g.get('type') == 'publisher' ] publishers = [ g for g in groups if g.get('type') == 'publisher' ]
publisher = publishers[0] if publishers else {'name':'', 'title': ''} publisher = publishers[0] if publishers else {'name':'', 'title': ''}
context = { context = {
'dataset': dataset_dict, 'dataset': dataset_dict,
'publisher': publisher_dict 'publisher': publisher_dict
} }
return base.render_snippet('ga_report/ga_popular_single.html', **context) return base.render_snippet('ga_report/ga_popular_single.html', **context)
   
   
def most_popular_datasets(publisher, count=20): def most_popular_datasets(publisher, count=20):
   
if not publisher: if not publisher:
_log.error("No valid publisher passed to 'most_popular_datasets'") _log.error("No valid publisher passed to 'most_popular_datasets'")
return "" return ""
   
results = _datasets_for_publisher(publisher, count) results = _datasets_for_publisher(publisher, count)
   
ctx = { ctx = {
'dataset_count': len(results), 'dataset_count': len(results),
'datasets': results, 'datasets': results,
   
'publisher': publisher 'publisher': publisher
} }
   
return base.render_snippet('ga_report/publisher/popular.html', **ctx) return base.render_snippet('ga_report/publisher/popular.html', **ctx)
   
def _datasets_for_publisher(publisher, count): def _datasets_for_publisher(publisher, count):
datasets = {} datasets = {}
entries = model.Session.query(GA_Url).\ entries = model.Session.query(GA_Url).\
filter(GA_Url.department_id==publisher.name).\ filter(GA_Url.department_id==publisher.name).\
filter(GA_Url.url.like('/dataset/%')).\ filter(GA_Url.url.like('/dataset/%')).\
order_by('ga_url.pageviews::int desc').all() order_by('ga_url.pageviews::int desc').all()
for entry in entries: for entry in entries:
if len(datasets) < count: if len(datasets) < count:
p = model.Package.get(entry.url[len('/dataset/'):]) p = model.Package.get(entry.url[len('/dataset/'):])
if not p in datasets: if not p in datasets:
datasets[p] = {'views':0, 'visits': 0} datasets[p] = {'views':0, 'visits': 0}
datasets[p]['views'] = datasets[p]['views'] + int(entry.pageviews) datasets[p]['views'] = datasets[p]['views'] + int(entry.pageviews)
datasets[p]['visits'] = datasets[p]['visits'] + int(entry.visits) datasets[p]['visits'] = datasets[p]['visits'] + int(entry.visits)
   
results = [] results = []
for k, v in datasets.iteritems(): for k, v in datasets.iteritems():
results.append((k,v['views'],v['visits'])) results.append((k,v['views'],v['visits']))
   
return sorted(results, key=operator.itemgetter(1), reverse=True) return sorted(results, key=operator.itemgetter(1), reverse=True)
   
<html xmlns:py="http://genshi.edgewall.org/" <html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n" xmlns:i18n="http://genshi.edgewall.org/i18n"
xmlns:xi="http://www.w3.org/2001/XInclude" xmlns:xi="http://www.w3.org/2001/XInclude"
py:strip=""> py:strip="">
   
<xi:include href="../ga_util.html" /> <xi:include href="../ga_util.html" />
   
<py:def function="page_title">Usage by Publisher</py:def> <py:def function="page_title">Usage by Publisher</py:def>
   
<py:match path="primarysidebar"> <py:match path="primarysidebar">
<li class="widget-container boxed widget_text"> <li class="widget-container boxed widget_text">
<h4>Download</h4> <h4>Download</h4>
<p><center> <p><center>
<a class="btn button btn-primary" href="${h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport',action='publisher_csv',month=c.month or 'all')}">Download as CSV</a></center> <a class="btn button btn-primary" href="${h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport',action='publisher_csv',month=c.month or 'all')}">Download as CSV</a></center>
</p> </p>
</li> </li>
<xi:include href="../notes.html" /> <xi:include href="../notes.html" />
</py:match> </py:match>
   
   
<div py:match="content"> <div py:match="content">
   
<h1>Site Usage</h1> <h1>Site Usage</h1>
   
${usage_nav('Publishers')} ${usage_nav('Publishers')}
   
   
<form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport',action='publishers')}" method="get"> <form class="form-inline" action="${h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport',action='publishers')}" method="get">
<div class="controls"> <div class="controls">
   
${month_selector(c.month, c.months, c.day)} ${month_selector(c.month, c.months, c.day)}
   
<input class="btn button btn-primary" type='submit' value="Update"/> <input class="btn button btn-primary" type='submit' value="Update"/>
</div> </div>
</form> </form>
   
<table class="table table-condensed table-bordered table-striped"> <table class="table table-condensed table-bordered table-striped">
<tr> <tr>
<th>Publisher</th> <th>Publisher</th>
<!-- <th>Dataset Visits</th>-->  
<th>Dataset Views</th> <th>Dataset Views</th>
</tr> </tr>
<py:for each="publisher, views, visits in c.top_publishers"> <py:for each="publisher, views, visits in c.top_publishers">
<tr> <tr>
<td>${h.link_to(publisher.title, h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport', action='read_publisher', id=publisher.name))} <td>
  ${h.link_to(publisher.title, h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport', action='read_publisher', id=publisher.name) + (("?month=" + c.month) if c.month else ''))}
</td> </td>
<!-- <td>${visits}</td> -->  
<td>${views}</td> <td>${views}</td>
</tr> </tr>
</py:for> </py:for>
</table> </table>
   
   
</div> </div>
   
<xi:include href="../../layout.html" /> <xi:include href="../../layout.html" />
   
<py:def function="optional_footer"> <py:def function="optional_footer">
<script type='text/javascript'> <script type='text/javascript'>
$('.nav-tabs li a').click(function (e) { $('.nav-tabs li a').click(function (e) {
e.preventDefault(); e.preventDefault();
$(this).tab('show'); $(this).tab('show');
}) })
</script> </script>
</py:def> </py:def>
   
</html> </html>