Merge commit 'd0db210'
Merge commit 'd0db210'

import logging import logging
import datetime import datetime
import os import os
   
from pylons import config from pylons import config
   
from ckan.lib.cli import CkanCommand from ckan.lib.cli import CkanCommand
# No other CKAN imports allowed until _load_config is run, # No other CKAN imports allowed until _load_config is run,
# or logging is disabled # or logging is disabled
   
   
class InitDB(CkanCommand): class InitDB(CkanCommand):
"""Initialise the extension's database tables """Initialise the extension's database tables
""" """
summary = __doc__.split('\n')[0] summary = __doc__.split('\n')[0]
usage = __doc__ usage = __doc__
max_args = 0 max_args = 0
min_args = 0 min_args = 0
   
def command(self): def command(self):
self._load_config() self._load_config()
   
import ckan.model as model import ckan.model as model
model.Session.remove() model.Session.remove()
model.Session.configure(bind=model.meta.engine) model.Session.configure(bind=model.meta.engine)
log = logging.getLogger('ckanext.ga-report') log = logging.getLogger('ckanext.ga_report')
   
import ga_model import ga_model
ga_model.init_tables() ga_model.init_tables()
log.info("DB tables are setup") log.info("DB tables are setup")
   
   
class GetAuthToken(CkanCommand): class GetAuthToken(CkanCommand):
""" Get's the Google auth token """ Get's the Google auth token
   
Usage: paster getauthtoken <credentials_file> Usage: paster getauthtoken <credentials_file>
   
Where <credentials_file> is the file name containing the details Where <credentials_file> is the file name containing the details
for the service (obtained from https://code.google.com/apis/console). for the service (obtained from https://code.google.com/apis/console).
By default this is set to credentials.json By default this is set to credentials.json
""" """
summary = __doc__.split('\n')[0] summary = __doc__.split('\n')[0]
usage = __doc__ usage = __doc__
max_args = 0 max_args = 0
min_args = 0 min_args = 0
   
def command(self): def command(self):
""" """
In this case we don't want a valid service, but rather just to In this case we don't want a valid service, but rather just to
force the user through the auth flow. We allow this to complete to force the user through the auth flow. We allow this to complete to
act as a form of verification instead of just getting the token and act as a form of verification instead of just getting the token and
assuming it is correct. assuming it is correct.
""" """
from ga_auth import init_service from ga_auth import init_service
init_service('token.dat', init_service('token.dat',
self.args[0] if self.args self.args[0] if self.args
else 'credentials.json') else 'credentials.json')
   
class FixTimePeriods(CkanCommand): class FixTimePeriods(CkanCommand):
""" """
Fixes the 'All' records for GA_Urls Fixes the 'All' records for GA_Urls
   
It is possible that older urls that haven't recently been visited It is possible that older urls that haven't recently been visited
do not have All records. This command will traverse through those do not have All records. This command will traverse through those
records and generate valid All records for them. records and generate valid All records for them.
""" """
summary = __doc__.split('\n')[0] summary = __doc__.split('\n')[0]
usage = __doc__ usage = __doc__
max_args = 0 max_args = 0
min_args = 0 min_args = 0
   
def __init__(self, name): def __init__(self, name):
super(FixTimePeriods, self).__init__(name) super(FixTimePeriods, self).__init__(name)
   
def command(self): def command(self):
import ckan.model as model import ckan.model as model
from ga_model import post_update_url_stats from ga_model import post_update_url_stats
self._load_config() self._load_config()
model.Session.remove() model.Session.remove()
model.Session.configure(bind=model.meta.engine) model.Session.configure(bind=model.meta.engine)
   
log = logging.getLogger('ckanext.ga_report') log = logging.getLogger('ckanext.ga_report')
   
log.info("Updating 'All' records for old URLs") log.info("Updating 'All' records for old URLs")
post_update_url_stats() post_update_url_stats()
log.info("Processing complete") log.info("Processing complete")
   
   
   
class LoadAnalytics(CkanCommand): class LoadAnalytics(CkanCommand):
"""Get data from Google Analytics API and save it """Get data from Google Analytics API and save it
in the ga_model in the ga_model
   
Usage: paster loadanalytics <time-period> Usage: paster loadanalytics <time-period>
   
Where <time-period> is: Where <time-period> is:
all - data for all time all - data for all time
latest - (default) just the 'latest' data latest - (default) just the 'latest' data
YYYY-MM - just data for the specific month YYYY-MM - just data for the specific month
""" """
summary = __doc__.split('\n')[0] summary = __doc__.split('\n')[0]
usage = __doc__ usage = __doc__
max_args = 1 max_args = 1
min_args = 0 min_args = 0
   
def __init__(self, name): def __init__(self, name):
super(LoadAnalytics, self).__init__(name) super(LoadAnalytics, self).__init__(name)
self.parser.add_option('-d', '--delete-first', self.parser.add_option('-d', '--delete-first',
action='store_true', action='store_true',
default=False, default=False,
dest='delete_first', dest='delete_first',
help='Delete data for the period first') help='Delete data for the period first')
self.parser.add_option('-s', '--skip_url_stats', self.parser.add_option('-s', '--skip_url_stats',
action='store_true', action='store_true',
default=False, default=False,
dest='skip_url_stats', dest='skip_url_stats',
help='Skip the download of URL data - just do site-wide stats') help='Skip the download of URL data - just do site-wide stats')
   
def command(self): def command(self):
self._load_config() self._load_config()
   
from download_analytics import DownloadAnalytics from download_analytics import DownloadAnalytics
from ga_auth import (init_service, get_profile_id) from ga_auth import (init_service, get_profile_id)
   
ga_token_filepath = os.path.expanduser(config.get('googleanalytics.token.filepath', '')) ga_token_filepath = os.path.expanduser(config.get('googleanalytics.token.filepath', ''))
if not ga_token_filepath: if not ga_token_filepath:
print 'ERROR: In the CKAN config you need to specify the filepath of the ' \ print 'ERROR: In the CKAN config you need to specify the filepath of the ' \
'Google Analytics token file under key: googleanalytics.token.filepath' 'Google Analytics token file under key: googleanalytics.token.filepath'
return return
   
try: try:
svc = init_service(ga_token_filepath, None) svc = init_service(ga_token_filepath, None)
except TypeError: except TypeError:
print ('Have you correctly run the getauthtoken task and ' print ('Have you correctly run the getauthtoken task and '
'specified the correct token file in the CKAN config under ' 'specified the correct token file in the CKAN config under '
'"googleanalytics.token.filepath"?') '"googleanalytics.token.filepath"?')
return return
   
downloader = DownloadAnalytics(svc, profile_id=get_profile_id(svc), downloader = DownloadAnalytics(svc, profile_id=get_profile_id(svc),
delete_first=self.options.delete_first, delete_first=self.options.delete_first,
skip_url_stats=self.options.skip_url_stats) skip_url_stats=self.options.skip_url_stats)
   
time_period = self.args[0] if self.args else 'latest' time_period = self.args[0] if self.args else 'latest'
if time_period == 'all': if time_period == 'all':
downloader.all_() downloader.all_()
elif time_period == 'latest': elif time_period == 'latest':
downloader.latest() downloader.latest()
else: else:
# The month to use # The month to use
for_date = datetime.datetime.strptime(time_period, '%Y-%m') for_date = datetime.datetime.strptime(time_period, '%Y-%m')
downloader.specific_month(for_date) downloader.specific_month(for_date)
   
import os import os
import logging import logging
import datetime import datetime
import collections import collections
from pylons import config from pylons import config
from ga_model import _normalize_url from ga_model import _normalize_url
import ga_model import ga_model
   
#from ga_client import GA #from ga_client import GA
   
log = logging.getLogger('ckanext.ga-report') log = logging.getLogger('ckanext.ga-report')
   
FORMAT_MONTH = '%Y-%m' FORMAT_MONTH = '%Y-%m'
MIN_VIEWS = 50 MIN_VIEWS = 50
MIN_VISITS = 20 MIN_VISITS = 20
MIN_DOWNLOADS = 10 MIN_DOWNLOADS = 10
   
class DownloadAnalytics(object): class DownloadAnalytics(object):
'''Downloads and stores analytics info''' '''Downloads and stores analytics info'''
   
def __init__(self, service=None, profile_id=None, delete_first=False, def __init__(self, service=None, profile_id=None, delete_first=False,
skip_url_stats=False): skip_url_stats=False):
self.period = config['ga-report.period'] self.period = config['ga-report.period']
self.service = service self.service = service
self.profile_id = profile_id self.profile_id = profile_id
self.delete_first = delete_first self.delete_first = delete_first
self.skip_url_stats = skip_url_stats self.skip_url_stats = skip_url_stats
   
def specific_month(self, date): def specific_month(self, date):
import calendar import calendar
   
first_of_this_month = datetime.datetime(date.year, date.month, 1) first_of_this_month = datetime.datetime(date.year, date.month, 1)
_, last_day_of_month = calendar.monthrange(int(date.year), int(date.month)) _, last_day_of_month = calendar.monthrange(int(date.year), int(date.month))
last_of_this_month = datetime.datetime(date.year, date.month, last_day_of_month) last_of_this_month = datetime.datetime(date.year, date.month, last_day_of_month)
  # if this is the latest month, note that it is only up until today
  now = datetime.datetime.now()
  if now.year == date.year and now.month == date.month:
  last_day_of_month = now.day
  last_of_this_month = now
periods = ((date.strftime(FORMAT_MONTH), periods = ((date.strftime(FORMAT_MONTH),
last_day_of_month, last_day_of_month,
first_of_this_month, last_of_this_month),) first_of_this_month, last_of_this_month),)
self.download_and_store(periods) self.download_and_store(periods)
   
   
def latest(self): def latest(self):
if self.period == 'monthly': if self.period == 'monthly':
# from first of this month to today # from first of this month to today
now = datetime.datetime.now() now = datetime.datetime.now()
first_of_this_month = datetime.datetime(now.year, now.month, 1) first_of_this_month = datetime.datetime(now.year, now.month, 1)
periods = ((now.strftime(FORMAT_MONTH), periods = ((now.strftime(FORMAT_MONTH),
now.day, now.day,
first_of_this_month, now),) first_of_this_month, now),)
else: else:
raise NotImplementedError raise NotImplementedError
self.download_and_store(periods) self.download_and_store(periods)
   
   
def for_date(self, for_date): def for_date(self, for_date):
assert isinstance(since_date, datetime.datetime) assert isinstance(since_date, datetime.datetime)
periods = [] # (period_name, period_complete_day, start_date, end_date) periods = [] # (period_name, period_complete_day, start_date, end_date)
if self.period == 'monthly': if self.period == 'monthly':
first_of_the_months_until_now = [] first_of_the_months_until_now = []
year = for_date.year year = for_date.year
month = for_date.month month = for_date.month
now = datetime.datetime.now() now = datetime.datetime.now()
first_of_this_month = datetime.datetime(now.year, now.month, 1) first_of_this_month = datetime.datetime(now.year, now.month, 1)
while True: while True:
first_of_the_month = datetime.datetime(year, month, 1) first_of_the_month = datetime.datetime(year, month, 1)
if first_of_the_month == first_of_this_month: if first_of_the_month == first_of_this_month:
periods.append((now.strftime(FORMAT_MONTH), periods.append((now.strftime(FORMAT_MONTH),
now.day, now.day,
first_of_this_month, now)) first_of_this_month, now))
break break
elif first_of_the_month < first_of_this_month: elif first_of_the_month < first_of_this_month:
in_the_next_month = first_of_the_month + datetime.timedelta(40) in_the_next_month = first_of_the_month + datetime.timedelta(40)
last_of_the_month = datetime.datetime(in_the_next_month.year, last_of_the_month = datetime.datetime(in_the_next_month.year,
in_the_next_month.month, 1)\ in_the_next_month.month, 1)\
- datetime.timedelta(1) - datetime.timedelta(1)
periods.append((now.strftime(FORMAT_MONTH), 0, periods.append((now.strftime(FORMAT_MONTH), 0,
first_of_the_month, last_of_the_month)) first_of_the_month, last_of_the_month))
else: else:
# first_of_the_month has got to the future somehow # first_of_the_month has got to the future somehow
break break
month += 1 month += 1
if month > 12: if month > 12:
year += 1 year += 1
month = 1 month = 1
else: else:
raise NotImplementedError raise NotImplementedError
self.download_and_store(periods) self.download_and_store(periods)
   
@staticmethod @staticmethod
def get_full_period_name(period_name, period_complete_day): def get_full_period_name(period_name, period_complete_day):
if period_complete_day: if period_complete_day:
return period_name + ' (up to %ith)' % period_complete_day return period_name + ' (up to %ith)' % period_complete_day
else: else:
return period_name return period_name
   
   
def download_and_store(self, periods): def download_and_store(self, periods):
for period_name, period_complete_day, start_date, end_date in periods: for period_name, period_complete_day, start_date, end_date in periods:
log.info('Period "%s" (%s - %s)', log.info('Period "%s" (%s - %s)',
self.get_full_period_name(period_name, period_complete_day), self.get_full_period_name(period_name, period_complete_day),
start_date.strftime('%Y-%m-%d'), start_date.strftime('%Y-%m-%d'),
end_date.strftime('%Y-%m-%d')) end_date.strftime('%Y-%m-%d'))
   
if self.delete_first: if self.delete_first:
log.info('Deleting existing Analytics for this period "%s"', log.info('Deleting existing Analytics for this period "%s"',
period_name) period_name)
ga_model.delete(period_name) ga_model.delete(period_name)
   
if not self.skip_url_stats: if not self.skip_url_stats:
# Clean out old url data before storing the new # Clean out old url data before storing the new
ga_model.pre_update_url_stats(period_name) ga_model.pre_update_url_stats(period_name)
   
accountName = config.get('googleanalytics.account') accountName = config.get('googleanalytics.account')
   
log.info('Downloading analytics for dataset views') log.info('Downloading analytics for dataset views')
data = self.download(start_date, end_date, '~/%s/dataset/[a-z0-9-_]+' % accountName) data = self.download(start_date, end_date, '~/%s/dataset/[a-z0-9-_]+' % accountName)
   
log.info('Storing dataset views (%i rows)', len(data.get('url'))) log.info('Storing dataset views (%i rows)', len(data.get('url')))
self.store(period_name, period_complete_day, data, ) self.store(period_name, period_complete_day, data, )
   
log.info('Downloading analytics for publisher views') log.info('Downloading analytics for publisher views')
data = self.download(start_date, end_date, '~/%s/publisher/[a-z0-9-_]+' % accountName) data = self.download(start_date, end_date, '~/%s/publisher/[a-z0-9-_]+' % accountName)
   
log.info('Storing publisher views (%i rows)', len(data.get('url'))) log.info('Storing publisher views (%i rows)', len(data.get('url')))
self.store(period_name, period_complete_day, data,) self.store(period_name, period_complete_day, data,)
   
# Make sure the All records are correct. # Make sure the All records are correct.
ga_model.post_update_url_stats() ga_model.post_update_url_stats()
   
log.info('Aggregating datasets by publisher') log.info('Associating datasets with their publisher')
ga_model.update_publisher_stats(period_name) # about 30 seconds. ga_model.update_publisher_stats(period_name) # about 30 seconds.
   
   
log.info('Downloading and storing analytics for site-wide stats') log.info('Downloading and storing analytics for site-wide stats')
self.sitewide_stats( period_name, period_complete_day ) self.sitewide_stats( period_name, period_complete_day )
   
log.info('Downloading and storing analytics for social networks') log.info('Downloading and storing analytics for social networks')
self.update_social_info(period_name, start_date, end_date) self.update_social_info(period_name, start_date, end_date)
   
   
def update_social_info(self, period_name, start_date, end_date): def update_social_info(self, period_name, start_date, end_date):
start_date = start_date.strftime('%Y-%m-%d') start_date = start_date.strftime('%Y-%m-%d')
end_date = end_date.strftime('%Y-%m-%d') end_date = end_date.strftime('%Y-%m-%d')
query = 'ga:hasSocialSourceReferral=~Yes$' query = 'ga:hasSocialSourceReferral=~Yes$'
metrics = 'ga:entrances' metrics = 'ga:entrances'
sort = '-ga:entrances' sort = '-ga:entrances'
   
# Supported query params at # Supported query params at
# https://developers.google.com/analytics/devguides/reporting/core/v3/reference # https://developers.google.com/analytics/devguides/reporting/core/v3/reference
results = self.service.data().ga().get( results = self.service.data().ga().get(
ids='ga:' + self.profile_id, ids='ga:' + self.profile_id,
filters=query, filters=query,
start_date=start_date, start_date=start_date,
metrics=metrics, metrics=metrics,
sort=sort, sort=sort,
dimensions="ga:landingPagePath,ga:socialNetwork", dimensions="ga:landingPagePath,ga:socialNetwork",
max_results=10000, max_results=10000,
end_date=end_date).execute() end_date=end_date).execute()
data = collections.defaultdict(list) data = collections.defaultdict(list)
rows = results.get('rows',[]) rows = results.get('rows',[])
for row in rows: for row in rows:
url = _normalize_url('http:/' + row[0]) url = _normalize_url('http:/' + row[0])
data[url].append( (row[1], int(row[2]),) ) data[url].append( (row[1], int(row[2]),) )
ga_model.update_social(period_name, data) ga_model.update_social(period_name, data)
   
   
def download(self, start_date, end_date, path=None): def download(self, start_date, end_date, path=None):
'''Get data from GA for a given time period''' '''Get data from GA for a given time period'''
start_date = start_date.strftime('%Y-%m-%d') start_date = start_date.strftime('%Y-%m-%d')
end_date = end_date.strftime('%Y-%m-%d') end_date = end_date.strftime('%Y-%m-%d')
query = 'ga:pagePath=%s$' % path query = 'ga:pagePath=%s$' % path
metrics = 'ga:pageviews, ga:visits' metrics = 'ga:pageviews, ga:visits'
sort = '-ga:pageviews' sort = '-ga:pageviews'
   
# Supported query params at # Supported query params at
# https://developers.google.com/analytics/devguides/reporting/core/v3/reference # https://developers.google.com/analytics/devguides/reporting/core/v3/reference
results = self.service.data().ga().get( results = self.service.data().ga().get(
ids='ga:' + self.profile_id, ids='ga:' + self.profile_id,
filters=query, filters=query,
start_date=start_date, start_date=start_date,
metrics=metrics, metrics=metrics,
sort=sort, sort=sort,
dimensions="ga:pagePath", dimensions="ga:pagePath",
max_results=10000, max_results=10000,
end_date=end_date).execute() end_date=end_date).execute()
   
packages = [] packages = []
log.info("There are %d results" % results['totalResults']) log.info("There are %d results" % results['totalResults'])
for entry in results.get('rows'): for entry in results.get('rows'):
(loc,pageviews,visits) = entry (loc,pageviews,visits) = entry
url = _normalize_url('http:/' + loc) # strips off domain e.g. www.data.gov.uk or data.gov.uk url = _normalize_url('http:/' + loc) # strips off domain e.g. www.data.gov.uk or data.gov.uk
   
if not url.startswith('/dataset/') and not url.startswith('/publisher/'): if not url.startswith('/dataset/') and not url.startswith('/publisher/'):
# filter out strays like: # filter out strays like:
# /data/user/login?came_from=http://data.gov.uk/dataset/os-code-point-open # /data/user/login?came_from=http://data.gov.uk/dataset/os-code-point-open
# /403.html?page=/about&from=http://data.gov.uk/publisher/planning-inspectorate # /403.html?page=/about&from=http://data.gov.uk/publisher/planning-inspectorate
continue continue
packages.append( (url, pageviews, visits,) ) # Temporary hack packages.append( (url, pageviews, visits,) ) # Temporary hack
return dict(url=packages) return dict(url=packages)
   
def store(self, period_name, period_complete_day, data): def store(self, period_name, period_complete_day, data):
if 'url' in data: if 'url' in data:
ga_model.update_url_stats(period_name, period_complete_day, data['url']) ga_model.update_url_stats(period_name, period_complete_day, data['url'])
   
def sitewide_stats(self, period_name, period_complete_day): def sitewide_stats(self, period_name, period_complete_day):
import calendar import calendar
year, month = period_name.split('-') year, month = period_name.split('-')
_, last_day_of_month = calendar.monthrange(int(year), int(month)) _, last_day_of_month = calendar.monthrange(int(year), int(month))
   
start_date = '%s-01' % period_name start_date = '%s-01' % period_name
end_date = '%s-%s' % (period_name, last_day_of_month) end_date = '%s-%s' % (period_name, last_day_of_month)
funcs = ['_totals_stats', '_social_stats', '_os_stats', funcs = ['_totals_stats', '_social_stats', '_os_stats',
'_locale_stats', '_browser_stats', '_mobile_stats', '_download_stats'] '_locale_stats', '_browser_stats', '_mobile_stats', '_download_stats']
for f in funcs: for f in funcs:
log.info('Downloading analytics for %s' % f.split('_')[1]) log.info('Downloading analytics for %s' % f.split('_')[1])
getattr(self, f)(start_date, end_date, period_name, period_complete_day) getattr(self, f)(start_date, end_date, period_name, period_complete_day)
   
def _get_results(result_data, f): def _get_results(result_data, f):
data = {} data = {}
for result in result_data: for result in result_data:
key = f(result) key = f(result)
data[key] = data.get(key,0) + result[1] data[key] = data.get(key,0) + result[1]
return data return data
   
def _totals_stats(self, start_date, end_date, period_name, period_complete_day): def _totals_stats(self, start_date, end_date, period_name, period_complete_day):
""" Fetches distinct totals, total pageviews etc """ """ Fetches distinct totals, total pageviews etc """
results = self.service.data().ga().get( results = self.service.data().ga().get(
ids='ga:' + self.profile_id, ids='ga:' + self.profile_id,
start_date=start_date, start_date=start_date,
metrics='ga:pageviews', metrics='ga:pageviews',
sort='-ga:pageviews', sort='-ga:pageviews',
max_results=10000, max_results=10000,
end_date=end_date).execute() end_date=end_date).execute()
result_data = results.get('rows') result_data = results.get('rows')
ga_model.update_sitewide_stats(period_name, "Totals", {'Total page views': result_data[0][0]}, ga_model.update_sitewide_stats(period_name, "Totals", {'Total page views': result_data[0][0]},
period_complete_day) period_complete_day)
   
results = self.service.data().ga().get( results = self.service.data().ga().get(
ids='ga:' + self.profile_id, ids='ga:' + self.profile_id,
start_date=start_date, start_date=start_date,
metrics='ga:pageviewsPerVisit,ga:avgTimeOnSite,ga:percentNewVisits,ga:visits', metrics='ga:pageviewsPerVisit,ga:avgTimeOnSite,ga:percentNewVisits,ga:visits',
max_results=10000, max_results=10000,
end_date=end_date).execute() end_date=end_date).execute()
result_data = results.get('rows') result_data = results.get('rows')
data = { data = {
'Pages per visit': result_data[0][0], 'Pages per visit': result_data[0][0],
'Average time on site': result_data[0][1], 'Average time on site': result_data[0][1],
'New visits': result_data[0][2], 'New visits': result_data[0][2],
'Total visits': result_data[0][3], 'Total visits': result_data[0][3],
} }
ga_model.update_sitewide_stats(period_name, "Totals", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Totals", data, period_complete_day)
   
# Bounces from / or another configurable page. # Bounces from / or another configurable page.
path = '/%s%s' % (config.get('googleanalytics.account'), path = '/%s%s' % (config.get('googleanalytics.account'),
config.get('ga-report.bounce_url', '/')) config.get('ga-report.bounce_url', '/'))
results = self.service.data().ga().get( results = self.service.data().ga().get(
ids='ga:' + self.profile_id, ids='ga:' + self.profile_id,
filters='ga:pagePath==%s' % (path,), filters='ga:pagePath==%s' % (path,),
start_date=start_date, start_date=start_date,
metrics='ga:visitBounceRate', metrics='ga:visitBounceRate',
dimensions='ga:pagePath', dimensions='ga:pagePath',
max_results=10000, max_results=10000,
end_date=end_date).execute() end_date=end_date).execute()
result_data = results.get('rows') result_data = results.get('rows')
if not result_data or len(result_data) != 1: if not result_data or len(result_data) != 1:
log.error('Could not pinpoint the bounces for path: %s. Got results: %r', log.error('Could not pinpoint the bounces for path: %s. Got results: %r',
path, result_data) path, result_data)
return return
results = result_data[0] results = result_data[0]
bounces = float(results[1]) bounces = float(results[1])
# visitBounceRate is already a % # visitBounceRate is already a %
log.info('Google reports visitBounceRate as %s', bounces) log.info('Google reports visitBounceRate as %s', bounces)
ga_model.update_sitewide_stats(period_name, "Totals", {'Bounce rate (home page)': float(bounces)}, ga_model.update_sitewide_stats(period_name, "Totals", {'Bounce rate (home page)': float(bounces)},
period_complete_day) period_complete_day)
   
   
def _locale_stats(self, start_date, end_date, period_name, period_complete_day): def _locale_stats(self, start_date, end_date, period_name, period_complete_day):
""" Fetches stats about language and country """ """ Fetches stats about language and country """
results = self.service.data().ga().get( results = self.service.data().ga().get(
ids='ga:' + self.profile_id, ids='ga:' + self.profile_id,
start_date=start_date, start_date=start_date,
metrics='ga:pageviews', metrics='ga:pageviews',
sort='-ga:pageviews', sort='-ga:pageviews',
dimensions="ga:language,ga:country", dimensions="ga:language,ga:country",
max_results=10000, max_results=10000,
end_date=end_date).execute() end_date=end_date).execute()
result_data = results.get('rows') result_data = results.get('rows')
data = {} data = {}
for result in result_data: for result in result_data:
data[result[0]] = data.get(result[0], 0) + int(result[2]) data[result[0]] = data.get(result[0], 0) + int(result[2])
self._filter_out_long_tail(data, MIN_VIEWS) self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Languages", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Languages", data, period_complete_day)
   
data = {} data = {}
for result in result_data: for result in result_data:
data[result[1]] = data.get(result[1], 0) + int(result[2]) data[result[1]] = data.get(result[1], 0) + int(result[2])
self._filter_out_long_tail(data, MIN_VIEWS) self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Country", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Country", data, period_complete_day)
   
   
def _download_stats(self, start_date, end_date, period_name, period_complete_day): def _download_stats(self, start_date, end_date, period_name, period_complete_day):
""" Fetches stats about language and country """ """ Fetches stats about data downloads """
import ckan.model as model import ckan.model as model
   
data = {} data = {}
   
results = self.service.data().ga().get( results = self.service.data().ga().get(
ids='ga:' + self.profile_id, ids='ga:' + self.profile_id,
start_date=start_date, start_date=start_date,
filters='ga:eventAction==download', filters='ga:eventAction==download',
metrics='ga:totalEvents', metrics='ga:totalEvents',
sort='-ga:totalEvents', sort='-ga:totalEvents',
dimensions="ga:eventLabel", dimensions="ga:eventLabel",
max_results=10000, max_results=10000,
end_date=end_date).execute() end_date=end_date).execute()
result_data = results.get('rows') result_data = results.get('rows')
if not result_data: if not result_data:
# We may not have data for this time period, so we need to bail # We may not have data for this time period, so we need to bail
# early. # early.
log.info("There is no download data for this time period") log.info("There is no download data for this time period")
return return
   
def process_result_data(result_data, cached=False): def process_result_data(result_data, cached=False):
  progress_total = len(result_data)
  progress_count = 0
  resources_not_matched = []
for result in result_data: for result in result_data:
  progress_count += 1
  if progress_count % 100 == 0:
  log.debug('.. %d/%d done so far', progress_count, progress_total)
   
url = result[0].strip() url = result[0].strip()
   
# Get package id associated with the resource that has this URL. # Get package id associated with the resource that has this URL.
q = model.Session.query(model.Resource) q = model.Session.query(model.Resource)
if cached: if cached:
r = q.filter(model.Resource.cache_url.like("%s%%" % url)).first() r = q.filter(model.Resource.cache_url.like("%s%%" % url)).first()
else: else:
r = q.filter(model.Resource.url.like("%s%%" % url)).first() r = q.filter(model.Resource.url.like("%s%%" % url)).first()
   
package_name = r.resource_group.package.name if r else "" package_name = r.resource_group.package.name if r else ""
if package_name: if package_name:
data[package_name] = data.get(package_name, 0) + int(result[1]) data[package_name] = data.get(package_name, 0) + int(result[1])
else: else:
log.warning(u"Could not find resource for URL: {url}".format(url=url)) resources_not_matched.append(url)
continue continue
  if resources_not_matched:
  log.debug('Could not match %i or %i resource URLs to datasets. e.g. %r',
  len(resources_not_matched), progress_total, resources_not_matched[:3])
   
  log.info('Associating downloads of resource URLs with their respective datasets')
process_result_data(results.get('rows')) process_result_data(results.get('rows'))
   
results = self.service.data().ga().get( results = self.service.data().ga().get(
ids='ga:' + self.profile_id, ids='ga:' + self.profile_id,
start_date=start_date, start_date=start_date,
filters='ga:eventAction==download-cache', filters='ga:eventAction==download-cache',
metrics='ga:totalEvents', metrics='ga:totalEvents',
sort='-ga:totalEvents', sort='-ga:totalEvents',
dimensions="ga:eventLabel", dimensions="ga:eventLabel",
max_results=10000, max_results=10000,
end_date=end_date).execute() end_date=end_date).execute()
  log.info('Associating downloads of cache resource URLs with their respective datasets')
process_result_data(results.get('rows'), cached=False) process_result_data(results.get('rows'), cached=False)
   
self._filter_out_long_tail(data, MIN_DOWNLOADS) self._filter_out_long_tail(data, MIN_DOWNLOADS)
ga_model.update_sitewide_stats(period_name, "Downloads", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Downloads", data, period_complete_day)
   
def _social_stats(self, start_date, end_date, period_name, period_complete_day): def _social_stats(self, start_date, end_date, period_name, period_complete_day):
""" Finds out which social sites people are referred from """ """ Finds out which social sites people are referred from """
results = self.service.data().ga().get( results = self.service.data().ga().get(
ids='ga:' + self.profile_id, ids='ga:' + self.profile_id,
start_date=start_date, start_date=start_date,
metrics='ga:pageviews', metrics='ga:pageviews',
sort='-ga:pageviews', sort='-ga:pageviews',
dimensions="ga:socialNetwork,ga:referralPath", dimensions="ga:socialNetwork,ga:referralPath",
max_results=10000, max_results=10000,
end_date=end_date).execute() end_date=end_date).execute()
result_data = results.get('rows') result_data = results.get('rows')
data = {} data = {}
for result in result_data: for result in result_data:
if not result[0] == '(not set)': if not result[0] == '(not set)':
data[result[0]] = data.get(result[0], 0) + int(result[2]) data[result[0]] = data.get(result[0], 0) + int(result[2])
self._filter_out_long_tail(data, 3) self._filter_out_long_tail(data, 3)
ga_model.update_sitewide_stats(period_name, "Social sources", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Social sources", data, period_complete_day)
   
   
def _os_stats(self, start_date, end_date, period_name, period_complete_day): def _os_stats(self, start_date, end_date, period_name, period_complete_day):
""" Operating system stats """ """ Operating system stats """
results = self.service.data().ga().get( results = self.service.data().ga().get(
ids='ga:' + self.profile_id, ids='ga:' + self.profile_id,
start_date=start_date, start_date=start_date,
metrics='ga:pageviews', metrics='ga:pageviews',
sort='-ga:pageviews', sort='-ga:pageviews',
dimensions="ga:operatingSystem,ga:operatingSystemVersion", dimensions="ga:operatingSystem,ga:operatingSystemVersion",
max_results=10000, max_results=10000,
end_date=end_date).execute() end_date=end_date).execute()
result_data = results.get('rows') result_data = results.get('rows')
data = {} data = {}
for result in result_data: for result in result_data:
data[result[0]] = data.get(result[0], 0) + int(result[2]) data[result[0]] = data.get(result[0], 0) + int(result[2])
self._filter_out_long_tail(data, MIN_VIEWS) self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Operating Systems", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Operating Systems", data, period_complete_day)
   
data = {} data = {}
for result in result_data: for result in result_data:
if int(result[2]) >= MIN_VIEWS: if int(result[2]) >= MIN_VIEWS:
key = "%s %s" % (result[0],result[1]) key = "%s %s" % (result[0],result[1])
data[key] = result[2] data[key] = result[2]
ga_model.update_sitewide_stats(period_name, "Operating Systems versions", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Operating Systems versions", data, period_complete_day)
   
   
def _browser_stats(self, start_date, end_date, period_name, period_complete_day): def _browser_stats(self, start_date, end_date, period_name, period_complete_day):
""" Information about browsers and browser versions """ """ Information about browsers and browser versions """
results = self.service.data().ga().get( results = self.service.data().ga().get(
ids='ga:' + self.profile_id, ids='ga:' + self.profile_id,
start_date=start_date, start_date=start_date,
metrics='ga:pageviews', metrics='ga:pageviews',
sort='-ga:pageviews', sort='-ga:pageviews',
dimensions="ga:browser,ga:browserVersion", dimensions="ga:browser,ga:browserVersion",
max_results=10000, max_results=10000,
end_date=end_date).execute() end_date=end_date).execute()
result_data = results.get('rows') result_data = results.get('rows')
# e.g. [u'Firefox', u'19.0', u'20'] # e.g. [u'Firefox', u'19.0', u'20']
   
data = {} data = {}
for result in result_data: for result in result_data:
data[result[0]] = data.get(result[0], 0) + int(result[2]) data[result[0]] = data.get(result[0], 0) + int(result[2])
self._filter_out_long_tail(data, MIN_VIEWS) self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Browsers", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Browsers", data, period_complete_day)
   
data = {} data = {}
for result in result_data: for result in result_data:
key = "%s %s" % (result[0], self._filter_browser_version(result[0], result[1])) key = "%s %s" % (result[0], self._filter_browser_version(result[0], result[1]))
data[key] = data.get(key, 0) + int(result[2]) data[key] = data.get(key, 0) + int(result[2])
self._filter_out_long_tail(data, MIN_VIEWS) self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Browser versions", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Browser versions", data, period_complete_day)
   
@classmethod @classmethod
def _filter_browser_version(cls, browser, version_str): def _filter_browser_version(cls, browser, version_str):
''' '''
Simplifies a browser version string if it is detailed. Simplifies a browser version string if it is detailed.
i.e. groups together Firefox 3.5.1 and 3.5.2 to be just 3. i.e. groups together Firefox 3.5.1 and 3.5.2 to be just 3.
This is helpful when viewing stats and good to protect privacy. This is helpful when viewing stats and good to protect privacy.
''' '''
ver = version_str ver = version_str
parts = ver.split('.') parts = ver.split('.')
if len(parts) > 1: if len(parts) > 1:
if parts[1][0] == '0': if parts[1][0] == '0':
ver = parts[0] ver = parts[0]
else: else:
ver = "%s" % (parts[0]) ver = "%s" % (parts[0])
# Special case complex version nums # Special case complex version nums
if browser in ['Safari', 'Android Browser']: if browser in ['Safari', 'Android Browser']:
ver = parts[0] ver = parts[0]
if len(ver) > 2: if len(ver) > 2:
num_hidden_digits = len(ver) - 2 num_hidden_digits = len(ver) - 2
ver = ver[0] + ver[1] + 'X' * num_hidden_digits ver = ver[0] + ver[1] + 'X' * num_hidden_digits
return ver return ver
   
def _mobile_stats(self, start_date, end_date, period_name, period_complete_day): def _mobile_stats(self, start_date, end_date, period_name, period_complete_day):
""" Info about mobile devices """ """ Info about mobile devices """
   
results = self.service.data().ga().get( results = self.service.data().ga().get(
ids='ga:' + self.profile_id, ids='ga:' + self.profile_id,
start_date=start_date, start_date=start_date,
metrics='ga:pageviews', metrics='ga:pageviews',
sort='-ga:pageviews', sort='-ga:pageviews',
dimensions="ga:mobileDeviceBranding, ga:mobileDeviceInfo", dimensions="ga:mobileDeviceBranding, ga:mobileDeviceInfo",
max_results=10000, max_results=10000,
end_date=end_date).execute() end_date=end_date).execute()
   
result_data = results.get('rows') result_data = results.get('rows')
data = {} data = {}
for result in result_data: for result in result_data:
data[result[0]] = data.get(result[0], 0) + int(result[2]) data[result[0]] = data.get(result[0], 0) + int(result[2])
self._filter_out_long_tail(data, MIN_VIEWS) self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Mobile brands", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Mobile brands", data, period_complete_day)
   
data = {} data = {}
for result in result_data: for result in result_data:
data[result[1]] = data.get(result[1], 0) + int(result[2]) data[result[1]] = data.get(result[1], 0) + int(result[2])
self._filter_out_long_tail(data, MIN_VIEWS) self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Mobile devices", data, period_complete_day) ga_model.update_sitewide_stats(period_name, "Mobile devices", data, period_complete_day)
   
@classmethod @classmethod
def _filter_out_long_tail(cls, data, threshold=10): def _filter_out_long_tail(cls, data, threshold=10):
''' '''
Given data which is a frequency distribution, filter out Given data which is a frequency distribution, filter out
results which are below a threshold count. This is good to protect results which are below a threshold count. This is good to protect
privacy. privacy.
''' '''
for key, value in data.items(): for key, value in data.items():
if value < threshold: if value < threshold:
del data[key] del data[key]
   
import re import re
import uuid import uuid
   
from sqlalchemy import Table, Column, MetaData, ForeignKey from sqlalchemy import Table, Column, MetaData, ForeignKey
from sqlalchemy import types from sqlalchemy import types
from sqlalchemy.sql import select from sqlalchemy.sql import select
from sqlalchemy.orm import mapper, relation from sqlalchemy.orm import mapper, relation
from sqlalchemy import func from sqlalchemy import func
   
import ckan.model as model import ckan.model as model
from ckan.lib.base import * from ckan.lib.base import *
   
log = __import__('logging').getLogger(__name__) log = __import__('logging').getLogger(__name__)
   
def make_uuid(): def make_uuid():
return unicode(uuid.uuid4()) return unicode(uuid.uuid4())
   
metadata = MetaData() metadata = MetaData()
   
class GA_Url(object): class GA_Url(object):
   
def __init__(self, **kwargs): def __init__(self, **kwargs):
for k,v in kwargs.items(): for k,v in kwargs.items():
setattr(self, k, v) setattr(self, k, v)
   
url_table = Table('ga_url', metadata, url_table = Table('ga_url', metadata,
Column('id', types.UnicodeText, primary_key=True, Column('id', types.UnicodeText, primary_key=True,
default=make_uuid), default=make_uuid),
Column('period_name', types.UnicodeText), Column('period_name', types.UnicodeText),
Column('period_complete_day', types.Integer), Column('period_complete_day', types.Integer),
Column('pageviews', types.UnicodeText), Column('pageviews', types.UnicodeText),
Column('visits', types.UnicodeText), Column('visits', types.UnicodeText),
Column('url', types.UnicodeText), Column('url', types.UnicodeText),
Column('department_id', types.UnicodeText), Column('department_id', types.UnicodeText),
Column('package_id', types.UnicodeText), Column('package_id', types.UnicodeText),
) )
mapper(GA_Url, url_table) mapper(GA_Url, url_table)
   
   
class GA_Stat(object): class GA_Stat(object):
   
def __init__(self, **kwargs): def __init__(self, **kwargs):
for k,v in kwargs.items(): for k,v in kwargs.items():
setattr(self, k, v) setattr(self, k, v)
   
stat_table = Table('ga_stat', metadata, stat_table = Table('ga_stat', metadata,
Column('id', types.UnicodeText, primary_key=True, Column('id', types.UnicodeText, primary_key=True,
default=make_uuid), default=make_uuid),
Column('period_name', types.UnicodeText), Column('period_name', types.UnicodeText),
Column('period_complete_day', types.UnicodeText), Column('period_complete_day', types.UnicodeText),
Column('stat_name', types.UnicodeText), Column('stat_name', types.UnicodeText),
Column('key', types.UnicodeText), Column('key', types.UnicodeText),
Column('value', types.UnicodeText), ) Column('value', types.UnicodeText), )
mapper(GA_Stat, stat_table) mapper(GA_Stat, stat_table)
   
   
class GA_Publisher(object): class GA_Publisher(object):
   
def __init__(self, **kwargs): def __init__(self, **kwargs):
for k,v in kwargs.items(): for k,v in kwargs.items():
setattr(self, k, v) setattr(self, k, v)
   
pub_table = Table('ga_publisher', metadata, pub_table = Table('ga_publisher', metadata,
Column('id', types.UnicodeText, primary_key=True, Column('id', types.UnicodeText, primary_key=True,
default=make_uuid), default=make_uuid),
Column('period_name', types.UnicodeText), Column('period_name', types.UnicodeText),
Column('publisher_name', types.UnicodeText), Column('publisher_name', types.UnicodeText),
Column('views', types.UnicodeText), Column('views', types.UnicodeText),
Column('visits', types.UnicodeText), Column('visits', types.UnicodeText),
Column('toplevel', types.Boolean, default=False), Column('toplevel', types.Boolean, default=False),
Column('subpublishercount', types.Integer, default=0), Column('subpublishercount', types.Integer, default=0),
Column('parent', types.UnicodeText), Column('parent', types.UnicodeText),
) )
mapper(GA_Publisher, pub_table) mapper(GA_Publisher, pub_table)
   
   
class GA_ReferralStat(object): class GA_ReferralStat(object):
   
def __init__(self, **kwargs): def __init__(self, **kwargs):
for k,v in kwargs.items(): for k,v in kwargs.items():
setattr(self, k, v) setattr(self, k, v)
   
referrer_table = Table('ga_referrer', metadata, referrer_table = Table('ga_referrer', metadata,
Column('id', types.UnicodeText, primary_key=True, Column('id', types.UnicodeText, primary_key=True,
default=make_uuid), default=make_uuid),
Column('period_name', types.UnicodeText), Column('period_name', types.UnicodeText),
Column('source', types.UnicodeText), Column('source', types.UnicodeText),
Column('url', types.UnicodeText), Column('url', types.UnicodeText),
Column('count', types.Integer), Column('count', types.Integer),
) )
mapper(GA_ReferralStat, referrer_table) mapper(GA_ReferralStat, referrer_table)
   
   
   
def init_tables(): def init_tables():
metadata.create_all(model.meta.engine) metadata.create_all(model.meta.engine)
   
   
cached_tables = {} cached_tables = {}
   
   
def get_table(name): def get_table(name):
if name not in cached_tables: if name not in cached_tables:
meta = MetaData() meta = MetaData()
meta.reflect(bind=model.meta.engine) meta.reflect(bind=model.meta.engine)
table = meta.tables[name] table = meta.tables[name]
cached_tables[name] = table cached_tables[name] = table
return cached_tables[name] return cached_tables[name]
   
   
def _normalize_url(url): def _normalize_url(url):
'''Strip off the hostname etc. Do this before storing it. '''Strip off the hostname etc. Do this before storing it.
   
>>> normalize_url('http://data.gov.uk/dataset/weekly_fuel_prices') >>> normalize_url('http://data.gov.uk/dataset/weekly_fuel_prices')
'/dataset/weekly_fuel_prices' '/dataset/weekly_fuel_prices'
''' '''
return '/' + '/'.join(url.split('/')[3:]) return '/' + '/'.join(url.split('/')[3:])
   
   
def _get_package_and_publisher(url): def _get_package_and_publisher(url):
# e.g. /dataset/fuel_prices # e.g. /dataset/fuel_prices
# e.g. /dataset/fuel_prices/resource/e63380d4 # e.g. /dataset/fuel_prices/resource/e63380d4
dataset_match = re.match('/dataset/([^/]+)(/.*)?', url) dataset_match = re.match('/dataset/([^/]+)(/.*)?', url)
if dataset_match: if dataset_match:
dataset_ref = dataset_match.groups()[0] dataset_ref = dataset_match.groups()[0]
dataset = model.Package.get(dataset_ref) dataset = model.Package.get(dataset_ref)
if dataset: if dataset:
publisher_groups = dataset.get_groups('publisher') publisher_groups = dataset.get_groups('publisher')
if publisher_groups: if publisher_groups:
return dataset_ref,publisher_groups[0].name return dataset_ref,publisher_groups[0].name
return dataset_ref, None return dataset_ref, None
else: else:
publisher_match = re.match('/publisher/([^/]+)(/.*)?', url) publisher_match = re.match('/publisher/([^/]+)(/.*)?', url)
if publisher_match: if publisher_match:
return None, publisher_match.groups()[0] return None, publisher_match.groups()[0]
return None, None return None, None
   
def update_sitewide_stats(period_name, stat_name, data, period_complete_day): def update_sitewide_stats(period_name, stat_name, data, period_complete_day):
for k,v in data.iteritems(): for k,v in data.iteritems():
item = model.Session.query(GA_Stat).\ item = model.Session.query(GA_Stat).\
filter(GA_Stat.period_name==period_name).\ filter(GA_Stat.period_name==period_name).\
filter(GA_Stat.key==k).\ filter(GA_Stat.key==k).\
filter(GA_Stat.stat_name==stat_name).first() filter(GA_Stat.stat_name==stat_name).first()
if item: if item:
item.period_name = period_name item.period_name = period_name
item.key = k item.key = k
item.value = v item.value = v
item.period_complete_day = period_complete_day item.period_complete_day = period_complete_day
model.Session.add(item) model.Session.add(item)
else: else:
# create the row # create the row
values = {'id': make_uuid(), values = {'id': make_uuid(),
'period_name': period_name, 'period_name': period_name,
'period_complete_day': period_complete_day, 'period_complete_day': period_complete_day,
'key': k, 'key': k,
'value': v, 'value': v,
'stat_name': stat_name 'stat_name': stat_name
} }
model.Session.add(GA_Stat(**values)) model.Session.add(GA_Stat(**values))
model.Session.commit() model.Session.commit()
   
   
def pre_update_url_stats(period_name): def pre_update_url_stats(period_name):
log.debug("Deleting '%s' records" % period_name) q = model.Session.query(GA_Url).\
model.Session.query(GA_Url).\ filter(GA_Url.period_name==period_name)
filter(GA_Url.period_name==period_name).delete() log.debug("Deleting %d '%s' records" % (q.count(), period_name))
  q.delete()
count = model.Session.query(GA_Url).\  
filter(GA_Url.period_name == 'All').count() q = model.Session.query(GA_Url).\
log.debug("Deleting %d 'All' records" % count) filter(GA_Url.period_name == 'All')
count = model.Session.query(GA_Url).\ log.debug("Deleting %d 'All' records..." % q.count())
filter(GA_Url.period_name == 'All').delete() q.delete()
log.debug("Deleted %d 'All' records" % count)  
   
model.Session.flush() model.Session.flush()
model.Session.commit() model.Session.commit()
model.repo.commit_and_remove() model.repo.commit_and_remove()
  log.debug('...done')
   
def post_update_url_stats(): def post_update_url_stats():
   
""" Check the distinct url field in ga_url and make sure """ Check the distinct url field in ga_url and make sure
it has an All record. If not then create one. it has an All record. If not then create one.
   
After running this then every URL should have an All After running this then every URL should have an All
record regardless of whether the URL has an entry for record regardless of whether the URL has an entry for
the month being currently processed. the month being currently processed.
""" """
  log.debug('Post-processing "All" records...')
query = """select url, pageviews::int, visits::int query = """select url, pageviews::int, visits::int
from ga_url from ga_url
where url not in (select url from ga_url where period_name ='All')""" where url not in (select url from ga_url where period_name ='All')"""
connection = model.Session.connection() connection = model.Session.connection()
res = connection.execute(query) res = connection.execute(query)
   
views, visits = {}, {} views, visits = {}, {}
# url, views, visits # url, views, visits
for row in res: for row in res:
views[row[0]] = views.get(row[0], 0) + row[1] views[row[0]] = views.get(row[0], 0) + row[1]
visits[row[0]] = visits.get(row[0], 0) + row[2] visits[row[0]] = visits.get(row[0], 0) + row[2]
   
  progress_total = len(views.keys())
  progress_count = 0
for key in views.keys(): for key in views.keys():
  progress_count += 1
  if progress_count % 100 == 0:
  log.debug('.. %d/%d done so far', progress_count, progress_total)
   
package, publisher = _get_package_and_publisher(key) package, publisher = _get_package_and_publisher(key)
   
values = {'id': make_uuid(), values = {'id': make_uuid(),
'period_name': "All", 'period_name': "All",
'period_complete_day': 0, 'period_complete_day': 0,
'url': key, 'url': key,
'pageviews': views[key], 'pageviews': views[key],
'visits': visits[key], 'visits': visits[key],
'department_id': publisher, 'department_id': publisher,
'package_id': publisher 'package_id': publisher
} }
model.Session.add(GA_Url(**values)) model.Session.add(GA_Url(**values))
model.Session.commit() model.Session.commit()
  log.debug('..done')
   
   
def update_url_stats(period_name, period_complete_day, url_data): def update_url_stats(period_name, period_complete_day, url_data):
''' '''
Given a list of urls and number of hits for each during a given period, Given a list of urls and number of hits for each during a given period,
stores them in GA_Url under the period and recalculates the totals for stores them in GA_Url under the period and recalculates the totals for
the 'All' period. the 'All' period.
''' '''
  progress_total = len(progress_data)
  progress_count = 0
for url, views, visits in url_data: for url, views, visits in url_data:
  progress_count += 1
  if progress_count % 100 == 0:
  log.debug('.. %d/%d done so far', progress_count, progress_total)
   
package, publisher = _get_package_and_publisher(url) package, publisher = _get_package_and_publisher(url)
   
   
item = model.Session.query(GA_Url).\ item = model.Session.query(GA_Url).\
filter(GA_Url.period_name==period_name).\ filter(GA_Url.period_name==period_name).\
filter(GA_Url.url==url).first() filter(GA_Url.url==url).first()
if item: if item:
item.pageviews = item.pageviews + views item.pageviews = item.pageviews + views
item.visits = item.visits + visits item.visits = item.visits + visits
if not item.package_id: if not item.package_id:
item.package_id = package item.package_id = package
if not item.department_id: if not item.department_id:
item.department_id = publisher item.department_id = publisher
model.Session.add(item) model.Session.add(item)
else: else:
values = {'id': make_uuid(), values = {'id': make_uuid(),
'period_name': period_name, 'period_name': period_name,
'period_complete_day': period_complete_day, 'period_complete_day': period_complete_day,
'url': url, 'url': url,
'pageviews': views, 'pageviews': views,
'visits': visits, 'visits': visits,
'department_id': publisher, 'department_id': publisher,
'package_id': package 'package_id': package
} }
model.Session.add(GA_Url(**values)) model.Session.add(GA_Url(**values))
model.Session.commit() model.Session.commit()
   
if package: if package:
old_pageviews, old_visits = 0, 0 old_pageviews, old_visits = 0, 0
old = model.Session.query(GA_Url).\ old = model.Session.query(GA_Url).\
filter(GA_Url.period_name=='All').\ filter(GA_Url.period_name=='All').\
filter(GA_Url.url==url).all() filter(GA_Url.url==url).all()
old_pageviews = sum([int(o.pageviews) for o in old]) old_pageviews = sum([int(o.pageviews) for o in old])
old_visits = sum([int(o.visits) for o in old]) old_visits = sum([int(o.visits) for o in old])
   
entries = model.Session.query(GA_Url).\ entries = model.Session.query(GA_Url).\
filter(GA_Url.period_name!='All').\ filter(GA_Url.period_name!='All').\
filter(GA_Url.url==url).all() filter(GA_Url.url==url).all()
values = {'id': make_uuid(), values = {'id': make_uuid(),
'period_name': 'All', 'period_name': 'All',
'period_complete_day': 0, 'period_complete_day': 0,
'url': url, 'url': url,
'pageviews': sum([int(e.pageviews) for e in entries]) + int(old_pageviews), 'pageviews': sum([int(e.pageviews) for e in entries]) + int(old_pageviews),
'visits': sum([int(e.visits or 0) for e in entries]) + int(old_visits), 'visits': sum([int(e.visits or 0) for e in entries]) + int(old_visits),
'department_id': publisher, 'department_id': publisher,
'package_id': package 'package_id': package
} }
   
model.Session.add(GA_Url(**values)) model.Session.add(GA_Url(**values))
model.Session.commit() model.Session.commit()
   
   
   
   
def update_social(period_name, data): def update_social(period_name, data):
# Clean up first. # Clean up first.
model.Session.query(GA_ReferralStat).\ model.Session.query(GA_ReferralStat).\
filter(GA_ReferralStat.period_name==period_name).delete() filter(GA_ReferralStat.period_name==period_name).delete()
   
for url,data in data.iteritems(): for url,data in data.iteritems():
for entry in data: for entry in data:
source = entry[0] source = entry[0]
count = entry[1] count = entry[1]
   
item = model.Session.query(GA_ReferralStat).\ item = model.Session.query(GA_ReferralStat).\
filter(GA_ReferralStat.period_name==period_name).\ filter(GA_ReferralStat.period_name==period_name).\
filter(GA_ReferralStat.source==source).\ filter(GA_ReferralStat.source==source).\
filter(GA_ReferralStat.url==url).first() filter(GA_ReferralStat.url==url).first()
if item: if item:
item.count = item.count + count item.count = item.count + count
model.Session.add(item) model.Session.add(item)
else: else:
# create the row # create the row
values = {'id': make_uuid(), values = {'id': make_uuid(),
'period_name': period_name, 'period_name': period_name,
'source': source, 'source': source,
'url': url, 'url': url,
'count': count, 'count': count,
} }
model.Session.add(GA_ReferralStat(**values)) model.Session.add(GA_ReferralStat(**values))
model.Session.commit() model.Session.commit()
   
def update_publisher_stats(period_name): def update_publisher_stats(period_name):
""" """
Updates the publisher stats from the data retrieved for /dataset/* Updates the publisher stats from the data retrieved for /dataset/*
and /publisher/*. Will run against each dataset and generates the and /publisher/*. Will run against each dataset and generates the
totals for the entire tree beneath each publisher. totals for the entire tree beneath each publisher.
""" """
toplevel = get_top_level() toplevel = get_top_level()
publishers = model.Session.query(model.Group).\ publishers = model.Session.query(model.Group).\
filter(model.Group.type=='publisher').\ filter(model.Group.type=='publisher').\
filter(model.Group.state=='active').all() filter(model.Group.state=='active').all()
for publisher in publishers: for publisher in publishers:
views, visits, subpub = update_publisher(period_name, publisher, publisher.name) views, visits, subpub = update_publisher(period_name, publisher, publisher.name)
parent, parents = '', publisher.get_groups('publisher') parent, parents = '', publisher.get_groups('publisher')
if parents: if parents:
parent = parents[0].name parent = parents[0].name
item = model.Session.query(GA_Publisher).\ item = model.Session.query(GA_Publisher).\
filter(GA_Publisher.period_name==period_name).\ filter(GA_Publisher.period_name==period_name).\
filter(GA_Publisher.publisher_name==publisher.name).first() filter(GA_Publisher.publisher_name==publisher.name).first()
if item: if item:
item.views = views item.views = views
item.visits = visits item.visits = visits
item.publisher_name = publisher.name item.publisher_name = publisher.name
item.toplevel = publisher in toplevel item.toplevel = publisher in toplevel
item.subpublishercount = subpub item.subpublishercount = subpub
item.parent = parent item.parent = parent
model.Session.add(item) model.Session.add(item)
else: else:
# create the row # create the row
values = {'id': make_uuid(), values = {'id': make_uuid(),
'period_name': period_name, 'period_name': period_name,
'publisher_name': publisher.name, 'publisher_name': publisher.name,
'views': views, 'views': views,
'visits': visits, 'visits': visits,
'toplevel': publisher in toplevel, 'toplevel': publisher in toplevel,
'subpublishercount': subpub, 'subpublishercount': subpub,
'parent': parent 'parent': parent
} }
model.Session.add(GA_Publisher(**values)) model.Session.add(GA_Publisher(**values))
model.Session.commit() model.Session.commit()
   
   
def update_publisher(period_name, pub, part=''): def update_publisher(period_name, pub, part=''):
views,visits,subpub = 0, 0, 0 views,visits,subpub = 0, 0, 0
for publisher in go_down_tree(pub): for publisher in go_down_tree(pub):
subpub = subpub + 1 subpub = subpub + 1
items = model.Session.query(GA_Url).\ items = model.Session.query(GA_Url).\
filter(GA_Url.period_name==period_name).\ filter(GA_Url.period_name==period_name).\
filter(GA_Url.department_id==publisher.name).all() filter(GA_Url.department_id==publisher.name).all()
for item in items: for item in items:
views = views + int(item.pageviews) views = views + int(item.pageviews)
visits = visits + int(item.visits) visits = visits + int(item.visits)
   
return views, visits, (subpub-1) return views, visits, (subpub-1)
   
   
def get_top_level(): def get_top_level():
'''Returns the top level publishers.''' '''Returns the top level publishers.'''
return model.Session.query(model.Group).\ return model.Session.query(model.Group).\
outerjoin(model.Member, model.Member.table_id == model.Group.id and \ outerjoin(model.Member, model.Member.table_id == model.Group.id and \
model.Member.table_name == 'group' and \ model.Member.table_name == 'group' and \
model.Member.state == 'active').\ model.Member.state == 'active').\
filter(model.Member.id==None).\ filter(model.Member.id==None).\
filter(model.Group.type=='publisher').\ filter(model.Group.type=='publisher').\
order_by(model.Group.name).all() order_by(model.Group.name).all()
   
def get_children(publisher): def get_children(publisher):
'''Finds child publishers for the given publisher (object). (Not recursive)''' '''Finds child publishers for the given publisher (object). (Not recursive)'''
from ckan.model.group import HIERARCHY_CTE from ckan.model.group import HIERARCHY_CTE
return model.Session.query(model.Group).\ return model.Session.query(model.Group).\
from_statement(HIERARCHY_CTE).params(id=publisher.id, type='publisher').\ from_statement(HIERARCHY_CTE).params(id=publisher.id, type='publisher').\
all() all()
   
def go_down_tree(publisher): def go_down_tree(publisher):
'''Provided with a publisher object, it walks down the hierarchy and yields each publisher, '''Provided with a publisher object, it walks down the hierarchy and yields each publisher,
including the one you supply.''' including the one you supply.'''
yield publisher yield publisher
for child in get_children(publisher): for child in get_children(publisher):
for grandchild in go_down_tree(child): for grandchild in go_down_tree(child):
yield grandchild yield grandchild
   
def delete(period_name): def delete(period_name):
''' '''
Deletes table data for the specified period, or specify 'all' Deletes table data for the specified period, or specify 'all'
for all periods. for all periods.
''' '''
for object_type in (GA_Url, GA_Stat, GA_Publisher, GA_ReferralStat): for object_type in (GA_Url, GA_Stat, GA_Publisher, GA_ReferralStat):
q = model.Session.query(object_type) q = model.Session.query(object_type)
if period_name != 'All': if period_name != 'All':
q = q.filter_by(period_name=period_name) q = q.filter_by(period_name=period_name)
q.delete() q.delete()
model.repo.commit_and_remove() model.repo.commit_and_remove()
   
def get_score_for_dataset(dataset_name): def get_score_for_dataset(dataset_name):
''' '''
Returns a "current popularity" score for a dataset, Returns a "current popularity" score for a dataset,
based on how many views it has had recently. based on how many views it has had recently.
''' '''
import datetime import datetime
now = datetime.datetime.now() now = datetime.datetime.now()
last_month = now - datetime.timedelta(days=30) last_month = now - datetime.timedelta(days=30)
period_names = ['%s-%02d' % (last_month.year, last_month.month), period_names = ['%s-%02d' % (last_month.year, last_month.month),
'%s-%02d' % (now.year, now.month), '%s-%02d' % (now.year, now.month),
] ]
   
score = 0 score = 0
for period_name in period_names: for period_name in period_names:
score /= 2 # previous periods are discounted by 50% score /= 2 # previous periods are discounted by 50%
entry = model.Session.query(GA_Url)\ entry = model.Session.query(GA_Url)\
.filter(GA_Url.period_name==period_name)\ .filter(GA_Url.period_name==period_name)\
.filter(GA_Url.package_id==dataset_name).first() .filter(GA_Url.package_id==dataset_name).first()
# score # score
if entry: if entry:
views = float(entry.pageviews) views = float(entry.pageviews)
if entry.period_complete_day: if entry.period_complete_day:
views_per_day = views / entry.period_complete_day views_per_day = views / entry.period_complete_day
else: else:
views_per_day = views / 15 # guess views_per_day = views / 15 # guess
score += views_per_day score += views_per_day
   
score = int(score * 100) score = int(score * 100)
log.debug('Popularity %s: %s', score, dataset_name) log.debug('Popularity %s: %s', score, dataset_name)
return score return score