From: Ross Jones Date: Thu, 06 Dec 2012 15:28:08 +0000 Subject: Updated with more logging X-Git-Url: https://maxious.lambdacomplex.org/git/?p=ckanext-ga-report.git&a=commitdiff&h=6f23983b5c75dc00838b934644ee314296e10ddb --- Updated with more logging --- --- a/README.rst +++ b/README.rst @@ -31,14 +31,12 @@ 2. Ensure you development.ini (or similar) contains the info about your Google Analytics account and configuration:: googleanalytics.id = UA-1010101-1 - googleanalytics.account = Account name (i.e. data.gov.uk, see top level item at https://www.google.com/analytics) + googleanalytics.account = Account name (e.g. data.gov.uk, see top level item at https://www.google.com/analytics) + googleanalytics.token.filepath = ~/pyenv/token.dat ga-report.period = monthly - ga-report.bounce_url = /data + ga-report.bounce_url = / - The ga-report.bounce_url specifies the path to use when calculating bounces. For DGU this is /data - but you may want to set this to /. - - Note that your credentials will be readable by system administrators on your server. Rather than use sensitive account details, it is suggested you give access to the GA account to a new Google account that you create just for this purpose. + The ga-report.bounce_url specifies a particular path to record the bounce rate for. Typically it is / (the home page). 3. Set up this extension's database tables using a paster command. (Ensure your CKAN pyenv is still activated, run the command from ``src/ckanext-ga-report``, alter the ``--config`` option to point to your site config file):: @@ -47,6 +45,12 @@ 4. Enable the extension in your CKAN config file by adding it to ``ckan.plugins``:: ckan.plugins = ga-report + +Problem shooting +---------------- + +* ``(ProgrammingError) relation "ga_url" does not exist`` + This means that the ``paster initdb`` step has not been run successfully. Refer to the installation instructions for this extension. Authorization @@ -79,13 +83,17 @@ $ paster getauthtoken --config=../ckan/development.ini +Now ensure you reference the correct path to your token.dat in your CKAN config file (e.g. development.ini):: + + googleanalytics.token.filepath = ~/pyenv/token.dat + Tutorial -------- -Download some GA data and store it in CKAN's db. (Ensure your CKAN pyenv is still activated, run the command from ``src/ckanext-ga-report``, alter the ``--config`` option to point to your site config file) and specifying the name of your auth file (token.dat by default) from the previous step:: +Download some GA data and store it in CKAN's database. (Ensure your CKAN pyenv is still activated, run the command from ``src/ckanext-ga-report``, alter the ``--config`` option to point to your site config file) and specifying the name of your auth file (token.dat by default) from the previous step:: - $ paster loadanalytics token.dat latest --config=../ckan/development.ini + $ paster loadanalytics latest --config=../ckan/development.ini The value after the token file is how much data you want to retrieve, this can be --- a/ckanext/ga_report/command.py +++ b/ckanext/ga_report/command.py @@ -1,5 +1,8 @@ import logging import datetime +import os + +from pylons import config from ckan.lib.cli import CkanCommand # No other CKAN imports allowed until _load_config is run, @@ -58,20 +61,30 @@ """Get data from Google Analytics API and save it in the ga_model - Usage: paster loadanalytics + Usage: paster loadanalytics - Where is the name of the auth token file from - the getauthtoken step. - - And where is: + Where is: all - data for all time latest - (default) just the 'latest' data YYYY-MM - just data for the specific month """ summary = __doc__.split('\n')[0] usage = __doc__ - max_args = 2 - min_args = 1 + max_args = 1 + min_args = 0 + + def __init__(self, name): + super(LoadAnalytics, self).__init__(name) + self.parser.add_option('-d', '--delete-first', + action='store_true', + default=False, + dest='delete_first', + help='Delete data for the period first') + self.parser.add_option('-s', '--skip_url_stats', + action='store_true', + default=False, + dest='skip_url_stats', + help='Skip the download of URL data - just do site-wide stats') def command(self): self._load_config() @@ -79,17 +92,25 @@ from download_analytics import DownloadAnalytics from ga_auth import (init_service, get_profile_id) + ga_token_filepath = os.path.expanduser(config.get('googleanalytics.token.filepath', '')) + if not ga_token_filepath: + print 'ERROR: In the CKAN config you need to specify the filepath of the ' \ + 'Google Analytics token file under key: googleanalytics.token.filepath' + return + try: - svc = init_service(self.args[0], None) + svc = init_service(ga_token_filepath, None) except TypeError: print ('Have you correctly run the getauthtoken task and ' - 'specified the correct file here') + 'specified the correct token file in the CKAN config under ' + '"googleanalytics.token.filepath"?') return - downloader = DownloadAnalytics(svc, profile_id=get_profile_id(svc)) + downloader = DownloadAnalytics(svc, profile_id=get_profile_id(svc), + delete_first=self.options.delete_first, + skip_url_stats=self.options.skip_url_stats) - time_period = self.args[1] if self.args and len(self.args) > 1 \ - else 'latest' + time_period = self.args[0] if self.args else 'latest' if time_period == 'all': downloader.all_() elif time_period == 'latest': --- a/ckanext/ga_report/controller.py +++ b/ckanext/ga_report/controller.py @@ -9,7 +9,7 @@ import sqlalchemy from sqlalchemy import func, cast, Integer import ckan.model as model -from ga_model import GA_Url, GA_Stat, GA_ReferralStat +from ga_model import GA_Url, GA_Stat, GA_ReferralStat, GA_Publisher log = logging.getLogger('ckanext.ga-report') @@ -22,11 +22,29 @@ def _month_details(cls): + ''' + Returns a list of all the periods for which we have data, unfortunately + knows too much about the type of the cls being passed as GA_Url has a + more complex query + + This may need extending if we add a period_name to the stats + ''' months = [] - vals = model.Session.query(cls.period_name).distinct().all() + day = None + + vals = model.Session.query(cls.period_name,cls.period_complete_day)\ + .filter(cls.period_name!='All').distinct(cls.period_name)\ + .order_by("period_name desc").all() + if vals and vals[0][1]: + day = int(vals[0][1]) + ordinal = 'th' if 11 <= day <= 13 \ + else {1:'st',2:'nd',3:'rd'}.get(day % 10, 'th') + day = "{day}{ordinal}".format(day=day, ordinal=ordinal) + for m in vals: months.append( (m[0], _get_month_name(m[0]))) - return sorted(months, key=operator.itemgetter(0), reverse=True) + + return months, day class GaReport(BaseController): @@ -55,7 +73,7 @@ # Get the month details by fetching distinct values and determining the # month names from the values. - c.months = _month_details(GA_Stat) + c.months, c.day = _month_details(GA_Stat) # Work out which month to show, based on query params of the first item c.month_desc = 'all months' @@ -70,13 +88,13 @@ entries = q.order_by('ga_stat.key').all() def clean_key(key, val): - if key in ['Average time on site', 'Pages per visit', 'New visits', 'Bounces']: + if key in ['Average time on site', 'Pages per visit', 'New visits', 'Bounce rate (home page)']: val = "%.2f" % round(float(val), 2) if key == 'Average time on site': mins, secs = divmod(float(val), 60) hours, mins = divmod(mins, 60) val = '%02d:%02d:%02d (%s seconds) ' % (hours, mins, secs, val) - if key in ['New visits','Bounces']: + if key in ['New visits','Bounce rate (home page)']: val = "%s%%" % val if key in ['Total page views', 'Total visits']: val = int(val) @@ -135,29 +153,7 @@ c.social_referrer_totals.append((shorten_name(entry[0]), fill_out_url(entry[0]),'', entry[1])) - - browser_version_re = re.compile("(.*)\((.*)\)") for k, v in keys.iteritems(): - - def clean_field(key): - if k != 'Browser versions': - return key - m = browser_version_re.match(key) - browser = m.groups()[0].strip() - ver = m.groups()[1] - parts = ver.split('.') - if len(parts) > 1: - if parts[1][0] == '0': - ver = parts[0] - else: - ver = "%s.%s" % (parts[0],parts[1]) - if browser in ['Safari','Android Browser']: # Special case complex version nums - ver = parts[0] - if len(ver) > 2: - ver = "%s%sX" % (ver[0], ver[1]) - - return "%s (%s)" % (browser, ver,) - q = model.Session.query(GA_Stat).\ filter(GA_Stat.stat_name==k) if c.month: @@ -167,7 +163,7 @@ d = collections.defaultdict(int) for e in q.all(): - d[clean_field(e.key)] += int(e.value) + d[e.key] += int(e.value) entries = [] for key, val in d.iteritems(): entries.append((key,val,)) @@ -184,56 +180,64 @@ return render('ga_report/site/index.html') -class GaPublisherReport(BaseController): +class GaDatasetReport(BaseController): """ - Displays the pageview and visit count for specific publishers based on - the datasets associated with the publisher. + Displays the pageview and visit count for datasets + with options to filter by publisher and time period. """ - def csv(self, month): - - c.month = month if not month =='all' else '' + def publisher_csv(self, month): + ''' + Returns a CSV of each publisher with the total number of dataset + views & visits. + ''' + c.month = month if not month == 'all' else '' response.headers['Content-Type'] = "text/csv; charset=utf-8" response.headers['Content-Disposition'] = str('attachment; filename=publishers_%s.csv' % (month,)) writer = csv.writer(response) - writer.writerow(["Publisher", "Views", "Visits", "Period Name"]) - - for publisher,view,visit in _get_publishers(None): + writer.writerow(["Publisher Title", "Publisher Name", "Views", "Visits", "Period Name"]) + + for publisher,view,visit in _get_top_publishers(None): writer.writerow([publisher.title.encode('utf-8'), + publisher.name.encode('utf-8'), view, visit, month]) - - - def publisher_csv(self, id, month): - - c.month = month if not month =='all' else '' - c.publisher = model.Group.get(id) - if not c.publisher: - abort(404, 'A publisher with that name could not be found') + def dataset_csv(self, id='all', month='all'): + ''' + Returns a CSV with the number of views & visits for each dataset. + + :param id: A Publisher ID or None if you want for all + :param month: The time period, or 'all' + ''' + c.month = month if not month == 'all' else '' + if id != 'all': + c.publisher = model.Group.get(id) + if not c.publisher: + abort(404, 'A publisher with that name could not be found') packages = self._get_packages(c.publisher) response.headers['Content-Type'] = "text/csv; charset=utf-8" response.headers['Content-Disposition'] = \ - str('attachment; filename=%s_%s.csv' % (c.publisher.name, month,)) + str('attachment; filename=datasets_%s_%s.csv' % (c.publisher_name, month,)) writer = csv.writer(response) - writer.writerow(["Publisher", "Views", "Visits", "Period Name"]) + writer.writerow(["Dataset Title", "Dataset Name", "Views", "Visits", "Period Name"]) for package,view,visit in packages: writer.writerow([package.title.encode('utf-8'), + package.name.encode('utf-8'), view, visit, month]) - - - def index(self): + def publishers(self): + '''A list of publishers and the number of views/visits for each''' # Get the month details by fetching distinct values and determining the # month names from the values. - c.months = _month_details(GA_Url) + c.months, c.day = _month_details(GA_Url) # Work out which month to show, based on query params of the first item c.month = request.params.get('month', '') @@ -241,57 +245,57 @@ if c.month: c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month]) - c.top_publishers = _get_publishers() - + c.top_publishers = _get_top_publishers() return render('ga_report/publisher/index.html') - - def _get_packages(self, publisher, count=-1): + def _get_packages(self, publisher=None, count=-1): + '''Returns the datasets in order of views''' if count == -1: count = sys.maxint + month = c.month or 'All' + + q = model.Session.query(GA_Url,model.Package)\ + .filter(model.Package.name==GA_Url.package_id)\ + .filter(GA_Url.url.like('/dataset/%')) + if publisher: + q = q.filter(GA_Url.department_id==publisher.name) + q = q.filter(GA_Url.period_name==month) + q = q.order_by('ga_url.pageviews::int desc') top_packages = [] - q = model.Session.query(GA_Url).\ - filter(GA_Url.department_id==publisher.name).\ - filter(GA_Url.url.like('/dataset/%')) - if c.month: - q = q.filter(GA_Url.period_name==c.month) - q = q.order_by('ga_url.pageviews::int desc') - - if c.month: - for entry in q[:count]: - p = model.Package.get(entry.url[len('/dataset/'):]) - top_packages.append((p,entry.pageviews,entry.visitors)) - else: - ds = {} - for entry in q.all(): - if len(ds) >= count: - break - p = model.Package.get(entry.url[len('/dataset/'):]) - if not p in ds: - ds[p] = {'views':0, 'visits': 0} - ds[p]['views'] = ds[p]['views'] + int(entry.pageviews) - ds[p]['visits'] = ds[p]['visits'] + int(entry.visitors) - - results = [] - for k, v in ds.iteritems(): - results.append((k,v['views'],v['visits'])) - - top_packages = sorted(results, key=operator.itemgetter(1), reverse=True) + for entry,package in q.limit(count): + if package: + top_packages.append((package, entry.pageviews, entry.visits)) + else: + log.warning('Could not find package associated package') + return top_packages - - def read(self, id): + def read(self): + ''' + Lists the most popular datasets across all publishers + ''' + return self.read_publisher(None) + + def read_publisher(self, id): + ''' + Lists the most popular datasets for a publisher (or across all publishers) + ''' count = 20 - c.publisher = model.Group.get(id) - if not c.publisher: - abort(404, 'A publisher with that name could not be found') + c.publishers = _get_publishers() + + id = request.params.get('publisher', id) + if id and id != 'all': + c.publisher = model.Group.get(id) + if not c.publisher: + abort(404, 'A publisher with that name could not be found') + c.publisher_name = c.publisher.name c.top_packages = [] # package, dataset_views in c.top_packages # Get the month details by fetching distinct values and determining the # month names from the values. - c.months = _month_details(GA_Url) + c.months, c.day = _month_details(GA_Url) # Work out which month to show, based on query params of the first item c.month = request.params.get('month', '') @@ -300,43 +304,38 @@ else: c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month]) + month = c.month or 'All' c.publisher_page_views = 0 q = model.Session.query(GA_Url).\ - filter(GA_Url.url=='/publisher/%s' % c.publisher.name) - if c.month: - entry = q.filter(GA_Url.period_name==c.month).first() - c.publisher_page_views = entry.pageviews if entry else 0 - else: - for e in q.all(): - c.publisher_page_views = c.publisher_page_views + int(e.pageviews) + filter(GA_Url.url=='/publisher/%s' % c.publisher_name) + entry = q.filter(GA_Url.period_name==c.month).first() + c.publisher_page_views = entry.pageviews if entry else 0 c.top_packages = self._get_packages(c.publisher, 20) return render('ga_report/publisher/read.html') -def _get_publishers(limit=20): +def _get_top_publishers(limit=20): + ''' + Returns a list of the top 20 publishers by dataset visits. + (The number to show can be varied with 'limit') + ''' + month = c.month or 'All' connection = model.Session.connection() q = """ - select department_id, sum(pageviews::int) views, sum(visitors::int) visits + select department_id, sum(pageviews::int) views, sum(visits::int) visits from ga_url - where department_id <> ''""" - if c.month: - q = q + """ - and period_name=%s - """ - q = q + """ - group by department_id order by views desc + where department_id <> '' + and package_id <> '' + and url like '/dataset/%%' + and period_name=%s + group by department_id order by views desc """ if limit: q = q + " limit %s;" % (limit) - # Add this back (before and period_name =%s) if you want to ignore publisher - # homepage views - # and not url like '/publisher/%%' - top_publishers = [] - res = connection.execute(q, c.month) - + res = connection.execute(q, month) for row in res: g = model.Group.get(row[0]) if g: @@ -344,6 +343,19 @@ return top_publishers +def _get_publishers(): + ''' + Returns a list of all publishers. Each item is a tuple: + (name, title) + ''' + publishers = [] + for pub in model.Session.query(model.Group).\ + filter(model.Group.type=='publisher').\ + filter(model.Group.state=='active').\ + order_by(model.Group.name): + publishers.append((pub.name, pub.title)) + return publishers + def _percent(num, total): p = 100 * float(num)/float(total) return "%.2f%%" % round(p, 2) --- a/ckanext/ga_report/download_analytics.py +++ b/ckanext/ga_report/download_analytics.py @@ -3,7 +3,7 @@ import datetime import collections from pylons import config - +from ga_model import _normalize_url import ga_model #from ga_client import GA @@ -11,15 +11,19 @@ log = logging.getLogger('ckanext.ga-report') FORMAT_MONTH = '%Y-%m' +MIN_VIEWS = 50 +MIN_VISITS = 20 class DownloadAnalytics(object): '''Downloads and stores analytics info''' - def __init__(self, service=None, profile_id=None): + def __init__(self, service=None, profile_id=None, delete_first=False, + skip_url_stats=False): self.period = config['ga-report.period'] self.service = service self.profile_id = profile_id - + self.delete_first = delete_first + self.skip_url_stats = skip_url_stats def specific_month(self, date): import calendar @@ -90,25 +94,43 @@ def download_and_store(self, periods): for period_name, period_complete_day, start_date, end_date in periods: - log.info('Downloading Analytics for period "%s" (%s - %s)', + log.info('Period "%s" (%s - %s)', self.get_full_period_name(period_name, period_complete_day), - start_date.strftime('%Y %m %d'), - end_date.strftime('%Y %m %d')) - - data = self.download(start_date, end_date, '~/dataset/[a-z0-9-_]+') - log.info('Storing Dataset Analytics for period "%s"', - self.get_full_period_name(period_name, period_complete_day)) - self.store(period_name, period_complete_day, data, ) - - data = self.download(start_date, end_date, '~/publisher/[a-z0-9-_]+') - log.info('Storing Publisher Analytics for period "%s"', - self.get_full_period_name(period_name, period_complete_day)) - self.store(period_name, period_complete_day, data,) - - ga_model.update_publisher_stats(period_name) # about 30 seconds. - self.sitewide_stats( period_name ) - + start_date.strftime('%Y-%m-%d'), + end_date.strftime('%Y-%m-%d')) + + if self.delete_first: + log.info('Deleting existing Analytics for this period "%s"', + period_name) + ga_model.delete(period_name) + + if not self.skip_url_stats: + # Clean out old url data before storing the new + ga_model.pre_update_url_stats(period_name) + + accountName = config.get('googleanalytics.account') + + log.info('Downloading analytics for dataset views') + data = self.download(start_date, end_date, '~/%s/dataset/[a-z0-9-_]+' % accountName) + + log.info('Storing dataset views (%i rows)', len(data.get('url'))) + self.store(period_name, period_complete_day, data, ) + + log.info('Downloading analytics for publisher views') + data = self.download(start_date, end_date, '~/%s/publisher/[a-z0-9-_]+' % accountName) + + log.info('Storing publisher views (%i rows)', len(data.get('url'))) + self.store(period_name, period_complete_day, data,) + + log.info('Aggregating datasets by publisher') + ga_model.update_publisher_stats(period_name) # about 30 seconds. + + log.info('Downloading and storing analytics for site-wide stats') + self.sitewide_stats( period_name, period_complete_day ) + + log.info('Downloading and storing analytics for social networks') self.update_social_info(period_name, start_date, end_date) + def update_social_info(self, period_name, start_date, end_date): start_date = start_date.strftime('%Y-%m-%d') @@ -131,18 +153,18 @@ data = collections.defaultdict(list) rows = results.get('rows',[]) for row in rows: - from ga_model import _normalize_url - data[_normalize_url(row[0])].append( (row[1], int(row[2]),) ) + url = _normalize_url('http:/' + row[0]) + data[url].append( (row[1], int(row[2]),) ) ga_model.update_social(period_name, data) - def download(self, start_date, end_date, path='~/dataset/[a-z0-9-_]+'): + def download(self, start_date, end_date, path=None): '''Get data from GA for a given time period''' start_date = start_date.strftime('%Y-%m-%d') end_date = end_date.strftime('%Y-%m-%d') query = 'ga:pagePath=%s$' % path - metrics = 'ga:uniquePageviews, ga:visitors' - sort = '-ga:uniquePageviews' + metrics = 'ga:pageviews, ga:visits' + sort = '-ga:pageviews' # Supported query params at # https://developers.google.com/analytics/devguides/reporting/core/v3/reference @@ -159,27 +181,32 @@ packages = [] for entry in results.get('rows'): (loc,pageviews,visits) = entry - packages.append( ('http:/' + loc, pageviews, visits,) ) # Temporary hack + url = _normalize_url('http:/' + loc) # strips off domain e.g. www.data.gov.uk or data.gov.uk + + if not url.startswith('/dataset/') and not url.startswith('/publisher/'): + # filter out strays like: + # /data/user/login?came_from=http://data.gov.uk/dataset/os-code-point-open + # /403.html?page=/about&from=http://data.gov.uk/publisher/planning-inspectorate + continue + packages.append( (url, pageviews, visits,) ) # Temporary hack return dict(url=packages) def store(self, period_name, period_complete_day, data): if 'url' in data: ga_model.update_url_stats(period_name, period_complete_day, data['url']) - def sitewide_stats(self, period_name): + def sitewide_stats(self, period_name, period_complete_day): import calendar year, month = period_name.split('-') _, last_day_of_month = calendar.monthrange(int(year), int(month)) start_date = '%s-01' % period_name end_date = '%s-%s' % (period_name, last_day_of_month) - print 'Sitewide_stats for %s (%s -> %s)' % (period_name, start_date, end_date) - funcs = ['_totals_stats', '_social_stats', '_os_stats', '_locale_stats', '_browser_stats', '_mobile_stats'] for f in funcs: - print ' + Fetching %s stats' % f.split('_')[1] - getattr(self, f)(start_date, end_date, period_name) + log.info('Downloading analytics for %s' % f.split('_')[1]) + getattr(self, f)(start_date, end_date, period_name, period_complete_day) def _get_results(result_data, f): data = {} @@ -188,22 +215,23 @@ data[key] = data.get(key,0) + result[1] return data - def _totals_stats(self, start_date, end_date, period_name): + def _totals_stats(self, start_date, end_date, period_name, period_complete_day): """ Fetches distinct totals, total pageviews etc """ results = self.service.data().ga().get( ids='ga:' + self.profile_id, start_date=start_date, - metrics='ga:uniquePageviews', - sort='-ga:uniquePageviews', - max_results=10000, - end_date=end_date).execute() - result_data = results.get('rows') - ga_model.update_sitewide_stats(period_name, "Totals", {'Total page views': result_data[0][0]}) - - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - start_date=start_date, - metrics='ga:pageviewsPerVisit,ga:avgTimeOnSite,ga:percentNewVisits,ga:visitors', + metrics='ga:pageviews', + sort='-ga:pageviews', + max_results=10000, + end_date=end_date).execute() + result_data = results.get('rows') + ga_model.update_sitewide_stats(period_name, "Totals", {'Total page views': result_data[0][0]}, + period_complete_day) + + results = self.service.data().ga().get( + ids='ga:' + self.profile_id, + start_date=start_date, + metrics='ga:pageviewsPerVisit,ga:avgTimeOnSite,ga:percentNewVisits,ga:visits', max_results=10000, end_date=end_date).execute() result_data = results.get('rows') @@ -213,36 +241,39 @@ 'New visits': result_data[0][2], 'Total visits': result_data[0][3], } - ga_model.update_sitewide_stats(period_name, "Totals", data) - - # Bounces from /data. This url is specified in configuration because - # for DGU we don't want /. - path = config.get('ga-report.bounce_url','/') - print path - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - filters='ga:pagePath=~%s$' % (path,), - start_date=start_date, - metrics='ga:bounces,ga:uniquePageviews', + ga_model.update_sitewide_stats(period_name, "Totals", data, period_complete_day) + + # Bounces from / or another configurable page. + path = '/%s%s' % (config.get('googleanalytics.account'), + config.get('ga-report.bounce_url', '/')) + results = self.service.data().ga().get( + ids='ga:' + self.profile_id, + filters='ga:pagePath==%s' % (path,), + start_date=start_date, + metrics='ga:bounces,ga:pageviews', dimensions='ga:pagePath', max_results=10000, end_date=end_date).execute() result_data = results.get('rows') - for results in result_data: - if results[0] == path: - bounce, total = [float(x) for x in results[1:]] - pct = 100 * bounce/total - print "%d bounces from %d total == %s" % (bounce, total, pct) - ga_model.update_sitewide_stats(period_name, "Totals", {'Bounces': pct}) - - - def _locale_stats(self, start_date, end_date, period_name): + if not result_data or len(result_data) != 1: + log.error('Could not pinpoint the bounces for path: %s. Got results: %r', + path, result_data) + return + results = result_data[0] + bounces, total = [float(x) for x in result_data[0][1:]] + pct = 100 * bounces/total + log.info('%d bounces from %d total == %s', bounces, total, pct) + ga_model.update_sitewide_stats(period_name, "Totals", {'Bounce rate (home page)': pct}, + period_complete_day) + + + def _locale_stats(self, start_date, end_date, period_name, period_complete_day): """ Fetches stats about language and country """ results = self.service.data().ga().get( ids='ga:' + self.profile_id, start_date=start_date, - metrics='ga:uniquePageviews', - sort='-ga:uniquePageviews', + metrics='ga:pageviews', + sort='-ga:pageviews', dimensions="ga:language,ga:country", max_results=10000, end_date=end_date).execute() @@ -250,42 +281,42 @@ data = {} for result in result_data: data[result[0]] = data.get(result[0], 0) + int(result[2]) - ga_model.update_sitewide_stats(period_name, "Languages", data) + self._filter_out_long_tail(data, MIN_VIEWS) + ga_model.update_sitewide_stats(period_name, "Languages", data, period_complete_day) data = {} for result in result_data: data[result[1]] = data.get(result[1], 0) + int(result[2]) - ga_model.update_sitewide_stats(period_name, "Country", data) - - - def _social_stats(self, start_date, end_date, period_name): + self._filter_out_long_tail(data, MIN_VIEWS) + ga_model.update_sitewide_stats(period_name, "Country", data, period_complete_day) + + + def _social_stats(self, start_date, end_date, period_name, period_complete_day): """ Finds out which social sites people are referred from """ results = self.service.data().ga().get( ids='ga:' + self.profile_id, start_date=start_date, - metrics='ga:uniquePageviews', - sort='-ga:uniquePageviews', + metrics='ga:pageviews', + sort='-ga:pageviews', dimensions="ga:socialNetwork,ga:referralPath", max_results=10000, end_date=end_date).execute() result_data = results.get('rows') - twitter_links = [] data = {} for result in result_data: if not result[0] == '(not set)': data[result[0]] = data.get(result[0], 0) + int(result[2]) - if result[0] == 'Twitter': - twitter_links.append(result[1]) - ga_model.update_sitewide_stats(period_name, "Social sources", data) - - - def _os_stats(self, start_date, end_date, period_name): + self._filter_out_long_tail(data, 3) + ga_model.update_sitewide_stats(period_name, "Social sources", data, period_complete_day) + + + def _os_stats(self, start_date, end_date, period_name, period_complete_day): """ Operating system stats """ results = self.service.data().ga().get( ids='ga:' + self.profile_id, start_date=start_date, - metrics='ga:uniquePageviews', - sort='-ga:uniquePageviews', + metrics='ga:pageviews', + sort='-ga:pageviews', dimensions="ga:operatingSystem,ga:operatingSystemVersion", max_results=10000, end_date=end_date).execute() @@ -293,46 +324,73 @@ data = {} for result in result_data: data[result[0]] = data.get(result[0], 0) + int(result[2]) - ga_model.update_sitewide_stats(period_name, "Operating Systems", data) - - data = {} - for result in result_data: - key = "%s (%s)" % (result[0],result[1]) - data[key] = result[2] - ga_model.update_sitewide_stats(period_name, "Operating Systems versions", data) - - - def _browser_stats(self, start_date, end_date, period_name): + self._filter_out_long_tail(data, MIN_VIEWS) + ga_model.update_sitewide_stats(period_name, "Operating Systems", data, period_complete_day) + + data = {} + for result in result_data: + if int(result[2]) >= MIN_VIEWS: + key = "%s %s" % (result[0],result[1]) + data[key] = result[2] + ga_model.update_sitewide_stats(period_name, "Operating Systems versions", data, period_complete_day) + + + def _browser_stats(self, start_date, end_date, period_name, period_complete_day): """ Information about browsers and browser versions """ results = self.service.data().ga().get( ids='ga:' + self.profile_id, start_date=start_date, - metrics='ga:uniquePageviews', - sort='-ga:uniquePageviews', + metrics='ga:pageviews', + sort='-ga:pageviews', dimensions="ga:browser,ga:browserVersion", max_results=10000, end_date=end_date).execute() result_data = results.get('rows') + # e.g. [u'Firefox', u'19.0', u'20'] + data = {} for result in result_data: data[result[0]] = data.get(result[0], 0) + int(result[2]) - ga_model.update_sitewide_stats(period_name, "Browsers", data) - - data = {} - for result in result_data: - key = "%s (%s)" % (result[0], result[1]) - data[key] = result[2] - ga_model.update_sitewide_stats(period_name, "Browser versions", data) - - - def _mobile_stats(self, start_date, end_date, period_name): + self._filter_out_long_tail(data, MIN_VIEWS) + ga_model.update_sitewide_stats(period_name, "Browsers", data, period_complete_day) + + data = {} + for result in result_data: + key = "%s %s" % (result[0], self._filter_browser_version(result[0], result[1])) + data[key] = data.get(key, 0) + int(result[2]) + self._filter_out_long_tail(data, MIN_VIEWS) + ga_model.update_sitewide_stats(period_name, "Browser versions", data, period_complete_day) + + @classmethod + def _filter_browser_version(cls, browser, version_str): + ''' + Simplifies a browser version string if it is detailed. + i.e. groups together Firefox 3.5.1 and 3.5.2 to be just 3. + This is helpful when viewing stats and good to protect privacy. + ''' + ver = version_str + parts = ver.split('.') + if len(parts) > 1: + if parts[1][0] == '0': + ver = parts[0] + else: + ver = "%s" % (parts[0]) + # Special case complex version nums + if browser in ['Safari', 'Android Browser']: + ver = parts[0] + if len(ver) > 2: + num_hidden_digits = len(ver) - 2 + ver = ver[0] + ver[1] + 'X' * num_hidden_digits + return ver + + def _mobile_stats(self, start_date, end_date, period_name, period_complete_day): """ Info about mobile devices """ results = self.service.data().ga().get( ids='ga:' + self.profile_id, start_date=start_date, - metrics='ga:uniquePageviews', - sort='-ga:uniquePageviews', + metrics='ga:pageviews', + sort='-ga:pageviews', dimensions="ga:mobileDeviceBranding, ga:mobileDeviceInfo", max_results=10000, end_date=end_date).execute() @@ -341,10 +399,23 @@ data = {} for result in result_data: data[result[0]] = data.get(result[0], 0) + int(result[2]) - ga_model.update_sitewide_stats(period_name, "Mobile brands", data) + self._filter_out_long_tail(data, MIN_VIEWS) + ga_model.update_sitewide_stats(period_name, "Mobile brands", data, period_complete_day) data = {} for result in result_data: data[result[1]] = data.get(result[1], 0) + int(result[2]) - ga_model.update_sitewide_stats(period_name, "Mobile devices", data) - + self._filter_out_long_tail(data, MIN_VIEWS) + ga_model.update_sitewide_stats(period_name, "Mobile devices", data, period_complete_day) + + @classmethod + def _filter_out_long_tail(cls, data, threshold=10): + ''' + Given data which is a frequency distribution, filter out + results which are below a threshold count. This is good to protect + privacy. + ''' + for key, value in data.items(): + if value < threshold: + del data[key] + --- a/ckanext/ga_report/ga_auth.py +++ b/ckanext/ga_report/ga_auth.py @@ -53,7 +53,11 @@ return None accountName = config.get('googleanalytics.account') + if not accountName: + raise Exception('googleanalytics.account needs to be configured') webPropertyId = config.get('googleanalytics.id') + if not webPropertyId: + raise Exception('googleanalytics.id needs to be configured') for acc in accounts.get('items'): if acc.get('name') == accountName: accountId = acc.get('id') --- a/ckanext/ga_report/ga_model.py +++ b/ckanext/ga_report/ga_model.py @@ -1,21 +1,21 @@ import re import uuid -from sqlalchemy import Table, Column, MetaData +from sqlalchemy import Table, Column, MetaData, ForeignKey from sqlalchemy import types from sqlalchemy.sql import select -from sqlalchemy.orm import mapper +from sqlalchemy.orm import mapper, relation from sqlalchemy import func import ckan.model as model from ckan.lib.base import * +log = __import__('logging').getLogger(__name__) + def make_uuid(): return unicode(uuid.uuid4()) metadata = MetaData() - - class GA_Url(object): @@ -29,9 +29,10 @@ Column('period_name', types.UnicodeText), Column('period_complete_day', types.Integer), Column('pageviews', types.UnicodeText), - Column('visitors', types.UnicodeText), + Column('visits', types.UnicodeText), Column('url', types.UnicodeText), Column('department_id', types.UnicodeText), + Column('package_id', types.UnicodeText), ) mapper(GA_Url, url_table) @@ -46,6 +47,7 @@ Column('id', types.UnicodeText, primary_key=True, default=make_uuid), Column('period_name', types.UnicodeText), + Column('period_complete_day', types.UnicodeText), Column('stat_name', types.UnicodeText), Column('key', types.UnicodeText), Column('value', types.UnicodeText), ) @@ -64,7 +66,7 @@ Column('period_name', types.UnicodeText), Column('publisher_name', types.UnicodeText), Column('views', types.UnicodeText), - Column('visitors', types.UnicodeText), + Column('visits', types.UnicodeText), Column('toplevel', types.Boolean, default=False), Column('subpublishercount', types.Integer, default=0), Column('parent', types.UnicodeText), @@ -112,12 +114,10 @@ >>> normalize_url('http://data.gov.uk/dataset/weekly_fuel_prices') '/dataset/weekly_fuel_prices' ''' - # Deliberately leaving a / - url = url.replace('http:/','') - return '/' + '/'.join(url.split('/')[2:]) - - -def _get_department_id_of_url(url): + return '/' + '/'.join(url.split('/')[3:]) + + +def _get_package_and_publisher(url): # e.g. /dataset/fuel_prices # e.g. /dataset/fuel_prices/resource/e63380d4 dataset_match = re.match('/dataset/([^/]+)(/.*)?', url) @@ -127,14 +127,15 @@ if dataset: publisher_groups = dataset.get_groups('publisher') if publisher_groups: - return publisher_groups[0].name + return dataset_ref,publisher_groups[0].name + return dataset_ref, None else: publisher_match = re.match('/publisher/([^/]+)(/.*)?', url) if publisher_match: - return publisher_match.groups()[0] - - -def update_sitewide_stats(period_name, stat_name, data): + return None, publisher_match.groups()[0] + return None, None + +def update_sitewide_stats(period_name, stat_name, data, period_complete_day): for k,v in data.iteritems(): item = model.Session.query(GA_Stat).\ filter(GA_Stat.period_name==period_name).\ @@ -144,11 +145,13 @@ item.period_name = period_name item.key = k item.value = v + item.period_complete_day = period_complete_day model.Session.add(item) else: # create the row values = {'id': make_uuid(), 'period_name': period_name, + 'period_complete_day': period_complete_day, 'key': k, 'value': v, 'stat_name': stat_name @@ -157,34 +160,78 @@ model.Session.commit() +def pre_update_url_stats(period_name): + log.debug("Deleting '%s' records" % period_name) + model.Session.query(GA_Url).\ + filter(GA_Url.period_name==period_name).delete() + + count = model.Session.query(GA_Url).\ + filter(GA_Url.period_name == 'All').count() + log.debug("Deleting %d 'All' records" % count) + model.Session.query(GA_Url).\ + filter(GA_Url.period_name == 'All').delete() + model.repo.commit_and_remove() + def update_url_stats(period_name, period_complete_day, url_data): - for url, views, visitors in url_data: - url = _normalize_url(url) - department_id = _get_department_id_of_url(url) - - # see if the row for this url & month is in the table already + ''' + Given a list of urls and number of hits for each during a given period, + stores them in GA_Url under the period and recalculates the totals for + the 'All' period. + ''' + for url, views, visits in url_data: + package, publisher = _get_package_and_publisher(url) + + item = model.Session.query(GA_Url).\ filter(GA_Url.period_name==period_name).\ filter(GA_Url.url==url).first() if item: - item.period_name = period_name - item.pageviews = views - item.visitors = visitors - item.department_id = department_id + item.pageviews = item.pageviews + views + item.visits = item.visits + visits + if not item.package_id: + item.package_id = package + if not item.department_id: + item.department_id = publisher model.Session.add(item) else: - # create the row values = {'id': make_uuid(), 'period_name': period_name, 'period_complete_day': period_complete_day, 'url': url, 'pageviews': views, - 'visitors': visitors, - 'department_id': department_id + 'visits': visits, + 'department_id': publisher, + 'package_id': package } model.Session.add(GA_Url(**values)) model.Session.commit() + + if package: + old_pageviews, old_visits = 0, 0 + old = model.Session.query(GA_Url).\ + filter(GA_Url.period_name=='All').\ + filter(GA_Url.url==url).all() + old_pageviews = sum([int(o.pageviews) for o in old]) + old_visits = sum([int(o.visits) for o in old]) + + entries = model.Session.query(GA_Url).\ + filter(GA_Url.period_name!='All').\ + filter(GA_Url.url==url).all() + values = {'id': make_uuid(), + 'period_name': 'All', + 'period_complete_day': 0, + 'url': url, + 'pageviews': sum([int(e.pageviews) for e in entries]) + int(old_pageviews), + 'visits': sum([int(e.visits or 0) for e in entries]) + int(old_visits), + 'department_id': publisher, + 'package_id': package + } + + model.Session.add(GA_Url(**values)) + model.Session.commit() + + def update_social(period_name, data): @@ -226,7 +273,7 @@ filter(model.Group.type=='publisher').\ filter(model.Group.state=='active').all() for publisher in publishers: - views, visitors, subpub = update_publisher(period_name, publisher, publisher.name) + views, visits, subpub = update_publisher(period_name, publisher, publisher.name) parent, parents = '', publisher.get_groups('publisher') if parents: parent = parents[0].name @@ -235,7 +282,7 @@ filter(GA_Publisher.publisher_name==publisher.name).first() if item: item.views = views - item.visitors = visitors + item.visits = visits item.publisher_name = publisher.name item.toplevel = publisher in toplevel item.subpublishercount = subpub @@ -247,7 +294,7 @@ 'period_name': period_name, 'publisher_name': publisher.name, 'views': views, - 'visitors': visitors, + 'visits': visits, 'toplevel': publisher in toplevel, 'subpublishercount': subpub, 'parent': parent @@ -257,7 +304,7 @@ def update_publisher(period_name, pub, part=''): - views,visitors,subpub = 0, 0, 0 + views,visits,subpub = 0, 0, 0 for publisher in go_down_tree(pub): subpub = subpub + 1 items = model.Session.query(GA_Url).\ @@ -265,9 +312,9 @@ filter(GA_Url.department_id==publisher.name).all() for item in items: views = views + int(item.pageviews) - visitors = visitors + int(item.visitors) - - return views, visitors, (subpub-1) + visits = visits + int(item.visits) + + return views, visits, (subpub-1) def get_top_level(): @@ -295,3 +342,46 @@ for grandchild in go_down_tree(child): yield grandchild +def delete(period_name): + ''' + Deletes table data for the specified period, or specify 'all' + for all periods. + ''' + for object_type in (GA_Url, GA_Stat, GA_Publisher, GA_ReferralStat): + q = model.Session.query(object_type) + if period_name != 'All': + q = q.filter_by(period_name=period_name) + q.delete() + model.repo.commit_and_remove() + +def get_score_for_dataset(dataset_name): + ''' + Returns a "current popularity" score for a dataset, + based on how many views it has had recently. + ''' + import datetime + now = datetime.datetime.now() + last_month = now - datetime.timedelta(days=30) + period_names = ['%s-%02d' % (last_month.year, last_month.month), + '%s-%02d' % (now.year, now.month), + ] + + score = 0 + for period_name in period_names: + score /= 2 # previous periods are discounted by 50% + entry = model.Session.query(GA_Url)\ + .filter(GA_Url.period_name==period_name)\ + .filter(GA_Url.package_id==dataset_name).first() + # score + if entry: + views = float(entry.pageviews) + if entry.period_complete_day: + views_per_day = views / entry.period_complete_day + else: + views_per_day = views / 15 # guess + score += views_per_day + + score = int(score * 100) + log.debug('Popularity %s: %s', score, dataset_name) + return score + --- a/ckanext/ga_report/helpers.py +++ b/ckanext/ga_report/helpers.py @@ -1,7 +1,9 @@ import logging import operator + import ckan.lib.base as base import ckan.model as model +from ckan.logic import get_action from ckanext.ga_report.ga_model import GA_Url, GA_Publisher from ckanext.ga_report.controller import _get_publishers @@ -28,28 +30,54 @@ return base.render_snippet('ga_report/ga_popular_datasets.html', **ctx) def single_popular_dataset(top=20): + '''Returns a random dataset from the most popular ones. + + :param top: the number of top datasets to select from + ''' import random - datasets = {} - rand = random.randrange(0, top) - entry = model.Session.query(GA_Url).\ - filter(GA_Url.url.like('/dataset/%')).\ - order_by('ga_url.pageviews::int desc')[rand] - + top_datasets = model.Session.query(GA_Url).\ + filter(GA_Url.url.like('/dataset/%')).\ + order_by('ga_url.pageviews::int desc') + num_top_datasets = top_datasets.count() dataset = None - while not dataset: - dataset = model.Package.get(entry.url[len('/dataset/'):]) - if dataset and not dataset.state == 'active': - dataset = None - else: - publisher = model.Group.get(entry.department_id) + if num_top_datasets: + count = 0 + while not dataset: + rand = random.randrange(0, min(top, num_top_datasets)) + ga_url = top_datasets[rand] + dataset = model.Package.get(ga_url.url[len('/dataset/'):]) + if dataset and not dataset.state == 'active': + dataset = None + # When testing, it is possible that top datasets are not available + # so only go round this loop a few times before falling back on + # a random dataset. + count += 1 + if count > 10: + break + if not dataset: + # fallback + dataset = model.Session.query(model.Package)\ + .filter_by(state='active').first() + if not dataset: + return None + dataset_dict = get_action('package_show')({'model': model, + 'session': model.Session, + 'validate': False}, + {'id':dataset.id}) + return dataset_dict - ctx = { - 'dataset': dataset, - 'publisher': publisher - } - return base.render_snippet('ga_report/ga_popular_single.html', **ctx) +def single_popular_dataset_html(top=20): + dataset_dict = single_popular_dataset(top) + groups = package.get('groups', []) + publishers = [ g for g in groups if g.get('type') == 'publisher' ] + publisher = publishers[0] if publishers else {'name':'', 'title': ''} + context = { + 'dataset': dataset_dict, + 'publisher': publisher_dict + } + return base.render_snippet('ga_report/ga_popular_single.html', **context) def most_popular_datasets(publisher, count=20): @@ -61,7 +89,7 @@ results = _datasets_for_publisher(publisher, count) ctx = { - 'dataset_count': len(datasets), + 'dataset_count': len(results), 'datasets': results, 'publisher': publisher @@ -81,7 +109,7 @@ if not p in datasets: datasets[p] = {'views':0, 'visits': 0} datasets[p]['views'] = datasets[p]['views'] + int(entry.pageviews) - datasets[p]['visits'] = datasets[p]['visits'] + int(entry.visitors) + datasets[p]['visits'] = datasets[p]['visits'] + int(entry.visits) results = [] for k, v in datasets.iteritems(): --- a/ckanext/ga_report/plugin.py +++ b/ckanext/ga_report/plugin.py @@ -31,26 +31,7 @@ } def after_map(self, map): - map.connect( - '/data/site-usage/publisher', - controller='ckanext.ga_report.controller:GaPublisherReport', - action='index' - ) - map.connect( - '/data/site-usage/publisher_{month}.csv', - controller='ckanext.ga_report.controller:GaPublisherReport', - action='csv' - ) - map.connect( - '/data/site-usage/publisher/{id}_{month}.csv', - controller='ckanext.ga_report.controller:GaPublisherReport', - action='publisher_csv' - ) - map.connect( - '/data/site-usage/publisher/{id}', - controller='ckanext.ga_report.controller:GaPublisherReport', - action='read' - ) + # GaReport map.connect( '/data/site-usage', controller='ckanext.ga_report.controller:GaReport', @@ -61,6 +42,33 @@ controller='ckanext.ga_report.controller:GaReport', action='csv' ) + + # GaDatasetReport + map.connect( + '/data/site-usage/publisher', + controller='ckanext.ga_report.controller:GaDatasetReport', + action='publishers' + ) + map.connect( + '/data/site-usage/publishers_{month}.csv', + controller='ckanext.ga_report.controller:GaDatasetReport', + action='publisher_csv' + ) + map.connect( + '/data/site-usage/dataset/datasets_{id}_{month}.csv', + controller='ckanext.ga_report.controller:GaDatasetReport', + action='dataset_csv' + ) + map.connect( + '/data/site-usage/dataset', + controller='ckanext.ga_report.controller:GaDatasetReport', + action='read' + ) + map.connect( + '/data/site-usage/dataset/{id}', + controller='ckanext.ga_report.controller:GaDatasetReport', + action='read_publisher' + ) return map --- a/ckanext/ga_report/templates/ga_report/ga_popular_datasets.html +++ b/ckanext/ga_report/templates/ga_report/ga_popular_datasets.html @@ -5,7 +5,7 @@