From: David Read Date: Fri, 09 Nov 2012 16:15:35 +0000 Subject: Adjust popularity score to take account of number of days in the month. X-Git-Url: http://maxious.lambdacomplex.org/git/?p=ckanext-ga-report.git&a=commitdiff&h=20b6eca0a538a77122ce85cf588045784fa9b67e --- Adjust popularity score to take account of number of days in the month. --- --- a/README.rst +++ b/README.rst @@ -26,16 +26,16 @@ 1. Activate you CKAN python environment and install this extension's software:: $ pyenv/bin/activate - $ pip install -e git+https://github.com/okfn/ckanext-ga-report.git#egg=ckanext-ga-report + $ pip install -e git+https://github.com/datagovuk/ckanext-ga-report.git#egg=ckanext-ga-report 2. Ensure you development.ini (or similar) contains the info about your Google Analytics account and configuration:: googleanalytics.id = UA-1010101-1 - googleanalytics.username = googleaccount@gmail.com - googleanalytics.password = googlepassword + googleanalytics.account = Account name (e.g. data.gov.uk, see top level item at https://www.google.com/analytics) ga-report.period = monthly + ga-report.bounce_url = / - Note that your password will be readable by system administrators on your server. Rather than use sensitive account details, it is suggested you give access to the GA account to a new Google account that you create just for this purpose. + The ga-report.bounce_url specifies a particular path to record the bounce rate for. Typically it is / (the home page). 3. Set up this extension's database tables using a paster command. (Ensure your CKAN pyenv is still activated, run the command from ``src/ckanext-ga-report``, alter the ``--config`` option to point to your site config file):: @@ -44,6 +44,12 @@ 4. Enable the extension in your CKAN config file by adding it to ``ckan.plugins``:: ckan.plugins = ga-report + +Problem shooting +---------------- + +* ``(ProgrammingError) relation "ga_url" does not exist`` + This means that the ``paster initdb`` step has not been run successfully. Refer to the installation instructions for this extension. Authorization @@ -80,7 +86,7 @@ Tutorial -------- -Download some GA data and store it in CKAN's db. (Ensure your CKAN pyenv is still activated, run the command from ``src/ckanext-ga-report``, alter the ``--config`` option to point to your site config file) and specifying the name of your auth file (token.dat by default) from the previous step:: +Download some GA data and store it in CKAN's database. (Ensure your CKAN pyenv is still activated, run the command from ``src/ckanext-ga-report``, alter the ``--config`` option to point to your site config file) and specifying the name of your auth file (token.dat by default) from the previous step:: $ paster loadanalytics token.dat latest --config=../ckan/development.ini --- a/ckanext/ga_report/command.py +++ b/ckanext/ga_report/command.py @@ -1,7 +1,10 @@ import logging +import datetime from ckan.lib.cli import CkanCommand -# No other CKAN imports allowed until _load_config is run, or logging is disabled +# No other CKAN imports allowed until _load_config is run, +# or logging is disabled + class InitDB(CkanCommand): """Initialise the extension's database tables @@ -46,9 +49,10 @@ assuming it is correct. """ from ga_auth import init_service - initialize_service('token.dat', - self.args[0] if self.args - else 'credentials.json') + init_service('token.dat', + self.args[0] if self.args + else 'credentials.json') + class LoadAnalytics(CkanCommand): """Get data from Google Analytics API and save it @@ -62,35 +66,51 @@ And where is: all - data for all time latest - (default) just the 'latest' data - YYYY-MM-DD - just data for all time periods going - back to (and including) this date + YYYY-MM - just data for the specific month """ summary = __doc__.split('\n')[0] usage = __doc__ max_args = 2 min_args = 1 + def __init__(self, name): + super(LoadAnalytics, self).__init__(name) + self.parser.add_option('-d', '--delete-first', + action='store_true', + default=False, + dest='delete_first', + help='Delete data for the period first') + self.parser.add_option('-s', '--skip_url_stats', + action='store_true', + default=False, + dest='skip_url_stats', + help='Skip the download of URL data - just do site-wide stats') + def command(self): self._load_config() - from ga_auth import init_service + from download_analytics import DownloadAnalytics + from ga_auth import (init_service, get_profile_id) + try: svc = init_service(self.args[0], None) except TypeError: - print 'Have you correctly run the getauthtoken task and specified the correct file here' + print ('Have you correctly run the getauthtoken task and ' + 'specified the correct token file?') return - from download_analytics import DownloadAnalytics - from ga_auth import get_profile_id - downloader = DownloadAnalytics(svc, profile_id=get_profile_id(svc)) + downloader = DownloadAnalytics(svc, profile_id=get_profile_id(svc), + delete_first=self.options.delete_first, + skip_url_stats=self.options.skip_url_stats) - time_period = self.args[1] if self.args and len(self.args) > 1 else 'latest' + time_period = self.args[1] if self.args and len(self.args) > 1 \ + else 'latest' if time_period == 'all': downloader.all_() elif time_period == 'latest': downloader.latest() else: - since_date = datetime.datetime.strptime(time_period, '%Y-%m-%d') - downloader.since_date(since_date) + # The month to use + for_date = datetime.datetime.strptime(time_period, '%Y-%m') + downloader.specific_month(for_date) - --- a/ckanext/ga_report/controller.py +++ b/ckanext/ga_report/controller.py @@ -1,10 +1,345 @@ +import re +import csv +import sys import logging -from ckan.lib.base import BaseController, c, render -import report_model +import operator +import collections +from ckan.lib.base import (BaseController, c, g, render, request, response, abort) + +import sqlalchemy +from sqlalchemy import func, cast, Integer +import ckan.model as model +from ga_model import GA_Url, GA_Stat, GA_ReferralStat, GA_Publisher log = logging.getLogger('ckanext.ga-report') + +def _get_month_name(strdate): + import calendar + from time import strptime + d = strptime(strdate, '%Y-%m') + return '%s %s' % (calendar.month_name[d.tm_mon], d.tm_year) + + +def _month_details(cls): + '''Returns a list of all the month names''' + months = [] + vals = model.Session.query(cls.period_name).filter(cls.period_name!='All').distinct().all() + for m in vals: + months.append( (m[0], _get_month_name(m[0]))) + return sorted(months, key=operator.itemgetter(0), reverse=True) + + class GaReport(BaseController): + + def csv(self, month): + import csv + + q = model.Session.query(GA_Stat) + if month != 'all': + q = q.filter(GA_Stat.period_name==month) + entries = q.order_by('GA_Stat.period_name, GA_Stat.stat_name, GA_Stat.key').all() + + response.headers['Content-Type'] = "text/csv; charset=utf-8" + response.headers['Content-Disposition'] = str('attachment; filename=stats_%s.csv' % (month,)) + + writer = csv.writer(response) + writer.writerow(["Period", "Statistic", "Key", "Value"]) + + for entry in entries: + writer.writerow([entry.period_name.encode('utf-8'), + entry.stat_name.encode('utf-8'), + entry.key.encode('utf-8'), + entry.value.encode('utf-8')]) + def index(self): - return render('index.html') - + + # Get the month details by fetching distinct values and determining the + # month names from the values. + c.months = _month_details(GA_Stat) + + # Work out which month to show, based on query params of the first item + c.month_desc = 'all months' + c.month = request.params.get('month', '') + if c.month: + c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month]) + + q = model.Session.query(GA_Stat).\ + filter(GA_Stat.stat_name=='Totals') + if c.month: + q = q.filter(GA_Stat.period_name==c.month) + entries = q.order_by('ga_stat.key').all() + + def clean_key(key, val): + if key in ['Average time on site', 'Pages per visit', 'New visits', 'Bounce rate (home page)']: + val = "%.2f" % round(float(val), 2) + if key == 'Average time on site': + mins, secs = divmod(float(val), 60) + hours, mins = divmod(mins, 60) + val = '%02d:%02d:%02d (%s seconds) ' % (hours, mins, secs, val) + if key in ['New visits','Bounce rate (home page)']: + val = "%s%%" % val + if key in ['Total page views', 'Total visits']: + val = int(val) + + return key, val + + c.global_totals = [] + if c.month: + for e in entries: + key, val = clean_key(e.key, e.value) + c.global_totals.append((key, val)) + else: + d = collections.defaultdict(list) + for e in entries: + d[e.key].append(float(e.value)) + for k, v in d.iteritems(): + if k in ['Total page views', 'Total visits']: + v = sum(v) + else: + v = float(sum(v))/len(v) + key, val = clean_key(k,v) + + c.global_totals.append((key, val)) + c.global_totals = sorted(c.global_totals, key=operator.itemgetter(0)) + + keys = { + 'Browser versions': 'browser_versions', + 'Browsers': 'browsers', + 'Operating Systems versions': 'os_versions', + 'Operating Systems': 'os', + 'Social sources': 'social_networks', + 'Languages': 'languages', + 'Country': 'country' + } + + def shorten_name(name, length=60): + return (name[:length] + '..') if len(name) > 60 else name + + def fill_out_url(url): + import urlparse + return urlparse.urljoin(g.site_url, url) + + c.social_referrer_totals, c.social_referrers = [], [] + q = model.Session.query(GA_ReferralStat) + q = q.filter(GA_ReferralStat.period_name==c.month) if c.month else q + q = q.order_by('ga_referrer.count::int desc') + for entry in q.all(): + c.social_referrers.append((shorten_name(entry.url), fill_out_url(entry.url), + entry.source,entry.count)) + + q = model.Session.query(GA_ReferralStat.url, + func.sum(GA_ReferralStat.count).label('count')) + q = q.filter(GA_ReferralStat.period_name==c.month) if c.month else q + q = q.order_by('count desc').group_by(GA_ReferralStat.url) + for entry in q.all(): + c.social_referrer_totals.append((shorten_name(entry[0]), fill_out_url(entry[0]),'', + entry[1])) + + for k, v in keys.iteritems(): + q = model.Session.query(GA_Stat).\ + filter(GA_Stat.stat_name==k) + if c.month: + entries = [] + q = q.filter(GA_Stat.period_name==c.month).\ + order_by('ga_stat.value::int desc') + + d = collections.defaultdict(int) + for e in q.all(): + d[e.key] += int(e.value) + entries = [] + for key, val in d.iteritems(): + entries.append((key,val,)) + entries = sorted(entries, key=operator.itemgetter(1), reverse=True) + + # Get the total for each set of values and then set the value as + # a percentage of the total + if k == 'Social sources': + total = sum([x for n,x in c.global_totals if n == 'Total visits']) + else: + total = sum([num for _,num in entries]) + setattr(c, v, [(k,_percent(v,total)) for k,v in entries ]) + + return render('ga_report/site/index.html') + + +class GaDatasetReport(BaseController): + """ + Displays the pageview and visit count for datasets + with options to filter by publisher and time period. + """ + def publisher_csv(self, month): + ''' + Returns a CSV of each publisher with the total number of dataset + views & visits. + ''' + c.month = month if not month == 'all' else '' + response.headers['Content-Type'] = "text/csv; charset=utf-8" + response.headers['Content-Disposition'] = str('attachment; filename=publishers_%s.csv' % (month,)) + + writer = csv.writer(response) + writer.writerow(["Publisher Title", "Publisher Name", "Views", "Visits", "Period Name"]) + + for publisher,view,visit in _get_top_publishers(None): + writer.writerow([publisher.title.encode('utf-8'), + publisher.name.encode('utf-8'), + view, + visit, + month]) + + def dataset_csv(self, id='all', month='all'): + ''' + Returns a CSV with the number of views & visits for each dataset. + + :param id: A Publisher ID or None if you want for all + :param month: The time period, or 'all' + ''' + c.month = month if not month == 'all' else '' + if id != 'all': + c.publisher = model.Group.get(id) + if not c.publisher: + abort(404, 'A publisher with that name could not be found') + + packages = self._get_packages(c.publisher) + response.headers['Content-Type'] = "text/csv; charset=utf-8" + response.headers['Content-Disposition'] = \ + str('attachment; filename=datasets_%s_%s.csv' % (c.publisher_name, month,)) + + writer = csv.writer(response) + writer.writerow(["Dataset Title", "Dataset Name", "Views", "Visits", "Period Name"]) + + for package,view,visit in packages: + writer.writerow([package.title.encode('utf-8'), + package.name.encode('utf-8'), + view, + visit, + month]) + + def publishers(self): + '''A list of publishers and the number of views/visits for each''' + + # Get the month details by fetching distinct values and determining the + # month names from the values. + c.months = _month_details(GA_Url) + + # Work out which month to show, based on query params of the first item + c.month = request.params.get('month', '') + c.month_desc = 'all months' + if c.month: + c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month]) + + c.top_publishers = _get_top_publishers() + return render('ga_report/publisher/index.html') + + def _get_packages(self, publisher=None, count=-1): + '''Returns the datasets in order of views''' + if count == -1: + count = sys.maxint + + month = c.month or 'All' + + q = model.Session.query(GA_Url,model.Package)\ + .filter(model.Package.name==GA_Url.package_id)\ + .filter(GA_Url.url.like('/dataset/%')) + if publisher: + q = q.filter(GA_Url.department_id==publisher.name) + q = q.filter(GA_Url.period_name==month) + q = q.order_by('ga_url.pageviews::int desc') + top_packages = [] + for entry,package in q.limit(count): + if package: + top_packages.append((package, entry.pageviews, entry.visits)) + else: + log.warning('Could not find package associated package') + + return top_packages + + def read(self): + ''' + Lists the most popular datasets across all publishers + ''' + return self.read_publisher(None) + + def read_publisher(self, id): + ''' + Lists the most popular datasets for a publisher (or across all publishers) + ''' + count = 20 + + c.publishers = _get_publishers() + + id = request.params.get('publisher', id) + if id and id != 'all': + c.publisher = model.Group.get(id) + if not c.publisher: + abort(404, 'A publisher with that name could not be found') + c.publisher_name = c.publisher.name + c.top_packages = [] # package, dataset_views in c.top_packages + + # Get the month details by fetching distinct values and determining the + # month names from the values. + c.months = _month_details(GA_Url) + + # Work out which month to show, based on query params of the first item + c.month = request.params.get('month', '') + if not c.month: + c.month_desc = 'all months' + else: + c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month]) + + month = c.month or 'All' + c.publisher_page_views = 0 + q = model.Session.query(GA_Url).\ + filter(GA_Url.url=='/publisher/%s' % c.publisher_name) + entry = q.filter(GA_Url.period_name==c.month).first() + c.publisher_page_views = entry.pageviews if entry else 0 + + c.top_packages = self._get_packages(c.publisher, 20) + + return render('ga_report/publisher/read.html') + +def _get_top_publishers(limit=20): + ''' + Returns a list of the top 20 publishers by dataset visits. + (The number to show can be varied with 'limit') + ''' + month = c.month or 'All' + connection = model.Session.connection() + q = """ + select department_id, sum(pageviews::int) views, sum(visits::int) visits + from ga_url + where department_id <> '' + and package_id <> '' + and url like '/dataset/%%' + and period_name=%s + group by department_id order by views desc + """ + if limit: + q = q + " limit %s;" % (limit) + + top_publishers = [] + res = connection.execute(q, month) + for row in res: + g = model.Group.get(row[0]) + if g: + top_publishers.append((g, row[1], row[2])) + return top_publishers + + +def _get_publishers(): + ''' + Returns a list of all publishers. Each item is a tuple: + (name, title) + ''' + publishers = [] + for pub in model.Session.query(model.Group).\ + filter(model.Group.type=='publisher').\ + filter(model.Group.state=='active').\ + order_by(model.Group.name): + publishers.append((pub.name, pub.title)) + return publishers + +def _percent(num, total): + p = 100 * float(num)/float(total) + return "%.2f%%" % round(p, 2) + --- a/ckanext/ga_report/download_analytics.py +++ b/ckanext/ga_report/download_analytics.py @@ -1,8 +1,9 @@ +import os import logging import datetime - +import collections from pylons import config - +from ga_model import _normalize_url import ga_model #from ga_client import GA @@ -10,18 +11,31 @@ log = logging.getLogger('ckanext.ga-report') FORMAT_MONTH = '%Y-%m' +MIN_VIEWS = 50 +MIN_VISITS = 20 class DownloadAnalytics(object): '''Downloads and stores analytics info''' - def __init__(self, service=None, profile_id=None): + def __init__(self, service=None, profile_id=None, delete_first=False, + skip_url_stats=False): self.period = config['ga-report.period'] self.service = service self.profile_id = profile_id - - - def all_(self): - self.since_date(datetime.datetime(2010, 1, 1)) + self.delete_first = delete_first + self.skip_url_stats = skip_url_stats + + def specific_month(self, date): + import calendar + + first_of_this_month = datetime.datetime(date.year, date.month, 1) + _, last_day_of_month = calendar.monthrange(int(date.year), int(date.month)) + last_of_this_month = datetime.datetime(date.year, date.month, last_day_of_month) + periods = ((date.strftime(FORMAT_MONTH), + last_day_of_month, + first_of_this_month, last_of_this_month),) + self.download_and_store(periods) + def latest(self): if self.period == 'monthly': @@ -36,13 +50,13 @@ self.download_and_store(periods) - def since_date(self, since_date): + def for_date(self, for_date): assert isinstance(since_date, datetime.datetime) periods = [] # (period_name, period_complete_day, start_date, end_date) if self.period == 'monthly': first_of_the_months_until_now = [] - year = since_date.year - month = since_date.month + year = for_date.year + month = for_date.month now = datetime.datetime.now() first_of_this_month = datetime.datetime(now.year, now.month, 1) while True: @@ -80,27 +94,76 @@ def download_and_store(self, periods): for period_name, period_complete_day, start_date, end_date in periods: - log.info('Downloading Analytics for period "%s" (%s - %s)', + log.info('Period "%s" (%s - %s)', self.get_full_period_name(period_name, period_complete_day), - start_date.strftime('%Y %m %d'), - end_date.strftime('%Y %m %d')) - data = self.download(start_date, end_date) - log.info('Storing Analytics for period "%s"', - self.get_full_period_name(period_name, period_complete_day)) - self.store(period_name, period_complete_day, data) - - - def download(self, start_date, end_date): + start_date.strftime('%Y-%m-%d'), + end_date.strftime('%Y-%m-%d')) + + if self.delete_first: + log.info('Deleting existing Analytics for this period "%s"', + period_name) + ga_model.delete(period_name) + + if not self.skip_url_stats: + # Clean out old url data before storing the new + ga_model.pre_update_url_stats(period_name) + + accountName = config.get('googleanalytics.account') + + log.info('Downloading analytics for dataset views') + data = self.download(start_date, end_date, '~/%s/dataset/[a-z0-9-_]+' % accountName) + + log.info('Storing dataset views (%i rows)', len(data.get('url'))) + self.store(period_name, period_complete_day, data, ) + + log.info('Downloading analytics for publisher views') + data = self.download(start_date, end_date, '~/%s/publisher/[a-z0-9-_]+' % accountName) + + log.info('Storing publisher views (%i rows)', len(data.get('url'))) + self.store(period_name, period_complete_day, data,) + + log.info('Aggregating datasets by publisher') + ga_model.update_publisher_stats(period_name) # about 30 seconds. + + log.info('Downloading and storing analytics for site-wide stats') + self.sitewide_stats( period_name ) + + log.info('Downloading and storing analytics for social networks') + self.update_social_info(period_name, start_date, end_date) + + + def update_social_info(self, period_name, start_date, end_date): + start_date = start_date.strftime('%Y-%m-%d') + end_date = end_date.strftime('%Y-%m-%d') + query = 'ga:hasSocialSourceReferral=~Yes$' + metrics = 'ga:entrances' + sort = '-ga:entrances' + + # Supported query params at + # https://developers.google.com/analytics/devguides/reporting/core/v3/reference + results = self.service.data().ga().get( + ids='ga:' + self.profile_id, + filters=query, + start_date=start_date, + metrics=metrics, + sort=sort, + dimensions="ga:landingPagePath,ga:socialNetwork", + max_results=10000, + end_date=end_date).execute() + data = collections.defaultdict(list) + rows = results.get('rows',[]) + for row in rows: + data[_normalize_url(row[0])].append( (row[1], int(row[2]),) ) + ga_model.update_social(period_name, data) + + + def download(self, start_date, end_date, path=None): '''Get data from GA for a given time period''' start_date = start_date.strftime('%Y-%m-%d') end_date = end_date.strftime('%Y-%m-%d') - # url - #query = 'ga:pagePath=~^%s,ga:pagePath=~^%s' % \ - # (PACKAGE_URL, self.resource_url_tag) - query = 'ga:pagePath=~^/dataset/' - #query = 'ga:pagePath=~^/User/' - metrics = 'ga:uniquePageviews' - sort = '-ga:uniquePageviews' + query = 'ga:pagePath=%s$' % path + metrics = 'ga:pageviews, ga:visits' + sort = '-ga:pageviews' # Supported query params at # https://developers.google.com/analytics/devguides/reporting/core/v3/reference @@ -110,35 +173,246 @@ start_date=start_date, metrics=metrics, sort=sort, - end_date=end_date).execute() - self.print_results(results) - -# for entry in GA.ga_query(query_filter=query, -# from_date=start_date, -# metrics=metrics, -# sort=sort, -# to_date=end_date): -# print entry, type(entry) -# import pdb; pdb.set_trace() -# for dim in entry.dimension: -# if dim.name == "ga:pagePath": -# package = dim.value -# count = entry.get_metric( -# 'ga:uniquePageviews').value or 0 -# packages[package] = int(count) - return [] - - def print_results(self, results): - import pprint - pprint.pprint(results) - if results: - print 'Profile: %s' % results.get('profileInfo').get('profileName') - print 'Total results: %s' % results.get('totalResults') - print 'Total Visits: %s' % results.get('rows', [[-1]])[0][0] - else: - print 'No results found' + dimensions="ga:pagePath", + max_results=10000, + end_date=end_date).execute() + + packages = [] + for entry in results.get('rows'): + (loc,pageviews,visits) = entry + url = _normalize_url('http:/' + loc) # strips off domain e.g. www.data.gov.uk or data.gov.uk + + if not url.startswith('/dataset/') and not url.startswith('/publisher/'): + # filter out strays like: + # /data/user/login?came_from=http://data.gov.uk/dataset/os-code-point-open + # /403.html?page=/about&from=http://data.gov.uk/publisher/planning-inspectorate + continue + packages.append( (url, pageviews, visits,) ) # Temporary hack + return dict(url=packages) def store(self, period_name, period_complete_day, data): if 'url' in data: ga_model.update_url_stats(period_name, period_complete_day, data['url']) + def sitewide_stats(self, period_name): + import calendar + year, month = period_name.split('-') + _, last_day_of_month = calendar.monthrange(int(year), int(month)) + + start_date = '%s-01' % period_name + end_date = '%s-%s' % (period_name, last_day_of_month) + funcs = ['_totals_stats', '_social_stats', '_os_stats', + '_locale_stats', '_browser_stats', '_mobile_stats'] + for f in funcs: + log.info('Downloading analytics for %s' % f.split('_')[1]) + getattr(self, f)(start_date, end_date, period_name) + + def _get_results(result_data, f): + data = {} + for result in result_data: + key = f(result) + data[key] = data.get(key,0) + result[1] + return data + + def _totals_stats(self, start_date, end_date, period_name): + """ Fetches distinct totals, total pageviews etc """ + results = self.service.data().ga().get( + ids='ga:' + self.profile_id, + start_date=start_date, + metrics='ga:pageviews', + sort='-ga:pageviews', + max_results=10000, + end_date=end_date).execute() + result_data = results.get('rows') + ga_model.update_sitewide_stats(period_name, "Totals", {'Total page views': result_data[0][0]}) + + results = self.service.data().ga().get( + ids='ga:' + self.profile_id, + start_date=start_date, + metrics='ga:pageviewsPerVisit,ga:avgTimeOnSite,ga:percentNewVisits,ga:visits', + max_results=10000, + end_date=end_date).execute() + result_data = results.get('rows') + data = { + 'Pages per visit': result_data[0][0], + 'Average time on site': result_data[0][1], + 'New visits': result_data[0][2], + 'Total visits': result_data[0][3], + } + ga_model.update_sitewide_stats(period_name, "Totals", data) + + # Bounces from / or another configurable page. + path = '/%s%s' % (config.get('googleanalytics.account'), + config.get('ga-report.bounce_url', '/')) + results = self.service.data().ga().get( + ids='ga:' + self.profile_id, + filters='ga:pagePath==%s' % (path,), + start_date=start_date, + metrics='ga:bounces,ga:pageviews', + dimensions='ga:pagePath', + max_results=10000, + end_date=end_date).execute() + result_data = results.get('rows') + if not result_data or len(result_data) != 1: + log.error('Could not pinpoint the bounces for path: %s. Got results: %r', + path, result_data) + return + results = result_data[0] + bounces, total = [float(x) for x in result_data[0][1:]] + pct = 100 * bounces/total + log.info('%d bounces from %d total == %s', bounces, total, pct) + ga_model.update_sitewide_stats(period_name, "Totals", {'Bounce rate (home page)': pct}) + + + def _locale_stats(self, start_date, end_date, period_name): + """ Fetches stats about language and country """ + results = self.service.data().ga().get( + ids='ga:' + self.profile_id, + start_date=start_date, + metrics='ga:pageviews', + sort='-ga:pageviews', + dimensions="ga:language,ga:country", + max_results=10000, + end_date=end_date).execute() + result_data = results.get('rows') + data = {} + for result in result_data: + data[result[0]] = data.get(result[0], 0) + int(result[2]) + self._filter_out_long_tail(data, MIN_VIEWS) + ga_model.update_sitewide_stats(period_name, "Languages", data) + + data = {} + for result in result_data: + data[result[1]] = data.get(result[1], 0) + int(result[2]) + self._filter_out_long_tail(data, MIN_VIEWS) + ga_model.update_sitewide_stats(period_name, "Country", data) + + + def _social_stats(self, start_date, end_date, period_name): + """ Finds out which social sites people are referred from """ + results = self.service.data().ga().get( + ids='ga:' + self.profile_id, + start_date=start_date, + metrics='ga:pageviews', + sort='-ga:pageviews', + dimensions="ga:socialNetwork,ga:referralPath", + max_results=10000, + end_date=end_date).execute() + result_data = results.get('rows') + data = {} + for result in result_data: + if not result[0] == '(not set)': + data[result[0]] = data.get(result[0], 0) + int(result[2]) + self._filter_out_long_tail(data, 3) + ga_model.update_sitewide_stats(period_name, "Social sources", data) + + + def _os_stats(self, start_date, end_date, period_name): + """ Operating system stats """ + results = self.service.data().ga().get( + ids='ga:' + self.profile_id, + start_date=start_date, + metrics='ga:pageviews', + sort='-ga:pageviews', + dimensions="ga:operatingSystem,ga:operatingSystemVersion", + max_results=10000, + end_date=end_date).execute() + result_data = results.get('rows') + data = {} + for result in result_data: + data[result[0]] = data.get(result[0], 0) + int(result[2]) + self._filter_out_long_tail(data, MIN_VIEWS) + ga_model.update_sitewide_stats(period_name, "Operating Systems", data) + + data = {} + for result in result_data: + if int(result[2]) >= MIN_VIEWS: + key = "%s %s" % (result[0],result[1]) + data[key] = result[2] + ga_model.update_sitewide_stats(period_name, "Operating Systems versions", data) + + + def _browser_stats(self, start_date, end_date, period_name): + """ Information about browsers and browser versions """ + results = self.service.data().ga().get( + ids='ga:' + self.profile_id, + start_date=start_date, + metrics='ga:pageviews', + sort='-ga:pageviews', + dimensions="ga:browser,ga:browserVersion", + max_results=10000, + end_date=end_date).execute() + result_data = results.get('rows') + # e.g. [u'Firefox', u'19.0', u'20'] + + data = {} + for result in result_data: + data[result[0]] = data.get(result[0], 0) + int(result[2]) + self._filter_out_long_tail(data, MIN_VIEWS) + ga_model.update_sitewide_stats(period_name, "Browsers", data) + + data = {} + for result in result_data: + key = "%s %s" % (result[0], self._filter_browser_version(result[0], result[1])) + data[key] = data.get(key, 0) + int(result[2]) + self._filter_out_long_tail(data, MIN_VIEWS) + ga_model.update_sitewide_stats(period_name, "Browser versions", data) + + @classmethod + def _filter_browser_version(cls, browser, version_str): + ''' + Simplifies a browser version string if it is detailed. + i.e. groups together Firefox 3.5.1 and 3.5.2 to be just 3. + This is helpful when viewing stats and good to protect privacy. + ''' + ver = version_str + parts = ver.split('.') + if len(parts) > 1: + if parts[1][0] == '0': + ver = parts[0] + else: + ver = "%s" % (parts[0]) + # Special case complex version nums + if browser in ['Safari', 'Android Browser']: + ver = parts[0] + if len(ver) > 2: + num_hidden_digits = len(ver) - 2 + ver = ver[0] + ver[1] + 'X' * num_hidden_digits + return ver + + def _mobile_stats(self, start_date, end_date, period_name): + """ Info about mobile devices """ + + results = self.service.data().ga().get( + ids='ga:' + self.profile_id, + start_date=start_date, + metrics='ga:pageviews', + sort='-ga:pageviews', + dimensions="ga:mobileDeviceBranding, ga:mobileDeviceInfo", + max_results=10000, + end_date=end_date).execute() + + result_data = results.get('rows') + data = {} + for result in result_data: + data[result[0]] = data.get(result[0], 0) + int(result[2]) + self._filter_out_long_tail(data, MIN_VIEWS) + ga_model.update_sitewide_stats(period_name, "Mobile brands", data) + + data = {} + for result in result_data: + data[result[1]] = data.get(result[1], 0) + int(result[2]) + self._filter_out_long_tail(data, MIN_VIEWS) + ga_model.update_sitewide_stats(period_name, "Mobile devices", data) + + @classmethod + def _filter_out_long_tail(cls, data, threshold=10): + ''' + Given data which is a frequency distribution, filter out + results which are below a threshold count. This is good to protect + privacy. + ''' + for key, value in data.items(): + if value < threshold: + del data[key] + --- a/ckanext/ga_report/ga_auth.py +++ b/ckanext/ga_report/ga_auth.py @@ -1,3 +1,4 @@ +import os import httplib2 from apiclient.discovery import build from oauth2client.client import flow_from_clientsecrets @@ -7,12 +8,12 @@ from pylons import config -def _prepare_credentials( token_filename, credentials_filename ): +def _prepare_credentials(token_filename, credentials_filename): """ Either returns the user's oauth credentials or uses the credentials file to generate a token (by forcing the user to login in the browser) """ - storage = Storage( token_filename ) + storage = Storage(token_filename) credentials = storage.get() if credentials is None or credentials.invalid: @@ -23,7 +24,8 @@ return credentials -def init_service( token_file, credentials_file ): + +def init_service(token_file, credentials_file): """ Given a file containing the user's oauth token (and another with credentials in case we need to generate the token) will return a @@ -40,15 +42,28 @@ def get_profile_id(service): """ Get the profile ID for this user and the service specified by the - 'googleanalytics.id' configuration option. + 'googleanalytics.id' configuration option. This function iterates + over all of the accounts available to the user who invoked the + service to find one where the account name matches (in case the + user has several). """ accounts = service.management().accounts().list().execute() if not accounts.get('items'): return None - accountId = accounts.get('items')[0].get('id') + accountName = config.get('googleanalytics.account') + if not accountName: + raise Exception('googleanalytics.account needs to be configured') webPropertyId = config.get('googleanalytics.id') + if not webPropertyId: + raise Exception('googleanalytics.id needs to be configured') + for acc in accounts.get('items'): + if acc.get('name') == accountName: + accountId = acc.get('id') + + webproperties = service.management().webproperties().list(accountId=accountId).execute() + profiles = service.management().profiles().list( accountId=accountId, webPropertyId=webPropertyId).execute() @@ -56,3 +71,4 @@ return profiles.get('items')[0].get('id') return None + --- a/ckanext/ga_report/ga_model.py +++ b/ckanext/ga_report/ga_model.py @@ -1,32 +1,102 @@ import re import uuid -from sqlalchemy import Table, Column, MetaData +from sqlalchemy import Table, Column, MetaData, ForeignKey from sqlalchemy import types -from sqlalchemy.sql import select, text +from sqlalchemy.sql import select +from sqlalchemy.orm import mapper, relation from sqlalchemy import func import ckan.model as model -from ckan.model.types import JsonType from ckan.lib.base import * + +log = __import__('logging').getLogger(__name__) def make_uuid(): return unicode(uuid.uuid4()) +metadata = MetaData() + +class GA_Url(object): + + def __init__(self, **kwargs): + for k,v in kwargs.items(): + setattr(self, k, v) + +url_table = Table('ga_url', metadata, + Column('id', types.UnicodeText, primary_key=True, + default=make_uuid), + Column('period_name', types.UnicodeText), + Column('period_complete_day', types.Integer), + Column('pageviews', types.UnicodeText), + Column('visits', types.UnicodeText), + Column('url', types.UnicodeText), + Column('department_id', types.UnicodeText), + Column('package_id', types.UnicodeText), + ) +mapper(GA_Url, url_table) + + +class GA_Stat(object): + + def __init__(self, **kwargs): + for k,v in kwargs.items(): + setattr(self, k, v) + +stat_table = Table('ga_stat', metadata, + Column('id', types.UnicodeText, primary_key=True, + default=make_uuid), + Column('period_name', types.UnicodeText), + Column('stat_name', types.UnicodeText), + Column('key', types.UnicodeText), + Column('value', types.UnicodeText), ) +mapper(GA_Stat, stat_table) + + +class GA_Publisher(object): + + def __init__(self, **kwargs): + for k,v in kwargs.items(): + setattr(self, k, v) + +pub_table = Table('ga_publisher', metadata, + Column('id', types.UnicodeText, primary_key=True, + default=make_uuid), + Column('period_name', types.UnicodeText), + Column('publisher_name', types.UnicodeText), + Column('views', types.UnicodeText), + Column('visits', types.UnicodeText), + Column('toplevel', types.Boolean, default=False), + Column('subpublishercount', types.Integer, default=0), + Column('parent', types.UnicodeText), +) +mapper(GA_Publisher, pub_table) + + +class GA_ReferralStat(object): + + def __init__(self, **kwargs): + for k,v in kwargs.items(): + setattr(self, k, v) + +referrer_table = Table('ga_referrer', metadata, + Column('id', types.UnicodeText, primary_key=True, + default=make_uuid), + Column('period_name', types.UnicodeText), + Column('source', types.UnicodeText), + Column('url', types.UnicodeText), + Column('count', types.Integer), + ) +mapper(GA_ReferralStat, referrer_table) + + + def init_tables(): - metadata = MetaData() - package_stats = Table('ga_url', metadata, - Column('id', types.UnicodeText, primary_key=True, default=make_uuid), - Column('period_name', types.UnicodeText), - Column('period_complete_day', types.Integer), - Column('visits', types.Integer), - Column('group_id', types.String(60)), - Column('next_page', JsonType), - ) metadata.create_all(model.meta.engine) cached_tables = {} + def get_table(name): if name not in cached_tables: @@ -43,10 +113,10 @@ >>> normalize_url('http://data.gov.uk/dataset/weekly_fuel_prices') '/dataset/weekly_fuel_prices' ''' - url = re.sub('https?://(www\.)?data.gov.uk', '', url) - return url - -def _get_department_id_of_url(url): + return '/' + '/'.join(url.split('/')[3:]) + + +def _get_package_and_publisher(url): # e.g. /dataset/fuel_prices # e.g. /dataset/fuel_prices/resource/e63380d4 dataset_match = re.match('/dataset/([^/]+)(/.*)?', url) @@ -56,36 +126,253 @@ if dataset: publisher_groups = dataset.get_groups('publisher') if publisher_groups: - return publisher_groups[0].id - -def update_url_stats(period_name, period_complete_day, url_data): - table = get_table('ga_url') - connection = model.Session.connection() - for url, views, next_page in url_data: - url = _normalize_url(url) - department_id = _get_department_id_of_url(url) - # see if the row for this url & month is in the table already - s = select([func.count(id_col)], - table.c.period_name == period_name, - table.c.url == url) - count = connection.execute(s).fetchone() - if count and count[0]: - # update the row - connection.execute(table.update()\ - .where(table.c.period_name == period_name, - table.c.url == url)\ - .values(period_complete_day=period_complete_day, - views=views, - department_id=department_id, - next_page=next_page)) + return dataset_ref,publisher_groups[0].name + return dataset_ref, None + else: + publisher_match = re.match('/publisher/([^/]+)(/.*)?', url) + if publisher_match: + return None, publisher_match.groups()[0] + return None, None + +def update_sitewide_stats(period_name, stat_name, data): + for k,v in data.iteritems(): + item = model.Session.query(GA_Stat).\ + filter(GA_Stat.period_name==period_name).\ + filter(GA_Stat.key==k).\ + filter(GA_Stat.stat_name==stat_name).first() + if item: + item.period_name = period_name + item.key = k + item.value = v + model.Session.add(item) else: # create the row - values = {'period_name': period_name, + values = {'id': make_uuid(), + 'period_name': period_name, + 'key': k, + 'value': v, + 'stat_name': stat_name + } + model.Session.add(GA_Stat(**values)) + model.Session.commit() + + +def pre_update_url_stats(period_name): + model.Session.query(GA_Url).\ + filter(GA_Url.period_name==period_name).delete() + model.Session.query(GA_Url).\ + filter(GA_Url.period_name=='All').delete() + + +def update_url_stats(period_name, period_complete_day, url_data): + ''' + Given a list of urls and number of hits for each during a given period, + stores them in GA_Url under the period and recalculates the totals for + the 'All' period. + ''' + for url, views, visits in url_data: + package, publisher = _get_package_and_publisher(url) + + + item = model.Session.query(GA_Url).\ + filter(GA_Url.period_name==period_name).\ + filter(GA_Url.url==url).first() + if item: + item.pageviews = item.pageviews + views + item.visits = item.visits + visits + if not item.package_id: + item.package_id = package + if not item.department_id: + item.department_id = publisher + model.Session.add(item) + else: + values = {'id': make_uuid(), + 'period_name': period_name, 'period_complete_day': period_complete_day, 'url': url, - 'views': views, - 'department_id': department_id, - 'next_page': next_page} - connection.execute(stats.insert()\ - .values(**values)) - + 'pageviews': views, + 'visits': visits, + 'department_id': publisher, + 'package_id': package + } + model.Session.add(GA_Url(**values)) + model.Session.commit() + + if package: + old_pageviews, old_visits = 0, 0 + old = model.Session.query(GA_Url).\ + filter(GA_Url.period_name=='All').\ + filter(GA_Url.url==url).all() + old_pageviews = sum([int(o.pageviews) for o in old]) + old_visits = sum([int(o.visits) for o in old]) + + entries = model.Session.query(GA_Url).\ + filter(GA_Url.period_name!='All').\ + filter(GA_Url.url==url).all() + values = {'id': make_uuid(), + 'period_name': 'All', + 'period_complete_day': 0, + 'url': url, + 'pageviews': sum([int(e.pageviews) for e in entries]) + old_pageviews, + 'visits': sum([int(e.visits or 0) for e in entries]) + old_visits, + 'department_id': publisher, + 'package_id': package + } + + model.Session.add(GA_Url(**values)) + model.Session.commit() + + + + +def update_social(period_name, data): + # Clean up first. + model.Session.query(GA_ReferralStat).\ + filter(GA_ReferralStat.period_name==period_name).delete() + + for url,data in data.iteritems(): + for entry in data: + source = entry[0] + count = entry[1] + + item = model.Session.query(GA_ReferralStat).\ + filter(GA_ReferralStat.period_name==period_name).\ + filter(GA_ReferralStat.source==source).\ + filter(GA_ReferralStat.url==url).first() + if item: + item.count = item.count + count + model.Session.add(item) + else: + # create the row + values = {'id': make_uuid(), + 'period_name': period_name, + 'source': source, + 'url': url, + 'count': count, + } + model.Session.add(GA_ReferralStat(**values)) + model.Session.commit() + +def update_publisher_stats(period_name): + """ + Updates the publisher stats from the data retrieved for /dataset/* + and /publisher/*. Will run against each dataset and generates the + totals for the entire tree beneath each publisher. + """ + toplevel = get_top_level() + publishers = model.Session.query(model.Group).\ + filter(model.Group.type=='publisher').\ + filter(model.Group.state=='active').all() + for publisher in publishers: + views, visits, subpub = update_publisher(period_name, publisher, publisher.name) + parent, parents = '', publisher.get_groups('publisher') + if parents: + parent = parents[0].name + item = model.Session.query(GA_Publisher).\ + filter(GA_Publisher.period_name==period_name).\ + filter(GA_Publisher.publisher_name==publisher.name).first() + if item: + item.views = views + item.visits = visits + item.publisher_name = publisher.name + item.toplevel = publisher in toplevel + item.subpublishercount = subpub + item.parent = parent + model.Session.add(item) + else: + # create the row + values = {'id': make_uuid(), + 'period_name': period_name, + 'publisher_name': publisher.name, + 'views': views, + 'visits': visits, + 'toplevel': publisher in toplevel, + 'subpublishercount': subpub, + 'parent': parent + } + model.Session.add(GA_Publisher(**values)) + model.Session.commit() + + +def update_publisher(period_name, pub, part=''): + views,visits,subpub = 0, 0, 0 + for publisher in go_down_tree(pub): + subpub = subpub + 1 + items = model.Session.query(GA_Url).\ + filter(GA_Url.period_name==period_name).\ + filter(GA_Url.department_id==publisher.name).all() + for item in items: + views = views + int(item.pageviews) + visits = visits + int(item.visits) + + return views, visits, (subpub-1) + + +def get_top_level(): + '''Returns the top level publishers.''' + return model.Session.query(model.Group).\ + outerjoin(model.Member, model.Member.table_id == model.Group.id and \ + model.Member.table_name == 'group' and \ + model.Member.state == 'active').\ + filter(model.Member.id==None).\ + filter(model.Group.type=='publisher').\ + order_by(model.Group.name).all() + +def get_children(publisher): + '''Finds child publishers for the given publisher (object). (Not recursive)''' + from ckan.model.group import HIERARCHY_CTE + return model.Session.query(model.Group).\ + from_statement(HIERARCHY_CTE).params(id=publisher.id, type='publisher').\ + all() + +def go_down_tree(publisher): + '''Provided with a publisher object, it walks down the hierarchy and yields each publisher, + including the one you supply.''' + yield publisher + for child in get_children(publisher): + for grandchild in go_down_tree(child): + yield grandchild + +def delete(period_name): + ''' + Deletes table data for the specified period, or specify 'all' + for all periods. + ''' + for object_type in (GA_Url, GA_Stat, GA_Publisher, GA_ReferralStat): + q = model.Session.query(object_type) + if period_name != 'all': + q = q.filter_by(period_name=period_name) + q.delete() + model.Session.commit() + +def get_score_for_dataset(dataset_name): + ''' + Returns a "current popularity" score for a dataset, + based on how many views it has had recently. + ''' + import datetime + now = datetime.datetime.now() + last_month = now - datetime.timedelta(days=30) + period_names = ['%s-%02d' % (last_month.year, last_month.month), + '%s-%02d' % (now.year, now.month), + ] + + score = 0 + for period_name in period_names: + score /= 2 # previous periods are discounted by 50% + entry = model.Session.query(GA_Url)\ + .filter(GA_Url.period_name==period_name)\ + .filter(GA_Url.package_id==dataset_name).first() + # score + if entry: + views = float(entry.pageviews) + if entry.period_complete_day: + views_per_day = views / entry.period_complete_day + else: + views_per_day = views / 15 # guess + score += views_per_day + + score = int(score * 100) + log.debug('Popularity %s: %s', score, dataset_name) + return score + --- /dev/null +++ b/ckanext/ga_report/helpers.py @@ -1,1 +1,116 @@ +import logging +import operator +import ckan.lib.base as base +import ckan.model as model +from ckan.logic import get_action + +from ckanext.ga_report.ga_model import GA_Url, GA_Publisher +from ckanext.ga_report.controller import _get_publishers +_log = logging.getLogger(__name__) + +def popular_datasets(count=10): + import random + + publisher = None + publishers = _get_publishers(30) + total = len(publishers) + while not publisher or not datasets: + rand = random.randrange(0, total) + publisher = publishers[rand][0] + if not publisher.state == 'active': + publisher = None + continue + datasets = _datasets_for_publisher(publisher, 10)[:count] + + ctx = { + 'datasets': datasets, + 'publisher': publisher + } + return base.render_snippet('ga_report/ga_popular_datasets.html', **ctx) + +def single_popular_dataset(top=20): + '''Returns a random dataset from the most popular ones. + + :param top: the number of top datasets to select from + ''' + import random + + top_datasets = model.Session.query(GA_Url).\ + filter(GA_Url.url.like('/dataset/%')).\ + order_by('ga_url.pageviews::int desc') + num_top_datasets = top_datasets.count() + + dataset = None + if num_top_datasets: + count = 0 + while not dataset: + rand = random.randrange(0, min(top, num_top_datasets)) + ga_url = top_datasets[rand] + dataset = model.Package.get(ga_url.url[len('/dataset/'):]) + if dataset and not dataset.state == 'active': + dataset = None + count += 1 + if count > 10: + break + if not dataset: + # fallback + dataset = model.Session.query(model.Package)\ + .filter_by(state='active').first() + if not dataset: + return None + dataset_dict = get_action('package_show')({'model': model, + 'session': model.Session, + 'validate': False}, + {'id':dataset.id}) + return dataset_dict + +def single_popular_dataset_html(top=20): + dataset_dict = single_popular_dataset(top) + groups = package.get('groups', []) + publishers = [ g for g in groups if g.get('type') == 'publisher' ] + publisher = publishers[0] if publishers else {'name':'', 'title': ''} + context = { + 'dataset': dataset_dict, + 'publisher': publisher_dict + } + return base.render_snippet('ga_report/ga_popular_single.html', **context) + + +def most_popular_datasets(publisher, count=20): + + if not publisher: + _log.error("No valid publisher passed to 'most_popular_datasets'") + return "" + + results = _datasets_for_publisher(publisher, count) + + ctx = { + 'dataset_count': len(results), + 'datasets': results, + + 'publisher': publisher + } + + return base.render_snippet('ga_report/publisher/popular.html', **ctx) + +def _datasets_for_publisher(publisher, count): + datasets = {} + entries = model.Session.query(GA_Url).\ + filter(GA_Url.department_id==publisher.name).\ + filter(GA_Url.url.like('/dataset/%')).\ + order_by('ga_url.pageviews::int desc').all() + for entry in entries: + if len(datasets) < count: + p = model.Package.get(entry.url[len('/dataset/'):]) + if not p in datasets: + datasets[p] = {'views':0, 'visits': 0} + datasets[p]['views'] = datasets[p]['views'] + int(entry.pageviews) + datasets[p]['visits'] = datasets[p]['visits'] + int(entry.visits) + + results = [] + for k, v in datasets.iteritems(): + results.append((k,v['views'],v['visits'])) + + return sorted(results, key=operator.itemgetter(1), reverse=True) + --- a/ckanext/ga_report/plugin.py +++ b/ckanext/ga_report/plugin.py @@ -1,25 +1,73 @@ import logging import ckan.lib.helpers as h +import ckan.plugins as p from ckan.plugins import implements, toolkit -import gasnippet -import commands -import dbutil + +from ckanext.ga_report.helpers import (most_popular_datasets, + popular_datasets, + single_popular_dataset) log = logging.getLogger('ckanext.ga-report') -class GoogleAnalyticsPlugin(p.SingletonPlugin): +class GAReportPlugin(p.SingletonPlugin): implements(p.IConfigurer, inherit=True) implements(p.IRoutes, inherit=True) + implements(p.ITemplateHelpers, inherit=True) def update_config(self, config): toolkit.add_template_directory(config, 'templates') toolkit.add_public_directory(config, 'public') + def get_helpers(self): + """ + A dictionary of extra helpers that will be available to provide + ga report info to templates. + """ + return { + 'ga_report_installed': lambda: True, + 'popular_datasets': popular_datasets, + 'most_popular_datasets': most_popular_datasets, + 'single_popular_dataset': single_popular_dataset + } + def after_map(self, map): + # GaReport map.connect( - '/data/analytics/index', - controller='ckanext.ga-report.controller:GaReport', + '/data/site-usage', + controller='ckanext.ga_report.controller:GaReport', action='index' + ) + map.connect( + '/data/site-usage/data_{month}.csv', + controller='ckanext.ga_report.controller:GaReport', + action='csv' + ) + + # GaDatasetReport + map.connect( + '/data/site-usage/publisher', + controller='ckanext.ga_report.controller:GaDatasetReport', + action='publishers' + ) + map.connect( + '/data/site-usage/publishers_{month}.csv', + controller='ckanext.ga_report.controller:GaDatasetReport', + action='publisher_csv' + ) + map.connect( + '/data/site-usage/dataset/datasets_{id}_{month}.csv', + controller='ckanext.ga_report.controller:GaDatasetReport', + action='dataset_csv' + ) + map.connect( + '/data/site-usage/dataset', + controller='ckanext.ga_report.controller:GaDatasetReport', + action='read' + ) + map.connect( + '/data/site-usage/dataset/{id}', + controller='ckanext.ga_report.controller:GaDatasetReport', + action='read_publisher' ) return map --- a/ckanext/ga_report/report_model.py +++ /dev/null --- /dev/null +++ b/ckanext/ga_report/templates/ga_report/ga_popular_datasets.html @@ -1,1 +1,27 @@ + + + + + + + + --- /dev/null +++ b/ckanext/ga_report/templates/ga_report/ga_popular_single.html @@ -1,1 +1,31 @@ + + +