From: Alex Sadleir Date: Fri, 28 Feb 2014 05:34:19 +0000 Subject: Fixes to make this work on not-data.gov.uk X-Git-Url: https://maxious.lambdacomplex.org/git/?p=ckanext-ga-report.git&a=commitdiff&h=e56e17d15791ab06ab5fc62d10fedcf302cf1141 --- Fixes to make this work on not-data.gov.uk --- --- a/ckanext/ga_report/command.py +++ b/ckanext/ga_report/command.py @@ -52,9 +52,7 @@ assuming it is correct. """ from ga_auth import init_service - init_service('token.dat', - self.args[0] if self.args - else 'credentials.json') + init_service('token.dat', 'credentials.json') class FixTimePeriods(CkanCommand): """ @@ -115,6 +113,7 @@ default=False, dest='skip_url_stats', help='Skip the download of URL data - just do site-wide stats') + self.token = "" def command(self): self._load_config() @@ -129,14 +128,14 @@ return try: - svc = init_service(ga_token_filepath, None) + self.token, svc = init_service(ga_token_filepath, None) except TypeError: print ('Have you correctly run the getauthtoken task and ' 'specified the correct token file in the CKAN config under ' '"googleanalytics.token.filepath"?') return - downloader = DownloadAnalytics(svc, profile_id=get_profile_id(svc), + downloader = DownloadAnalytics(svc, self.token, profile_id=get_profile_id(svc), delete_first=self.options.delete_first, skip_url_stats=self.options.skip_url_stats) --- a/ckanext/ga_report/controller.py +++ b/ckanext/ga_report/controller.py @@ -211,7 +211,7 @@ graph_dict = {} for stat in graph_query: graph_dict[ stat.key ] = graph_dict.get(stat.key,{ - 'name':stat.key, + 'name':stat.key, 'raw': {} }) graph_dict[ stat.key ]['raw'][stat.period_name] = float(stat.value) @@ -304,7 +304,9 @@ graph_data = _get_top_publishers_graph() c.top_publishers_graph = json.dumps( _to_rickshaw(graph_data) ) - return render('ga_report/publisher/index.html') + x = render('ga_report/publisher/index.html') + + return x def _get_packages(self, publisher=None, month='', count=-1): '''Returns the datasets in order of views''' @@ -412,7 +414,7 @@ def _to_rickshaw(data, percentageMode=False): if data==[]: return data - # x-axis is every month in c.months. Note that data might not exist + # x-axis is every month in c.months. Note that data might not exist # for entire history, eg. for recently-added datasets x_axis = [x[0] for x in c.months] x_axis.reverse() # Ascending order @@ -444,10 +446,10 @@ for i in range(len(x_axis)): x = _get_unix_epoch(x_axis[i]) y = 0 - for series in others: + for series in others: y += series['data'][i]['y'] data_other.append({'x':x,'y':y}) - data.append({ + data.append({ 'name':'Other', 'data': data_other }) @@ -505,8 +507,8 @@ # Query for a history graph of these department ids q = model.Session.query( - GA_Url.department_id, - GA_Url.period_name, + GA_Url.department_id, + GA_Url.period_name, func.sum(cast(GA_Url.pageviews,sqlalchemy.types.INT)))\ .filter( GA_Url.department_id.in_(department_ids) )\ .filter( GA_Url.url.like('/dataset/%') )\ @@ -529,7 +531,7 @@ ''' publishers = [] for pub in model.Session.query(model.Group).\ - filter(model.Group.type=='publisher').\ + filter(model.Group.type=='organization').\ filter(model.Group.state=='active').\ order_by(model.Group.name): publishers.append((pub.name, pub.title)) --- a/ckanext/ga_report/download_analytics.py +++ b/ckanext/ga_report/download_analytics.py @@ -3,6 +3,8 @@ import datetime import httplib import collections +import requests +import json from pylons import config from ga_model import _normalize_url import ga_model @@ -19,13 +21,14 @@ class DownloadAnalytics(object): '''Downloads and stores analytics info''' - def __init__(self, service=None, profile_id=None, delete_first=False, + def __init__(self, service=None, token=None, profile_id=None, delete_first=False, skip_url_stats=False): self.period = config['ga-report.period'] self.service = service self.profile_id = profile_id self.delete_first = delete_first self.skip_url_stats = skip_url_stats + self.token = token def specific_month(self, date): import calendar @@ -118,13 +121,13 @@ accountName = config.get('googleanalytics.account') log.info('Downloading analytics for dataset views') - data = self.download(start_date, end_date, '~/%s/dataset/[a-z0-9-_]+' % accountName) + data = self.download(start_date, end_date, '~^/dataset/[a-z0-9-_]+') log.info('Storing dataset views (%i rows)', len(data.get('url'))) self.store(period_name, period_complete_day, data, ) log.info('Downloading analytics for publisher views') - data = self.download(start_date, end_date, '~/%s/publisher/[a-z0-9-_]+' % accountName) + data = self.download(start_date, end_date, '~^/organization/[a-z0-9-_]+') log.info('Storing publisher views (%i rows)', len(data.get('url'))) self.store(period_name, period_complete_day, data,) @@ -150,21 +153,31 @@ metrics = 'ga:entrances' sort = '-ga:entrances' - # Supported query params at - # https://developers.google.com/analytics/devguides/reporting/core/v3/reference - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - filters=query, - start_date=start_date, - metrics=metrics, - sort=sort, - dimensions="ga:landingPagePath,ga:socialNetwork", - max_results=10000, - end_date=end_date).execute() + try: + # Because of issues of invalid responses, we are going to make these requests + # ourselves. + headers = {'authorization': 'Bearer ' + self.token} + + args = dict(ids='ga:' + self.profile_id, + filters=query, + metrics=metrics, + sort=sort, + dimensions="ga:landingPagePath,ga:socialNetwork", + max_results=10000) + + args['start-date'] = start_date + args['end-date'] = end_date + + results = self._get_json(args) + except Exception, e: + log.exception(e) + results = dict(url=[]) + + data = collections.defaultdict(list) rows = results.get('rows',[]) for row in rows: - url = _normalize_url('http:/' + row[0]) + url = row[0] data[url].append( (row[1], int(row[2]),) ) ga_model.update_social(period_name, data) @@ -179,28 +192,34 @@ # Supported query params at # https://developers.google.com/analytics/devguides/reporting/core/v3/reference - try: - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - filters=query, - start_date=start_date, - metrics=metrics, - sort=sort, - dimensions="ga:pagePath", - max_results=10000, - end_date=end_date).execute() - except httplib.BadStatusLine: - log.error(u"Failed to download data=> ids: ga:{0}, filters: {1}, start_date: {2}, end_date: {3}, metrics: {4}, sort: {5}, dimensions: ga:pagePath".format( - self.profile_id, query, start_date, end_date, metrics, sort )) + # https://ga-dev-tools.appspot.com/explorer/ + try: + args = {} + args["sort"] = "-ga:pageviews" + args["max-results"] = 100000 + args["dimensions"] = "ga:pagePath" + args["start-date"] = start_date + args["end-date"] = end_date + args["metrics"] = metrics + args["ids"] = "ga:" + self.profile_id + args["filters"] = query + args["alt"] = "json" + print args + results = self._get_json(args) + + except Exception, e: + log.exception(e) return dict(url=[]) packages = [] log.info("There are %d results" % results['totalResults']) - for entry in results.get('rows'): + if results['totalResults'] > 0: + for entry in results.get('rows'): (loc,pageviews,visits) = entry - url = _normalize_url('http:/' + loc) # strips off domain e.g. www.data.gov.uk or data.gov.uk - - if not url.startswith('/dataset/') and not url.startswith('/publisher/'): + #url = _normalize_url('http:/' + loc) # strips off domain e.g. www.data.gov.uk or data.gov.uk + url = loc + #print url + if not url.startswith('/dataset/') and not url.startswith('/organization/'): # filter out strays like: # /data/user/login?came_from=http://data.gov.uk/dataset/os-code-point-open # /403.html?page=/about&from=http://data.gov.uk/publisher/planning-inspectorate @@ -232,25 +251,78 @@ data[key] = data.get(key,0) + result[1] return data + def _get_json(self, params, prev_fail=False): + ga_token_filepath = os.path.expanduser(config.get('googleanalytics.token.filepath', '')) + if not ga_token_filepath: + print 'ERROR: In the CKAN config you need to specify the filepath of the ' \ + 'Google Analytics token file under key: googleanalytics.token.filepath' + return + + log.info("Trying to refresh our OAuth token") + try: + from ga_auth import init_service + self.token, svc = init_service(ga_token_filepath, None) + log.info("OAuth token refreshed") + except Exception, auth_exception: + log.error("Oauth refresh failed") + log.exception(auth_exception) + return + + try: + headers = {'authorization': 'Bearer ' + self.token} + r = requests.get("https://www.googleapis.com/analytics/v3/data/ga", params=params, headers=headers) + if r.status_code != 200: + log.info("STATUS: %s" % (r.status_code,)) + log.info("CONTENT: %s" % (r.content,)) + raise Exception("Request with params: %s failed" % params) + + return json.loads(r.content) + except Exception, e: + log.exception(e) + + return dict(url=[]) + def _totals_stats(self, start_date, end_date, period_name, period_complete_day): """ Fetches distinct totals, total pageviews etc """ - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - start_date=start_date, - metrics='ga:pageviews', - sort='-ga:pageviews', - max_results=10000, - end_date=end_date).execute() + try: + args = {} + args["max-results"] = 100000 + args["start-date"] = start_date + args["end-date"] = end_date + args["ids"] = "ga:" + self.profile_id + + args["metrics"] = "ga:pageviews" + args["sort"] = "-ga:pageviews" + args["alt"] = "json" + + results = self._get_json(args) + except Exception, e: + log.exception(e) + results = dict(url=[]) + result_data = results.get('rows') ga_model.update_sitewide_stats(period_name, "Totals", {'Total page views': result_data[0][0]}, period_complete_day) - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - start_date=start_date, - metrics='ga:pageviewsPerVisit,ga:avgTimeOnSite,ga:percentNewVisits,ga:visits', - max_results=10000, - end_date=end_date).execute() + try: + # Because of issues of invalid responses, we are going to make these requests + # ourselves. + headers = {'authorization': 'Bearer ' + self.token} + + args = {} + args["max-results"] = 100000 + args["start-date"] = start_date + args["end-date"] = end_date + args["ids"] = "ga:" + self.profile_id + + args["metrics"] = "ga:pageviewsPerVisit,ga:avgTimeOnSite,ga:percentNewVisits,ga:visits" + args["alt"] = "json" + + results = self._get_json(args) + except Exception, e: + log.exception(e) + results = dict(url=[]) + result_data = results.get('rows') data = { 'Pages per visit': result_data[0][0], @@ -261,16 +333,29 @@ ga_model.update_sitewide_stats(period_name, "Totals", data, period_complete_day) # Bounces from / or another configurable page. - path = '/%s%s' % (config.get('googleanalytics.account'), - config.get('ga-report.bounce_url', '/')) - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - filters='ga:pagePath==%s' % (path,), - start_date=start_date, - metrics='ga:visitBounceRate', - dimensions='ga:pagePath', - max_results=10000, - end_date=end_date).execute() + path = '/' #% (config.get('googleanalytics.account'), config.get('ga-report.bounce_url', '/')) + + try: + # Because of issues of invalid responses, we are going to make these requests + # ourselves. + headers = {'authorization': 'Bearer ' + self.token} + + args = {} + args["max-results"] = 100000 + args["start-date"] = start_date + args["end-date"] = end_date + args["ids"] = "ga:" + self.profile_id + + args["filters"] = 'ga:pagePath==%s' % (path,) + args["dimensions"] = 'ga:pagePath' + args["metrics"] = "ga:visitBounceRate" + args["alt"] = "json" + + results = self._get_json(args) + except Exception, e: + log.exception(e) + results = dict(url=[]) + result_data = results.get('rows') if not result_data or len(result_data) != 1: log.error('Could not pinpoint the bounces for path: %s. Got results: %r', @@ -286,14 +371,28 @@ def _locale_stats(self, start_date, end_date, period_name, period_complete_day): """ Fetches stats about language and country """ - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - start_date=start_date, - metrics='ga:pageviews', - sort='-ga:pageviews', - dimensions="ga:language,ga:country", - max_results=10000, - end_date=end_date).execute() + + try: + # Because of issues of invalid responses, we are going to make these requests + # ourselves. + headers = {'authorization': 'Bearer ' + self.token} + + args = {} + args["max-results"] = 100000 + args["start-date"] = start_date + args["end-date"] = end_date + args["ids"] = "ga:" + self.profile_id + + args["dimensions"] = "ga:language,ga:country" + args["metrics"] = "ga:pageviews" + args["sort"] = "-ga:pageviews" + args["alt"] = "json" + + results = self._get_json(args) + except Exception, e: + log.exception(e) + results = dict(url=[]) + result_data = results.get('rows') data = {} for result in result_data: @@ -314,15 +413,27 @@ data = {} - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - start_date=start_date, - filters='ga:eventAction==download', - metrics='ga:totalEvents', - sort='-ga:totalEvents', - dimensions="ga:eventLabel", - max_results=10000, - end_date=end_date).execute() + try: + # Because of issues of invalid responses, we are going to make these requests + # ourselves. + headers = {'authorization': 'Bearer ' + self.token} + + args = {} + args["max-results"] = 100000 + args["start-date"] = start_date + args["end-date"] = end_date + args["ids"] = "ga:" + self.profile_id + + args["filters"] = 'ga:eventAction==download' + args["dimensions"] = "ga:eventLabel" + args["metrics"] = "ga:totalEvents" + args["alt"] = "json" + + results = self._get_json(args) + except Exception, e: + log.exception(e) + results = dict(url=[]) + result_data = results.get('rows') if not result_data: # We may not have data for this time period, so we need to bail @@ -361,15 +472,25 @@ log.info('Associating downloads of resource URLs with their respective datasets') process_result_data(results.get('rows')) - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - start_date=start_date, - filters='ga:eventAction==download-cache', - metrics='ga:totalEvents', - sort='-ga:totalEvents', - dimensions="ga:eventLabel", - max_results=10000, - end_date=end_date).execute() + try: + # Because of issues of invalid responses, we are going to make these requests + # ourselves. + headers = {'authorization': 'Bearer ' + self.token} + + args = dict( ids='ga:' + self.profile_id, + filters='ga:eventAction==download-cache', + metrics='ga:totalEvents', + sort='-ga:totalEvents', + dimensions="ga:eventLabel", + max_results=10000) + args['start-date'] = start_date + args['end-date'] = end_date + + results = self._get_json(args) + except Exception, e: + log.exception(e) + results = dict(url=[]) + log.info('Associating downloads of cache resource URLs with their respective datasets') process_result_data(results.get('rows'), cached=False) @@ -378,14 +499,25 @@ def _social_stats(self, start_date, end_date, period_name, period_complete_day): """ Finds out which social sites people are referred from """ - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - start_date=start_date, - metrics='ga:pageviews', - sort='-ga:pageviews', - dimensions="ga:socialNetwork,ga:referralPath", - max_results=10000, - end_date=end_date).execute() + + try: + # Because of issues of invalid responses, we are going to make these requests + # ourselves. + headers = {'authorization': 'Bearer ' + self.token} + + args = dict( ids='ga:' + self.profile_id, + metrics='ga:pageviews', + sort='-ga:pageviews', + dimensions="ga:socialNetwork,ga:referralPath", + max_results=10000) + args['start-date'] = start_date + args['end-date'] = end_date + + results = self._get_json(args) + except Exception, e: + log.exception(e) + results = dict(url=[]) + result_data = results.get('rows') data = {} for result in result_data: @@ -397,14 +529,24 @@ def _os_stats(self, start_date, end_date, period_name, period_complete_day): """ Operating system stats """ - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - start_date=start_date, - metrics='ga:pageviews', - sort='-ga:pageviews', - dimensions="ga:operatingSystem,ga:operatingSystemVersion", - max_results=10000, - end_date=end_date).execute() + try: + # Because of issues of invalid responses, we are going to make these requests + # ourselves. + headers = {'authorization': 'Bearer ' + self.token} + + args = dict( ids='ga:' + self.profile_id, + metrics='ga:pageviews', + sort='-ga:pageviews', + dimensions="ga:operatingSystem,ga:operatingSystemVersion", + max_results=10000) + args['start-date'] = start_date + args['end-date'] = end_date + + results = self._get_json(args) + except Exception, e: + log.exception(e) + results = dict(url=[]) + result_data = results.get('rows') data = {} for result in result_data: @@ -422,14 +564,27 @@ def _browser_stats(self, start_date, end_date, period_name, period_complete_day): """ Information about browsers and browser versions """ - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - start_date=start_date, - metrics='ga:pageviews', - sort='-ga:pageviews', - dimensions="ga:browser,ga:browserVersion", - max_results=10000, - end_date=end_date).execute() + + try: + # Because of issues of invalid responses, we are going to make these requests + # ourselves. + headers = {'authorization': 'Bearer ' + self.token} + + args = dict( ids='ga:' + self.profile_id, + metrics='ga:pageviews', + sort='-ga:pageviews', + dimensions="ga:browser,ga:browserVersion", + max_results=10000) + + args['start-date'] = start_date + args['end-date'] = end_date + + results = self._get_json(args) + except Exception, e: + log.exception(e) + results = dict(url=[]) + + result_data = results.get('rows') # e.g. [u'Firefox', u'19.0', u'20'] @@ -471,14 +626,24 @@ def _mobile_stats(self, start_date, end_date, period_name, period_complete_day): """ Info about mobile devices """ - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - start_date=start_date, - metrics='ga:pageviews', - sort='-ga:pageviews', - dimensions="ga:mobileDeviceBranding, ga:mobileDeviceInfo", - max_results=10000, - end_date=end_date).execute() + try: + # Because of issues of invalid responses, we are going to make these requests + # ourselves. + headers = {'authorization': 'Bearer ' + self.token} + + args = dict( ids='ga:' + self.profile_id, + metrics='ga:pageviews', + sort='-ga:pageviews', + dimensions="ga:mobileDeviceBranding, ga:mobileDeviceInfo", + max_results=10000) + args['start-date'] = start_date + args['end-date'] = end_date + + results = self._get_json(args) + except Exception, e: + log.exception(e) + results = dict(url=[]) + result_data = results.get('rows') data = {} --- a/ckanext/ga_report/ga_auth.py +++ b/ckanext/ga_report/ga_auth.py @@ -36,7 +36,7 @@ credentials = _prepare_credentials(token_file, credentials_file) http = credentials.authorize(http) # authorize the http object - return build('analytics', 'v3', http=http) + return credentials.access_token, build('analytics', 'v3', http=http) def get_profile_id(service): --- a/ckanext/ga_report/ga_model.py +++ b/ckanext/ga_report/ga_model.py @@ -114,7 +114,7 @@ >>> normalize_url('http://data.gov.uk/dataset/weekly_fuel_prices') '/dataset/weekly_fuel_prices' ''' - return '/' + '/'.join(url.split('/')[3:]) + return url #'/' + '/'.join(url.split('/')[3:]) def _get_package_and_publisher(url): @@ -125,12 +125,12 @@ dataset_ref = dataset_match.groups()[0] dataset = model.Package.get(dataset_ref) if dataset: - publisher_groups = dataset.get_groups('publisher') + publisher_groups = dataset.get_groups('organization') if publisher_groups: return dataset_ref,publisher_groups[0].name return dataset_ref, None else: - publisher_match = re.match('/publisher/([^/]+)(/.*)?', url) + publisher_match = re.match('/organization/([^/]+)(/.*)?', url) if publisher_match: return None, publisher_match.groups()[0] return None, None @@ -323,11 +323,11 @@ """ toplevel = get_top_level() publishers = model.Session.query(model.Group).\ - filter(model.Group.type=='publisher').\ + filter(model.Group.type=='organization').\ filter(model.Group.state=='active').all() for publisher in publishers: views, visits, subpub = update_publisher(period_name, publisher, publisher.name) - parent, parents = '', publisher.get_groups('publisher') + parent, parents = '', publisher.get_parent_groups(type='organization') if parents: parent = parents[0].name item = model.Session.query(GA_Publisher).\ @@ -377,7 +377,7 @@ model.Member.table_name == 'group' and \ model.Member.state == 'active').\ filter(model.Member.id==None).\ - filter(model.Group.type=='publisher').\ + filter(model.Group.type=='organization').\ order_by(model.Group.name).all() def get_children(publisher): --- a/ckanext/ga_report/helpers.py +++ b/ckanext/ga_report/helpers.py @@ -71,7 +71,7 @@ def single_popular_dataset_html(top=20): dataset_dict = single_popular_dataset(top) groups = package.get('groups', []) - publishers = [ g for g in groups if g.get('type') == 'publisher' ] + publishers = [ g for g in groups if g.get('type') == 'organization' ] publisher = publishers[0] if publishers else {'name':'', 'title': ''} context = { 'dataset': dataset_dict, @@ -118,7 +118,7 @@ if not p in datasets: datasets[p] = {'views':0, 'visits': 0} - + datasets[p]['views'] = datasets[p]['views'] + int(entry.pageviews) datasets[p]['visits'] = datasets[p]['visits'] + int(entry.visits) --- a/ckanext/ga_report/plugin.py +++ b/ckanext/ga_report/plugin.py @@ -35,49 +35,49 @@ def after_map(self, map): # GaReport map.connect( - '/data/site-usage', + '/site-usage', controller='ckanext.ga_report.controller:GaReport', action='index' ) map.connect( - '/data/site-usage/data_{month}.csv', + '/site-usage_{month}.csv', controller='ckanext.ga_report.controller:GaReport', action='csv' ) map.connect( - '/data/site-usage/downloads', + '/site-usage/downloads', controller='ckanext.ga_report.controller:GaReport', action='downloads' ) map.connect( - '/data/site-usage/downloads_{month}.csv', + '/site-usage/downloads_{month}.csv', controller='ckanext.ga_report.controller:GaReport', action='csv_downloads' ) # GaDatasetReport map.connect( - '/data/site-usage/publisher', + '/site-usage/publisher', controller='ckanext.ga_report.controller:GaDatasetReport', action='publishers' ) map.connect( - '/data/site-usage/publishers_{month}.csv', + '/site-usage/publishers_{month}.csv', controller='ckanext.ga_report.controller:GaDatasetReport', action='publisher_csv' ) map.connect( - '/data/site-usage/dataset/datasets_{id}_{month}.csv', + '/site-usagesetsets_{id}_{month}.csv', controller='ckanext.ga_report.controller:GaDatasetReport', action='dataset_csv' ) map.connect( - '/data/site-usage/dataset', + '/site-usageset', controller='ckanext.ga_report.controller:GaDatasetReport', action='read' ) map.connect( - '/data/site-usage/dataset/{id}', + '/site-usageset/{id}', controller='ckanext.ga_report.controller:GaDatasetReport', action='read_publisher' ) --- a/ckanext/ga_report/public/css/ga_report.css +++ b/ckanext/ga_report/public/css/ga_report.css @@ -66,4 +66,7 @@ margin-top: 4px; float: left; } +.tab-content { + padding-top: 12px; +} --- a/ckanext/ga_report/public/scripts/ckanext_ga_reports.js +++ b/ckanext/ga_report/public/scripts/ckanext_ga_reports.js @@ -97,7 +97,7 @@ * Show the correct rickshaw graph in the sidebar. * Not to be called before all graphs load. */ - $('a[data-toggle="hashtab"]').on( + $('a[data-toggle="tab"]').on( 'shown', function(e) { var href = $(e.target).attr('href'); @@ -115,7 +115,7 @@ } ); /* The first tab might already have been shown */ - $('li.active > a[data-toggle="hashtab"]').trigger('shown'); + $('li.active > a[data-toggle="tab"]').trigger('shown'); }; CKAN.GA_Reports.bind_month_selector = function() { @@ -126,7 +126,6 @@ window.location = url; }; var selectors = $('select[name="month"]'); - assert(selectors.length>0); selectors.bind('change', handler); }; --- a/ckanext/ga_report/templates/ga_report/ga_util.html +++ b/ckanext/ga_report/templates/ga_report/ga_util.html @@ -57,43 +57,19 @@ - -
- - - Site-wide - | - Publishers - | - Datasets - -
- -
-
-
-

Download

-

- Download as CSV
-

-
-
-

Graph Legend

-
- -
-
-
-

Notes

+ +
+
Notes
+
    -
  • "Views" is the number of times a page was loaded in users' browsers.
  • -
  • "Downloads" is the number of times a user has clicked to download either an original or cached resource for a particular dataset. Download information is only available from 2nd December 2012; 'No data' is shown for records before that date.
  • -
  • These usage statistics are confined to users with javascript enabled, which excludes web crawlers and API calls.
  • -
  • The results are not shown when the number of views/visits is tiny. Where these relate to site pages, results are available in full in the CSV download. Where these relate to users' web browser information, results are not disclosed, for privacy reasons.
  • +
  • "Views" is the number of times a page was loaded in users' browsers.
  • +
  • "Downloads" is the number of times a user has clicked to download either an original or cached resource for a particular dataset. Download information is only available from 2nd December 2012; 'No data' is shown for records before that date.
  • +
  • These usage statistics are confined to users with javascript enabled, which excludes web crawlers and API calls.
  • +
  • The results are not shown when the number of views/visits is tiny. Where these relate to site pages, results are available in full in the CSV download. Where these relate to users' web browser information, results are not disclosed, for privacy reasons.
-
+ --- a/ckanext/ga_report/templates/ga_report/publisher/index.html +++ b/ckanext/ga_report/templates/ga_report/publisher/index.html @@ -6,11 +6,6 @@ Usage by Publisher - - - - ${ga_sidebar(download_link=h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport',action='publisher_csv',month=c.month or 'all'))} - @@ -23,38 +18,49 @@ - Site Usage ${usage_nav('Publishers')} + + +
  • Site Analytics
  • +
  • Publishers
  • +
    + +   Download as CSV + +

    Site Usage

    - -
    - - ${rickshaw_graph(c.top_publishers_graph,'publishers')} +
    +
    +
    + Publishers + ${rickshaw_graph(c.top_publishers_graph,'publishers')} +
    +
    +

    -

    Statistics for

    - ${month_selector(c.month, c.months, c.day)} +

    Statistics for

    + ${month_selector(c.month, c.months, c.day)}
    - - - - - - - - - - - -
    PublisherDataset Views
    - ${h.link_to(publisher.title, h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport', action='read_publisher', id=publisher.name) + (("?month=" + c.month) if c.month else ''))} - ${views}
    -
    + + + + + + + + + + + +
    PublisherDataset Views
    + ${h.link_to(publisher.title, h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport', action='read_publisher', id=publisher.name) + (("?month=" + c.month) if c.month else ''))} + ${views}
    @@ -64,7 +70,7 @@ }); - + --- a/ckanext/ga_report/templates/ga_report/publisher/read.html +++ b/ckanext/ga_report/templates/ga_report/publisher/read.html @@ -19,64 +19,83 @@ - - ${ga_sidebar(download_link=h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport',action='dataset_csv',id=c.publisher_name or 'all',month=c.month or 'all'))} + + +
  • Site Analytics
  • + +
  • Publishers
  • +
  • ${c.publisher.title}
  • +
    + +
  • Usage By Dataset
  • +
    - Site Usage ${usage_nav('Datasets')}
    -
    -

    ${c.publisher.title}

    + +   Download as CSV + +

    Site Usage + ${c.publisher.title} + All datasets +

    - - ${rickshaw_graph(c.graph_data,'dataset-downloads',debug=True)} - -
    -
    +
    +
    +
    + + ${rickshaw_graph(c.graph_data,'dataset-downloads',debug=True)} + +
    +
    +
    +
    + +

    Statistics for ${h.month_option_title(c.month,c.months,c.day)}:

    +
    + +

    Statistics for all months

    +
    + +
    - ${month_selector(c.month, c.months, c.day)} + ${month_selector(c.month, c.months, c.day)} - - -
    - - -

    Statistics for ${h.month_option_title(c.month,c.months,c.day)}:

    -
    - -

    Statistics for all months:

    -
    -
    No page views in this period.
    - - + + + + +
    No page views in this period.
    + +
    + + + + + + - - - - - - - - - - - -
    DatasetViewsDownloads
    DatasetViewsDownloads
    - ${h.link_to(package.title or package.name, h.url_for(controller='package', action='read', id=package.name))} - ${views}${downloads}
    -
    + + ${h.link_to(package.title or package.name, h.url_for(controller='package', action='read', id=package.name))} + + ${views} + ${downloads} + + + + + + ${ga_footer()}
    -
    - - + --- a/ckanext/ga_report/templates/ga_report/site/downloads.html +++ b/ckanext/ga_report/templates/ga_report/site/downloads.html @@ -7,9 +7,9 @@ Downloads - + ${ga_sidebar(download_link=h.url_for(controller='ckanext.ga_report.controller:GaReport',action='csv_downloads',month=c.month or 'all'))} - + Downloads ${usage_nav('Downloads')}
    --- a/ckanext/ga_report/templates/ga_report/site/index.html +++ b/ckanext/ga_report/templates/ga_report/site/index.html @@ -8,157 +8,178 @@ Site usage - - - - - - - - - - - ${ga_sidebar(download_link=h.url_for(controller='ckanext.ga_report.controller:GaReport',action='csv',month=c.month or 'all'))} + +
  • Site Analytics
  • +
  • Site-wide
  • - Site Usage ${usage_nav('Site-wide')} -
    - -
    -
    -
    -
    -
    +
    +
    +
    @@ -168,11 +189,11 @@ CKAN.GA_Reports.bind_sparklines(); CKAN.GA_Reports.bind_sidebar(); CKAN.GA_Reports.bind_month_selector(); - }); + }); - + --- /dev/null +++ b/ckanext/ga_report/templates/ga_report/site/layout.html @@ -1,1 +1,8 @@ + + + --- /dev/null +++ b/ckanext/ga_report/templates/ga_report/site/layout_base.html @@ -1,1 +1,357 @@ - + + + + + + + + + ${page_title()} - ${g.site_title} + + + + + + + + + ${optional_feed()} + + +