From: David Read
Date: Tue, 22 Jan 2013 17:05:58 +0000
Subject: Improved debug logging.
X-Git-Url: http://maxious.lambdacomplex.org/git/?p=ckanext-ga-report.git&a=commitdiff&h=d0db210d9cbb2a8b8fe54affc9d9388dbc8a32b6
---
Improved debug logging.
---
--- a/.gitignore
+++ b/.gitignore
@@ -1,6 +1,7 @@
*.py[co]
*.py~
.gitignore
+ckan.log
# Packages
*.egg
--- a/ckanext/ga_report/command.py
+++ b/ckanext/ga_report/command.py
@@ -23,7 +23,7 @@
import ckan.model as model
model.Session.remove()
model.Session.configure(bind=model.meta.engine)
- log = logging.getLogger('ckanext.ga-report')
+ log = logging.getLogger('ckanext.ga_report')
import ga_model
ga_model.init_tables()
@@ -55,6 +55,36 @@
init_service('token.dat',
self.args[0] if self.args
else 'credentials.json')
+
+class FixTimePeriods(CkanCommand):
+ """
+ Fixes the 'All' records for GA_Urls
+
+ It is possible that older urls that haven't recently been visited
+ do not have All records. This command will traverse through those
+ records and generate valid All records for them.
+ """
+ summary = __doc__.split('\n')[0]
+ usage = __doc__
+ max_args = 0
+ min_args = 0
+
+ def __init__(self, name):
+ super(FixTimePeriods, self).__init__(name)
+
+ def command(self):
+ import ckan.model as model
+ from ga_model import post_update_url_stats
+ self._load_config()
+ model.Session.remove()
+ model.Session.configure(bind=model.meta.engine)
+
+ log = logging.getLogger('ckanext.ga_report')
+
+ log.info("Updating 'All' records for old URLs")
+ post_update_url_stats()
+ log.info("Processing complete")
+
class LoadAnalytics(CkanCommand):
--- a/ckanext/ga_report/controller.py
+++ b/ckanext/ga_report/controller.py
@@ -13,6 +13,7 @@
log = logging.getLogger('ckanext.ga-report')
+DOWNLOADS_AVAILABLE_FROM = '2012-12'
def _get_month_name(strdate):
import calendar
@@ -21,7 +22,7 @@
return '%s %s' % (calendar.month_name[d.tm_mon], d.tm_year)
-def _month_details(cls):
+def _month_details(cls, stat_key=None):
'''
Returns a list of all the periods for which we have data, unfortunately
knows too much about the type of the cls being passed as GA_Url has a
@@ -32,9 +33,13 @@
months = []
day = None
- vals = model.Session.query(cls.period_name,cls.period_complete_day)\
- .filter(cls.period_name!='All').distinct(cls.period_name)\
- .order_by("period_name desc").all()
+ q = model.Session.query(cls.period_name,cls.period_complete_day)\
+ .filter(cls.period_name!='All').distinct(cls.period_name)
+ if stat_key:
+ q= q.filter(cls.stat_name==stat_key)
+
+ vals = q.order_by("period_name desc").all()
+
if vals and vals[0][1]:
day = int(vals[0][1])
ordinal = 'th' if 11 <= day <= 13 \
@@ -52,7 +57,7 @@
def csv(self, month):
import csv
- q = model.Session.query(GA_Stat)
+ q = model.Session.query(GA_Stat).filter(GA_Stat.stat_name!='Downloads')
if month != 'all':
q = q.filter(GA_Stat.period_name==month)
entries = q.order_by('GA_Stat.period_name, GA_Stat.stat_name, GA_Stat.key').all()
@@ -68,6 +73,7 @@
entry.stat_name.encode('utf-8'),
entry.key.encode('utf-8'),
entry.value.encode('utf-8')])
+
def index(self):
@@ -114,7 +120,7 @@
if k in ['Total page views', 'Total visits']:
v = sum(v)
else:
- v = float(sum(v))/len(v)
+ v = float(sum(v))/float(len(v))
key, val = clean_key(k,v)
c.global_totals.append((key, val))
@@ -223,13 +229,14 @@
str('attachment; filename=datasets_%s_%s.csv' % (c.publisher_name, month,))
writer = csv.writer(response)
- writer.writerow(["Dataset Title", "Dataset Name", "Views", "Visits", "Period Name"])
-
- for package,view,visit in packages:
+ writer.writerow(["Dataset Title", "Dataset Name", "Views", "Visits", "Resource downloads", "Period Name"])
+
+ for package,view,visit,downloads in packages:
writer.writerow([package.title.encode('utf-8'),
package.name.encode('utf-8'),
view,
visit,
+ downloads,
month])
def publishers(self):
@@ -250,10 +257,10 @@
def _get_packages(self, publisher=None, count=-1):
'''Returns the datasets in order of views'''
- if count == -1:
- count = sys.maxint
-
+ have_download_data = True
month = c.month or 'All'
+ if month != 'All':
+ have_download_data = month >= DOWNLOADS_AVAILABLE_FROM
q = model.Session.query(GA_Url,model.Package)\
.filter(model.Package.name==GA_Url.package_id)\
@@ -263,9 +270,25 @@
q = q.filter(GA_Url.period_name==month)
q = q.order_by('ga_url.pageviews::int desc')
top_packages = []
- for entry,package in q.limit(count):
+ if count == -1:
+ entries = q.all()
+ else:
+ entries = q.limit(count)
+
+ for entry,package in entries:
if package:
- top_packages.append((package, entry.pageviews, entry.visits))
+ # Downloads ....
+ if have_download_data:
+ dls = model.Session.query(GA_Stat).\
+ filter(GA_Stat.stat_name=='Downloads').\
+ filter(GA_Stat.key==package.name)
+ if month != 'All': # Fetch everything unless the month is specific
+ dls = dls.filter(GA_Stat.period_name==month)
+
+ downloads = sum(int(d.value) for d in dls.all())
+ else:
+ downloads = 'No data'
+ top_packages.append((package, entry.pageviews, entry.visits, downloads))
else:
log.warning('Could not find package associated package')
--- a/ckanext/ga_report/download_analytics.py
+++ b/ckanext/ga_report/download_analytics.py
@@ -13,6 +13,7 @@
FORMAT_MONTH = '%Y-%m'
MIN_VIEWS = 50
MIN_VISITS = 20
+MIN_DOWNLOADS = 10
class DownloadAnalytics(object):
'''Downloads and stores analytics info'''
@@ -31,6 +32,11 @@
first_of_this_month = datetime.datetime(date.year, date.month, 1)
_, last_day_of_month = calendar.monthrange(int(date.year), int(date.month))
last_of_this_month = datetime.datetime(date.year, date.month, last_day_of_month)
+ # if this is the latest month, note that it is only up until today
+ now = datetime.datetime.now()
+ if now.year == date.year and now.month == date.month:
+ last_day_of_month = now.day
+ last_of_this_month = now
periods = ((date.strftime(FORMAT_MONTH),
last_day_of_month,
first_of_this_month, last_of_this_month),)
@@ -122,8 +128,12 @@
log.info('Storing publisher views (%i rows)', len(data.get('url')))
self.store(period_name, period_complete_day, data,)
- log.info('Aggregating datasets by publisher')
+ # Make sure the All records are correct.
+ ga_model.post_update_url_stats()
+
+ log.info('Associating datasets with their publisher')
ga_model.update_publisher_stats(period_name) # about 30 seconds.
+
log.info('Downloading and storing analytics for site-wide stats')
self.sitewide_stats( period_name, period_complete_day )
@@ -179,6 +189,7 @@
end_date=end_date).execute()
packages = []
+ log.info("There are %d results" % results['totalResults'])
for entry in results.get('rows'):
(loc,pageviews,visits) = entry
url = _normalize_url('http:/' + loc) # strips off domain e.g. www.data.gov.uk or data.gov.uk
@@ -203,7 +214,7 @@
start_date = '%s-01' % period_name
end_date = '%s-%s' % (period_name, last_day_of_month)
funcs = ['_totals_stats', '_social_stats', '_os_stats',
- '_locale_stats', '_browser_stats', '_mobile_stats']
+ '_locale_stats', '_browser_stats', '_mobile_stats', '_download_stats']
for f in funcs:
log.info('Downloading analytics for %s' % f.split('_')[1])
getattr(self, f)(start_date, end_date, period_name, period_complete_day)
@@ -250,7 +261,7 @@
ids='ga:' + self.profile_id,
filters='ga:pagePath==%s' % (path,),
start_date=start_date,
- metrics='ga:bounces,ga:pageviews',
+ metrics='ga:visitBounceRate',
dimensions='ga:pagePath',
max_results=10000,
end_date=end_date).execute()
@@ -260,10 +271,10 @@
path, result_data)
return
results = result_data[0]
- bounces, total = [float(x) for x in result_data[0][1:]]
- pct = 100 * bounces/total
- log.info('%d bounces from %d total == %s', bounces, total, pct)
- ga_model.update_sitewide_stats(period_name, "Totals", {'Bounce rate (home page)': pct},
+ bounces = float(results[1])
+ # visitBounceRate is already a %
+ log.info('Google reports visitBounceRate as %s', bounces)
+ ga_model.update_sitewide_stats(period_name, "Totals", {'Bounce rate (home page)': float(bounces)},
period_complete_day)
@@ -290,6 +301,74 @@
self._filter_out_long_tail(data, MIN_VIEWS)
ga_model.update_sitewide_stats(period_name, "Country", data, period_complete_day)
+
+ def _download_stats(self, start_date, end_date, period_name, period_complete_day):
+ """ Fetches stats about data downloads """
+ import ckan.model as model
+
+ data = {}
+
+ results = self.service.data().ga().get(
+ ids='ga:' + self.profile_id,
+ start_date=start_date,
+ filters='ga:eventAction==download',
+ metrics='ga:totalEvents',
+ sort='-ga:totalEvents',
+ dimensions="ga:eventLabel",
+ max_results=10000,
+ end_date=end_date).execute()
+ result_data = results.get('rows')
+ if not result_data:
+ # We may not have data for this time period, so we need to bail
+ # early.
+ log.info("There is no download data for this time period")
+ return
+
+ def process_result_data(result_data, cached=False):
+ progress_total = len(result_data)
+ progress_count = 0
+ resources_not_matched = []
+ for result in result_data:
+ progress_count += 1
+ if progress_count % 100 == 0:
+ log.debug('.. %d/%d done so far', progress_count, progress_total)
+
+ url = result[0].strip()
+
+ # Get package id associated with the resource that has this URL.
+ q = model.Session.query(model.Resource)
+ if cached:
+ r = q.filter(model.Resource.cache_url.like("%s%%" % url)).first()
+ else:
+ r = q.filter(model.Resource.url.like("%s%%" % url)).first()
+
+ package_name = r.resource_group.package.name if r else ""
+ if package_name:
+ data[package_name] = data.get(package_name, 0) + int(result[1])
+ else:
+ resources_not_matched.append(url)
+ continue
+ if resources_not_matched:
+ log.debug('Could not match %i or %i resource URLs to datasets. e.g. %r',
+ len(resources_not_matched), progress_total, resources_not_matched[:3])
+
+ log.info('Associating downloads of resource URLs with their respective datasets')
+ process_result_data(results.get('rows'))
+
+ results = self.service.data().ga().get(
+ ids='ga:' + self.profile_id,
+ start_date=start_date,
+ filters='ga:eventAction==download-cache',
+ metrics='ga:totalEvents',
+ sort='-ga:totalEvents',
+ dimensions="ga:eventLabel",
+ max_results=10000,
+ end_date=end_date).execute()
+ log.info('Associating downloads of cache resource URLs with their respective datasets')
+ process_result_data(results.get('rows'), cached=False)
+
+ self._filter_out_long_tail(data, MIN_DOWNLOADS)
+ ga_model.update_sitewide_stats(period_name, "Downloads", data, period_complete_day)
def _social_stats(self, start_date, end_date, period_name, period_complete_day):
""" Finds out which social sites people are referred from """
--- a/ckanext/ga_report/ga_model.py
+++ b/ckanext/ga_report/ga_model.py
@@ -161,10 +161,64 @@
def pre_update_url_stats(period_name):
- model.Session.query(GA_Url).\
- filter(GA_Url.period_name==period_name).delete()
- model.Session.query(GA_Url).\
- filter(GA_Url.period_name=='All').delete()
+ q = model.Session.query(GA_Url).\
+ filter(GA_Url.period_name==period_name)
+ log.debug("Deleting %d '%s' records" % (q.count(), period_name))
+ q.delete()
+
+ q = model.Session.query(GA_Url).\
+ filter(GA_Url.period_name == 'All')
+ log.debug("Deleting %d 'All' records..." % q.count())
+ q.delete()
+
+ model.Session.flush()
+ model.Session.commit()
+ model.repo.commit_and_remove()
+ log.debug('...done')
+
+def post_update_url_stats():
+
+ """ Check the distinct url field in ga_url and make sure
+ it has an All record. If not then create one.
+
+ After running this then every URL should have an All
+ record regardless of whether the URL has an entry for
+ the month being currently processed.
+ """
+ log.debug('Post-processing "All" records...')
+ query = """select url, pageviews::int, visits::int
+ from ga_url
+ where url not in (select url from ga_url where period_name ='All')"""
+ connection = model.Session.connection()
+ res = connection.execute(query)
+
+ views, visits = {}, {}
+ # url, views, visits
+ for row in res:
+ views[row[0]] = views.get(row[0], 0) + row[1]
+ visits[row[0]] = visits.get(row[0], 0) + row[2]
+
+ progress_total = len(views.keys())
+ progress_count = 0
+ for key in views.keys():
+ progress_count += 1
+ if progress_count % 100 == 0:
+ log.debug('.. %d/%d done so far', progress_count, progress_total)
+
+ package, publisher = _get_package_and_publisher(key)
+
+ values = {'id': make_uuid(),
+ 'period_name': "All",
+ 'period_complete_day': 0,
+ 'url': key,
+ 'pageviews': views[key],
+ 'visits': visits[key],
+ 'department_id': publisher,
+ 'package_id': publisher
+ }
+ model.Session.add(GA_Url(**values))
+ model.Session.commit()
+ log.debug('..done')
def update_url_stats(period_name, period_complete_day, url_data):
@@ -173,9 +227,14 @@
stores them in GA_Url under the period and recalculates the totals for
the 'All' period.
'''
+ progress_total = len(progress_data)
+ progress_count = 0
for url, views, visits in url_data:
+ progress_count += 1
+ if progress_count % 100 == 0:
+ log.debug('.. %d/%d done so far', progress_count, progress_total)
+
package, publisher = _get_package_and_publisher(url)
-
item = model.Session.query(GA_Url).\
filter(GA_Url.period_name==period_name).\
@@ -216,8 +275,8 @@
'period_name': 'All',
'period_complete_day': 0,
'url': url,
- 'pageviews': sum([int(e.pageviews) for e in entries]) + old_pageviews,
- 'visits': sum([int(e.visits or 0) for e in entries]) + old_visits,
+ 'pageviews': sum([int(e.pageviews) for e in entries]) + int(old_pageviews),
+ 'visits': sum([int(e.visits or 0) for e in entries]) + int(old_visits),
'department_id': publisher,
'package_id': package
}
@@ -343,10 +402,10 @@
'''
for object_type in (GA_Url, GA_Stat, GA_Publisher, GA_ReferralStat):
q = model.Session.query(object_type)
- if period_name != 'all':
+ if period_name != 'All':
q = q.filter_by(period_name=period_name)
q.delete()
- model.Session.commit()
+ model.repo.commit_and_remove()
def get_score_for_dataset(dataset_name):
'''
--- a/ckanext/ga_report/plugin.py
+++ b/ckanext/ga_report/plugin.py
@@ -42,6 +42,16 @@
controller='ckanext.ga_report.controller:GaReport',
action='csv'
)
+ map.connect(
+ '/data/site-usage/downloads',
+ controller='ckanext.ga_report.controller:GaReport',
+ action='downloads'
+ )
+ map.connect(
+ '/data/site-usage/downloads_{month}.csv',
+ controller='ckanext.ga_report.controller:GaReport',
+ action='csv_downloads'
+ )
# GaDatasetReport
map.connect(
--- a/ckanext/ga_report/templates/ga_report/notes.html
+++ b/ckanext/ga_report/templates/ga_report/notes.html
@@ -7,7 +7,7 @@
Notes
- "Views" is the number of times a page was loaded in users' browsers.
- - "Visits" is the number of unique user visits to a page, counted once for each visitor for each of their browsing sessions.
+ - "Downloads" is the number of times a user has clicked to download either an original or cached resource for a particular dataset since December 2012
- These usage statistics are confined to users with javascript enabled, which excludes web crawlers and API calls.
- The results are not shown when the number of views/visits is tiny. Where these relate to site pages, results are available in full in the CSV download. Where these relate to users' web browser information, results are not disclosed, for privacy reasons.
--- a/ckanext/ga_report/templates/ga_report/publisher/read.html
+++ b/ckanext/ga_report/templates/ga_report/publisher/read.html
@@ -44,15 +44,15 @@
Dataset |
-
Views |
+ Downloads |
-
+
${h.link_to(package.title or package.name, h.url_for(controller='package', action='read', id=package.name))}
|
-
${views} |
+ ${downloads} |
--- /dev/null
+++ b/ckanext/ga_report/templates/ga_report/site/downloads.html
@@ -1,1 +1,59 @@
+
+
+
+ Downloads
+
+
+
+ Download
+
+ Download as CSV
+
+
+
+
+
+
+
+
Downloads
+ ${usage_nav('Downloads')}
+
+
+
+
+ ${downloads_table(c.downloads)}
+
+
+ No data
+ There is no download data available for this month
+
+
+
+
+
+
+
+
+
+
+
+
+
--- a/setup.py
+++ b/setup.py
@@ -33,6 +33,7 @@
loadanalytics = ckanext.ga_report.command:LoadAnalytics
initdb = ckanext.ga_report.command:InitDB
getauthtoken = ckanext.ga_report.command:GetAuthToken
+ fixtimeperiods = ckanext.ga_report.command:FixTimePeriods
""",
)