Show top datasets cross-publisher. Drop-down for the publisher. Browser version numbers filtered on download, so you get this version in the CSV too - for privacy. single_popular_dataset now copes when not much data, and can return the figures so DGU can reskin it in its own repo. Notes about usage stats centralised to notes.html.
Show top datasets cross-publisher. Drop-down for the publisher. Browser version numbers filtered on download, so you get this version in the CSV too - for privacy. single_popular_dataset now copes when not much data, and can return the figures so DGU can reskin it in its own repo. Notes about usage stats centralised to notes.html.

file:a/README.rst -> file:b/README.rst
--- a/README.rst
+++ b/README.rst
@@ -31,10 +31,8 @@
 2. Ensure you development.ini (or similar) contains the info about your Google Analytics account and configuration::
 
       googleanalytics.id = UA-1010101-1
-      googleanalytics.account = Account name (i.e. data.gov.uk, see top level item at https://www.google.com/analytics)
+      googleanalytics.account = Account name (e.g. data.gov.uk, see top level item at https://www.google.com/analytics)
       ga-report.period = monthly
-
-   Note that your credentials will be readable by system administrators on your server. Rather than use sensitive account details, it is suggested you give access to the GA account to a new Google account that you create just for this purpose.
 
 3. Set up this extension's database tables using a paster command. (Ensure your CKAN pyenv is still activated, run the command from ``src/ckanext-ga-report``, alter the ``--config`` option to point to your site config file)::
 
@@ -43,6 +41,12 @@
 4. Enable the extension in your CKAN config file by adding it to ``ckan.plugins``::
 
     ckan.plugins = ga-report
+
+Problem shooting
+----------------
+
+* ``(ProgrammingError) relation "ga_url" does not exist``
+  This means that the ``paster initdb`` step has not been run successfully. Refer to the installation instructions for this extension.
 
 
 Authorization
@@ -79,7 +83,7 @@
 Tutorial
 --------
 
-Download some GA data and store it in CKAN's db. (Ensure your CKAN pyenv is still activated, run the command from ``src/ckanext-ga-report``, alter the ``--config`` option to point to your site config file) and specifying the name of your auth file (token.dat by default) from the previous step::
+Download some GA data and store it in CKAN's database. (Ensure your CKAN pyenv is still activated, run the command from ``src/ckanext-ga-report``, alter the ``--config`` option to point to your site config file) and specifying the name of your auth file (token.dat by default) from the previous step::
 
     $ paster loadanalytics token.dat latest --config=../ckan/development.ini
 

--- a/ckanext/ga_report/command.py
+++ b/ckanext/ga_report/command.py
@@ -73,6 +73,14 @@
     max_args = 2
     min_args = 1
 
+    def __init__(self, name):
+        super(LoadAnalytics, self).__init__(name)
+        self.parser.add_option('-d', '--delete-first',
+                               action='store_true',
+                               default=False,
+                               dest='delete_first',
+                               help='Delete data for the period first')
+
     def command(self):
         self._load_config()
 
@@ -83,10 +91,11 @@
             svc = init_service(self.args[0], None)
         except TypeError:
             print ('Have you correctly run the getauthtoken task and '
-                   'specified the correct file here')
+                   'specified the correct token file?')
             return
 
-        downloader = DownloadAnalytics(svc, profile_id=get_profile_id(svc))
+        downloader = DownloadAnalytics(svc, profile_id=get_profile_id(svc),
+                                       delete_first=self.options.delete_first)
 
         time_period = self.args[1] if self.args and len(self.args) > 1 \
             else 'latest'

--- a/ckanext/ga_report/controller.py
+++ b/ckanext/ga_report/controller.py
@@ -1,11 +1,15 @@
+import re
+import csv
+import sys
 import logging
 import operator
-from ckan.lib.base import BaseController, c, render, request, response, abort
+import collections
+from ckan.lib.base import (BaseController, c, g, render, request, response, abort)
 
 import sqlalchemy
 from sqlalchemy import func, cast, Integer
 import ckan.model as model
-from ga_model import GA_Url, GA_Stat
+from ga_model import GA_Url, GA_Stat, GA_ReferralStat
 
 log = logging.getLogger('ckanext.ga-report')
 
@@ -30,11 +34,13 @@
     def csv(self, month):
         import csv
 
-        entries = model.Session.query(GA_Stat).\
-            filter(GA_Stat.period_name==month).\
-            order_by('GA_Stat.stat_name, GA_Stat.key').all()
+        q = model.Session.query(GA_Stat)
+        if month != 'all':
+            q = q.filter(GA_Stat.period_name==month)
+        entries = q.order_by('GA_Stat.period_name, GA_Stat.stat_name, GA_Stat.key').all()
 
         response.headers['Content-Type'] = "text/csv; charset=utf-8"
+        response.headers['Content-Disposition'] = str('attachment; filename=stats_%s.csv' % (month,))
 
         writer = csv.writer(response)
         writer.writerow(["Period", "Statistic", "Key", "Value"])
@@ -52,87 +58,268 @@
         c.months = _month_details(GA_Stat)
 
         # Work out which month to show, based on query params of the first item
-        c.month = request.params.get('month', c.months[0][0] if c.months else '')
-        c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month])
-
-        entries = model.Session.query(GA_Stat).\
-            filter(GA_Stat.stat_name=='Totals').\
-            filter(GA_Stat.period_name==c.month).\
-            order_by('ga_stat.key').all()
-
-        c.global_totals = []
-        for e in entries:
-            val = e.value
-            if e.key in ['Average time on site', 'Pages per visit', 'Percent new visits']:
-                val =  "%.2f" % round(float(e.value), 2)
-                if e.key == 'Average time on site':
+        c.month_desc = 'all months'
+        c.month = request.params.get('month', '')
+        if c.month:
+            c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month])
+
+        q = model.Session.query(GA_Stat).\
+            filter(GA_Stat.stat_name=='Totals')
+        if c.month:
+            q = q.filter(GA_Stat.period_name==c.month)
+        entries = q.order_by('ga_stat.key').all()
+
+        def clean_key(key, val):
+            if key in ['Average time on site', 'Pages per visit', 'New visits']:
+                val =  "%.2f" % round(float(val), 2)
+                if key == 'Average time on site':
                     mins, secs = divmod(float(val), 60)
                     hours, mins = divmod(mins, 60)
                     val = '%02d:%02d:%02d (%s seconds) ' % (hours, mins, secs, val)
-                e.key = '%s *' % e.key
-            c.global_totals.append((e.key, val))
-
+                if key == 'New visits':
+                    val = "%s%%" % val
+            if key in ['Bounces', 'Total page views', 'Total visits']:
+                val = int(val)
+
+            return key, val
+
+        c.global_totals = []
+        if c.month:
+            for e in entries:
+                key, val = clean_key(e.key, e.value)
+                c.global_totals.append((key, val))
+        else:
+            d = collections.defaultdict(list)
+            for e in entries:
+                d[e.key].append(float(e.value))
+            for k, v in d.iteritems():
+                if k in ['Bounces', 'Total page views', 'Total visits']:
+                    v = sum(v)
+                else:
+                    v = float(sum(v))/len(v)
+                key, val = clean_key(k,v)
+                c.global_totals.append((key, val))
+                c.global_totals = sorted(c.global_totals, key=operator.itemgetter(0))
 
         keys = {
-            'Browser versions': 'browsers',
-            'Operating Systems versions': 'os',
+            'Browser versions': 'browser_versions',
+            'Browsers': 'browsers',
+            'Operating Systems versions': 'os_versions',
+            'Operating Systems': 'os',
             'Social sources': 'social_networks',
             'Languages': 'languages',
             'Country': 'country'
         }
 
+        def shorten_name(name, length=60):
+            return (name[:length] + '..') if len(name) > 60 else name
+
+        def fill_out_url(url):
+            import urlparse
+            return urlparse.urljoin(g.site_url, url)
+
+        c.social_referrer_totals, c.social_referrers = [], []
+        q = model.Session.query(GA_ReferralStat)
+        q = q.filter(GA_ReferralStat.period_name==c.month) if c.month else q
+        q = q.order_by('ga_referrer.count::int desc')
+        for entry in q.all():
+            c.social_referrers.append((shorten_name(entry.url), fill_out_url(entry.url),
+                                       entry.source,entry.count))
+
+        q = model.Session.query(GA_ReferralStat.url,
+                                func.sum(GA_ReferralStat.count).label('count'))
+        q = q.filter(GA_ReferralStat.period_name==c.month) if c.month else q
+        q = q.order_by('count desc').group_by(GA_ReferralStat.url)
+        for entry in q.all():
+            c.social_referrer_totals.append((shorten_name(entry[0]), fill_out_url(entry[0]),'',
+                                            entry[1]))
+
+
+        browser_version_re = re.compile("(.*)\((.*)\)")
         for k, v in keys.iteritems():
-            entries = model.Session.query(GA_Stat).\
-                filter(GA_Stat.stat_name==k).\
-                filter(GA_Stat.period_name==c.month).\
-                order_by('ga_stat.value::int desc').all()
-            setattr(c, v, [(s.key, s.value) for s in entries ])
-
+
+            def clean_field(key):
+                if k != 'Browser versions':
+                    return key
+                m = browser_version_re.match(key)
+                browser = m.groups()[0].strip()
+                ver = m.groups()[1]
+                parts = ver.split('.')
+                if len(parts) > 1:
+                    if parts[1][0] == '0':
+                        ver = parts[0]
+                    else:
+                        ver = "%s.%s" % (parts[0],parts[1])
+                if browser in ['Safari','Android Browser']:  # Special case complex version nums
+                    ver = parts[0]
+                    if len(ver) > 2:
+                        ver = "%s%sX" % (ver[0], ver[1])
+
+                return "%s (%s)" % (browser, ver,)
+
+            q = model.Session.query(GA_Stat).\
+                filter(GA_Stat.stat_name==k)
+            if c.month:
+                entries = []
+                q = q.filter(GA_Stat.period_name==c.month).\
+                          order_by('ga_stat.value::int desc')
+
+            d = collections.defaultdict(int)
+            for e in q.all():
+                d[e.key] += int(e.value)
+            entries = []
+            for key, val in d.iteritems():
+                entries.append((key,val,))
+            entries = sorted(entries, key=operator.itemgetter(1), reverse=True)
+
+            def percent(num, total):
+                p = 100 * float(num)/float(total)
+                return "%.2f%%" % round(p, 2)
+
+            # Get the total for each set of values and then set the value as
+            # a percentage of the total
+            if k == 'Social sources':
+                total = sum([x for n,x in c.global_totals if n == 'Total visits'])
+            else:
+                total = sum([num for _,num in entries])
+            setattr(c, v, [(k,percent(v,total)) for k,v in entries ])
 
         return render('ga_report/site/index.html')
 
 
-class GaPublisherReport(BaseController):
+class GaDatasetReport(BaseController):
     """
-    Displays the pageview and visit count for specific publishers based on
-    the datasets associated with the publisher.
+    Displays the pageview and visit count for datasets
+    with options to filter by publisher and time period.
     """
-
-    def index(self):
+    def publisher_csv(self, month):
+        '''
+        Returns a CSV of each publisher with the total number of dataset
+        views & visits.
+        '''
+        c.month = month if not month == 'all' else ''
+        response.headers['Content-Type'] = "text/csv; charset=utf-8"
+        response.headers['Content-Disposition'] = str('attachment; filename=publishers_%s.csv' % (month,))
+
+        writer = csv.writer(response)
+        writer.writerow(["Publisher Title", "Publisher Name", "Views", "Visits", "Period Name"])
+
+        for publisher,view,visit in _get_top_publishers(None):
+            writer.writerow([publisher.title.encode('utf-8'),
+                             publisher.name.encode('utf-8'),
+                             view,
+                             visit,
+                             month])
+
+    def dataset_csv(self, id='all', month='all'):
+        '''
+        Returns a CSV with the number of views & visits for each dataset.
+
+        :param id: A Publisher ID or None if you want for all
+        :param month: The time period, or 'all'
+        '''
+        c.month = month if not month == 'all' else ''
+        if id != 'all':
+            c.publisher = model.Group.get(id)
+            if not c.publisher:
+                abort(404, 'A publisher with that name could not be found')
+
+        packages = self._get_packages(c.publisher)
+        response.headers['Content-Type'] = "text/csv; charset=utf-8"
+        response.headers['Content-Disposition'] = \
+            str('attachment; filename=datasets_%s_%s.csv' % (c.publisher_name, month,))
+
+        writer = csv.writer(response)
+        writer.writerow(["Dataset Title", "Dataset Name", "Views", "Visits", "Period Name"])
+
+        for package,view,visit in packages:
+            writer.writerow([package.title.encode('utf-8'),
+                             package.name.encode('utf-8'),
+                             view,
+                             visit,
+                             month])
+
+    def publishers(self):
+        '''A list of publishers and the number of views/visits for each'''
 
         # Get the month details by fetching distinct values and determining the
         # month names from the values.
         c.months = _month_details(GA_Url)
 
         # Work out which month to show, based on query params of the first item
-        c.month = request.params.get('month', c.months[0][0] if c.months else '')
-        c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month])
-
-        connection = model.Session.connection()
-        q = """
-            select department_id, sum(pageviews::int) views, sum(visitors::int) visits
-            from ga_url
-            where department_id <> ''
-                and period_name=%s
-            group by department_id order by views desc limit 20;
-        """
-        # Add this back (before and period_name =%s) if you want to ignore publisher
-        # homepage views
-        # and not url like '/publisher/%%'
-
-        c.top_publishers = []
-        res = connection.execute(q, c.month)
-        for row in res:
-            c.top_publishers.append((model.Group.get(row[0]), row[1], row[2]))
+        c.month = request.params.get('month', '')
+        c.month_desc = 'all months'
+        if c.month:
+            c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month])
+
+        c.top_publishers = _get_top_publishers()
 
         return render('ga_report/publisher/index.html')
 
-
-    def read(self, id):
-
-        c.publisher = model.Group.get(id)
-        if not c.publisher:
-            abort(404, 'A publisher with that name could not be found')
+    def _get_packages(self, publisher=None, count=-1):
+        '''Returns the datasets in order of visits'''
+        if count == -1:
+            count = sys.maxint
+
+        q = model.Session.query(GA_Url)\
+            .filter(GA_Url.url.like('/dataset/%'))
+        if publisher:
+            q = q.filter(GA_Url.department_id==publisher.name)
+        if c.month:
+            q = q.filter(GA_Url.period_name==c.month)
+        q = q.order_by('ga_url.visitors::int desc')
+
+        if c.month:
+            top_packages = []
+            for entry in q.limit(count):
+                package_name = entry.url[len('/dataset/'):]
+                p = model.Package.get(package_name)
+                if p:
+                    top_packages.append((p, entry.pageviews, entry.visitors))
+                else:
+                    log.warning('Could not find package "%s"', package_name)
+        else:
+            ds = {}
+            for entry in q:
+                if len(ds) >= count:
+                    break
+                package_name = entry.url[len('/dataset/'):]
+                p = model.Package.get(package_name)
+                if p:
+                    if not p in ds:
+                        ds[p] = {'views': 0, 'visits': 0}
+                    ds[p]['views'] = ds[p]['views'] + int(entry.pageviews)
+                    ds[p]['visits'] = ds[p]['visits'] + int(entry.visitors)
+                else:
+                    log.warning('Could not find package "%s"', package_name)
+
+            results = []
+            for k, v in ds.iteritems():
+                results.append((k,v['views'],v['visits']))
+
+            top_packages = sorted(results, key=operator.itemgetter(1), reverse=True)
+        return top_packages
+
+    def read(self):
+        '''
+        Lists the most popular datasets across all publishers
+        '''
+        return self.read_publisher(None)
+
+    def read_publisher(self, id):
+        '''
+        Lists the most popular datasets for a publisher (or across all publishers)
+        '''
+        count = 20
+
+        c.publishers = _get_publishers()
+
+        id = request.params.get('publisher', id)
+        if id and id != 'all':
+            c.publisher = model.Group.get(id)
+            if not c.publisher:
+                abort(404, 'A publisher with that name could not be found')
+            c.publisher_name = c.publisher.name
         c.top_packages = [] # package, dataset_views in c.top_packages
 
         # Get the month details by fetching distinct values and determining the
@@ -140,22 +327,69 @@
         c.months = _month_details(GA_Url)
 
         # Work out which month to show, based on query params of the first item
-        c.month = request.params.get('month', c.months[0][0] if c.months else '')
-        c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month])
-
-        entry = model.Session.query(GA_Url).\
-            filter(GA_Url.url=='/publisher/%s' % c.publisher.name).\
-            filter(GA_Url.period_name==c.month).first()
-        c.publisher_page_views = entry.pageviews if entry else 0
-
-        entries = model.Session.query(GA_Url).\
-            filter(GA_Url.department_id==c.publisher.name).\
-            filter(GA_Url.period_name==c.month).\
-            order_by('ga_url.pageviews::int desc')[:20]
-        for entry in entries:
-            if entry.url.startswith('/dataset/'):
-                p = model.Package.get(entry.url[len('/dataset/'):])
-                c.top_packages.append((p,entry.pageviews,entry.visitors))
+        c.month = request.params.get('month', '')
+        if not c.month:
+            c.month_desc = 'all months'
+        else:
+            c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month])
+
+        c.publisher_page_views = 0
+        q = model.Session.query(GA_Url).\
+            filter(GA_Url.url=='/publisher/%s' % c.publisher_name)
+        if c.month:
+            entry = q.filter(GA_Url.period_name==c.month).first()
+            c.publisher_page_views = entry.pageviews if entry else 0
+        else:
+            for e in q.all():
+                c.publisher_page_views = c.publisher_page_views  + int(e.pageviews)
+
+        c.top_packages = self._get_packages(c.publisher, 20)
 
         return render('ga_report/publisher/read.html')
 
+def _get_top_publishers(limit=20):
+    '''
+    Returns a list of the top 20 publishers by dataset visits.
+    (The number to show can be varied with 'limit')
+    '''
+    connection = model.Session.connection()
+    q = """
+        select department_id, sum(pageviews::int) views, sum(visitors::int) visits
+        from ga_url
+        where department_id <> ''"""
+    if c.month:
+        q = q + """
+                and period_name=%s
+        """
+    q = q + """
+            group by department_id order by visits desc
+        """
+    if limit:
+        q = q + " limit %s;" % (limit)
+
+    # Add this back (before and period_name =%s) if you want to ignore publisher
+    # homepage views
+    # and not url like '/publisher/%%'
+
+    top_publishers = []
+    res = connection.execute(q, c.month)
+
+    for row in res:
+        g = model.Group.get(row[0])
+        if g:
+            top_publishers.append((g, row[1], row[2]))
+    return top_publishers
+
+def _get_publishers():
+    '''
+    Returns a list of all publishers. Each item is a tuple:
+      (names, title)
+    '''
+    publishers = []
+    for pub in model.Session.query(model.Group).\
+               filter(model.Group.type=='publisher').\
+               filter(model.Group.state=='active').\
+               order_by(model.Group.name):
+        publishers.append((pub.name, pub.title))
+    return publishers
+

--- a/ckanext/ga_report/download_analytics.py
+++ b/ckanext/ga_report/download_analytics.py
@@ -1,7 +1,7 @@
 import os
 import logging
 import datetime
-
+import collections
 from pylons import config
 
 import ga_model
@@ -11,15 +11,17 @@
 log = logging.getLogger('ckanext.ga-report')
 
 FORMAT_MONTH = '%Y-%m'
+MIN_VIEWS = 50
+MIN_VISITS = 20
 
 class DownloadAnalytics(object):
     '''Downloads and stores analytics info'''
 
-    def __init__(self, service=None, profile_id=None):
+    def __init__(self, service=None, profile_id=None, delete_first=False):
         self.period = config['ga-report.period']
         self.service = service
         self.profile_id = profile_id
-
+        self.delete_first = delete_first
 
     def specific_month(self, date):
         import calendar
@@ -90,11 +92,14 @@
 
     def download_and_store(self, periods):
         for period_name, period_complete_day, start_date, end_date in periods:
+            if self.delete_first:
+                log.info('Deleting existing Analytics for period "%s"',
+                         period_name)
+                ga_model.delete(period_name)
             log.info('Downloading Analytics for period "%s" (%s - %s)',
                      self.get_full_period_name(period_name, period_complete_day),
                      start_date.strftime('%Y %m %d'),
                      end_date.strftime('%Y %m %d'))
-
             data = self.download(start_date, end_date, '~/dataset/[a-z0-9-_]+')
             log.info('Storing Dataset Analytics for period "%s"',
                      self.get_full_period_name(period_name, period_complete_day))
@@ -107,6 +112,33 @@
 
             ga_model.update_publisher_stats(period_name) # about 30 seconds.
             self.sitewide_stats( period_name )
+
+            self.update_social_info(period_name, start_date, end_date)
+
+    def update_social_info(self, period_name, start_date, end_date):
+        start_date = start_date.strftime('%Y-%m-%d')
+        end_date = end_date.strftime('%Y-%m-%d')
+        query = 'ga:hasSocialSourceReferral=~Yes$'
+        metrics = 'ga:entrances'
+        sort = '-ga:entrances'
+
+        # Supported query params at
+        # https://developers.google.com/analytics/devguides/reporting/core/v3/reference
+        results = self.service.data().ga().get(
+                                 ids='ga:' + self.profile_id,
+                                 filters=query,
+                                 start_date=start_date,
+                                 metrics=metrics,
+                                 sort=sort,
+                                 dimensions="ga:landingPagePath,ga:socialNetwork",
+                                 max_results=10000,
+                                 end_date=end_date).execute()
+        data = collections.defaultdict(list)
+        rows = results.get('rows',[])
+        for row in rows:
+            from ga_model import _normalize_url
+            data[_normalize_url(row[0])].append( (row[1], int(row[2]),) )
+        ga_model.update_social(period_name, data)
 
 
     def download(self, start_date, end_date, path='~/dataset/[a-z0-9-_]+'):
@@ -176,12 +208,12 @@
                                  max_results=10000,
                                  end_date=end_date).execute()
         result_data = results.get('rows')
-        ga_model.update_sitewide_stats(period_name, "Totals", {'Total pageviews': result_data[0][0]})
-
-        results = self.service.data().ga().get(
-                                 ids='ga:' + self.profile_id,
-                                 start_date=start_date,
-                                 metrics='ga:pageviewsPerVisit,ga:bounces,ga:avgTimeOnSite,ga:percentNewVisits',
+        ga_model.update_sitewide_stats(period_name, "Totals", {'Total page views': result_data[0][0]})
+
+        results = self.service.data().ga().get(
+                                 ids='ga:' + self.profile_id,
+                                 start_date=start_date,
+                                 metrics='ga:pageviewsPerVisit,ga:bounces,ga:avgTimeOnSite,ga:percentNewVisits,ga:visitors',
                                  max_results=10000,
                                  end_date=end_date).execute()
         result_data = results.get('rows')
@@ -189,7 +221,8 @@
             'Pages per visit': result_data[0][0],
             'Bounces': result_data[0][1],
             'Average time on site': result_data[0][2],
-            'Percent new visits': result_data[0][3],
+            'New visits': result_data[0][3],
+            'Total visits': result_data[0][4],
         }
         ga_model.update_sitewide_stats(period_name, "Totals", data)
 
@@ -208,11 +241,13 @@
         data = {}
         for result in result_data:
             data[result[0]] = data.get(result[0], 0) + int(result[2])
+        self._filter_out_long_tail(data, MIN_VIEWS)
         ga_model.update_sitewide_stats(period_name, "Languages", data)
 
         data = {}
         for result in result_data:
             data[result[1]] = data.get(result[1], 0) + int(result[2])
+        self._filter_out_long_tail(data, MIN_VIEWS)
         ga_model.update_sitewide_stats(period_name, "Country", data)
 
 
@@ -227,13 +262,11 @@
                                  max_results=10000,
                                  end_date=end_date).execute()
         result_data = results.get('rows')
-        twitter_links = []
         data = {}
         for result in result_data:
             if not result[0] == '(not set)':
                 data[result[0]] = data.get(result[0], 0) + int(result[2])
-                if result[0] == 'Twitter':
-                    twitter_links.append(result[1])
+        self._filter_out_long_tail(data, 3)
         ga_model.update_sitewide_stats(period_name, "Social sources", data)
 
 
@@ -251,12 +284,14 @@
         data = {}
         for result in result_data:
             data[result[0]] = data.get(result[0], 0) + int(result[2])
+        self._filter_out_long_tail(data, MIN_VIEWS)
         ga_model.update_sitewide_stats(period_name, "Operating Systems", data)
 
         data = {}
         for result in result_data:
-            key = "%s (%s)" % (result[0],result[1])
-            data[key] = result[2]
+            if int(result[2]) >= MIN_VIEWS:
+                key = "%s %s" % (result[0],result[1])
+                data[key] = result[2]
         ga_model.update_sitewide_stats(period_name, "Operating Systems versions", data)
 
 
@@ -271,17 +306,42 @@
                                  max_results=10000,
                                  end_date=end_date).execute()
         result_data = results.get('rows')
+        # e.g. [u'Firefox', u'19.0', u'20']
+
         data = {}
         for result in result_data:
             data[result[0]] = data.get(result[0], 0) + int(result[2])
+        self._filter_out_long_tail(data, MIN_VIEWS)
         ga_model.update_sitewide_stats(period_name, "Browsers", data)
 
         data = {}
         for result in result_data:
-            key = "%s (%s)" % (result[0], result[1])
-            data[key] = result[2]
+            key = "%s %s" % (result[0], self._filter_browser_version(result[0], result[1]))
+            data[key] = data.get(key, 0) + int(result[2])
+        self._filter_out_long_tail(data, MIN_VIEWS)
         ga_model.update_sitewide_stats(period_name, "Browser versions", data)
 
+    @classmethod
+    def _filter_browser_version(cls, browser, version_str):
+        '''
+        Simplifies a browser version string if it is detailed.
+        i.e. groups together Firefox 3.5.1 and 3.5.2 to be just 3.
+        This is helpful when viewing stats and good to protect privacy.
+        '''
+        ver = version_str
+        parts = ver.split('.')
+        if len(parts) > 1:
+            if parts[1][0] == '0':
+                ver = parts[0]
+            else:
+                ver = "%s" % (parts[0])
+        # Special case complex version nums
+        if browser in ['Safari', 'Android Browser']:
+            ver = parts[0]
+            if len(ver) > 2:
+                num_hidden_digits = len(ver) - 2
+                ver = ver[0] + ver[1] + 'X' * num_hidden_digits
+        return ver
 
     def _mobile_stats(self, start_date, end_date, period_name):
         """ Info about mobile devices """
@@ -299,10 +359,23 @@
         data = {}
         for result in result_data:
             data[result[0]] = data.get(result[0], 0) + int(result[2])
+        self._filter_out_long_tail(data, MIN_VIEWS)
         ga_model.update_sitewide_stats(period_name, "Mobile brands", data)
 
         data = {}
         for result in result_data:
             data[result[1]] = data.get(result[1], 0) + int(result[2])
+        self._filter_out_long_tail(data, MIN_VIEWS)
         ga_model.update_sitewide_stats(period_name, "Mobile devices", data)
 
+    @classmethod
+    def _filter_out_long_tail(cls, data, threshold=10):
+        '''
+        Given data which is a frequency distribution, filter out
+        results which are below a threshold count. This is good to protect
+        privacy.
+        '''
+        for key, value in data.items():
+            if value < threshold:
+                del data[key]
+

--- a/ckanext/ga_report/ga_auth.py
+++ b/ckanext/ga_report/ga_auth.py
@@ -53,7 +53,11 @@
         return None
 
     accountName = config.get('googleanalytics.account')
+    if not accountName:
+        raise Exception('googleanalytics.account needs to be configured')
     webPropertyId = config.get('googleanalytics.id')
+    if not webPropertyId:
+        raise Exception('googleanalytics.id needs to be configured')
     for acc in accounts.get('items'):
         if acc.get('name') == accountName:
             accountId = acc.get('id')

--- a/ckanext/ga_report/ga_model.py
+++ b/ckanext/ga_report/ga_model.py
@@ -13,6 +13,8 @@
 def make_uuid():
     return unicode(uuid.uuid4())
 
+metadata = MetaData()
+
 
 
 class GA_Url(object):
@@ -21,20 +23,6 @@
         for k,v in kwargs.items():
             setattr(self, k, v)
 
-class GA_Stat(object):
-
-    def __init__(self, **kwargs):
-        for k,v in kwargs.items():
-            setattr(self, k, v)
-
-class GA_Publisher(object):
-
-    def __init__(self, **kwargs):
-        for k,v in kwargs.items():
-            setattr(self, k, v)
-
-
-metadata = MetaData()
 url_table = Table('ga_url', metadata,
                       Column('id', types.UnicodeText, primary_key=True,
                              default=make_uuid),
@@ -47,6 +35,13 @@
                 )
 mapper(GA_Url, url_table)
 
+
+class GA_Stat(object):
+
+    def __init__(self, **kwargs):
+        for k,v in kwargs.items():
+            setattr(self, k, v)
+