Changes to fix some issues around bounces and presentation
Changes to fix some issues around bounces and presentation

file:a/README.rst -> file:b/README.rst
--- a/README.rst
+++ b/README.rst
@@ -33,10 +33,9 @@
       googleanalytics.id = UA-1010101-1
       googleanalytics.account = Account name (e.g. data.gov.uk, see top level item at https://www.google.com/analytics)
       ga-report.period = monthly
-      ga-report.bounce_url = /data
+      ga-report.bounce_url = /
 
-   The ga-report.bounce_url specifies the path to use when calculating bounces. For DGU this is /data
-   but you may want to set this to /.
+   The ga-report.bounce_url specifies a particular path to record the bounce rate for. Typically it is / (the home page).
 
 3. Set up this extension's database tables using a paster command. (Ensure your CKAN pyenv is still activated, run the command from ``src/ckanext-ga-report``, alter the ``--config`` option to point to your site config file)::
 

--- a/ckanext/ga_report/command.py
+++ b/ckanext/ga_report/command.py
@@ -80,6 +80,11 @@
                                default=False,
                                dest='delete_first',
                                help='Delete data for the period first')
+        self.parser.add_option('-s', '--skip_url_stats',
+                               action='store_true',
+                               default=False,
+                               dest='skip_url_stats',
+                               help='Skip the download of URL data - just do site-wide stats')
 
     def command(self):
         self._load_config()
@@ -95,7 +100,8 @@
             return
 
         downloader = DownloadAnalytics(svc, profile_id=get_profile_id(svc),
-                                       delete_first=self.options.delete_first)
+                                       delete_first=self.options.delete_first,
+                                       skip_url_stats=self.options.skip_url_stats)
 
         time_period = self.args[1] if self.args and len(self.args) > 1 \
             else 'latest'

--- a/ckanext/ga_report/controller.py
+++ b/ckanext/ga_report/controller.py
@@ -9,7 +9,7 @@
 import sqlalchemy
 from sqlalchemy import func, cast, Integer
 import ckan.model as model
-from ga_model import GA_Url, GA_Stat, GA_ReferralStat
+from ga_model import GA_Url, GA_Stat, GA_ReferralStat, GA_Publisher
 
 log = logging.getLogger('ckanext.ga-report')
 
@@ -22,6 +22,7 @@
 
 
 def _month_details(cls):
+    '''Returns a list of all the month names'''
     months = []
     vals = model.Session.query(cls.period_name).filter(cls.period_name!='All').distinct().all()
     for m in vals:
@@ -70,13 +71,13 @@
         entries = q.order_by('ga_stat.key').all()
 
         def clean_key(key, val):
-            if key in ['Average time on site', 'Pages per visit', 'New visits', 'Bounces']:
+            if key in ['Average time on site', 'Pages per visit', 'New visits', 'Bounce rate (home page)']:
                 val =  "%.2f" % round(float(val), 2)
                 if key == 'Average time on site':
                     mins, secs = divmod(float(val), 60)
                     hours, mins = divmod(mins, 60)
                     val = '%02d:%02d:%02d (%s seconds) ' % (hours, mins, secs, val)
-                if key in ['New visits','Bounces']:
+                if key in ['New visits','Bounce rate (home page)']:
                     val = "%s%%" % val
             if key in ['Total page views', 'Total visits']:
                 val = int(val)
@@ -228,7 +229,6 @@
             c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month])
 
         c.top_publishers = _get_top_publishers()
-
         return render('ga_report/publisher/index.html')
 
     def _get_packages(self, publisher=None, count=-1):
@@ -244,12 +244,11 @@
         if publisher:
             q = q.filter(GA_Url.department_id==publisher.name)
         q = q.filter(GA_Url.period_name==month)
-        q = q.order_by('ga_url.visitors::int desc')
+        q = q.order_by('ga_url.visits::int desc')
         top_packages = []
-
         for entry,package in q.limit(count):
             if package:
-                top_packages.append((package, entry.pageviews, entry.visitors))
+                top_packages.append((package, entry.pageviews, entry.visits))
             else:
                 log.warning('Could not find package associated package')
 
@@ -288,7 +287,7 @@
         else:
             c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month])
 
-        month = c.mnth or 'All'
+        month = c.month or 'All'
         c.publisher_page_views = 0
         q = model.Session.query(GA_Url).\
             filter(GA_Url.url=='/publisher/%s' % c.publisher_name)
@@ -304,33 +303,28 @@
     Returns a list of the top 20 publishers by dataset visits.
     (The number to show can be varied with 'limit')
     '''
+    month = c.month or 'All'
     connection = model.Session.connection()
     q = """
-        select department_id, sum(pageviews::int) views, sum(visitors::int) visits
+        select department_id, sum(pageviews::int) views, sum(visits::int) visits
         from ga_url
-        where department_id <> ''"""
-    if c.month:
-        q = q + """
-                and period_name=%s
-        """
-    q = q + """
-            group by department_id order by visits desc
+        where department_id <> ''
+          and package_id <> ''
+          and url like '/dataset/%%'
+          and period_name=%s
+        group by department_id order by views desc
         """
     if limit:
         q = q + " limit %s;" % (limit)
 
-    # Add this back (before and period_name =%s) if you want to ignore publisher
-    # homepage views
-    # and not url like '/publisher/%%'
-
     top_publishers = []
-    res = connection.execute(q, c.month)
-
+    res = connection.execute(q, month)
     for row in res:
         g = model.Group.get(row[0])
         if g:
             top_publishers.append((g, row[1], row[2]))
     return top_publishers
+
 
 def _get_publishers():
     '''

--- a/ckanext/ga_report/download_analytics.py
+++ b/ckanext/ga_report/download_analytics.py
@@ -3,7 +3,7 @@
 import datetime
 import collections
 from pylons import config
-
+from ga_model import _normalize_url
 import ga_model
 
 #from ga_client import GA
@@ -17,11 +17,13 @@
 class DownloadAnalytics(object):
     '''Downloads and stores analytics info'''
 
-    def __init__(self, service=None, profile_id=None, delete_first=False):
+    def __init__(self, service=None, profile_id=None, delete_first=False,
+                 skip_url_stats=False):
         self.period = config['ga-report.period']
         self.service = service
         self.profile_id = profile_id
         self.delete_first = delete_first
+        self.skip_url_stats = skip_url_stats
 
     def specific_month(self, date):
         import calendar
@@ -92,29 +94,43 @@
 
     def download_and_store(self, periods):
         for period_name, period_complete_day, start_date, end_date in periods:
+            log.info('Period "%s" (%s - %s)',
+                     self.get_full_period_name(period_name, period_complete_day),
+                     start_date.strftime('%Y-%m-%d'),
+                     end_date.strftime('%Y-%m-%d'))
+
             if self.delete_first:
-                log.info('Deleting existing Analytics for period "%s"',
+                log.info('Deleting existing Analytics for this period "%s"',
                          period_name)
                 ga_model.delete(period_name)
-            log.info('Downloading Analytics for period "%s" (%s - %s)',
-                     self.get_full_period_name(period_name, period_complete_day),
-                     start_date.strftime('%Y %m %d'),
-                     end_date.strftime('%Y %m %d'))
-
-            data = self.download(start_date, end_date, '~/dataset/[a-z0-9-_]+')
-            log.info('Storing Dataset Analytics for period "%s"',
-                     self.get_full_period_name(period_name, period_complete_day))
-            self.store(period_name, period_complete_day, data, )
-
-            data = self.download(start_date, end_date, '~/publisher/[a-z0-9-_]+')
-            log.info('Storing Publisher Analytics for period "%s"',
-                     self.get_full_period_name(period_name, period_complete_day))
-            self.store(period_name, period_complete_day, data,)
-
-            ga_model.update_publisher_stats(period_name) # about 30 seconds.
+
+            if not self.skip_url_stats:
+                # Clean out old url data before storing the new
+                ga_model.pre_update_url_stats(period_name)
+
+                accountName = config.get('googleanalytics.account')
+
+                log.info('Downloading analytics for dataset views')
+                data = self.download(start_date, end_date, '~/%s/dataset/[a-z0-9-_]+' % accountName)
+
+                log.info('Storing dataset views (%i rows)', len(data.get('url')))
+                self.store(period_name, period_complete_day, data, )
+
+                log.info('Downloading analytics for publisher views')
+                data = self.download(start_date, end_date, '~/%s/publisher/[a-z0-9-_]+' % accountName)
+
+                log.info('Storing publisher views (%i rows)', len(data.get('url')))
+                self.store(period_name, period_complete_day, data,)
+
+                log.info('Aggregating datasets by publisher')
+                ga_model.update_publisher_stats(period_name) # about 30 seconds.
+
+            log.info('Downloading and storing analytics for site-wide stats')
             self.sitewide_stats( period_name )
 
+            log.info('Downloading and storing analytics for social networks')
             self.update_social_info(period_name, start_date, end_date)
+
 
     def update_social_info(self, period_name, start_date, end_date):
         start_date = start_date.strftime('%Y-%m-%d')
@@ -137,18 +153,17 @@
         data = collections.defaultdict(list)
         rows = results.get('rows',[])
         for row in rows:
-            from ga_model import _normalize_url
             data[_normalize_url(row[0])].append( (row[1], int(row[2]),) )
         ga_model.update_social(period_name, data)
 
 
-    def download(self, start_date, end_date, path='~/dataset/[a-z0-9-_]+'):
+    def download(self, start_date, end_date, path=None):
         '''Get data from GA for a given time period'''
         start_date = start_date.strftime('%Y-%m-%d')
         end_date = end_date.strftime('%Y-%m-%d')
         query = 'ga:pagePath=%s$' % path
-        metrics = 'ga:uniquePageviews, ga:visits'
-        sort = '-ga:uniquePageviews'
+        metrics = 'ga:pageviews, ga:visits'
+        sort = '-ga:pageviews'
 
         # Supported query params at
         # https://developers.google.com/analytics/devguides/reporting/core/v3/reference
@@ -165,7 +180,14 @@
         packages = []
         for entry in results.get('rows'):
             (loc,pageviews,visits) = entry
-            packages.append( ('http:/' + loc, pageviews, visits,) ) # Temporary hack
+            url = _normalize_url('http:/' + loc) # strips off domain e.g. www.data.gov.uk or data.gov.uk
+
+            if not url.startswith('/dataset/') and not url.startswith('/publisher/'):
+                # filter out strays like:
+                # /data/user/login?came_from=http://data.gov.uk/dataset/os-code-point-open
+                # /403.html?page=/about&from=http://data.gov.uk/publisher/planning-inspectorate
+                continue
+            packages.append( (url, pageviews, visits,) ) # Temporary hack
         return dict(url=packages)
 
     def store(self, period_name, period_complete_day, data):
@@ -179,12 +201,10 @@
 
         start_date = '%s-01' % period_name
         end_date = '%s-%s' % (period_name, last_day_of_month)
-        print 'Sitewide_stats for %s (%s -> %s)' % (period_name, start_date, end_date)
-
         funcs = ['_totals_stats', '_social_stats', '_os_stats',
                  '_locale_stats', '_browser_stats', '_mobile_stats']
         for f in funcs:
-            print ' + Fetching %s stats' % f.split('_')[1]
+            log.info('Downloading analytics for %s' % f.split('_')[1])
             getattr(self, f)(start_date, end_date, period_name)
 
     def _get_results(result_data, f):
@@ -199,8 +219,8 @@
         results = self.service.data().ga().get(
                                  ids='ga:' + self.profile_id,
                                  start_date=start_date,
-                                 metrics='ga:uniquePageviews',
-                                 sort='-ga:uniquePageviews',
+                                 metrics='ga:pageviews',
+                                 sort='-ga:pageviews',
                                  max_results=10000,
                                  end_date=end_date).execute()
         result_data = results.get('rows')
@@ -221,25 +241,27 @@
         }
         ga_model.update_sitewide_stats(period_name, "Totals", data)
 
-        # Bounces from /data. This url is specified in configuration because
-        # for DGU we don't want /.
-        path = config.get('ga-report.bounce_url','/')
-        print path
-        results = self.service.data().ga().get(
-                                 ids='ga:' + self.profile_id,
-                                 filters='ga:pagePath=~%s$' % (path,),
-                                 start_date=start_date,
-                                 metrics='ga:bounces,ga:uniquePageviews',
+        # Bounces from / or another configurable page.
+        path = '/%s%s' % (config.get('googleanalytics.account'),
+                          config.get('ga-report.bounce_url', '/'))
+        results = self.service.data().ga().get(
+                                 ids='ga:' + self.profile_id,
+                                 filters='ga:pagePath==%s' % (path,),
+                                 start_date=start_date,
+                                 metrics='ga:bounces,ga:pageviews',
                                  dimensions='ga:pagePath',
                                  max_results=10000,
                                  end_date=end_date).execute()
         result_data = results.get('rows')
-        for results in result_data:
-            if results[0] == path:
-                bounce, total = [float(x) for x in results[1:]]
-                pct = 100 * bounce/total
-                print "%d bounces from %d total == %s" % (bounce, total, pct)
-                ga_model.update_sitewide_stats(period_name, "Totals", {'Bounces': pct})
+        if not result_data or len(result_data) != 1:
+            log.error('Could not pinpoint the bounces for path: %s. Got results: %r',
+                      path, result_data)
+            return
+        results = result_data[0]
+        bounces, total = [float(x) for x in result_data[0][1:]]
+        pct = 100 * bounces/total
+        log.info('%d bounces from %d total == %s', bounces, total, pct)
+        ga_model.update_sitewide_stats(period_name, "Totals", {'Bounce rate (home page)': pct})
 
 
     def _locale_stats(self, start_date, end_date, period_name):
@@ -247,8 +269,8 @@
         results = self.service.data().ga().get(
                                  ids='ga:' + self.profile_id,
                                  start_date=start_date,
-                                 metrics='ga:uniquePageviews',
-                                 sort='-ga:uniquePageviews',
+                                 metrics='ga:pageviews',
+                                 sort='-ga:pageviews',
                                  dimensions="ga:language,ga:country",
                                  max_results=10000,
                                  end_date=end_date).execute()
@@ -271,8 +293,8 @@
         results = self.service.data().ga().get(
                                  ids='ga:' + self.profile_id,
                                  start_date=start_date,
-                                 metrics='ga:uniquePageviews',
-                                 sort='-ga:uniquePageviews',
+                                 metrics='ga:pageviews',
+                                 sort='-ga:pageviews',
                                  dimensions="ga:socialNetwork,ga:referralPath",
                                  max_results=10000,
                                  end_date=end_date).execute()
@@ -290,8 +312,8 @@
         results = self.service.data().ga().get(
                                  ids='ga:' + self.profile_id,
                                  start_date=start_date,
-                                 metrics='ga:uniquePageviews',
-                                 sort='-ga:uniquePageviews',
+                                 metrics='ga:pageviews',
+                                 sort='-ga:pageviews',
                                  dimensions="ga:operatingSystem,ga:operatingSystemVersion",
                                  max_results=10000,
                                  end_date=end_date).execute()
@@ -315,8 +337,8 @@
         results = self.service.data().ga().get(
                                  ids='ga:' + self.profile_id,
                                  start_date=start_date,
-                                 metrics='ga:uniquePageviews',
-                                 sort='-ga:uniquePageviews',
+                                 metrics='ga:pageviews',
+                                 sort='-ga:pageviews',
                                  dimensions="ga:browser,ga:browserVersion",
                                  max_results=10000,
                                  end_date=end_date).execute()
@@ -364,8 +386,8 @@
         results = self.service.data().ga().get(
                                  ids='ga:' + self.profile_id,
                                  start_date=start_date,
-                                 metrics='ga:uniquePageviews',
-                                 sort='-ga:uniquePageviews',
+                                 metrics='ga:pageviews',
+                                 sort='-ga:pageviews',
                                  dimensions="ga:mobileDeviceBranding, ga:mobileDeviceInfo",
                                  max_results=10000,
                                  end_date=end_date).execute()

--- a/ckanext/ga_report/ga_model.py
+++ b/ckanext/ga_report/ga_model.py
@@ -27,7 +27,7 @@
                       Column('period_name', types.UnicodeText),
                       Column('period_complete_day', types.Integer),
                       Column('pageviews', types.UnicodeText),
-                      Column('visitors', types.UnicodeText),
+                      Column('visits', types.UnicodeText),
                       Column('url', types.UnicodeText),
                       Column('department_id', types.UnicodeText),
                       Column('package_id', types.UnicodeText),
@@ -63,7 +63,7 @@
                   Column('period_name', types.UnicodeText),
                   Column('publisher_name', types.UnicodeText),
                   Column('views', types.UnicodeText),
-                  Column('visitors', types.UnicodeText),
+                  Column('visits', types.UnicodeText),
                   Column('toplevel', types.Boolean, default=False),
                   Column('subpublishercount', types.Integer, default=0),
                   Column('parent', types.UnicodeText),
@@ -111,12 +111,10 @@
     >>> normalize_url('http://data.gov.uk/dataset/weekly_fuel_prices')
     '/dataset/weekly_fuel_prices'
     '''
-    # Deliberately leaving a /
-    url = url.replace('http:/','')
-    return '/' + '/'.join(url.split('/')[2:])
-
-
-def _get_department_id_of_url(url):
+    return '/' + '/'.join(url.split('/')[3:])
+
+
+def _get_package_and_publisher(url):
     # e.g. /dataset/fuel_prices
     # e.g. /dataset/fuel_prices/resource/e63380d4
     dataset_match = re.match('/dataset/([^/]+)(/.*)?', url)
@@ -126,12 +124,13 @@
         if dataset:
             publisher_groups = dataset.get_groups('publisher')
             if publisher_groups:
-                return publisher_groups[0].name
+                return dataset_ref,publisher_groups[0].name
+        return dataset_ref, None
     else:
         publisher_match = re.match('/publisher/([^/]+)(/.*)?', url)
         if publisher_match:
-            return publisher_match.groups()[0]
-
+            return None, publisher_match.groups()[0]
+    return None, None
 
 def update_sitewide_stats(period_name, stat_name, data):
     for k,v in data.iteritems():
@@ -156,61 +155,72 @@
         model.Session.commit()
 
 
+def pre_update_url_stats(period_name):
+    model.Session.query(GA_Url).\
+            filter(GA_Url.period_name==period_name).delete()
+    model.Session.query(GA_Url).\
+            filter(GA_Url.period_name=='All').delete()
+
 
 def update_url_stats(period_name, period_complete_day, url_data):
-    for url, views, visitors in url_data:
-        url = _normalize_url(url)
-        department_id = _get_department_id_of_url(url)
-
-        package = None
-        if url.startswith('/dataset/'):
-            package = url[len('/dataset/'):]
-
-        # see if the row for this url & month is in the table already
+    '''
+    Given a list of urls and number of hits for each during a given period,
+    stores them in GA_Url under the period and recalculates the totals for
+    the 'All' period.
+    '''
+    for url, views, visits in url_data:
+        package, publisher = _get_package_and_publisher(url)
+
+
         item = model.Session.query(GA_Url).\
             filter(GA_Url.period_name==period_name).\
             filter(GA_Url.url==url).first()
         if item:
-            item.period_name = period_name
-            item.pageviews = views
-            item.visitors = visitors
-            item.department_id = department_id
-            item.package_id = package
+            item.pageviews = item.pageviews + views
+            item.visits = item.visits + visits
+            if not item.package_id:
+                item.package_id = package
+            if not item.department_id:
+                item.department_id = publisher
             model.Session.add(item)
         else:
-            # create the row
             values = {'id': make_uuid(),
                       'period_name': period_name,
                       'period_complete_day': period_complete_day,
                       'url': url,
                       'pageviews': views,
-                      'visitors': visitors,
-                      'department_id': department_id,
+                      'visits': visits,
+                      'department_id': publisher,
                       'package_id': package
                      }
             model.Session.add(GA_Url(**values))
-
-        # We now need to recaculate the ALL time_period from the data we have
-        # Delete the old 'All'
-        old = model.Session.query(GA_Url).\
-            filter(GA_Url.period_name == "All").\
-            filter(GA_Url.url==url).delete()
-
-        items = model.Session.query(GA_Url).\
-            filter(GA_Url.period_name != "All").\
-            filter(GA_Url.url==url).all()
-        values = {'id': make_uuid(),
-                  'period_name': "All",
-                  'period_complete_day': "0",
-                  'url': url,
-                  'pageviews': sum([int(x.pageviews) for x in items]),
-                  'visitors': sum([int(x.visitors) for x in items]),
-                  'department_id': department_id,
-                  'package_id': package
-                 }
-        model.Session.add(GA_Url(**values))
-
         model.Session.commit()
+
+        if package:
+            old_pageviews, old_visits = 0, 0
+            old = model.Session.query(GA_Url).\
+                filter(GA_Url.period_name=='All').\
+                filter(GA_Url.url==url).all()
+            old_pageviews = sum([int(o.pageviews) for o in old])
+            old_visits = sum([int(o.visits) for o in old])
+
+            entries = model.Session.query(GA_Url).\
+                filter(GA_Url.period_name!='All').\
+                filter(GA_Url.url==url).all()
+            values = {'id': make_uuid(),
+                      'period_name': 'All',
+                      'period_complete_day': 0,
+                      'url': url,
+                      'pageviews': sum([int(e.pageviews) for e in entries]) + old_pageviews,
+                      'visits': sum([int(e.visits) for e in entries]) + old_visits,
+                      'department_id': publisher,
+                      'package_id': package
+                     }
+
+            model.Session.add(GA_Url(**values))
+            model.Session.commit()
+
+
 
 
 def update_social(period_name, data):
@@ -252,7 +262,7 @@
         filter(model.Group.type=='publisher').\
         filter(model.Group.state=='active').all()
     for publisher in publishers:
-        views, visitors, subpub = update_publisher(period_name, publisher, publisher.name)
+        views, visits, subpub = update_publisher(period_name, publisher, publisher.name)
         parent, parents = '', publisher.get_groups('publisher')
         if parents:
             parent = parents[0].name
@@ -261,7 +271,7 @@
             filter(GA_Publisher.publisher_name==publisher.name).first()
         if item:
             item.views = views
-            item.visitors = visitors
+            item.visits = visits
             item.publisher_name = publisher.name
             item.toplevel = publisher in toplevel
             item.subpublishercount = subpub
@@ -273,7 +283,7 @@
                      'period_name': period_name,
                      'publisher_name': publisher.name,
                      'views': views,
-                     'visitors': visitors,
+                     'visits': visits,
                      'toplevel': publisher in toplevel,
                      'subpublishercount': subpub,
                      'parent': parent
@@ -283,7 +293,7 @@
 
 
 def update_publisher(period_name, pub, part=''):
-    views,visitors,subpub = 0, 0, 0
+    views,visits,subpub = 0, 0, 0
     for publisher in go_down_tree(pub):
         subpub = subpub + 1
         items = model.Session.query(GA_Url).\
@@ -291,9 +301,9 @@
                 filter(GA_Url.department_id==publisher.name).all()
         for item in items:
             views = views + int(item.pageviews)
-            visitors = visitors + int(item.visitors)
-
-    return views, visitors, (subpub-1)
+            visits = visits + int(item.visits)
+
+    return views, visits, (subpub-1)
 
 
 def get_top_level():

--- a/ckanext/ga_report/helpers.py
+++ b/ckanext/ga_report/helpers.py
@@ -1,7 +1,9 @@
 import logging
 import operator
+
 import ckan.lib.base as base
 import ckan.model as model
+from ckan.logic import get_action
 
 from ckanext.ga_report.ga_model import GA_Url, GA_Publisher
 from ckanext.ga_report.controller import _get_publishers
@@ -39,25 +41,39 @@
                    order_by('ga_url.pageviews::int desc')
     num_top_datasets = top_datasets.count()
 
+    dataset = None
     if num_top_datasets:
-        dataset = None
+        count = 0
         while not dataset:
             rand = random.randrange(0, min(top, num_top_datasets))
             ga_url = top_datasets[rand]
             dataset = model.Package.get(ga_url.url[len('/dataset/'):])
             if dataset and not dataset.state == 'active':
                 dataset = None
-    else:
+                count += 1
+                if count > 10:
+                    break
+    if not dataset:
+        # fallback
         dataset = model.Session.query(model.Package)\
                   .filter_by(state='active').first()
-    publisher = dataset.get_groups('publisher')[0]
-    return {
-        'dataset': dataset,
-        'publisher': publisher
-    }
+        if not dataset:
+            return None
+    dataset_dict = get_action('package_show')({'model': model,
+                                               'session': model.Session,
+                                               'validate': False},
+                                              {'id':dataset.id})
+    return dataset_dict
 
 def single_popular_dataset_html(top=20):
-    context = single_popular_dataset(top)
+    dataset_dict = single_popular_dataset(top)
+    groups = package.get('groups', [])
+    publishers = [ g for g in groups if g.get('type') == 'publisher' ]
+    publisher = publishers[0] if publishers else {'name':'', 'title': ''}
+    context = {
+        'dataset': dataset_dict,
+        'publisher': publisher_dict
+        }
     return base.render_snippet('ga_report/ga_popular_single.html', **context)
 
 
@@ -90,7 +106,7 @@
             if not p in datasets:
                 datasets[p] = {'views':0, 'visits': 0}
             datasets[p]['views'] = datasets[p]['views'] + int(entry.pageviews)
-            datasets[p]['visits'] = datasets[p]['visits'] + int(entry.visitors)
+            datasets[p]['visits'] = datasets[p]['visits'] + int(entry.visits)
 
     results = []
     for k, v in datasets.iteritems():

--- a/ckanext/ga_report/templates/ga_report/ga_popular_single.html
+++ b/ckanext/ga_report/templates/ga_report/ga_popular_single.html
@@ -8,14 +8,14 @@
     <h2>Featured dataset</h2>
 
     <div class="dataset-summary boxed">
-          <a class="dataset-header" href="${h.url_for(controller='package', action='read', id=dataset.name)}">
-            <h3>${dataset.title}</h3>
+          <a class="dataset-header" href="${h.url_for(controller='package', action='read', id=dataset['name'])}">
+            <h3>${dataset['title']}</h3>
           </a>
           <h4>
             <strong>Publisher</strong> :
-            <a href="/publisher/${publisher.name}">${publisher.title}</a>
+            <a href="/publisher/${publisher['name']}">${publisher['title']}</a>
           </h4>
-          <div>${h.truncate(dataset.notes, length=200, whole_word=True)}</div>
+          <div>${h.truncate(dataset['notes_rendered'], length=200, whole_word=True)}</div>
     </div>
     <div>
       <a href="${h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport',action='read')}" class="btn">Other popular datasets</a>

--- a/ckanext/ga_report/templates/ga_report/notes.html
+++ b/ckanext/ga_report/templates/ga_report/notes.html
@@ -6,11 +6,10 @@
     <li class="widget-container boxed widget_text">
       <h4>Notes</h4>
       <ul>
-          <li>'Views' is the number of sessions during which that page was viewed one or more times ('Unique Pageviews').</li>
-<!--          <li>'Visits' is the number of individual sessions initiated by all the visitors to your site, counted once for each visitor for each session.</li>-->
-          <li>'Visitors' is the number of unique users visiting the site (whether once or more times).</li>
+          <li>"Views" is the number of sessions during which the page was viewed one or more times (technically known as "unique pageviews").</li>
+          <li>"Visits" is the number of unique user visits to a page, counted once for each visitor for each session.</li>
           <li>These usage statistics are confined to users with javascript enabled, which excludes web crawlers and API calls.</li>
-          <li>The results for only small numbers of views/visits are not shown. Where these relate to site pages, then they are available in full in the CSV download. Where these relate to users' web browser information, they are not disclosed, for privacy reasons.</li>
+          <li>The results are not shown when the number of views/visits is tiny. Where these relate to site pages, results are available in full in the CSV download. Where these relate to users' web browser information, results are not disclosed, for privacy reasons.</li>
       </ul>
     </li>
 </html>

--- a/ckanext/ga_report/templates/ga_report/publisher/index.html
+++ b/ckanext/ga_report/templates/ga_report/publisher/index.html
@@ -41,15 +41,15 @@
        <table class="table table-condensed table-bordered table-striped">
 	 <tr>
 	   <th>Publisher</th>
+<!--	   <th>Dataset Visits</th>-->
 	   <th>Dataset Views</th>
-	   <th>Dataset Visits</th>
 	 </tr>
         <py:for each="publisher, views, visits in c.top_publishers">
 	  <tr>
 	    <td>${h.link_to(publisher.title, h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport', action='read_publisher', id=publisher.name))}
 	    </td>
+<!--	    <td>${visits}</td> -->
 	    <td>${views}</td>
-	    <td>${visits}</td>
 	  </tr>