Adjust popularity score to take account of number of days in the month.
Adjust popularity score to take account of number of days in the month.

file:a/README.rst -> file:b/README.rst
--- a/README.rst
+++ b/README.rst
@@ -33,10 +33,9 @@
       googleanalytics.id = UA-1010101-1
       googleanalytics.account = Account name (e.g. data.gov.uk, see top level item at https://www.google.com/analytics)
       ga-report.period = monthly
-      ga-report.bounce_url = /data
+      ga-report.bounce_url = /
 
-   The ga-report.bounce_url specifies the path to use when calculating bounces. For DGU this is /data
-   but you may want to set this to /.
+   The ga-report.bounce_url specifies a particular path to record the bounce rate for. Typically it is / (the home page).
 
 3. Set up this extension's database tables using a paster command. (Ensure your CKAN pyenv is still activated, run the command from ``src/ckanext-ga-report``, alter the ``--config`` option to point to your site config file)::
 

--- a/ckanext/ga_report/command.py
+++ b/ckanext/ga_report/command.py
@@ -80,6 +80,11 @@
                                default=False,
                                dest='delete_first',
                                help='Delete data for the period first')
+        self.parser.add_option('-s', '--skip_url_stats',
+                               action='store_true',
+                               default=False,
+                               dest='skip_url_stats',
+                               help='Skip the download of URL data - just do site-wide stats')
 
     def command(self):
         self._load_config()
@@ -95,7 +100,8 @@
             return
 
         downloader = DownloadAnalytics(svc, profile_id=get_profile_id(svc),
-                                       delete_first=self.options.delete_first)
+                                       delete_first=self.options.delete_first,
+                                       skip_url_stats=self.options.skip_url_stats)
 
         time_period = self.args[1] if self.args and len(self.args) > 1 \
             else 'latest'

--- a/ckanext/ga_report/controller.py
+++ b/ckanext/ga_report/controller.py
@@ -71,13 +71,13 @@
         entries = q.order_by('ga_stat.key').all()
 
         def clean_key(key, val):
-            if key in ['Average time on site', 'Pages per visit', 'New visits', 'Bounces']:
+            if key in ['Average time on site', 'Pages per visit', 'New visits', 'Bounce rate (home page)']:
                 val =  "%.2f" % round(float(val), 2)
                 if key == 'Average time on site':
                     mins, secs = divmod(float(val), 60)
                     hours, mins = divmod(mins, 60)
                     val = '%02d:%02d:%02d (%s seconds) ' % (hours, mins, secs, val)
-                if key in ['New visits','Bounces']:
+                if key in ['New visits','Bounce rate (home page)']:
                     val = "%s%%" % val
             if key in ['Total page views', 'Total visits']:
                 val = int(val)
@@ -232,7 +232,7 @@
         return render('ga_report/publisher/index.html')
 
     def _get_packages(self, publisher=None, count=-1):
-        '''Returns the datasets in order of visits'''
+        '''Returns the datasets in order of views'''
         if count == -1:
             count = sys.maxint
 
@@ -244,11 +244,11 @@
         if publisher:
             q = q.filter(GA_Url.department_id==publisher.name)
         q = q.filter(GA_Url.period_name==month)
-        q = q.order_by('ga_url.visitors::int desc')
+        q = q.order_by('ga_url.pageviews::int desc')
         top_packages = []
         for entry,package in q.limit(count):
             if package:
-                top_packages.append((package, entry.pageviews, entry.visitors))
+                top_packages.append((package, entry.pageviews, entry.visits))
             else:
                 log.warning('Could not find package associated package')
 
@@ -306,11 +306,13 @@
     month = c.month or 'All'
     connection = model.Session.connection()
     q = """
-        select department_id, sum(pageviews::int) views, sum(visitors::int) visits
+        select department_id, sum(pageviews::int) views, sum(visits::int) visits
         from ga_url
         where department_id <> ''
+          and package_id <> ''
+          and url like '/dataset/%%'
           and period_name=%s
-        group by department_id order by visits desc
+        group by department_id order by views desc
         """
     if limit:
         q = q + " limit %s;" % (limit)
@@ -327,7 +329,7 @@
 def _get_publishers():
     '''
     Returns a list of all publishers. Each item is a tuple:
-      (names, title)
+      (name, title)
     '''
     publishers = []
     for pub in model.Session.query(model.Group).\

--- a/ckanext/ga_report/download_analytics.py
+++ b/ckanext/ga_report/download_analytics.py
@@ -17,11 +17,13 @@
 class DownloadAnalytics(object):
     '''Downloads and stores analytics info'''
 
-    def __init__(self, service=None, profile_id=None, delete_first=False):
+    def __init__(self, service=None, profile_id=None, delete_first=False,
+                 skip_url_stats=False):
         self.period = config['ga-report.period']
         self.service = service
         self.profile_id = profile_id
         self.delete_first = delete_first
+        self.skip_url_stats = skip_url_stats
 
     def specific_month(self, date):
         import calendar
@@ -92,33 +94,41 @@
 
     def download_and_store(self, periods):
         for period_name, period_complete_day, start_date, end_date in periods:
+            log.info('Period "%s" (%s - %s)',
+                     self.get_full_period_name(period_name, period_complete_day),
+                     start_date.strftime('%Y-%m-%d'),
+                     end_date.strftime('%Y-%m-%d'))
+
             if self.delete_first:
-                log.info('Deleting existing Analytics for period "%s"',
+                log.info('Deleting existing Analytics for this period "%s"',
                          period_name)
                 ga_model.delete(period_name)
-            log.info('Downloading Analytics for period "%s" (%s - %s)',
-                     self.get_full_period_name(period_name, period_complete_day),
-                     start_date.strftime('%Y %m %d'),
-                     end_date.strftime('%Y %m %d'))
-
-            # Clean up the entries before we run this
-            ga_model.pre_update_url_stats(period_name)
-
-            accountName = config.get('googleanalytics.account')
-
-            data = self.download(start_date, end_date, '~/%s/dataset/[a-z0-9-_]+' % accountName)
-            log.info('Storing Dataset Analytics for period "%s"',
-                     self.get_full_period_name(period_name, period_complete_day))
-            self.store(period_name, period_complete_day, data, )
-
-            data = self.download(start_date, end_date, '~/%s/publisher/[a-z0-9-_]+' % accountName)
-            log.info('Storing Publisher Analytics for period "%s"',
-                     self.get_full_period_name(period_name, period_complete_day))
-            self.store(period_name, period_complete_day, data,)
-
-            ga_model.update_publisher_stats(period_name) # about 30 seconds.
+
+            if not self.skip_url_stats:
+                # Clean out old url data before storing the new
+                ga_model.pre_update_url_stats(period_name)
+
+                accountName = config.get('googleanalytics.account')
+
+                log.info('Downloading analytics for dataset views')
+                data = self.download(start_date, end_date, '~/%s/dataset/[a-z0-9-_]+' % accountName)
+
+                log.info('Storing dataset views (%i rows)', len(data.get('url')))
+                self.store(period_name, period_complete_day, data, )
+
+                log.info('Downloading analytics for publisher views')
+                data = self.download(start_date, end_date, '~/%s/publisher/[a-z0-9-_]+' % accountName)
+
+                log.info('Storing publisher views (%i rows)', len(data.get('url')))
+                self.store(period_name, period_complete_day, data,)
+
+                log.info('Aggregating datasets by publisher')
+                ga_model.update_publisher_stats(period_name) # about 30 seconds.
+
+            log.info('Downloading and storing analytics for site-wide stats')
             self.sitewide_stats( period_name )
 
+            log.info('Downloading and storing analytics for social networks')
             self.update_social_info(period_name, start_date, end_date)
 
 
@@ -152,8 +162,8 @@
         start_date = start_date.strftime('%Y-%m-%d')
         end_date = end_date.strftime('%Y-%m-%d')
         query = 'ga:pagePath=%s$' % path
-        metrics = 'ga:uniquePageviews, ga:visits'
-        sort = '-ga:uniquePageviews'
+        metrics = 'ga:pageviews, ga:visits'
+        sort = '-ga:pageviews'
 
         # Supported query params at
         # https://developers.google.com/analytics/devguides/reporting/core/v3/reference
@@ -170,8 +180,12 @@
         packages = []
         for entry in results.get('rows'):
             (loc,pageviews,visits) = entry
-            url = _normalize_url('http:/' + loc)
+            url = _normalize_url('http:/' + loc) # strips off domain e.g. www.data.gov.uk or data.gov.uk
+
             if not url.startswith('/dataset/') and not url.startswith('/publisher/'):
+                # filter out strays like:
+                # /data/user/login?came_from=http://data.gov.uk/dataset/os-code-point-open
+                # /403.html?page=/about&from=http://data.gov.uk/publisher/planning-inspectorate
                 continue
             packages.append( (url, pageviews, visits,) ) # Temporary hack
         return dict(url=packages)
@@ -187,12 +201,10 @@
 
         start_date = '%s-01' % period_name
         end_date = '%s-%s' % (period_name, last_day_of_month)
-        print 'Sitewide_stats for %s (%s -> %s)' % (period_name, start_date, end_date)
-
         funcs = ['_totals_stats', '_social_stats', '_os_stats',
                  '_locale_stats', '_browser_stats', '_mobile_stats']
         for f in funcs:
-            print ' + Fetching %s stats' % f.split('_')[1]
+            log.info('Downloading analytics for %s' % f.split('_')[1])
             getattr(self, f)(start_date, end_date, period_name)
 
     def _get_results(result_data, f):
@@ -207,8 +219,8 @@
         results = self.service.data().ga().get(
                                  ids='ga:' + self.profile_id,
                                  start_date=start_date,
-                                 metrics='ga:uniquePageviews',
-                                 sort='-ga:uniquePageviews',
+                                 metrics='ga:pageviews',
+                                 sort='-ga:pageviews',
                                  max_results=10000,
                                  end_date=end_date).execute()
         result_data = results.get('rows')
@@ -229,25 +241,27 @@
         }
         ga_model.update_sitewide_stats(period_name, "Totals", data)
 
-        # Bounces from /data. This url is specified in configuration because
-        # for DGU we don't want /.
-        path = config.get('ga-report.bounce_url','/')
-        print path
-        results = self.service.data().ga().get(
-                                 ids='ga:' + self.profile_id,
-                                 filters='ga:pagePath=~%s$' % (path,),
-                                 start_date=start_date,
-                                 metrics='ga:bounces,ga:uniquePageviews',
+        # Bounces from / or another configurable page.
+        path = '/%s%s' % (config.get('googleanalytics.account'),
+                          config.get('ga-report.bounce_url', '/'))
+        results = self.service.data().ga().get(
+                                 ids='ga:' + self.profile_id,
+                                 filters='ga:pagePath==%s' % (path,),
+                                 start_date=start_date,
+                                 metrics='ga:bounces,ga:pageviews',
                                  dimensions='ga:pagePath',
                                  max_results=10000,
                                  end_date=end_date).execute()
         result_data = results.get('rows')
-        for results in result_data:
-            if results[0] == path:
-                bounce, total = [float(x) for x in results[1:]]
-                pct = 100 * bounce/total
-                print "%d bounces from %d total == %s" % (bounce, total, pct)
-                ga_model.update_sitewide_stats(period_name, "Totals", {'Bounces': pct})
+        if not result_data or len(result_data) != 1:
+            log.error('Could not pinpoint the bounces for path: %s. Got results: %r',
+                      path, result_data)
+            return
+        results = result_data[0]
+        bounces, total = [float(x) for x in result_data[0][1:]]
+        pct = 100 * bounces/total
+        log.info('%d bounces from %d total == %s', bounces, total, pct)
+        ga_model.update_sitewide_stats(period_name, "Totals", {'Bounce rate (home page)': pct})
 
 
     def _locale_stats(self, start_date, end_date, period_name):
@@ -255,8 +269,8 @@
         results = self.service.data().ga().get(
                                  ids='ga:' + self.profile_id,
                                  start_date=start_date,
-                                 metrics='ga:uniquePageviews',
-                                 sort='-ga:uniquePageviews',
+                                 metrics='ga:pageviews',
+                                 sort='-ga:pageviews',
                                  dimensions="ga:language,ga:country",
                                  max_results=10000,
                                  end_date=end_date).execute()
@@ -279,8 +293,8 @@
         results = self.service.data().ga().get(
                                  ids='ga:' + self.profile_id,
                                  start_date=start_date,
-                                 metrics='ga:uniquePageviews',
-                                 sort='-ga:uniquePageviews',
+                                 metrics='ga:pageviews',
+                                 sort='-ga:pageviews',
                                  dimensions="ga:socialNetwork,ga:referralPath",
                                  max_results=10000,
                                  end_date=end_date).execute()
@@ -298,8 +312,8 @@
         results = self.service.data().ga().get(
                                  ids='ga:' + self.profile_id,
                                  start_date=start_date,
-                                 metrics='ga:uniquePageviews',
-                                 sort='-ga:uniquePageviews',
+                                 metrics='ga:pageviews',
+                                 sort='-ga:pageviews',
                                  dimensions="ga:operatingSystem,ga:operatingSystemVersion",
                                  max_results=10000,
                                  end_date=end_date).execute()
@@ -323,8 +337,8 @@
         results = self.service.data().ga().get(
                                  ids='ga:' + self.profile_id,
                                  start_date=start_date,
-                                 metrics='ga:uniquePageviews',
-                                 sort='-ga:uniquePageviews',
+                                 metrics='ga:pageviews',
+                                 sort='-ga:pageviews',
                                  dimensions="ga:browser,ga:browserVersion",
                                  max_results=10000,
                                  end_date=end_date).execute()
@@ -372,8 +386,8 @@
         results = self.service.data().ga().get(
                                  ids='ga:' + self.profile_id,
                                  start_date=start_date,
-                                 metrics='ga:uniquePageviews',
-                                 sort='-ga:uniquePageviews',
+                                 metrics='ga:pageviews',
+                                 sort='-ga:pageviews',
                                  dimensions="ga:mobileDeviceBranding, ga:mobileDeviceInfo",
                                  max_results=10000,
                                  end_date=end_date).execute()

--- a/ckanext/ga_report/ga_model.py
+++ b/ckanext/ga_report/ga_model.py
@@ -9,6 +9,8 @@
 
 import ckan.model as model
 from ckan.lib.base import *
+
+log = __import__('logging').getLogger(__name__)
 
 def make_uuid():
     return unicode(uuid.uuid4())
@@ -27,7 +29,7 @@
                       Column('period_name', types.UnicodeText),
                       Column('period_complete_day', types.Integer),
                       Column('pageviews', types.UnicodeText),
-                      Column('visitors', types.UnicodeText),
+                      Column('visits', types.UnicodeText),
                       Column('url', types.UnicodeText),
                       Column('department_id', types.UnicodeText),
                       Column('package_id', types.UnicodeText),
@@ -63,7 +65,7 @@
                   Column('period_name', types.UnicodeText),
                   Column('publisher_name', types.UnicodeText),
                   Column('views', types.UnicodeText),
-                  Column('visitors', types.UnicodeText),
+                  Column('visits', types.UnicodeText),
                   Column('toplevel', types.Boolean, default=False),
                   Column('subpublishercount', types.Integer, default=0),
                   Column('parent', types.UnicodeText),
@@ -111,12 +113,10 @@
     >>> normalize_url('http://data.gov.uk/dataset/weekly_fuel_prices')
     '/dataset/weekly_fuel_prices'
     '''
-    # Deliberately leaving a /
-    url = url.replace('http:/','')
-    return '/' + '/'.join(url.split('/')[2:])
-
-
-def _get_department_id_of_url(url):
+    return '/' + '/'.join(url.split('/')[3:])
+
+
+def _get_package_and_publisher(url):
     # e.g. /dataset/fuel_prices
     # e.g. /dataset/fuel_prices/resource/e63380d4
     dataset_match = re.match('/dataset/([^/]+)(/.*)?', url)
@@ -126,12 +126,13 @@
         if dataset:
             publisher_groups = dataset.get_groups('publisher')
             if publisher_groups:
-                return publisher_groups[0].name
+                return dataset_ref,publisher_groups[0].name
+        return dataset_ref, None
     else:
         publisher_match = re.match('/publisher/([^/]+)(/.*)?', url)
         if publisher_match:
-            return publisher_match.groups()[0]
-
+            return None, publisher_match.groups()[0]
+    return None, None
 
 def update_sitewide_stats(period_name, stat_name, data):
     for k,v in data.iteritems():
@@ -156,25 +157,6 @@
         model.Session.commit()
 
 
-def update_url_stat_totals(period_name):
-
-    """
-        items = model.Session.query(GA_Url).\
-            filter(GA_Url.period_name != "All").\
-            filter(GA_Url.url==url).all()
-        values = {'id': make_uuid(),
-                  'period_name': "All",
-                  'period_complete_day': "0",
-                  'url': url,
-                  'pageviews': sum([int(x.pageviews) for x in items]),
-                  'visitors': sum([int(x.visitors) for x in items]),
-                  'department_id': department_id,
-                  'package_id': package
-                 }
-        model.Session.add(GA_Url(**values))
-        model.Session.commit()
-    """
-
 def pre_update_url_stats(period_name):
     model.Session.query(GA_Url).\
             filter(GA_Url.period_name==period_name).delete()
@@ -183,27 +165,47 @@
 
 
 def update_url_stats(period_name, period_complete_day, url_data):
-
-    for url, views, visitors in url_data:
-        department_id = _get_department_id_of_url(url)
-
-        package = None
-        if url.startswith('/dataset/'):
-            package = url[len('/dataset/'):]
-
-        values = {'id': make_uuid(),
-                  'period_name': period_name,
-                  'period_complete_day': period_complete_day,
-                  'url': url,
-                  'pageviews': views,
-                  'visitors': visitors,
-                  'department_id': department_id,
-                  'package_id': package
-                 }
-        model.Session.add(GA_Url(**values))
+    '''
+    Given a list of urls and number of hits for each during a given period,
+    stores them in GA_Url under the period and recalculates the totals for
+    the 'All' period.
+    '''
+    for url, views, visits in url_data:
+        package, publisher = _get_package_and_publisher(url)
+
+
+        item = model.Session.query(GA_Url).\
+            filter(GA_Url.period_name==period_name).\
+            filter(GA_Url.url==url).first()
+        if item:
+            item.pageviews = item.pageviews + views
+            item.visits = item.visits + visits
+            if not item.package_id:
+                item.package_id = package
+            if not item.department_id:
+                item.department_id = publisher
+            model.Session.add(item)
+        else:
+            values = {'id': make_uuid(),
+                      'period_name': period_name,
+                      'period_complete_day': period_complete_day,
+                      'url': url,
+                      'pageviews': views,
+                      'visits': visits,
+                      'department_id': publisher,
+                      'package_id': package
+                     }
+            model.Session.add(GA_Url(**values))
         model.Session.commit()
 
         if package:
+            old_pageviews, old_visits = 0, 0
+            old = model.Session.query(GA_Url).\
+                filter(GA_Url.period_name=='All').\
+                filter(GA_Url.url==url).all()
+            old_pageviews = sum([int(o.pageviews) for o in old])
+            old_visits = sum([int(o.visits) for o in old])
+
             entries = model.Session.query(GA_Url).\
                 filter(GA_Url.period_name!='All').\
                 filter(GA_Url.url==url).all()
@@ -211,14 +213,14 @@
                       'period_name': 'All',
                       'period_complete_day': 0,
                       'url': url,
-                      'pageviews': sum([int(e.pageviews) for e in entries]),
-                      'visitors': sum([int(e.visitors) for e in entries]),
-                      'department_id': department_id,
+                      'pageviews': sum([int(e.pageviews) for e in entries]) + old_pageviews,
+                      'visits': sum([int(e.visits or 0) for e in entries]) + old_visits,
+                      'department_id': publisher,
                       'package_id': package
                      }
+
             model.Session.add(GA_Url(**values))
             model.Session.commit()
-
 
 
 
@@ -262,7 +264,7 @@
         filter(model.Group.type=='publisher').\
         filter(model.Group.state=='active').all()
     for publisher in publishers:
-        views, visitors, subpub = update_publisher(period_name, publisher, publisher.name)
+        views, visits, subpub = update_publisher(period_name, publisher, publisher.name)
         parent, parents = '', publisher.get_groups('publisher')
         if parents:
             parent = parents[0].name
@@ -271,7 +273,7 @@
             filter(GA_Publisher.publisher_name==publisher.name).first()
         if item:
             item.views = views
-            item.visitors = visitors
+            item.visits = visits
             item.publisher_name = publisher.name
             item.toplevel = publisher in toplevel
             item.subpublishercount = subpub
@@ -283,7 +285,7 @@
                      'period_name': period_name,
                      'publisher_name': publisher.name,
                      'views': views,
-                     'visitors': visitors,
+                     'visits': visits,
                      'toplevel': publisher in toplevel,
                      'subpublishercount': subpub,
                      'parent': parent
@@ -293,7 +295,7 @@
 
 
 def update_publisher(period_name, pub, part=''):
-    views,visitors,subpub = 0, 0, 0
+    views,visits,subpub = 0, 0, 0
     for publisher in go_down_tree(pub):
         subpub = subpub + 1
         items = model.Session.query(GA_Url).\
@@ -301,9 +303,9 @@
                 filter(GA_Url.department_id==publisher.name).all()
         for item in items:
             views = views + int(item.pageviews)
-            visitors = visitors + int(item.visitors)
-
-    return views, visitors, (subpub-1)
+            visits = visits + int(item.visits)
+
+    return views, visits, (subpub-1)
 
 
 def get_top_level():
@@ -343,3 +345,34 @@
         q.delete()
     model.Session.commit()
 
+def get_score_for_dataset(dataset_name):
+    '''
+    Returns a "current popularity" score for a dataset,
+    based on how many views it has had recently.
+    '''
+    import datetime
+    now = datetime.datetime.now()
+    last_month = now - datetime.timedelta(days=30)
+    period_names = ['%s-%02d' % (last_month.year, last_month.month),
+                    '%s-%02d' % (now.year, now.month),
+                    ]
+
+    score = 0
+    for period_name in period_names:
+        score /= 2 # previous periods are discounted by 50%
+        entry = model.Session.query(GA_Url)\
+                .filter(GA_Url.period_name==period_name)\
+                .filter(GA_Url.package_id==dataset_name).first()
+        # score
+        if entry:
+            views = float(entry.pageviews)
+            if entry.period_complete_day:
+                views_per_day = views / entry.period_complete_day
+            else:
+                views_per_day = views / 15 # guess
+            score += views_per_day
+
+    score = int(score * 100)
+    log.debug('Popularity %s: %s', score, dataset_name)
+    return score
+

--- a/ckanext/ga_report/helpers.py
+++ b/ckanext/ga_report/helpers.py
@@ -60,7 +60,8 @@
         if not dataset:
             return None
     dataset_dict = get_action('package_show')({'model': model,
-                                               'session': model.Session},
+                                               'session': model.Session,
+                                               'validate': False},
                                               {'id':dataset.id})
     return dataset_dict
 
@@ -105,7 +106,7 @@
             if not p in datasets:
                 datasets[p] = {'views':0, 'visits': 0}
             datasets[p]['views'] = datasets[p]['views'] + int(entry.pageviews)
-            datasets[p]['visits'] = datasets[p]['visits'] + int(entry.visitors)
+            datasets[p]['visits'] = datasets[p]['visits'] + int(entry.visits)
 
     results = []
     for k, v in datasets.iteritems():

--- a/ckanext/ga_report/templates/ga_report/notes.html
+++ b/ckanext/ga_report/templates/ga_report/notes.html
@@ -6,11 +6,10 @@
     <li class="widget-container boxed widget_text">
       <h4>Notes</h4>
       <ul>
-          <li>'Views' is the number of sessions during which that page was viewed one or more times ('Unique Pageviews').</li>
-<!--          <li>'Visits' is the number of individual sessions initiated by all the visitors to your site, counted once for each visitor for each session.</li>-->
-          <li>'Visitors' is the number of unique users visiting the site (whether once or more times).</li>
+          <li>"Views" is the number of times a page was loaded in users' browsers.</li>
+          <li>"Visits" is the number of unique user visits to a page, counted once for each visitor for each of their browsing sessions.</li>
           <li>These usage statistics are confined to users with javascript enabled, which excludes web crawlers and API calls.</li>
-          <li>The results for only small numbers of views/visits are not shown. Where these relate to site pages, then they are available in full in the CSV download. Where these relate to users' web browser information, they are not disclosed, for privacy reasons.</li>
+          <li>The results are not shown when the number of views/visits is tiny. Where these relate to site pages, results are available in full in the CSV download. Where these relate to users' web browser information, results are not disclosed, for privacy reasons.</li>
       </ul>
     </li>
 </html>

--- a/ckanext/ga_report/templates/ga_report/publisher/index.html
+++ b/ckanext/ga_report/templates/ga_report/publisher/index.html
@@ -41,14 +41,14 @@
        <table class="table table-condensed table-bordered table-striped">
 	 <tr>
 	   <th>Publisher</th>
-	   <th>Dataset Visits</th>
+<!--	   <th>Dataset Visits</th>-->
 	   <th>Dataset Views</th>
 	 </tr>
         <py:for each="publisher, views, visits in c.top_publishers">
 	  <tr>
 	    <td>${h.link_to(publisher.title, h.url_for(controller='ckanext.ga_report.controller:GaDatasetReport', action='read_publisher', id=publisher.name))}
 	    </td>
-	    <td>${visits}</td>
+<!--	    <td>${visits}</td> -->
 	    <td>${views}</td>
 	  </tr>
         </py:for>

--- a/ckanext/ga_report/templates/ga_report/publisher/read.html
+++ b/ckanext/ga_report/templates/ga_report/publisher/read.html
@@ -47,14 +47,14 @@
      <table py:if="c.top_packages" class="table table-condensed table-bordered table-striped">
 	 <tr>
 	   <th>Dataset</th>
-	   <th>Visits</th>
+<!--	   <th>Visits</th> -->
 	   <th>Views</th>
 	 </tr>
         <py:for each="package, views, visits in c.top_packages">
 	  <tr>
 	    <td>${h.link_to(package.title or package.name, h.url_for(controller='package', action='read', id=package.name))}
 	    </td>
-	    <td>${visits}</td>
+<!--	    <td>${visits}</td> -->
 	    <td>${views}</td>
 	  </tr>
         </py:for>

--- /dev/null
+++ b/ckanext/ga_report/tests/test_model.py
@@ -1,1 +1,18 @@
+from nose.tools import assert_equal
 
+from ckanext.ga_report.ga_model import _normalize_url
+
+class TestNormalizeUrl:
+    def test_normal(self):
+        assert_equal(_normalize_url('http://data.gov.uk/dataset/weekly_fuel_prices'),
+                     '/dataset/weekly_fuel_prices')
+
+    def test_www_dot(self):
+        assert_equal(_normalize_url('http://www.data.gov.uk/dataset/weekly_fuel_prices'),
+                     '/dataset/weekly_fuel_prices')
+
+    def test_https(self):
+        assert_equal(_normalize_url('https://data.gov.uk/dataset/weekly_fuel_prices'),
+                     '/dataset/weekly_fuel_prices')
+
+