Horribly hacky links to publishers and datasets, nobody tell Tom
Horribly hacky links to publishers and datasets, nobody tell Tom

--- a/ckanext/ga_report/command.py
+++ b/ckanext/ga_report/command.py
@@ -115,6 +115,7 @@
                                default=False,
                                dest='skip_url_stats',
                                help='Skip the download of URL data - just do site-wide stats')
+        self.token = ""
 
     def command(self):
         self._load_config()
@@ -129,14 +130,14 @@
             return
 
         try:
-            svc = init_service(ga_token_filepath, None)
+            self.token, svc = init_service(ga_token_filepath, None)
         except TypeError:
             print ('Have you correctly run the getauthtoken task and '
                    'specified the correct token file in the CKAN config under '
                    '"googleanalytics.token.filepath"?')
             return
 
-        downloader = DownloadAnalytics(svc, profile_id=get_profile_id(svc),
+        downloader = DownloadAnalytics(svc, self.token, profile_id=get_profile_id(svc),
                                        delete_first=self.options.delete_first,
                                        skip_url_stats=self.options.skip_url_stats)
 

--- a/ckanext/ga_report/controller.py
+++ b/ckanext/ga_report/controller.py
@@ -211,13 +211,10 @@
             graph_dict = {}
             for stat in graph_query:
                 graph_dict[ stat.key ] = graph_dict.get(stat.key,{
-                    'name':stat.key, 
-                    'data': []
+                    'name':stat.key,
+                    'raw': {}
                     })
-                graph_dict[ stat.key ]['data'].append({
-                    'x':_get_unix_epoch(stat.period_name),
-                    'y':float(stat.value)
-                    })
+                graph_dict[ stat.key ]['raw'][stat.period_name] = float(stat.value)
             stats_in_table = [x[0] for x in entries]
             stats_not_in_table = set(graph_dict.keys()) - set(stats_in_table)
             stats = stats_in_table + sorted(list(stats_not_in_table))
@@ -252,7 +249,7 @@
         writer = csv.writer(response)
         writer.writerow(["Publisher Title", "Publisher Name", "Views", "Visits", "Period Name"])
 
-        top_publishers, top_publishers_graph = _get_top_publishers(None)
+        top_publishers = _get_top_publishers(limit=None)
 
         for publisher,view,visit in top_publishers:
             writer.writerow([publisher.title.encode('utf-8'),
@@ -274,7 +271,7 @@
             if not c.publisher:
                 abort(404, 'A publisher with that name could not be found')
 
-        packages = self._get_packages(c.publisher)
+        packages = self._get_packages(publisher=c.publisher, month=c.month)
         response.headers['Content-Type'] = "text/csv; charset=utf-8"
         response.headers['Content-Disposition'] = \
             str('attachment; filename=datasets_%s_%s.csv' % (c.publisher_name, month,))
@@ -303,15 +300,18 @@
         if c.month:
             c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month])
 
-        c.top_publishers, graph_data = _get_top_publishers()
+        c.top_publishers = _get_top_publishers()
+        graph_data = _get_top_publishers_graph()
         c.top_publishers_graph = json.dumps( _to_rickshaw(graph_data) )
 
-        return render('ga_report/publisher/index.html')
-
-    def _get_packages(self, publisher=None, count=-1):
+        x =  render('ga_report/publisher/index.html')
+
+        return x
+
+    def _get_packages(self, publisher=None, month='', count=-1):
         '''Returns the datasets in order of views'''
         have_download_data = True
-        month = c.month or 'All'
+        month = month or 'All'
         if month != 'All':
             have_download_data = month >= DOWNLOADS_AVAILABLE_FROM
 
@@ -388,28 +388,25 @@
         entry = q.filter(GA_Url.period_name==c.month).first()
         c.publisher_page_views = entry.pageviews if entry else 0
 
-        c.top_packages = self._get_packages(c.publisher, 20)
+        c.top_packages = self._get_packages(publisher=c.publisher, count=20, month=c.month)
 
         # Graph query
-        top_package_names = [ x[0].name for x in c.top_packages ]
+        top_packages_all_time = self._get_packages(publisher=c.publisher, count=20, month='All')
+        top_package_names = [ x[0].name for x in top_packages_all_time ]
         graph_query = model.Session.query(GA_Url,model.Package)\
             .filter(model.Package.name==GA_Url.package_id)\
             .filter(GA_Url.url.like('/dataset/%'))\
             .filter(GA_Url.package_id.in_(top_package_names))
-        graph_dict = {}
+        all_series = {}
         for entry,package in graph_query:
             if not package: continue
             if entry.period_name=='All': continue
-            graph_dict[package.name] = graph_dict.get(package.name,{
+            all_series[package.name] = all_series.get(package.name,{
                 'name':package.title,
-                'data':[]
+                'raw': {}
                 })
-            graph_dict[package.name]['data'].append({
-                'x':_get_unix_epoch(entry.period_name),
-                'y':int(entry.pageviews),
-                })
-        graph = [ graph_dict[x] for x in top_package_names ]
-
+            all_series[package.name]['raw'][entry.period_name] = int(entry.pageviews)
+        graph = [ all_series[series_name] for series_name in top_package_names ]
         c.graph_data = json.dumps( _to_rickshaw(graph) )
 
         return render('ga_report/publisher/read.html')
@@ -417,53 +414,45 @@
 def _to_rickshaw(data, percentageMode=False):
     if data==[]:
         return data
-    # Create a consistent x-axis between all series
-    num_points = [ len(series['data']) for series in data ]
-    ideal_index = num_points.index( max(num_points) )
-    x_axis = []
+    # x-axis is every month in c.months. Note that data might not exist
+    # for entire history, eg. for recently-added datasets
+    x_axis = [x[0] for x in c.months]
+    x_axis.reverse() # Ascending order
+    x_axis = x_axis[:-1] # Remove latest month
+    totals = {}
     for series in data:
+        series['data'] = []
+        for x_string in x_axis:
+            x = _get_unix_epoch( x_string )
+            y = series['raw'].get(x_string,0)
+            series['data'].append({'x':x,'y':y})
+            totals[x] = totals.get(x,0)+y
+    if not percentageMode:
+        return data
+    # Turn all data into percentages
+    # Roll insignificant series into a catch-all
+    THRESHOLD = 1
+    raw_data = data
+    data = []
+    for series in raw_data:
         for point in series['data']:
-            x_axis.append(point['x'])
-    x_axis = sorted( list( set(x_axis) ) )
-    # Zero pad any missing values
-    for series in data:
-        xs = [ point['x'] for point in series['data'] ]
-        for x in set(x_axis).difference(set(xs)):
-            series['data'].append( {'x':x, 'y':0} )
-    if percentageMode:
-        def get_totals(series_list):
-            totals = {}
-            for series in series_list:
-                for point in series['data']:
-                    totals[point['x']] = totals.get(point['x'],0) + point['y']
-            return totals
-        # Transform data into percentage stacks
-        totals = get_totals(data)
-        # Roll insignificant series into a catch-all
-        THRESHOLD = 0.01
-        raw_data = data
-        data = []
-        for series in raw_data:
-            for point in series['data']:
-                fraction = float(point['y']) / totals[point['x']]
-                if not (series in data) and fraction>THRESHOLD:
-                    data.append(series)
-        # Overwrite data with a set of interesting series
-        others = [ x for x in raw_data if not (x in data) ]
-        if len(others):
-            data.append({ 
-                'name':'Other',
-                'data': [ {'x':x,'y':y} for x,y in get_totals(others).items() ] 
-                })
-        # Turn each point into a percentage
-        for series in data:
-            for point in series['data']:
-                point['y'] = (point['y']*100) / totals[point['x']]
-    # Sort the points
-    for series in data:
-        series['data'] = sorted( series['data'], key=lambda x:x['x'] )
-        # Strip the latest month's incomplete analytics
-        series['data'] = series['data'][:-1]
+            percentage = (100*float(point['y'])) / totals[point['x']]
+            if not (series in data) and percentage>THRESHOLD:
+                data.append(series)
+            point['y'] = percentage
+    others = [ x for x in raw_data if not (x in data) ]
+    if len(others):
+        data_other = []
+        for i in range(len(x_axis)):
+            x = _get_unix_epoch(x_axis[i])
+            y = 0
+            for series in others:
+                y += series['data'][i]['y']
+            data_other.append({'x':x,'y':y})
+        data.append({
+            'name':'Other',
+            'data': data_other
+            })
     return data
 
 
@@ -488,39 +477,51 @@
 
     top_publishers = []
     res = connection.execute(q, month)
-    department_ids = []
     for row in res:
         g = model.Group.get(row[0])
         if g:
-            department_ids.append(row[0])
             top_publishers.append((g, row[1], row[2]))
-
-    graph = []
-    if limit is not None:
-        # Query for a history graph of these publishers
-        q = model.Session.query(
-                GA_Url.department_id, 
-                GA_Url.period_name, 
-                func.sum(cast(GA_Url.pageviews,sqlalchemy.types.INT)))\
-            .filter( GA_Url.department_id.in_(department_ids) )\
-            .filter( GA_Url.period_name!='All' )\
-            .filter( GA_Url.url.like('/dataset/%') )\
-            .filter( GA_Url.package_id!='' )\
-            .group_by( GA_Url.department_id, GA_Url.period_name )
-        graph_dict = {}
-        for dept_id,period_name,views in q:
-            graph_dict[dept_id] = graph_dict.get( dept_id, {
-                'name' : model.Group.get(dept_id).title,
-                'data' : []
-                })
-            graph_dict[dept_id]['data'].append({
-                'x': _get_unix_epoch(period_name),
-                'y': views
-                })
-        # Sort dict into ordered list
-        for id in department_ids:
-            graph.append( graph_dict[id] )
-    return top_publishers, graph
+    return top_publishers
+
+
+def _get_top_publishers_graph(limit=20):
+    '''
+    Returns a list of the top 20 publishers by dataset visits.
+    (The number to show can be varied with 'limit')
+    '''
+    connection = model.Session.connection()
+    q = """
+        select department_id, sum(pageviews::int) views
+        from ga_url
+        where department_id <> ''
+          and package_id <> ''
+          and url like '/dataset/%%'
+          and period_name='All'
+        group by department_id order by views desc
+        """
+    if limit:
+        q = q + " limit %s;" % (limit)
+
+    res = connection.execute(q)
+    department_ids = [ row[0] for row in res ]
+
+    # Query for a history graph of these department ids
+    q = model.Session.query(
+            GA_Url.department_id,
+            GA_Url.period_name,
+            func.sum(cast(GA_Url.pageviews,sqlalchemy.types.INT)))\
+        .filter( GA_Url.department_id.in_(department_ids) )\
+        .filter( GA_Url.url.like('/dataset/%') )\
+        .filter( GA_Url.package_id!='' )\
+        .group_by( GA_Url.department_id, GA_Url.period_name )
+    graph_dict = {}
+    for dept_id,period_name,views in q:
+        graph_dict[dept_id] = graph_dict.get( dept_id, {
+            'name' : model.Group.get(dept_id).title,
+            'raw' : {}
+            })
+        graph_dict[dept_id]['raw'][period_name] = views
+    return [ graph_dict[id] for id in department_ids ]
 
 
 def _get_publishers():
@@ -530,7 +531,7 @@
     '''
     publishers = []
     for pub in model.Session.query(model.Group).\
-               filter(model.Group.type=='publisher').\
+               filter(model.Group.type=='organization').\
                filter(model.Group.state=='active').\
                order_by(model.Group.name):
         publishers.append((pub.name, pub.title))

--- a/ckanext/ga_report/download_analytics.py
+++ b/ckanext/ga_report/download_analytics.py
@@ -1,7 +1,10 @@
 import os
 import logging
 import datetime
+import httplib
 import collections
+import requests
+import json
 from pylons import config
 from ga_model import _normalize_url
 import ga_model
@@ -18,13 +21,14 @@
 class DownloadAnalytics(object):
     '''Downloads and stores analytics info'''
 
-    def __init__(self, service=None, profile_id=None, delete_first=False,
+    def __init__(self, service=None, token=None, profile_id=None, delete_first=False,
                  skip_url_stats=False):
         self.period = config['ga-report.period']
         self.service = service
         self.profile_id = profile_id
         self.delete_first = delete_first
         self.skip_url_stats = skip_url_stats
+        self.token = token
 
     def specific_month(self, date):
         import calendar
@@ -149,17 +153,27 @@
         metrics = 'ga:entrances'
         sort = '-ga:entrances'
 
-        # Supported query params at
-        # https://developers.google.com/analytics/devguides/reporting/core/v3/reference
-        results = self.service.data().ga().get(
-                                 ids='ga:' + self.profile_id,
-                                 filters=query,
-                                 start_date=start_date,
-                                 metrics=metrics,
-                                 sort=sort,
-                                 dimensions="ga:landingPagePath,ga:socialNetwork",
-                                 max_results=10000,
-                                 end_date=end_date).execute()
+        try:
+            # Because of issues of invalid responses, we are going to make these requests
+            # ourselves.
+            headers = {'authorization': 'Bearer ' + self.token}
+
+            args = dict(ids='ga:' + self.profile_id,
+                       filters=query,
+                       metrics=metrics,
+                       sort=sort,
+                       dimensions="ga:landingPagePath,ga:socialNetwork",
+                       max_results=10000)
+
+            args['start-date'] = start_date
+            args['end-date'] = end_date
+
+            results = self._get_json(args)
+        except Exception, e:
+            log.exception(e)
+            results = dict(url=[])
+
+
         data = collections.defaultdict(list)
         rows = results.get('rows',[])
         for row in rows:
@@ -178,15 +192,32 @@
 
         # Supported query params at
         # https://developers.google.com/analytics/devguides/reporting/core/v3/reference
-        results = self.service.data().ga().get(
-                                 ids='ga:' + self.profile_id,
-                                 filters=query,
-                                 start_date=start_date,
-                                 metrics=metrics,
-                                 sort=sort,
-                                 dimensions="ga:pagePath",
-                                 max_results=10000,
-                                 end_date=end_date).execute()
+        try:
+            # Because of issues of invalid responses, we are going to make these requests
+            # ourselves.
+            headers = {'authorization': 'Bearer ' + self.token}
+
+            args = {}
+            args["sort"] = "-ga:pageviews"
+            args["max-results"] = 100000
+            args["dimensions"] = "ga:pagePath"
+            args["start-date"] = start_date
+            args["end-date"] = end_date
+            args["metrics"] = metrics
+            args["ids"] = "ga:" + self.profile_id
+            args["filters"] = query
+            args["alt"] = "json"
+
+            r = requests.get("https://www.googleapis.com/analytics/v3/data/ga", params=args, headers=headers)
+            if r.status_code != 200:
+              raise Exception("Request with params: %s failed" % args)
+
+            results = json.loads(r.content)
+            print len(results.keys())
+        except Exception, e:
+            log.exception(e)
+            #return dict(url=[])
+            raise e
 
         packages = []
         log.info("There are %d results" % results['totalResults'])
@@ -226,25 +257,83 @@
             data[key] = data.get(key,0) + result[1]
         return data
 
+    def _get_json(self, params, prev_fail=False):
+        if prev_fail:
+          import os
+          ga_token_filepath = os.path.expanduser(config.get('googleanalytics.token.filepath', ''))
+          if not ga_token_filepath:
+              print 'ERROR: In the CKAN config you need to specify the filepath of the ' \
+                    'Google Analytics token file under key: googleanalytics.token.filepath'
+              return
+
+          try:
+              self.token, svc = init_service(ga_token_filepath, None)
+          except TypeError:
+              print ('Have you correctly run the getauthtoken task and '
+                     'specified the correct token file in the CKAN config under '
+                     '"googleanalytics.token.filepath"?')
+
+        try:
+            # Because of issues of invalid responses, we are going to make these requests
+            # ourselves.
+            headers = {'authorization': 'Bearer ' + self.token}
+            r = requests.get("https://www.googleapis.com/analytics/v3/data/ga", params=params, headers=headers)
+            if r.status_code != 200:
+              log.info("STATUS: %s" % (r.status_code,))
+              log.info("CONTENT: %s" % (r.content,))
+              raise Exception("Request with params: %s failed" % params)
+
+            return json.loads(r.content)
+        except Exception, e:
+            if not prev_fail:
+              print e
+              results = self._get_json(self, params, prev_fail=True)
+            else:
+              log.exception(e)
+
+        return dict(url=[])
+
     def _totals_stats(self, start_date, end_date, period_name, period_complete_day):
         """ Fetches distinct totals, total pageviews etc """
-        results = self.service.data().ga().get(
-                                 ids='ga:' + self.profile_id,
-                                 start_date=start_date,
-                                 metrics='ga:pageviews',
-                                 sort='-ga:pageviews',
-                                 max_results=10000,
-                                 end_date=end_date).execute()
+        try:
+            args = {}
+            args["max-results"] = 100000
+            args["start-date"] = start_date
+            args["end-date"] = end_date
+            args["ids"] = "ga:" + self.profile_id
+
+            args["metrics"] = "ga:pageviews"
+            args["sort"] = "-ga:pageviews"
+            args["alt"] = "json"
+
+            results = self._get_json(args)
+        except Exception, e:
+            log.exception(e)
+            results = dict(url=[])
+
         result_data = results.get('rows')
         ga_model.update_sitewide_stats(period_name, "Totals", {'Total page views': result_data[0][0]},
             period_complete_day)
 
-        results = self.service.data().ga().get(
-                                 ids='ga:' + self.profile_id,
-                                 start_date=start_date,
-                                 metrics='ga:pageviewsPerVisit,ga:avgTimeOnSite,ga:percentNewVisits,ga:visits',
-                                 max_results=10000,
-                                 end_date=end_date).execute()
+        try:
+            # Because of issues of invalid responses, we are going to make these requests
+            # ourselves.
+            headers = {'authorization': 'Bearer ' + self.token}
+
+            args = {}
+            args["max-results"] = 100000
+            args["start-date"] = start_date
+            args["end-date"] = end_date
+            args["ids"] = "ga:" + self.profile_id
+
+            args["metrics"] = "ga:pageviewsPerVisit,ga:avgTimeOnSite,ga:percentNewVisits,ga:visits"
+            args["alt"] = "json"
+
+            results = self._get_json(args)
+        except Exception, e:
+            log.exception(e)
+            results = dict(url=[])
+
         result_data = results.get('rows')
         data = {
             'Pages per visit': result_data[0][0],
@@ -257,14 +346,28 @@
         # Bounces from / or another configurable page.
         path = '/%s%s' % (config.get('googleanalytics.account'),
                           config.get('ga-report.bounce_url', '/'))
-        results = self.service.data().ga().get(
-                                 ids='ga:' + self.profile_id,
-                                 filters='ga:pagePath==%s' % (path,),
-                                 start_date=start_date,
-                                 metrics='ga:visitBounceRate',
-                                 dimensions='ga:pagePath',
-                                 max_results=10000,
-                                 end_date=end_date).execute()
+
+        try:
+            # Because of issues of invalid responses, we are going to make these requests
+            # ourselves.
+            headers = {'authorization': 'Bearer ' + self.token}
+
+            args = {}
+            args["max-results"] = 100000
+            args["start-date"] = start_date
+            args["end-date"] = end_date
+            args["ids"] = "ga:" + self.profile_id
+
+            args["filters"] = 'ga:pagePath==%s' % (path,)
+            args["dimensions"] = 'ga:pagePath'
+            args["metrics"] = "ga:visitBounceRate"
+            args["alt"] = "json"
+
+            results = self._get_json(args)
+        except Exception, e:
+            log.exception(e)
+            results = dict(url=[])
+
         result_data = results.get('rows')
         if not result_data or len(result_data) != 1:
             log.error('Could not pinpoint the bounces for path: %s. Got results: %r',
@@ -280,14 +383,28 @@
 
     def _locale_stats(self, start_date, end_date, period_name, period_complete_day):
         """ Fetches stats about language and country """
-        results = self.service.data().ga().get(
-                                 ids='ga:' + self.profile_id,
-                                 start_date=start_date,
-                                 metrics='ga:pageviews',
-                                 sort='-ga:pageviews',
-                                 dimensions="ga:language,ga:country",
-                                 max_results=10000,
-                                 end_date=end_date).execute()
+
+        try:
+            # Because of issues of invalid responses, we are going to make these requests
+            # ourselves.
+            headers = {'authorization': 'Bearer ' + self.token}
+
+            args = {}
+            args["max-results"] = 100000
+            args["start-date"] = start_date
+            args["end-date"] = end_date
+            args["ids"] = "ga:" + self.profile_id
+
+            args["dimensions"] = "ga:language,ga:country"
+            args["metrics"] = "ga:pageviews"
+            args["sort"] = "-ga:pageviews"
+            args["alt"] = "json"
+
+            results = self._get_json(args)
+        except Exception, e:
+            log.exception(e)
+            results = dict(url=[])
+
         result_data = results.get('rows')
         data = {}
         for result in result_data:
@@ -308,15 +425,27 @@
 
         data = {}
 
-        results = self.service.data().ga().get(
-                                 ids='ga:' + self.profile_id,
-                                 start_date=start_date,
-                                 filters='ga:eventAction==download',
-                                 metrics='ga:totalEvents',
-                                 sort='-ga:totalEvents',
-                                 dimensions="ga:eventLabel",
-                                 max_results=10000,
-                                 end_date=end_date).execute()
+        try:
+            # Because of issues of invalid responses, we are going to make these requests
+            # ourselves.
+            headers = {'authorization': 'Bearer ' + self.token}
+
+            args = {}
+            args["max-results"] = 100000
+            args["start-date"] = start_date
+            args["end-date"] = end_date
+            args["ids"] = "ga:" + self.profile_id
+
+            args["filters"] = 'ga:eventAction==download'
+            args["dimensions"] = "ga:eventLabel"
+            args["metrics"] = "ga:totalEvents"
+            args["alt"] = "json"
+
+            results = self._get_json(args)
+        except Exception, e:
+            log.exception(e)
+            results = dict(url=[])
+
         result_data = results.get('rows')
         if not result_data:
             # We may not have data for this time period, so we need to bail
@@ -355,15 +484,25 @@
         log.info('Associating downloads of resource URLs with their respective datasets')
         process_result_data(results.get('rows'))
 
-        results = self.service.data().ga().get(
-                                 ids='ga:' + self.profile_id,
-                                 start_date=start_date,
-                                 filters='ga:eventAction==download-cache',
-                                 metrics='ga:totalEvents',
-                                 sort='-ga:totalEvents',
-                                 dimensions="ga:eventLabel",
-                                 max_results=10000,
-                                 end_date=end_date).execute()
+        try:
+            # Because of issues of invalid responses, we are going to make these requests
+            # ourselves.
+            headers = {'authorization': 'Bearer ' + self.token}
+
+            args = dict( ids='ga:' + self.profile_id,
+                         filters='ga:eventAction==download-cache',
+                         metrics='ga:totalEvents',
+                         sort='-ga:totalEvents',
+                         dimensions="ga:eventLabel",
+                         max_results=10000)
+            args['start-date'] = start_date
+            args['end-date'] = end_date
+
+            results = self._get_json(args)
+        except Exception, e:
+            log.exception(e)
+            results = dict(url=[])
+
         log.info('Associating downloads of cache resource URLs with their respective datasets')
         process_result_data(results.get('rows'), cached=False)
 
@@ -372,14 +511,25 @@
 
     def _social_stats(self, start_date, end_date, period_name, period_complete_day):
         """ Finds out which social sites people are referred from """
-        results = self.service.data().ga().get(
-                                 ids='ga:' + self.profile_id,
-                                 start_date=start_date,
-                                 metrics='ga:pageviews',
-                                 sort='-ga:pageviews',
-                                 dimensions="ga:socialNetwork,ga:referralPath",
-                                 max_results=10000,
-                                 end_date=end_date).execute()
+
+        try:
+            # Because of issues of invalid responses, we are going to make these requests
+            # ourselves.
+            headers = {'authorization': 'Bearer ' + self.token}
+
+            args = dict( ids='ga:' + self.profile_id,
+                         metrics='ga:pageviews',
+                         sort='-ga:pageviews',
+                         dimensions="ga:socialNetwork,ga:referralPath",
+                         max_results=10000)
+            args['start-date'] = start_date
+            args['end-date'] = end_date
+
+            results = self._get_json(args)
+        except Exception, e:
+            log.exception(e)
+            results = dict(url=[])
+
         result_data = results.get('rows')
         data = {}
         for result in result_data:
@@ -391,14 +541,24 @@
 
     def _os_stats(self, start_date, end_date, period_name, period_complete_day):
         """ Operating system stats """
-        results = self.service.data().ga().get(
-                                 ids='ga:' + self.profile_id,
-                                 start_date=start_date,
-                                 metrics='ga:pageviews',
-                                 sort='-ga:pageviews',
-                                 dimensions="ga:operatingSystem,ga:operatingSystemVersion",
-                                 max_results=10000,
-                                 end_date=end_date).execute()
+        try:
+            # Because of issues of invalid responses, we are going to make these requests
+            # ourselves.
+            headers = {'authorization': 'Bearer ' + self.token}
+
+            args = dict( ids='ga:' + self.profile_id,
+                         metrics='ga:pageviews',
+                         sort='-ga:pageviews',
+                         dimensions="ga:operatingSystem,ga:operatingSystemVersion",
+                         max_results=10000)
+            args['start-date'] = start_date
+            args['end-date'] = end_date
+
+            results = self._get_json(args)
+        except Exception, e:
+            log.exception(e)
+            results = dict(url=[])
+
         result_data = results.get('rows')
         data = {}
         for result in result_data:
@@ -416,14 +576,27 @@
 
     def _browser_stats(self, start_date, end_date, period_name, period_complete_day):
         """ Information about browsers and browser versions """
-        results = self.service.data().ga().get(
-                                 ids='ga:' + self.profile_id,
-                                 start_date=start_date,
-                                 metrics='ga:pageviews',
-                                 sort='-ga:pageviews',
-                                 dimensions="ga:browser,ga:browserVersion",
-                                 max_results=10000,
-                                 end_date=end_date).execute()
+
+        try:
+            # Because of issues of invalid responses, we are going to make these requests
+            # ourselves.
+            headers = {'authorization': 'Bearer ' + self.token}
+
+            args = dict( ids='ga:' + self.profile_id,
+                         metrics='ga:pageviews',
+                         sort='-ga:pageviews',
+                         dimensions="ga:browser,ga:browserVersion",
+                         max_results=10000)
+
+            args['start-date'] = start_date
+            args['end-date'] = end_date
+
+            results = self._get_json(args)
+        except Exception, e:
+            log.exception(e)
+            results = dict(url=[])
+
+
         result_data = results.get('rows')
         # e.g. [u'Firefox', u'19.0', u'20']
 
@@ -465,14 +638,24 @@
     def _mobile_stats(self, start_date, end_date, period_name, period_complete_day):
         """ Info about mobile devices """
 
-        results = self.service.data().ga().get(
-                                 ids='ga:' + self.profile_id,
-                                 start_date=start_date,
-                                 metrics='ga:pageviews',
-                                 sort='-ga:pageviews',
-                                 dimensions="ga:mobileDeviceBranding, ga:mobileDeviceInfo",
-                                 max_results=10000,
-                                 end_date=end_date).execute()
+        try:
+            # Because of issues of invalid responses, we are going to make these requests
+            # ourselves.
+            headers = {'authorization': 'Bearer ' + self.token}
+
+            args = dict( ids='ga:' + self.profile_id,
+                         metrics='ga:pageviews',
+                         sort='-ga:pageviews',
+                         dimensions="ga:mobileDeviceBranding, ga:mobileDeviceInfo",
+                         max_results=10000)
+            args['start-date'] = start_date
+            args['end-date'] = end_date
+
+            results = self._get_json(args)
+        except Exception, e:
+            log.exception(e)
+            results = dict(url=[])
+
 
         result_data = results.get('rows')
         data = {}