From: Tom Rees Date: Wed, 30 Oct 2013 01:00:28 +0000 Subject: Merge remote-tracking branch 'origin/777-responsive' X-Git-Url: https://maxious.lambdacomplex.org/git/?p=ckanext-ga-report.git&a=commitdiff&h=feb2cc9d9a19e66049d8a546ad654af7ac2921a6 --- Merge remote-tracking branch 'origin/777-responsive' --- --- a/ckanext/ga_report/command.py +++ b/ckanext/ga_report/command.py @@ -115,6 +115,7 @@ default=False, dest='skip_url_stats', help='Skip the download of URL data - just do site-wide stats') + self.token = "" def command(self): self._load_config() @@ -129,14 +130,14 @@ return try: - svc = init_service(ga_token_filepath, None) + self.token, svc = init_service(ga_token_filepath, None) except TypeError: print ('Have you correctly run the getauthtoken task and ' 'specified the correct token file in the CKAN config under ' '"googleanalytics.token.filepath"?') return - downloader = DownloadAnalytics(svc, profile_id=get_profile_id(svc), + downloader = DownloadAnalytics(svc, self.token, profile_id=get_profile_id(svc), delete_first=self.options.delete_first, skip_url_stats=self.options.skip_url_stats) --- a/ckanext/ga_report/controller.py +++ b/ckanext/ga_report/controller.py @@ -191,25 +191,11 @@ q = model.Session.query(GA_Stat).\ filter(GA_Stat.stat_name==k).\ order_by(GA_Stat.period_name) - # Run the query on all months to gather graph data - graph = {} - for stat in q: - graph[ stat.key ] = graph.get(stat.key,{ - 'name':stat.key, - 'data': [] - }) - graph[ stat.key ]['data'].append({ - 'x':_get_unix_epoch(stat.period_name), - 'y':float(stat.value) - }) - setattr(c, v+'_graph', json.dumps( _to_rickshaw(graph.values(),percentageMode=True) )) - # Buffer the tabular data if c.month: entries = [] q = q.filter(GA_Stat.period_name==c.month).\ order_by('ga_stat.value::int desc') - d = collections.defaultdict(int) for e in q.all(): d[e.key] += int(e.value) @@ -218,6 +204,23 @@ entries.append((key,val,)) entries = sorted(entries, key=operator.itemgetter(1), reverse=True) + # Run a query on all months to gather graph data + graph_query = model.Session.query(GA_Stat).\ + filter(GA_Stat.stat_name==k).\ + order_by(GA_Stat.period_name) + graph_dict = {} + for stat in graph_query: + graph_dict[ stat.key ] = graph_dict.get(stat.key,{ + 'name':stat.key, + 'raw': {} + }) + graph_dict[ stat.key ]['raw'][stat.period_name] = float(stat.value) + stats_in_table = [x[0] for x in entries] + stats_not_in_table = set(graph_dict.keys()) - set(stats_in_table) + stats = stats_in_table + sorted(list(stats_not_in_table)) + graph = [graph_dict[x] for x in stats] + setattr(c, v+'_graph', json.dumps( _to_rickshaw(graph,percentageMode=True) )) + # Get the total for each set of values and then set the value as # a percentage of the total if k == 'Social sources': @@ -246,7 +249,7 @@ writer = csv.writer(response) writer.writerow(["Publisher Title", "Publisher Name", "Views", "Visits", "Period Name"]) - top_publishers, top_publishers_graph = _get_top_publishers(None) + top_publishers = _get_top_publishers(limit=None) for publisher,view,visit in top_publishers: writer.writerow([publisher.title.encode('utf-8'), @@ -268,7 +271,7 @@ if not c.publisher: abort(404, 'A publisher with that name could not be found') - packages = self._get_packages(c.publisher) + packages = self._get_packages(publisher=c.publisher, month=c.month) response.headers['Content-Type'] = "text/csv; charset=utf-8" response.headers['Content-Disposition'] = \ str('attachment; filename=datasets_%s_%s.csv' % (c.publisher_name, month,)) @@ -297,15 +300,18 @@ if c.month: c.month_desc = ''.join([m[1] for m in c.months if m[0]==c.month]) - c.top_publishers, graph_data = _get_top_publishers() - c.top_publishers_graph = json.dumps( _to_rickshaw(graph_data.values()) ) - - return render('ga_report/publisher/index.html') - - def _get_packages(self, publisher=None, count=-1): + c.top_publishers = _get_top_publishers() + graph_data = _get_top_publishers_graph() + c.top_publishers_graph = json.dumps( _to_rickshaw(graph_data) ) + + x = render('ga_report/publisher/index.html') + + return x + + def _get_packages(self, publisher=None, month='', count=-1): '''Returns the datasets in order of views''' have_download_data = True - month = c.month or 'All' + month = month or 'All' if month != 'All': have_download_data = month >= DOWNLOADS_AVAILABLE_FROM @@ -382,78 +388,71 @@ entry = q.filter(GA_Url.period_name==c.month).first() c.publisher_page_views = entry.pageviews if entry else 0 - c.top_packages = self._get_packages(c.publisher, 20) + c.top_packages = self._get_packages(publisher=c.publisher, count=20, month=c.month) # Graph query - top_package_names = [ x[0].name for x in c.top_packages ] + top_packages_all_time = self._get_packages(publisher=c.publisher, count=20, month='All') + top_package_names = [ x[0].name for x in top_packages_all_time ] graph_query = model.Session.query(GA_Url,model.Package)\ .filter(model.Package.name==GA_Url.package_id)\ .filter(GA_Url.url.like('/dataset/%'))\ .filter(GA_Url.package_id.in_(top_package_names)) - graph_data = {} + all_series = {} for entry,package in graph_query: if not package: continue if entry.period_name=='All': continue - graph_data[package.id] = graph_data.get(package.id,{ + all_series[package.name] = all_series.get(package.name,{ 'name':package.title, - 'data':[] + 'raw': {} }) - graph_data[package.id]['data'].append({ - 'x':_get_unix_epoch(entry.period_name), - 'y':int(entry.pageviews), - }) - - c.graph_data = json.dumps( _to_rickshaw(graph_data.values()) ) + all_series[package.name]['raw'][entry.period_name] = int(entry.pageviews) + graph = [ all_series[series_name] for series_name in top_package_names ] + c.graph_data = json.dumps( _to_rickshaw(graph) ) return render('ga_report/publisher/read.html') def _to_rickshaw(data, percentageMode=False): if data==[]: return data - # Create a consistent x-axis - num_points = [ len(package['data']) for package in data ] - ideal_index = num_points.index( max(num_points) ) - x_axis = [ point['x'] for point in data[ideal_index]['data'] ] - for package in data: - xs = [ point['x'] for point in package['data'] ] - assert set(xs).issubset( set(x_axis) ), (xs, x_axis) - # Zero pad any missing values - for x in set(x_axis).difference(set(xs)): - package['data'].append( {'x':x, 'y':0} ) - assert len(package['data'])==len(x_axis), (len(package['data']),len(x_axis),package['data'],x_axis,set(x_axis).difference(set(xs))) - if percentageMode: - # Transform data into percentage stacks - totals = {} - for x in x_axis: - for package in data: - for point in package['data']: - totals[ point['x'] ] = totals.get(point['x'],0) + point['y'] - # Roll insignificant series into a catch-all - THRESHOLD = 0.01 - significant_series = [] - for package in data: - for point in package['data']: - fraction = float(point['y']) / totals[point['x']] - if fraction>THRESHOLD and not (package in significant_series): - significant_series.append(package) - temp = {} - for package in data: - if package in significant_series: continue - for point in package['data']: - temp[point['x']] = temp.get(point['x'],0) + point['y'] - catch_all = { 'name':'Other','data': [ {'x':x,'y':y} for x,y in temp.items() ] } - # Roll insignificant series into one - data = significant_series - data.append(catch_all) - # Turn each point into a percentage - for package in data: - for point in package['data']: - point['y'] = (point['y']*100) / totals[point['x']] - # Sort the points - for package in data: - package['data'] = sorted( package['data'], key=lambda x:x['x'] ) - # Strip the latest month's incomplete analytics - package['data'] = package['data'][:-1] + # x-axis is every month in c.months. Note that data might not exist + # for entire history, eg. for recently-added datasets + x_axis = [x[0] for x in c.months] + x_axis.reverse() # Ascending order + x_axis = x_axis[:-1] # Remove latest month + totals = {} + for series in data: + series['data'] = [] + for x_string in x_axis: + x = _get_unix_epoch( x_string ) + y = series['raw'].get(x_string,0) + series['data'].append({'x':x,'y':y}) + totals[x] = totals.get(x,0)+y + if not percentageMode: + return data + # Turn all data into percentages + # Roll insignificant series into a catch-all + THRESHOLD = 1 + raw_data = data + data = [] + for series in raw_data: + for point in series['data']: + percentage = (100*float(point['y'])) / totals[point['x']] + if not (series in data) and percentage>THRESHOLD: + data.append(series) + point['y'] = percentage + others = [ x for x in raw_data if not (x in data) ] + if len(others): + data_other = [] + for i in range(len(x_axis)): + x = _get_unix_epoch(x_axis[i]) + y = 0 + for series in others: + y += series['data'][i]['y'] + data_other.append({'x':x,'y':y}) + data.append({ + 'name':'Other', + 'data': data_other + }) return data @@ -478,35 +477,51 @@ top_publishers = [] res = connection.execute(q, month) - department_ids = [] for row in res: g = model.Group.get(row[0]) if g: - department_ids.append(row[0]) top_publishers.append((g, row[1], row[2])) - - graph = {} - if limit is not None: - # Query for a history graph of these publishers - q = model.Session.query( - GA_Url.department_id, - GA_Url.period_name, - func.sum(cast(GA_Url.pageviews,sqlalchemy.types.INT)))\ - .filter( GA_Url.department_id.in_(department_ids) )\ - .filter( GA_Url.period_name!='All' )\ - .filter( GA_Url.url.like('/dataset/%') )\ - .filter( GA_Url.package_id!='' )\ - .group_by( GA_Url.department_id, GA_Url.period_name ) - for dept_id,period_name,views in q: - graph[dept_id] = graph.get( dept_id, { - 'name' : model.Group.get(dept_id).title, - 'data' : [] - }) - graph[dept_id]['data'].append({ - 'x': _get_unix_epoch(period_name), - 'y': views - }) - return top_publishers, graph + return top_publishers + + +def _get_top_publishers_graph(limit=20): + ''' + Returns a list of the top 20 publishers by dataset visits. + (The number to show can be varied with 'limit') + ''' + connection = model.Session.connection() + q = """ + select department_id, sum(pageviews::int) views + from ga_url + where department_id <> '' + and package_id <> '' + and url like '/dataset/%%' + and period_name='All' + group by department_id order by views desc + """ + if limit: + q = q + " limit %s;" % (limit) + + res = connection.execute(q) + department_ids = [ row[0] for row in res ] + + # Query for a history graph of these department ids + q = model.Session.query( + GA_Url.department_id, + GA_Url.period_name, + func.sum(cast(GA_Url.pageviews,sqlalchemy.types.INT)))\ + .filter( GA_Url.department_id.in_(department_ids) )\ + .filter( GA_Url.url.like('/dataset/%') )\ + .filter( GA_Url.package_id!='' )\ + .group_by( GA_Url.department_id, GA_Url.period_name ) + graph_dict = {} + for dept_id,period_name,views in q: + graph_dict[dept_id] = graph_dict.get( dept_id, { + 'name' : model.Group.get(dept_id).title, + 'raw' : {} + }) + graph_dict[dept_id]['raw'][period_name] = views + return [ graph_dict[id] for id in department_ids ] def _get_publishers(): @@ -516,7 +531,7 @@ ''' publishers = [] for pub in model.Session.query(model.Group).\ - filter(model.Group.type=='publisher').\ + filter(model.Group.type=='organization').\ filter(model.Group.state=='active').\ order_by(model.Group.name): publishers.append((pub.name, pub.title)) --- a/ckanext/ga_report/download_analytics.py +++ b/ckanext/ga_report/download_analytics.py @@ -1,7 +1,10 @@ import os import logging import datetime +import httplib import collections +import requests +import json from pylons import config from ga_model import _normalize_url import ga_model @@ -18,13 +21,14 @@ class DownloadAnalytics(object): '''Downloads and stores analytics info''' - def __init__(self, service=None, profile_id=None, delete_first=False, + def __init__(self, service=None, token=None, profile_id=None, delete_first=False, skip_url_stats=False): self.period = config['ga-report.period'] self.service = service self.profile_id = profile_id self.delete_first = delete_first self.skip_url_stats = skip_url_stats + self.token = token def specific_month(self, date): import calendar @@ -149,17 +153,27 @@ metrics = 'ga:entrances' sort = '-ga:entrances' - # Supported query params at - # https://developers.google.com/analytics/devguides/reporting/core/v3/reference - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - filters=query, - start_date=start_date, - metrics=metrics, - sort=sort, - dimensions="ga:landingPagePath,ga:socialNetwork", - max_results=10000, - end_date=end_date).execute() + try: + # Because of issues of invalid responses, we are going to make these requests + # ourselves. + headers = {'authorization': 'Bearer ' + self.token} + + args = dict(ids='ga:' + self.profile_id, + filters=query, + metrics=metrics, + sort=sort, + dimensions="ga:landingPagePath,ga:socialNetwork", + max_results=10000) + + args['start-date'] = start_date + args['end-date'] = end_date + + results = self._get_json(args) + except Exception, e: + log.exception(e) + results = dict(url=[]) + + data = collections.defaultdict(list) rows = results.get('rows',[]) for row in rows: @@ -178,15 +192,23 @@ # Supported query params at # https://developers.google.com/analytics/devguides/reporting/core/v3/reference - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - filters=query, - start_date=start_date, - metrics=metrics, - sort=sort, - dimensions="ga:pagePath", - max_results=10000, - end_date=end_date).execute() + try: + args = {} + args["sort"] = "-ga:pageviews" + args["max-results"] = 100000 + args["dimensions"] = "ga:pagePath" + args["start-date"] = start_date + args["end-date"] = end_date + args["metrics"] = metrics + args["ids"] = "ga:" + self.profile_id + args["filters"] = query + args["alt"] = "json" + + results = self._get_json(args) + + except Exception, e: + log.exception(e) + return dict(url=[]) packages = [] log.info("There are %d results" % results['totalResults']) @@ -226,25 +248,78 @@ data[key] = data.get(key,0) + result[1] return data + def _get_json(self, params, prev_fail=False): + ga_token_filepath = os.path.expanduser(config.get('googleanalytics.token.filepath', '')) + if not ga_token_filepath: + print 'ERROR: In the CKAN config you need to specify the filepath of the ' \ + 'Google Analytics token file under key: googleanalytics.token.filepath' + return + + log.info("Trying to refresh our OAuth token") + try: + from ga_auth import init_service + self.token, svc = init_service(ga_token_filepath, None) + log.info("OAuth token refreshed") + except Exception, auth_exception: + log.error("Oauth refresh failed") + log.exception(auth_exception) + return + + try: + headers = {'authorization': 'Bearer ' + self.token} + r = requests.get("https://www.googleapis.com/analytics/v3/data/ga", params=params, headers=headers) + if r.status_code != 200: + log.info("STATUS: %s" % (r.status_code,)) + log.info("CONTENT: %s" % (r.content,)) + raise Exception("Request with params: %s failed" % params) + + return json.loads(r.content) + except Exception, e: + log.exception(e) + + return dict(url=[]) + def _totals_stats(self, start_date, end_date, period_name, period_complete_day): """ Fetches distinct totals, total pageviews etc """ - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - start_date=start_date, - metrics='ga:pageviews', - sort='-ga:pageviews', - max_results=10000, - end_date=end_date).execute() + try: + args = {} + args["max-results"] = 100000 + args["start-date"] = start_date + args["end-date"] = end_date + args["ids"] = "ga:" + self.profile_id + + args["metrics"] = "ga:pageviews" + args["sort"] = "-ga:pageviews" + args["alt"] = "json" + + results = self._get_json(args) + except Exception, e: + log.exception(e) + results = dict(url=[]) + result_data = results.get('rows') ga_model.update_sitewide_stats(period_name, "Totals", {'Total page views': result_data[0][0]}, period_complete_day) - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - start_date=start_date, - metrics='ga:pageviewsPerVisit,ga:avgTimeOnSite,ga:percentNewVisits,ga:visits', - max_results=10000, - end_date=end_date).execute() + try: + # Because of issues of invalid responses, we are going to make these requests + # ourselves. + headers = {'authorization': 'Bearer ' + self.token} + + args = {} + args["max-results"] = 100000 + args["start-date"] = start_date + args["end-date"] = end_date + args["ids"] = "ga:" + self.profile_id + + args["metrics"] = "ga:pageviewsPerVisit,ga:avgTimeOnSite,ga:percentNewVisits,ga:visits" + args["alt"] = "json" + + results = self._get_json(args) + except Exception, e: + log.exception(e) + results = dict(url=[]) + result_data = results.get('rows') data = { 'Pages per visit': result_data[0][0], @@ -257,14 +332,28 @@ # Bounces from / or another configurable page. path = '/%s%s' % (config.get('googleanalytics.account'), config.get('ga-report.bounce_url', '/')) - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - filters='ga:pagePath==%s' % (path,), - start_date=start_date, - metrics='ga:visitBounceRate', - dimensions='ga:pagePath', - max_results=10000, - end_date=end_date).execute() + + try: + # Because of issues of invalid responses, we are going to make these requests + # ourselves. + headers = {'authorization': 'Bearer ' + self.token} + + args = {} + args["max-results"] = 100000 + args["start-date"] = start_date + args["end-date"] = end_date + args["ids"] = "ga:" + self.profile_id + + args["filters"] = 'ga:pagePath==%s' % (path,) + args["dimensions"] = 'ga:pagePath' + args["metrics"] = "ga:visitBounceRate" + args["alt"] = "json" + + results = self._get_json(args) + except Exception, e: + log.exception(e) + results = dict(url=[]) + result_data = results.get('rows') if not result_data or len(result_data) != 1: log.error('Could not pinpoint the bounces for path: %s. Got results: %r', @@ -280,14 +369,28 @@ def _locale_stats(self, start_date, end_date, period_name, period_complete_day): """ Fetches stats about language and country """ - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - start_date=start_date, - metrics='ga:pageviews', - sort='-ga:pageviews', - dimensions="ga:language,ga:country", - max_results=10000, - end_date=end_date).execute() + + try: + # Because of issues of invalid responses, we are going to make these requests + # ourselves. + headers = {'authorization': 'Bearer ' + self.token} + + args = {} + args["max-results"] = 100000 + args["start-date"] = start_date + args["end-date"] = end_date + args["ids"] = "ga:" + self.profile_id + + args["dimensions"] = "ga:language,ga:country" + args["metrics"] = "ga:pageviews" + args["sort"] = "-ga:pageviews" + args["alt"] = "json" + + results = self._get_json(args) + except Exception, e: + log.exception(e) + results = dict(url=[]) + result_data = results.get('rows') data = {} for result in result_data: @@ -308,15 +411,27 @@ data = {} - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - start_date=start_date, - filters='ga:eventAction==download', - metrics='ga:totalEvents', - sort='-ga:totalEvents', - dimensions="ga:eventLabel", - max_results=10000, - end_date=end_date).execute() + try: + # Because of issues of invalid responses, we are going to make these requests + # ourselves. + headers = {'authorization': 'Bearer ' + self.token} + + args = {} + args["max-results"] = 100000 + args["start-date"] = start_date + args["end-date"] = end_date + args["ids"] = "ga:" + self.profile_id + + args["filters"] = 'ga:eventAction==download' + args["dimensions"] = "ga:eventLabel" + args["metrics"] = "ga:totalEvents" + args["alt"] = "json" + + results = self._get_json(args) + except Exception, e: + log.exception(e) + results = dict(url=[]) + result_data = results.get('rows') if not result_data: # We may not have data for this time period, so we need to bail @@ -355,15 +470,25 @@ log.info('Associating downloads of resource URLs with their respective datasets') process_result_data(results.get('rows')) - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - start_date=start_date, - filters='ga:eventAction==download-cache', - metrics='ga:totalEvents', - sort='-ga:totalEvents', - dimensions="ga:eventLabel", - max_results=10000, - end_date=end_date).execute() + try: + # Because of issues of invalid responses, we are going to make these requests + # ourselves. + headers = {'authorization': 'Bearer ' + self.token} + + args = dict( ids='ga:' + self.profile_id, + filters='ga:eventAction==download-cache', + metrics='ga:totalEvents', + sort='-ga:totalEvents', + dimensions="ga:eventLabel", + max_results=10000) + args['start-date'] = start_date + args['end-date'] = end_date + + results = self._get_json(args) + except Exception, e: + log.exception(e) + results = dict(url=[]) + log.info('Associating downloads of cache resource URLs with their respective datasets') process_result_data(results.get('rows'), cached=False) @@ -372,14 +497,25 @@ def _social_stats(self, start_date, end_date, period_name, period_complete_day): """ Finds out which social sites people are referred from """ - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - start_date=start_date, - metrics='ga:pageviews', - sort='-ga:pageviews', - dimensions="ga:socialNetwork,ga:referralPath", - max_results=10000, - end_date=end_date).execute() + + try: + # Because of issues of invalid responses, we are going to make these requests + # ourselves. + headers = {'authorization': 'Bearer ' + self.token} + + args = dict( ids='ga:' + self.profile_id, + metrics='ga:pageviews', + sort='-ga:pageviews', + dimensions="ga:socialNetwork,ga:referralPath", + max_results=10000) + args['start-date'] = start_date + args['end-date'] = end_date + + results = self._get_json(args) + except Exception, e: + log.exception(e) + results = dict(url=[]) + result_data = results.get('rows') data = {} for result in result_data: @@ -391,14 +527,24 @@ def _os_stats(self, start_date, end_date, period_name, period_complete_day): """ Operating system stats """ - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - start_date=start_date, - metrics='ga:pageviews', - sort='-ga:pageviews', - dimensions="ga:operatingSystem,ga:operatingSystemVersion", - max_results=10000, - end_date=end_date).execute() + try: + # Because of issues of invalid responses, we are going to make these requests + # ourselves. + headers = {'authorization': 'Bearer ' + self.token} + + args = dict( ids='ga:' + self.profile_id, + metrics='ga:pageviews', + sort='-ga:pageviews', + dimensions="ga:operatingSystem,ga:operatingSystemVersion", + max_results=10000) + args['start-date'] = start_date + args['end-date'] = end_date + + results = self._get_json(args) + except Exception, e: + log.exception(e) + results = dict(url=[]) + result_data = results.get('rows') data = {} for result in result_data: @@ -416,14 +562,27 @@ def _browser_stats(self, start_date, end_date, period_name, period_complete_day): """ Information about browsers and browser versions """ - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - start_date=start_date, - metrics='ga:pageviews', - sort='-ga:pageviews', - dimensions="ga:browser,ga:browserVersion", - max_results=10000, - end_date=end_date).execute() + + try: + # Because of issues of invalid responses, we are going to make these requests + # ourselves. + headers = {'authorization': 'Bearer ' + self.token} + + args = dict( ids='ga:' + self.profile_id, + metrics='ga:pageviews', + sort='-ga:pageviews', + dimensions="ga:browser,ga:browserVersion", + max_results=10000) + + args['start-date'] = start_date + args['end-date'] = end_date + + results = self._get_json(args) + except Exception, e: + log.exception(e) + results = dict(url=[]) + + result_data = results.get('rows') # e.g. [u'Firefox', u'19.0', u'20'] @@ -465,14 +624,24 @@ def _mobile_stats(self, start_date, end_date, period_name, period_complete_day): """ Info about mobile devices """ - results = self.service.data().ga().get( - ids='ga:' + self.profile_id, - start_date=start_date, - metrics='ga:pageviews', - sort='-ga:pageviews', - dimensions="ga:mobileDeviceBranding, ga:mobileDeviceInfo", - max_results=10000, - end_date=end_date).execute() + try: + # Because of issues of invalid responses, we are going to make these requests + # ourselves. + headers = {'authorization': 'Bearer ' + self.token} + + args = dict( ids='ga:' + self.profile_id, + metrics='ga:pageviews', + sort='-ga:pageviews', + dimensions="ga:mobileDeviceBranding, ga:mobileDeviceInfo", + max_results=10000) + args['start-date'] = start_date + args['end-date'] = end_date + + results = self._get_json(args) + except Exception, e: + log.exception(e) + results = dict(url=[]) + result_data = results.get('rows') data = {} --- a/ckanext/ga_report/ga_auth.py +++ b/ckanext/ga_report/ga_auth.py @@ -36,7 +36,7 @@ credentials = _prepare_credentials(token_file, credentials_file) http = credentials.authorize(http) # authorize the http object - return build('analytics', 'v3', http=http) + return credentials.access_token, build('analytics', 'v3', http=http) def get_profile_id(service): --- a/ckanext/ga_report/ga_model.py +++ b/ckanext/ga_report/ga_model.py @@ -125,7 +125,7 @@ dataset_ref = dataset_match.groups()[0] dataset = model.Package.get(dataset_ref) if dataset: - publisher_groups = dataset.get_groups('publisher') + publisher_groups = dataset.get_groups('organization') if publisher_groups: return dataset_ref,publisher_groups[0].name return dataset_ref, None @@ -214,7 +214,7 @@ 'pageviews': views[key], 'visits': visits[key], 'department_id': publisher, - 'package_id': publisher + 'package_id': package } model.Session.add(GA_Url(**values)) model.Session.commit() @@ -227,7 +227,7 @@ stores them in GA_Url under the period and recalculates the totals for the 'All' period. ''' - progress_total = len(progress_data) + progress_total = len(url_data) progress_count = 0 for url, views, visits in url_data: progress_count += 1 @@ -323,11 +323,11 @@ """ toplevel = get_top_level() publishers = model.Session.query(model.Group).\ - filter(model.Group.type=='publisher').\ + filter(model.Group.type=='organization').\ filter(model.Group.state=='active').all() for publisher in publishers: views, visits, subpub = update_publisher(period_name, publisher, publisher.name) - parent, parents = '', publisher.get_groups('publisher') + parent, parents = '', publisher.get_groups('organization') if parents: parent = parents[0].name item = model.Session.query(GA_Publisher).\ @@ -377,15 +377,12 @@ model.Member.table_name == 'group' and \ model.Member.state == 'active').\ filter(model.Member.id==None).\ - filter(model.Group.type=='publisher').\ + filter(model.Group.type=='organization').\ order_by(model.Group.name).all() def get_children(publisher): - '''Finds child publishers for the given publisher (object). (Not recursive)''' - from ckan.model.group import HIERARCHY_CTE - return model.Session.query(model.Group).\ - from_statement(HIERARCHY_CTE).params(id=publisher.id, type='publisher').\ - all() + '''Finds child publishers for the given publisher (object). (Not recursive i.e. returns one level)''' + return publisher.get_children_groups(type='organization') def go_down_tree(publisher): '''Provided with a publisher object, it walks down the hierarchy and yields each publisher, --- a/ckanext/ga_report/helpers.py +++ b/ckanext/ga_report/helpers.py @@ -71,7 +71,7 @@ def single_popular_dataset_html(top=20): dataset_dict = single_popular_dataset(top) groups = package.get('groups', []) - publishers = [ g for g in groups if g.get('type') == 'publisher' ] + publishers = [ g for g in groups if g.get('type') == 'organization' ] publisher = publishers[0] if publishers else {'name':'', 'title': ''} context = { 'dataset': dataset_dict, @@ -80,7 +80,7 @@ return base.render_snippet('ga_report/ga_popular_single.html', **context) -def most_popular_datasets(publisher, count=20): +def most_popular_datasets(publisher, count=20, preview_image=None): if not publisher: _log.error("No valid publisher passed to 'most_popular_datasets'") @@ -92,7 +92,8 @@ 'dataset_count': len(results), 'datasets': results, - 'publisher': publisher + 'publisher': publisher, + 'preview_image': preview_image } return base.render_snippet('ga_report/publisher/popular.html', **ctx) @@ -106,12 +107,18 @@ for entry in entries: if len(datasets) < count: p = model.Package.get(entry.url[len('/dataset/'):]) + if not p: _log.warning("Could not find Package for {url}".format(url=entry.url)) continue + if not p.state == 'active': + _log.warning("Package {0} is not active, it is {1}".format(p.name, p.state)) + continue + if not p in datasets: datasets[p] = {'views':0, 'visits': 0} + datasets[p]['views'] = datasets[p]['views'] + int(entry.pageviews) datasets[p]['visits'] = datasets[p]['visits'] + int(entry.visits) @@ -121,3 +128,17 @@ return sorted(results, key=operator.itemgetter(1), reverse=True) +def month_option_title(month_iso, months, day): + month_isos = [ iso_code for (iso_code,name) in months ] + try: + index = month_isos.index(month_iso) + except ValueError: + _log.error('Month "%s" not found in list of months.' % month_iso) + return month_iso + month_name = months[index][1] + if index==0: + return month_name + (' (up to %s)'%day) + return month_name + + + --- a/ckanext/ga_report/plugin.py +++ b/ckanext/ga_report/plugin.py @@ -5,7 +5,8 @@ from ckanext.ga_report.helpers import (most_popular_datasets, popular_datasets, - single_popular_dataset) + single_popular_dataset, + month_option_title) log = logging.getLogger('ckanext.ga-report') @@ -27,7 +28,8 @@ 'ga_report_installed': lambda: True, 'popular_datasets': popular_datasets, 'most_popular_datasets': most_popular_datasets, - 'single_popular_dataset': single_popular_dataset + 'single_popular_dataset': single_popular_dataset, + 'month_option_title': month_option_title } def after_map(self, map): --- a/ckanext/ga_report/public/css/ga_report.css +++ b/ckanext/ga_report/public/css/ga_report.css @@ -2,6 +2,11 @@ padding: 1px 0 0 0; width: 108px; text-align: center; + /* Hack to hide the momentary flash of text + * before sparklines are fully rendered */ + font-size: 1px; + color: transparent; + overflow: hidden; } .rickshaw_chart_container { position: relative; @@ -16,16 +21,9 @@ bottom: 0; } .rickshaw_legend { - position: absolute; - right: 0; - top: 0; - margin-left: 15px; - padding: 0 5px; background: transparent; - max-width: 150px; - overflow: hidden; - background: rgba(0,0,0,0.05); - border-radius:5px; + width: 100%; + padding-top: 4px; } .rickshaw_y_axis { position: absolute; @@ -38,4 +36,34 @@ color: #000000 !important; font-weight: normal !important; } +.rickshaw_legend .instructions { + color: #000; + margin-bottom: 6px; +} +.rickshaw_legend .line .action { + display: none; +} +.rickshaw_legend .line .swatch { + display: block; + float: left; +} +.rickshaw_legend .line .label { + display: block; + white-space: normal; + float: left; + width: 200px; +} +.rickshaw_legend .line .label:hover { + text-decoration: underline; +} + +.ga-reports-table .td-numeric { + text-align: center; +} +.ga-reports-heading { + padding-right: 10px; + margin-top: 4px; + float: left; +} + --- a/ckanext/ga_report/public/scripts/ckanext_ga_reports.js +++ b/ckanext/ga_report/public/scripts/ckanext_ga_reports.js @@ -1,12 +1,34 @@ - var CKAN = CKAN || {}; CKAN.GA_Reports = {}; CKAN.GA_Reports.render_rickshaw = function( css_name, data, mode, colorscheme ) { + var graphLegends = $('#graph-legend-container'); + + function renderError(alertClass,alertText,legendText) { + $("#chart_"+css_name) + .html( '
'+alertText+'
') + .closest('.rickshaw_chart_container').css('height',50); + var myLegend = $('
') + .html(legendText) + .appendTo(graphLegends); + } + + if (!Modernizr.svg) { + renderError('','Your browser does not support vector graphics. No graphs can be rendered.','(Graph cannot be rendered)'); + return; + } + if (data.length==0) { + renderError('alert-info','There is not enough data to render a graph.','(No graph available)'); + return + } + var myLegend = $('
').appendTo(graphLegends); + var palette = new Rickshaw.Color.Palette( { scheme: colorscheme } ); $.each(data, function(i, object) { object['color'] = palette.color(); }); + // Rickshaw renders the legend in reverse order... + data.reverse(); var graphElement = document.querySelector("#chart_"+css_name); @@ -16,27 +38,95 @@ series: data , height: 328 }); - var x_axis = new Rickshaw.Graph.Axis.Time( { graph: graph } ); + var x_axis = new Rickshaw.Graph.Axis.Time( { + graph: graph + } ); var y_axis = new Rickshaw.Graph.Axis.Y( { graph: graph, orientation: 'left', tickFormat: Rickshaw.Fixtures.Number.formatKMBT, - element: document.getElementById('y_axis_'+css_name), + element: document.getElementById('y_axis_'+css_name) } ); var legend = new Rickshaw.Graph.Legend( { element: document.querySelector('#legend_'+css_name), graph: graph } ); - var hoverDetail = new Rickshaw.Graph.HoverDetail( { + var shelving = new Rickshaw.Graph.Behavior.Series.Toggle( { graph: graph, - formatter: function(series, x, y) { - var date = '' + new Date(x * 1000).toUTCString() + ''; - var swatch = ''; - var content = swatch + series.name + ": " + parseInt(y) + '
' + date; - return content; - } + legend: legend } ); + myLegend.prepend('
Click on a series below to isolate its graph:
'); graph.render(); }; +CKAN.GA_Reports.bind_sparklines = function() { + /* + * Bind to the 'totals' tab being on screen, when the + * Sparkline graphs should be drawn. + * Note that they cannot be drawn sooner. + */ + var created = false; + $('a[href="#totals"]').on( + 'shown', + function() { + if (!created) { + var sparkOptions = { + enableTagOptions: true, + type: 'line', + width: 100, + height: 26, + chartRangeMin: 0, + spotColor: '', + maxSpotColor: '', + minSpotColor: '', + highlightSpotColor: '#000000', + lineColor: '#3F8E6D', + fillColor: '#B7E66B' + }; + $('.sparkline').sparkline('html',sparkOptions); + created = true; + } + $.sparkline_display_visible(); + } + ); +}; +CKAN.GA_Reports.bind_sidebar = function() { + /* + * Bind to changes in the tab behaviour: + * Show the correct rickshaw graph in the sidebar. + * Not to be called before all graphs load. + */ + $('a[data-toggle="hashtab"]').on( + 'shown', + function(e) { + var href = $(e.target).attr('href'); + var pane = $(href); + if (!pane.length) { console.err('bad href',href); return; } + var legend_name = "none"; + var graph = pane.find('.rickshaw_chart'); + if (graph.length) { + legend_name = graph.attr('id').replace('chart_',''); + } + legend_name = '#legend_'+legend_name; + $('#graph-legend-container > *').hide(); + $('#graph-legend-container .instructions').show(); + $(legend_name).show(); + } + ); + /* The first tab might already have been shown */ + $('li.active > a[data-toggle="hashtab"]').trigger('shown'); +}; + +CKAN.GA_Reports.bind_month_selector = function() { + var handler = function(e) { + var target = $(e.delegateTarget); + var form = target.closest('form'); + var url = form.attr('action')+'?month='+target.val()+window.location.hash; + window.location = url; + }; + var selectors = $('select[name="month"]'); + assert(selectors.length>0); + selectors.bind('change', handler); +}; + --- /dev/null +++ b/ckanext/ga_report/public/scripts/modernizr-2.6.2.custom.js @@ -1,1 +1,815 @@ - +/* Modernizr 2.6.2 (Custom Build) | MIT & BSD + * Build: http://modernizr.com/download/#-fontface-backgroundsize-borderimage-borderradius-boxshadow-flexbox-hsla-multiplebgs-opacity-rgba-textshadow-cssanimations-csscolumns-generatedcontent-cssgradients-cssreflections-csstransforms-csstransforms3d-csstransitions-applicationcache-canvas-canvastext-draganddrop-hashchange-history-audio-video-indexeddb-input-inputtypes-localstorage-postmessage-sessionstorage-websockets-websqldatabase-webworkers-geolocation-inlinesvg-smil-svg-svgclippaths-touch-webgl-shiv-cssclasses-addtest-prefixed-teststyles-testprop-testallprops-hasevent-prefixes-domprefixes-load + */ +; + + + +window.Modernizr = (function( window, document, undefined ) { + + var version = '2.6.2', + + Modernizr = {}, + + enableClasses = true, + + docElement = document.documentElement, + + mod = 'modernizr', + modElem = document.createElement(mod), + mStyle = modElem.style, + + inputElem = document.createElement('input') , + + smile = ':)', + + toString = {}.toString, + + prefixes = ' -webkit- -moz- -o- -ms- '.split(' '), + + + + omPrefixes = 'Webkit Moz O ms', + + cssomPrefixes = omPrefixes.split(' '), + + domPrefixes = omPrefixes.toLowerCase().split(' '), + + ns = {'svg': 'http://www.w3.org/2000/svg'}, + + tests = {}, + inputs = {}, + attrs = {}, + + classes = [], + + slice = classes.slice, + + featureName, + + + injectElementWithStyles = function( rule, callback, nodes, testnames ) { + + var style, ret, node, docOverflow, + div = document.createElement('div'), + body = document.body, + fakeBody = body || document.createElement('body'); + + if ( parseInt(nodes, 10) ) { + while ( nodes-- ) { + node = document.createElement('div'); + node.id = testnames ? testnames[nodes] : mod + (nodes + 1); + div.appendChild(node); + } + } + + style = ['­',''].join(''); + div.id = mod; + (body ? div : fakeBody).innerHTML += style; + fakeBody.appendChild(div); + if ( !body ) { + fakeBody.style.background = ''; + fakeBody.style.overflow = 'hidden'; + docOverflow = docElement.style.overflow; + docElement.style.overflow = 'hidden'; + docElement.appendChild(fakeBody); + } + + ret = callback(div, rule); + if ( !body ) { + fakeBody.parentNode.removeChild(fakeBody); + docElement.style.overflow = docOverflow; + } else { + div.parentNode.removeChild(div); + } + + return !!ret; + + }, + + + + isEventSupported = (function() { + + var TAGNAMES = { + 'select': 'input', 'change': 'input', + 'submit': 'form', 'reset': 'form', + 'error': 'img', 'load': 'img', 'abort': 'img' + }; + + function isEventSupported( eventName, element ) { + + element = element || document.createElement(TAGNAMES[eventName] || 'div'); + eventName = 'on' + eventName; + + var isSupported = eventName in element; + + if ( !isSupported ) { + if ( !element.setAttribute ) { + element = document.createElement('div'); + } + if ( element.setAttribute && element.removeAttribute ) { + element.setAttribute(eventName, ''); + isSupported = is(element[eventName], 'function'); + + if ( !is(element[eventName], 'undefined') ) { + element[eventName] = undefined; + } + element.removeAttribute(eventName); + } + } + + element = null; + return isSupported; + } + return isEventSupported; + })(), + + + _hasOwnProperty = ({}).hasOwnProperty, hasOwnProp; + + if ( !is(_hasOwnProperty, 'undefined') && !is(_hasOwnProperty.call, 'undefined') ) { + hasOwnProp = function (object, property) { + return _hasOwnProperty.call(object, property); + }; + } + else { + hasOwnProp = function (object, property) { + return ((property in object) && is(object.constructor.prototype[property], 'undefined')); + }; + } + + + if (!Function.prototype.bind) { + Function.prototype.bind = function bind(that) { + + var target = this; + + if (typeof target != "function") { + throw new TypeError(); + } + + var args = slice.call(arguments, 1), + bound = function () { + + if (this instanceof bound) { + + var F = function(){}; + F.prototype = target.prototype; + var self = new F(); + + var result = target.apply( + self, + args.concat(slice.call(arguments)) + ); + if (Object(result) === result) { + return result; + } + return self; + + } else { + + return target.apply( + that, + args.concat(slice.call(arguments)) + ); + + } + + }; + + return bound; + }; + } + + function setCss( str ) { + mStyle.cssText = str; + } + + function setCssAll( str1, str2 ) { + return setCss(prefixes.join(str1 + ';') + ( str2 || '' )); + } + + function is( obj, type ) { + return typeof obj === type; + } + + function contains( str, substr ) { + return !!~('' + str).indexOf(substr); + } + + function testProps( props, prefixed ) { + for ( var i in props ) { + var prop = props[i]; + if ( !contains(prop, "-") && mStyle[prop] !== undefined ) { + return prefixed == 'pfx' ? prop : true; + } + } + return false; + } + + function testDOMProps( props, obj, elem ) { + for ( var i in props ) { + var item = obj[props[i]]; + if ( item !== undefined) { + + if (elem === false) return props[i]; + + if (is(item, 'function')){ + return item.bind(elem || obj); + } + + return item; + } + } + return false; + } + + function testPropsAll( prop, prefixed, elem ) { + + var ucProp = prop.charAt(0).toUpperCase() + prop.slice(1), + props = (prop + ' ' + cssomPrefixes.join(ucProp + ' ') + ucProp).split(' '); + + if(is(prefixed, "string") || is(prefixed, "undefined")) { + return testProps(props, prefixed); + + } else { + props = (prop + ' ' + (domPrefixes).join(ucProp + ' ') + ucProp).split(' '); + return testDOMProps(props, prefixed, elem); + } + } tests['flexbox'] = function() { + return testPropsAll('flexWrap'); + }; tests['canvas'] = function() { + var elem = document.createElement('canvas'); + return !!(elem.getContext && elem.getContext('2d')); + }; + + tests['canvastext'] = function() { + return !!(Modernizr['canvas'] && is(document.createElement('canvas').getContext('2d').fillText, 'function')); + }; + + + + tests['webgl'] = function() { + return !!window.WebGLRenderingContext; + }; + + + tests['touch'] = function() { + var bool; + + if(('ontouchstart' in window) || window.DocumentTouch && document instanceof DocumentTouch) { + bool = true; + } else { + injectElementWithStyles(['@media (',prefixes.join('touch-enabled),('),mod,')','{#modernizr{top:9px;position:absolute}}'].join(''), function( node ) { + bool = node.offsetTop === 9; + }); + } + + return bool; + }; + + + + tests['geolocation'] = function() { + return 'geolocation' in navigator; + }; + + + tests['postmessage'] = function() { + return !!window.postMessage; + }; + + + tests['websqldatabase'] = function() { + return !!window.openDatabase; + }; + + tests['indexedDB'] = function() { + return !!testPropsAll("indexedDB", window); + }; + + tests['hashchange'] = function() { + return isEventSupported('hashchange', window) && (document.documentMode === undefined || document.documentMode > 7); + }; + + tests['history'] = function() { + return !!(window.history && history.pushState); + }; + + tests['draganddrop'] = function() { + var div = document.createElement('div'); + return ('draggable' in div) || ('ondragstart' in div && 'ondrop' in div); + }; + + tests['websockets'] = function() { + return 'WebSocket' in window || 'MozWebSocket' in window; + }; + + + tests['rgba'] = function() { + setCss('background-color:rgba(150,255,150,.5)'); + + return contains(mStyle.backgroundColor, 'rgba'); + }; + + tests['hsla'] = function() { + setCss('background-color:hsla(120,40%,100%,.5)'); + + return contains(mStyle.backgroundColor, 'rgba') || contains(mStyle.backgroundColor, 'hsla'); + }; + + tests['multiplebgs'] = function() { + setCss('background:url(https://),url(https://),red url(https://)'); + + return (/(url\s*\(.*?){3}/).test(mStyle.background); + }; tests['backgroundsize'] = function() { + return testPropsAll('backgroundSize'); + }; + + tests['borderimage'] = function() { + return testPropsAll('borderImage'); + }; + + + + tests['borderradius'] = function() { + return testPropsAll('borderRadius'); + }; + + tests['boxshadow'] = function() { + return testPropsAll('boxShadow'); + }; + + tests['textshadow'] = function() { + return document.createElement('div').style.textShadow === ''; + }; + + + tests['opacity'] = function() { + setCssAll('opacity:.55'); + + return (/^0.55$/).test(mStyle.opacity); + }; + + + tests['cssanimations'] = function() { + return testPropsAll('animationName'); + }; + + + tests['csscolumns'] = function() { + return testPropsAll('columnCount'); + }; + + + tests['cssgradients'] = function() { + var str1 = 'background-image:', + str2 = 'gradient(linear,left top,right bottom,from(#9f9),to(white));', + str3 = 'linear-gradient(left top,#9f9, white);'; + + setCss( + (str1 + '-webkit- '.split(' ').join(str2 + str1) + + prefixes.join(str3 + str1)).slice(0, -str1.length) + ); + + return contains(mStyle.backgroundImage, 'gradient'); + }; + + + tests['cssreflections'] = function() { + return testPropsAll('boxReflect'); + }; + + + tests['csstransforms'] = function() { + return !!testPropsAll('transform'); + }; + + + tests['csstransforms3d'] = function() { + + var ret = !!testPropsAll('perspective'); + + if ( ret && 'webkitPerspective' in docElement.style ) { + + injectElementWithStyles('@media (transform-3d),(-webkit-transform-3d){#modernizr{left:9px;position:absolute;height:3px;}}', function( node, rule ) { + ret = node.offsetLeft === 9 && node.offsetHeight === 3; + }); + } + return ret; + }; + + + tests['csstransitions'] = function() { + return testPropsAll('transition'); + }; + + + + tests['fontface'] = function() { + var bool; + + injectElementWithStyles('@font-face {font-family:"font";src:url("https://")}', function( node, rule ) { + var style = document.getElementById('smodernizr'), + sheet = style.sheet || style.styleSheet, + cssText = sheet ? (sheet.cssRules && sheet.cssRules[0] ? sheet.cssRules[0].cssText : sheet.cssText || '') : ''; + + bool = /src/i.test(cssText) && cssText.indexOf(rule.split(' ')[0]) === 0; + }); + + return bool; + }; + + tests['generatedcontent'] = function() { + var bool; + + injectElementWithStyles(['#',mod,'{font:0/0 a}#',mod,':after{content:"',smile,'";visibility:hidden;font:3px/1 a}'].join(''), function( node ) { + bool = node.offsetHeight >= 3; + }); + + return bool; + }; + tests['video'] = function() { + var elem = document.createElement('video'), + bool = false; + + try { + if ( bool = !!elem.canPlayType ) { + bool = new Boolean(bool); + bool.ogg = elem.canPlayType('video/ogg; codecs="theora"') .replace(/^no$/,''); + + bool.h264 = elem.canPlayType('video/mp4; codecs="avc1.42E01E"') .replace(/^no$/,''); + + bool.webm = elem.canPlayType('video/webm; codecs="vp8, vorbis"').replace(/^no$/,''); + } + + } catch(e) { } + + return bool; + }; + + tests['audio'] = function() { + var elem = document.createElement('audio'), + bool = false; + + try { + if ( bool = !!elem.canPlayType ) { + bool = new Boolean(bool); + bool.ogg = elem.canPlayType('audio/ogg; codecs="vorbis"').replace(/^no$/,''); + bool.mp3 = elem.canPlayType('audio/mpeg;') .replace(/^no$/,''); + + bool.wav = elem.canPlayType('audio/wav; codecs="1"') .replace(/^no$/,''); + bool.m4a = ( elem.canPlayType('audio/x-m4a;') || + elem.canPlayType('audio/aac;')) .replace(/^no$/,''); + } + } catch(e) { } + + return bool; + }; + + + tests['localstorage'] = function() { + try { + localStorage.setItem(mod, mod); + localStorage.removeItem(mod); + return true; + } catch(e) { + return false; + } + }; + + tests['sessionstorage'] = function() { + try { + sessionStorage.setItem(mod, mod); + sessionStorage.removeItem(mod); + return true; + } catch(e) { + return false; + } + }; + + + tests['webworkers'] = function() { + return !!window.Worker; + }; + + + tests['applicationcache'] = function() { + return !!window.applicationCache; + }; + + + tests['svg'] = function() { + return !!document.createElementNS && !!document.createElementNS(ns.svg, 'svg').createSVGRect; + }; + + tests['inlinesvg'] = function() { + var div = document.createElement('div'); + div.innerHTML = ''; + return (div.firstChild && div.firstChild.namespaceURI) == ns.svg; + }; + + tests['smil'] = function() { + return !!document.createElementNS && /SVGAnimate/.test(toString.call(document.createElementNS(ns.svg, 'animate'))); + }; + + + tests['svgclippaths'] = function() { + return !!document.createElementNS && /SVGClipPath/.test(toString.call(document.createElementNS(ns.svg, 'clipPath'))); + }; + + function webforms() { + Modernizr['input'] = (function( props ) { + for ( var i = 0, len = props.length; i < len; i++ ) { + attrs[ props[i] ] = !!(props[i] in inputElem); + } + if (attrs.list){ + attrs.list = !!(document.createElement('datalist') && window.HTMLDataListElement); + } + return attrs; + })('autocomplete autofocus list placeholder max min multiple pattern required step'.split(' ')); + Modernizr['inputtypes'] = (function(props) { + + for ( var i = 0, bool, inputElemType, defaultView, len = props.length; i < len; i++ ) { + + inputElem.setAttribute('type', inputElemType = props[i]); + bool = inputElem.type !== 'text'; + + if ( bool ) { + + inputElem.value = smile; + inputElem.style.cssText = 'position:absolute;visibility:hidden;'; + + if ( /^range$/.test(inputElemType) && inputElem.style.WebkitAppearance !== undefined ) { + + docElement.appendChild(inputElem); + defaultView = document.defaultView; + + bool = defaultView.getComputedStyle && + defaultView.getComputedStyle(inputElem, null).WebkitAppearance !== 'textfield' && + (inputElem.offsetHeight !== 0); + + docElement.removeChild(inputElem); + + } else if ( /^(search|tel)$/.test(inputElemType) ){ + } else if ( /^(url|email)$/.test(inputElemType) ) { + bool = inputElem.checkValidity && inputElem.checkValidity() === false; + + } else { + bool = inputElem.value != smile; + } + } + + inputs[ props[i] ] = !!bool; + } + return inputs; + })('search tel url email datetime date month week time datetime-local number range color'.split(' ')); + } + for ( var feature in tests ) { + if ( hasOwnProp(tests, feature) ) { + featureName = feature.toLowerCase(); + Modernizr[featureName] = tests[feature](); + + classes.push((Modernizr[featureName] ? '' : 'no-') + featureName); + } + } + + Modernizr.input || webforms(); + + + Modernizr.addTest = function ( feature, test ) { + if ( typeof feature == 'object' ) { + for ( var key in feature ) { + if ( hasOwnProp( feature, key ) ) { + Modernizr.addTest( key, feature[ key ] ); + } + } + } else { + + feature = feature.toLowerCase(); + + if ( Modernizr[feature] !== undefined ) { + return Modernizr; + } + + test = typeof test == 'function' ? test() : test; + + if (typeof enableClasses !== "undefined" && enableClasses) { + docElement.className += ' ' + (test ? '' : 'no-') + feature; + } + Modernizr[feature] = test; + + } + + return Modernizr; + }; + + + setCss(''); + modElem = inputElem = null; + + ;(function(window, document) { + var options = window.html5 || {}; + + var reSkip = /^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i; + + var saveClones = /^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i; + + var supportsHtml5St