--- a/about.php +++ b/about.php @@ -5,6 +5,10 @@ <div class="foundation-header"> <h1><a href="about.php">About/FAQ</a></h1> </div> +<a href="http://thenounproject.com/noun/document-dump/">Document Dump icon designed by Iconathon, 2013</a> +Contact us: maxious@lambdacomplex.org + +Exports: <a href="admin/exportAll.csv.php">All Agencies</a> <a href="admin/exportEmployees.csv.php">Agency Employee Headcounts</a> <h2> Attributions </h2> National Archives of Australia, Australian Governments’ Interactive Functions Thesaurus, 2nd edition, September 2005, published at http://www.naa.gov.au/recordkeeping/thesaurus/index.htm <br/> data.gov.au http://data.gov.au/dataset/directory-gov-au-full-data-export/ <br/>
--- a/admin/refreshDesignDoc.php +++ b/admin/refreshDesignDoc.php @@ -42,6 +42,7 @@ $obj->views->byWebServer->map = "function(doc) {\n emit(doc.web_server, doc);\n}"; $obj->views->datasets->map = "function(doc) {\nif (doc.fieldName == \"data\") {\n emit(doc._id, doc);\n}\n}"; +$obj->views->datasetGroups->map = "function(doc) {\nif (doc.fieldName == \"data\") {\n doc.metadata[\"data.gov.au Category\"] && doc.metadata[\"data.gov.au Category\"].forEach(function(tag) {\n emit(tag, doc.url); \n });\n}\n}"; $obj->views->getValidationRequired->map = "function(doc) {\nif (doc.mime_type == \"text/html\" \n&& typeof(doc.validation) == \"undefined\") {\n emit(doc._id, doc._attachments);\n}\n}"; $docdb->save($obj, true);
--- a/charts.php +++ /dev/null @@ -1,133 +1,1 @@ -<?php -include_once('include/common.inc.php'); -include_header('Charts'); -$db = $server->get_db('disclosr-agencies'); -?> -<div class="foundation-header"> - <h1><a href="about.php">Charts</a></h1> - <h4 class="subheader">Lorem ipsum.</h4> -</div> -<div id="scores" style="width:900px;height:500px;"></div> -<div id="employees" style="width:1000px;height:900px;"></div> -<script id="source"> - window.onload = function() { - $(document).ready(function() { - var d1 = []; - var scorelabels = []; - <?php - try { - $rows = $db->get_view("app", "scoreHas?group=true", null, true)->rows; - - $dataValues = Array(); - foreach ($rows as $row) { - $dataValues[$row->value] = $row->key; - } - $i = 0; - ksort($dataValues); - foreach ($dataValues as $value => $key) { - - echo " d1.push([$i, $value]);" . PHP_EOL; - echo " scorelabels.push('$key');" . PHP_EOL; - $i++; - } - } catch (SetteeRestClientException $e) { - setteErrorHandler($e); - } - ?> - function scoretrackformatter(obj) { - if (scorelabels[Math.floor(obj.x)]) { - return (scorelabels[Math.floor(obj.x)])+"="+obj.y; - - } else { - return ""; - } - } - function scoretickformatter(val, axis) { - if (scorelabels[Math.floor(val)]) { - return '<p style="margin-top:8em;-webkit-transform:rotate(-90deg);">'+(scorelabels[Math.floor(val)])+"</b>"; - - } else { - return ""; - } - } - Flotr.draw(document.getElementById("scores"), [ {data: d1}], { - HtmlText: true, - bars : { - show : true - }, - mouse : { - track : true, - relative : true, - trackFormatter: scoretrackformatter - },yaxis: { - autoscaling: true - }, - xaxis: { - autoscaling: true, - minorTickFreq: 0.6, - noTicks : scorelabels.length, - tickFormatter: scoretickformatter - } - }); - - - - - - - - -var emplabels = []; -function emptrackformatter(obj) { - - return (obj.series.label)+" = "+obj.y+" in "+emplabels[Math.floor(obj.x)]; - - } - function emptickformatter(val, axis) { - if (emplabels[Math.floor(val)]) { - return '<p style="margin-top:8em;-webkit-transform:rotate(-90deg);">'+(emplabels[Math.floor(val)])+"</b>"; - - } else { - return ""; - } - } -function onDataReceived(series) { - emplabels = series.labels; - Flotr.draw(document.getElementById("employees"), series.data, { - mouse : { - track : true, - relative : true, - trackFormatter: emptrackformatter - },yaxis: { - max: 10000, - scaling: 'logarithmic' - }, - xaxis: { - minorTickFreq: 1, - noTicks: emplabels.length, - showMinorLabels: true, - tickFormatter: emptickformatter - }, - legend: { - show: false - } - }); - } - - $.ajax({ - url: "admin/exportEmployees.csv.php?format=json", - method: 'GET', - dataType: 'json', - success: onDataReceived - }); - - - }); - }; - -</script> - -<?php -include_footer(); -?>
--- /dev/null +++ b/disclosr.iml @@ -1,1 +1,16 @@ +<?xml version="1.0" encoding="UTF-8"?> +<module type="WEB_MODULE" version="4"> + <component name="FacetManager"> + <facet type="Python" name="Python"> + <configuration sdkName="" /> + </facet> + </component> + <component name="NewModuleRootManager" inherit-compiler-output="true"> + <exclude-output /> + <content url="file://$MODULE_DIR$" /> + <orderEntry type="inheritedJdk" /> + <orderEntry type="sourceFolder" forTests="false" /> + </component> +</module> +
--- a/documents/charts.php +++ b/documents/charts.php @@ -13,10 +13,10 @@ ?> <div class="foundation-header"> <h1><a href="about.php">Charts</a></h1> - <h4 class="subheader">Lorem ipsum.</h4> + <h4 class="subheader"></h4> </div> <div id="bydate" style="width:1000px;height:300px;"></div> -<div id="byagency" style="width:1200px;height:300px;"></div> +<div id="byagency" style="width:1200px;height:800px;"></div> <script id="source"> window.onload = function () { $(document).ready(function () { @@ -59,7 +59,7 @@ mode: 'x' }, HtmlText: false, - title: 'Time' + title: 'Disclosure Log entries added by Date' }; // Draw graph with default options, overwriting with passed options @@ -98,12 +98,12 @@ var agencylabels = []; function agencytrackformatter(obj) { - return agencylabels[Math.floor(obj.x)] + " = " + obj.y; + return agencylabels[Math.floor(obj.y)] + " = " + obj.x; } function agencytickformatter(val, axis) { if (agencylabels[Math.floor(val)]) { - return '<p style="margin-top:8em;-webkit-transform:rotate(-90deg);">' + (agencylabels[Math.floor(val)]) + "</b>"; + return (agencylabels[Math.floor(val)]) ; } else { return ""; @@ -117,7 +117,7 @@ $dataValues = Array(); $i = 0; foreach ($rows as $row) { - echo " d2.push([".$i.", $row->value]);" . PHP_EOL; + echo " d2.push([ $row->value,$i]);" . PHP_EOL; echo " agencylabels.push(['".str_replace("'","",$idtoname[$row->key])."']);" . PHP_EOL; $i++; @@ -131,9 +131,10 @@ document.getElementById("byagency"), [d2], { + title: "Disclosure Log entries by Agency", bars: { show: true, - horizontal: false, + horizontal: true, shadowSize: 0, barWidth: 0.5 }, @@ -143,14 +144,14 @@ trackFormatter: agencytrackformatter }, yaxis: { - min: 0, - autoscaleMargin: 1 - }, - xaxis: { minorTickFreq: 1, noTicks: agencylabels.length, showMinorLabels: true, tickFormatter: agencytickformatter + }, + xaxis: { + min: 0, + autoscaleMargin: 1 }, legend: { show: false
--- /dev/null +++ b/documents/datagov-export-groups.py @@ -1,1 +1,81 @@ +import ckanclient +import couchdb +from ckanclient import CkanApiError +import re + +class LoaderError(Exception): + pass + +# Instantiate the CKAN client. +#ckan = ckanclient.CkanClient(base_location='http://localhost:5000/api', api_key='b47b24cd-591d-40c1-8677-d73101d56d1b') +api_key = 'ff34526e-f794-4068-8235-fcbba38cd8bc' +ckan = ckanclient.CkanClient(base_location='http://data.disclosurelo.gs/api', + api_key=api_key) +couch = couchdb.Server('http://127.0.0.1:5984/') +#couch = couchdb.Server('http://192.168.1.113:5984/') + +# https://github.com/okfn/ckanext-importlib +def munge(name): + # convert spaces to underscores + name = re.sub(' ', '_', name).lower() + # convert symbols to dashes + name = re.sub('[:]', '_-', name).lower() + name = re.sub('[/]', '-', name).lower() + # take out not-allowed characters + name = re.sub('[^a-zA-Z0-9-_]', '', name).lower() + # remove double underscores + name = re.sub('__', '_', name).lower() + return name + + +def name_munge(input_name): + return munge(input_name.replace(' ', '').replace('.', '_').replace('&', 'and')) + + +docsdb = couch['disclosr-documents'] + +if __name__ == "__main__": + groups = {} + for doc in docsdb.view('app/datasetGroups'): + group_name = doc.key + if group_name != "Not specified": + pkg_name = filter(lambda x: x in '0123456789abcdefghijklmnopqrstuvwxyz-_', + doc.value.replace("http://data.gov.au/dataset/", '').replace('/', '')[:100]); + if group_name in groups.keys(): + groups[group_name] = list(set(groups[group_name] + [pkg_name])) + else: + groups[group_name] = [pkg_name] + + # add dataset to group(s) + for group_name in groups.keys(): + if group_name != "Not specified": + group_url = name_munge(group_name[:100]) + print group_name + print groups[group_name] + try: + # Update the group details + group_entity = ckan.group_entity_get(group_url) + print "group "+group_name+" exists" + if 'packages' in group_entity.keys(): + group_entity['packages'] = list(set(group_entity['packages'] + groups[group_name])) + else: + group_entity['packages'] = groups[group_name] + ckan.group_entity_put(group_entity) + except CkanApiError, e: + if ckan.last_status == 404: + print "group "+group_name+" does not exist, creating" + group_entity = { + 'name': group_url, + 'title': group_name, + 'description': group_name, + 'packages': groups[group_name] + } + #print group_entity + ckan.group_register_post(group_entity) + elif ckan.last_status == 409: + print "group already exists" + else: + raise LoaderError('Unexpected status %s adding to group under \'%s\': %r' % ( + ckan.last_status, pkg_name, e.args)) +
--- a/documents/datagov-export.py +++ b/documents/datagov-export.py @@ -1,44 +1,80 @@ +# coding=utf-8 import ckanclient import couchdb from ckanclient import CkanApiError import re +import html2text # aaronsw :( +import ckanapi # https://github.com/open-data/ckanapi +import scrape +import datetime, os, hashlib +import urllib2 class LoaderError(Exception): pass +import tempfile +def add_package_resource_cachedurl(ckan, package_name, url, name, format, license_id, size,**kwargs): + if "xls" in url: + format = "xls" + if "pdf" in url: + format = "pdf" + if "xlsx" in url: + format = "xlsx" + (returned_url, mime_type, content) = scrape.fetchURL(scrape.docsdb, + url, "dataset_resource", "AGIMO", False) + if mime_type in ["application/vnd.ms-excel","application/msexcel","application/x-msexcel","application/x-ms-excel","application/x-excel","application/x-dos_ms_excel","application/xls","application/x-xls"]: + format = "xls" + if mime_type in ["application/xlsx","application/x-xlsx","application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"]: + format = "xlsx" + + if content != None: + tf = tempfile.NamedTemporaryFile(delete=False) + tfName = os.path.abspath(tf.name) + print tfName + tf.seek(0) + tf.write(content) + tf.flush() + ckan.add_package_resource (package_name, tfName, name=name, format=format, license_id=license_id) + else: + print "fetch error" + return ckan.add_package_resource(package_name, url, name=name, resource_type='data', + format=format, + size=size, mimetype=mime_type, license_id=license_id) + # Instantiate the CKAN client. -#ckan = ckanclient.CkanClient(base_location='http://localhost:5000/api', api_key='b47b24cd-591d-40c1-8677-d73101d56d1b') -ckan = ckanclient.CkanClient(base_location='http://data.disclosurelo.gs/api', api_key='72f90359-0396-438c-804f-a26a24336747') -#couch = couchdb.Server('http://127.0.0.1:5984/') -couch = couchdb.Server('http://192.168.1.113:5984/') - -# http://stackoverflow.com/a/7778368/684978 -from HTMLParser import HTMLParser -import htmlentitydefs - -class HTMLTextExtractor(HTMLParser): - def __init__(self): - HTMLParser.__init__(self) - self.result = [ ] - - def handle_data(self, d): - self.result.append(d) - - def handle_charref(self, number): - codepoint = int(number[1:], 16) if number[0] in (u'x', u'X') else int(number) - self.result.append(unichr(codepoint)) - - def handle_entityref(self, name): - codepoint = htmlentitydefs.name2codepoint[name] - self.result.append(unichr(codepoint)) - - def get_text(self): - return u''.join(self.result) - -def html_to_text(html): - s = HTMLTextExtractor() - s.feed(html) - return s.get_text() +api_key = 'ff34526e-f794-4068-8235-fcbba38cd8bc' +server = 'data.disclosurelo.gs' + +ckan = ckanclient.CkanClient(base_location='http://' + server + '/api', + api_key=api_key) +ckandirect = ckanapi.RemoteCKAN('http://' + server, api_key=api_key) +couch = couchdb.Server('http://127.0.0.1:5984/') +#couch = couchdb.Server('http://192.168.1.113:5984/') + +import urllib +import urlparse + + +def url_fix(s, charset='utf-8'): + """Sometimes you get an URL by a user that just isn't a real + URL because it contains unsafe characters like ' ' and so on. This + function can fix some of the problems in a similar way browsers + handle data entered by the user: + + >>> url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffsklärung)') + 'http://de.wikipedia.org/wiki/Elf%20%28Begriffskl%C3%A4rung%29' + + :param charset: The target charset for the URL if the url was + given as unicode string. + """ + if isinstance(s, unicode): + s = s.encode(charset, 'ignore') + if not urlparse.urlparse(s).scheme: + s = "http://" + s + scheme, netloc, path, qs, anchor = urlparse.urlsplit(s) + path = urllib.quote(path, '/%') + qs = urllib.quote_plus(qs, ':&=') + return urlparse.urlunsplit((scheme, netloc, path, qs, anchor)) # http://code.activestate.com/recipes/578019-bytes-to-human-human-to-bytes-converter/ SYMBOLS = { @@ -49,6 +85,7 @@ 'iec_ext': ('byte', 'kibi', 'mebi', 'gibi', 'tebi', 'pebi', 'exbi', 'zebi', 'yobi'), } + def human2bytes(s): """ @@ -78,6 +115,9 @@ ... ValueError: can't interpret '12 foo' """ + if s == None: + return 0 + s = s.replace(',', '') init = s num = "" while s and s[0:1].isdigit() or s[0:1] == '.': @@ -116,89 +156,163 @@ def name_munge(input_name): return munge(input_name.replace(' ', '').replace('.', '_').replace('&', 'and')) - #return input_name.replace(' ', '').replace('.', '_').replace('&', 'and') - -def get_licence_id(licencename): + + +def get_license_id(licencename): map = { "Creative Commons - Attribution-Share Alike 2.0 Australia (CC-SA)\nThe downloadable version of the database is licensed under CC-BY-SA Creative Commons Attribution Share Alike and contains only the database fields that are released under that license. These fields are object title, object number, object description as well as temporal, spatial and dimension details. It also contains a persistent URL for each record.": 'cc-by-sa', "CreativeCommonsAttributionNonCommercial30AustraliaCCBYNC30": 'cc-nc', 'Otherpleasespecify': 'notspecified', '': 'notspecified', "Publicly available data": 'notspecified', - "CreativeCommonsAttributionNoDerivativeWorks30AustraliaCCBYND30": "other-closed", - "CreativeCommonsAttributionNonCommercialNoDerivs30AustraliaCCBYNCND30": "other-closed", + "CreativeCommonsAttributionNoDerivativeWorks30AustraliaCCBYND30": "cc-by-nd", + "CreativeCommonsAttributionNonCommercialNoDerivs30AustraliaCCBYNCND30": "cc-nc-nd", 'CreativeCommonsAttribution30AustraliaCCBY30': 'cc-by', "Creative Commons - Attribution 2.5 Australia (CC-BY)": 'cc-by', 'CreativeCommonsAttributionCCBY25': 'cc-by', "PublicDomain": 'other-pd', - } + } if licencename not in map.keys(): - raise Exception(licencename + " not found"); + raise Exception(licencename + " not found"); return map[licencename]; +goodcsvdata = "afl-in-victoria,annual-budget-initiatives-by-suburb-brisbane-city-council,athletics-in-victoria-gfyl,bicycle-racks-mosman-municipal-council,boat-ramps-brisbane-city-council,brisbane-access-ratings-database,bus-stops-brisbane-city-council,cemeteries-brisbane-city-council,cfa-locations,citycycle-stations-brisbane-city-council,community-gardens-brisbane-city-council,community-halls-brisbane-city-council,cooking-classes-gfyl,court-locations-victoria,customer-service-centres-brisbane-city-council,dance-in-victoria-gfyl,disability-activity-gfyl,dog-parks-brisbane-city-council,ferry-terminals-brisbane-city-council,fishing-club-in-victoria-gfyl,fitness-centres-in-victoria-gfyl,gardens-reserves-gfyl,golf-courses-brisbane-city-council,gymnastics-in-victoria-gfyl,historic-cemeteries-brisbane-city-council,ice-skating-centres-gfyl,immunisation-clinics-brisbane-city-council,libraries-brisbane-city-council,licenced-venues-victoria,lifesaving-locations-victoria,loading-zones-brisbane-city-council,major-projects-victoria,markets-in-victoria,martial-arts-in-victoria-gfyl,melbourne-water-use-by-postcode,members-of-parliament-both-houses-nsw,members-of-the-legislative-assembly-nsw,members-of-the-legislative-council-nsw,mfb-locations-vic,ministers-of-the-nsw-parliament,mosman-local-government-area,mosman-rider-route,mosman-wwii-honour-roll,neighbourhood-houses-gfyl,news-feeds-mosman-municipal-council,off-street-car-parks-mosman-municipal-council,orienteering-clubs-gfyl,parking-meter-areas-brisbane-city-council,parks-and-reserves-mosman-municipal-council,parks-brisbane-city-council,personal-training-gfyl,picnic-areas-brisbane-city-council,playgrounds-brisbane-city-council,playgrounds-mosman-municipal-council,police-region-crime-statistics-victoria,police-service-area-crime-statistics-victoria,pony-clubs-in-victoria-gfyl,prison-locations-victoria,public-amenities-maintained-by-mosman-council,public-art-brisbane-city-council,public-internet-locations-vic,public-toilets-brisbane-city-council,racecourse-locations-victoria,recent-development-applications-mosman-municipal-council,recreation-groups-gfyl,recreational-fishing-spots,regional-business-centres-brisbane-city-council,reports-of-swooping-birds-mosman-municipal-council,restricted-parking-areas-brisbane-city-council,rollerskating-centres-in-victoria-gfyl,sailing-clubs-gfyl,school-locations-victoria,shadow-ministers-of-the-nsw-parliament,skate-parks-gfyl,sporting-clubs-and-organisations-gfyl,stakeboard-parks-brisbane-city-council,state-bodies-gfyl,street-names-brisbane-city-council,suburbs-and-adjoining-suburbs-brisbane-city-council,swimming-pools-brisbane-city-council,swimming-pools-gfyl,tennis-courts-brisbane-city-council,top-40-book-club-reads-brisbane-city-council,tracks-and-trails-gfyl,triathlon-clubs-gfyl,urban-water-restrictions-victoria,veterinary-services-in-mosman,victorian-microbreweries,volunteering-centres-services-and-groups-victoria,walking-groups-gfyl,ward-offices-brisbane-city-council,waste-collection-days-brisbane-city-council,waste-transfer-stations-brisbane-city-council,water-consumption-in-melbourne,water-sports-in-victoria-gfyl,wifi-hot-spots-brisbane-city-council,yoga-pilates-and-tai-chi-in-victoria-gfyl,2809cycling-in-new-south-wales-what-the-data-tells-us2809-and-related-data,act-barbecue-bbq-locations,act-tafe-locations,ausindustry-locations,austender-contract-notice-export,austender-contract-notice-export,austender-contract-notice-export,austender-contract-notice-export,austender-contract-notice-export,austender-contract-notice-export,austender-contract-notice-export,austender-contract-notice-export,australian-gas-light-company-maps,australian-gas-light-company-maps,australian-ports,australian-public-service-statistical-bulletin-2011-12,australian-public-service-statistical-bulletin-snapshot-at-december-31-2011,australian-public-service-statistical-bulletin-tables-0910,austrics-timetable-set,capital-works-call-tender-schedule,collection-item-usage-state-library-of-victoria,country-and-commodity-trade-data-spreadsheet,country-and-commodity-trade-data-spreadsheet-2,country-by-level-of-processing-trade-data-spreadsheet,crime-incident-type-and-frequency-by-capital-city-and-nationally,csiro-locations,data-from-the-oaic-public-sector-information-survey-2012,data-from-the-oaic-public-sector-information-survey-2012,data-from-the-oaic-public-sector-information-survey-2012,department-of-finance-and-deregulation-office-locations,digitised-maps,diisr-division-locations-excluding-ausindustry-enterprise-connect-and-nmi,diisr-locations,diisr-portfolio-agency-locations-excluding-csiro,distance-to-legal-service-providers-from-disadvantaged-suburbs,enterprise-connect-locations,fire-insurance-maps-sydney-block-plans-1919-1940,fire-insurance-maps-sydney-block-plans-1919-1940,first-fleet-collection,first-fleet-collection,first-fleet-maps,first-fleet-maps,freedom-of-information-annual-estimated-costs-and-staff-time-statistical-data-2011-12,freedom-of-information-quarterly-request-and-review-statistical-data-2011-12,freedom-of-information-requests-estimated-costs-and-charges-collected-1982-83-to-2011-12,higher-education-course-completions,higher-education-enrolments,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,journey-planner-data-nt,library-catalogue-search-terms-state-library-of-victoria,location-of-act-schools,location-of-centrelink-offices,location-of-european-wasps-nests,location-of-lawyers-and-legal-service-providers-by-town,location-of-legal-assistance-service-providers,location-of-medicare-offices,location-of-medicare-offices,maps-of-the-southern-hemisphere-16th-18th-centuries,maps-of-the-southern-hemisphere-16th-18th-centuries,music-queensland,national-measurement-institute-locations,new-south-wales-officers-and-men-of-the-australian-imperial-force-a-i-f-and-the-australian-naval-for,new-south-wales-officers-and-men-of-the-australian-imperial-force-a-i-f-and-the-australian-naval-for,photographs-of-nsw-life-pre-1955,photographs-of-nsw-life-pre-1955,photographs-of-sydney-before-1885,photographs-of-sydney-before-1885,picture-queensland,plgr-28093-playgrounds-act,police-station-locations,queensland-public-libraries,rare-printed-books,rare-printed-books,real-estate-maps,regional-australia-funding-projects,sa-memory-state-library-of-south-australia,search-engine-terms-state-library-of-victoria,south-australian-photographs-state-library-of-south-australia,south-australian-sheet-music-state-library-of-south-australia,sydney-bond-store-maps-1894,sydney-bond-store-maps-1894,sydney-maps-1917,sydney-maps-1917,tafe-institute-locations-victoria,tafe-sa-campus-locations,tolt-public-toilets-act,victorian-public-library-branches-state-library-of-victoria,western-australia-public-library-network,world-war-one-photographs-by-frank-hurley,world-war-one-photographs-by-frank-hurley,citycat-timetables-brisbane-city-council,cityferry-timetables-brisbane-city-council,cost-of-salinity-to-local-infrastructure-1996-97-summary-of-component-costs-of-salinity-by-reporting,cost-of-salinity-to-local-infrastructure-1996-97-summary-of-component-costs-of-salinity-by-reporting,downstream-cost-calculator-model-and-data-for-199697-or-2001-prices,economics-of-australian-soil-conditions-199697-limiting-factor-or-relative-yield-min-of-ry_salt2000-,geographical-names-register-gnr-of-nsw,victorian-dryland-salinity-assessment-2000-d01cac_ramsar_final-xls,victorian-dryland-salinity-assessment-2000-d02cac_fauna_final-xls,victorian-dryland-salinity-assessment-2000-d03cac_fauna_dist_final-xls,victorian-dryland-salinity-assessment-2000-dc04cac_hydrol_final-xls,victorian-dryland-salinity-assessment-2000-dc05cac_wetland_final-xls,victorian-dryland-salinity-assessment-2000-dc06cac_util_final-xls,victorian-dryland-salinity-assessment-2000-dc07cac_road_final-xls,victorian-dryland-salinity-assessment-2000-dc08cac_towns_final-xls,victorian-dryland-salinity-assessment-2000-dc09cac_flora_final-xls,victorian-dryland-salinity-assessment-2000-dc10cac_flora_dist_final-xls,victorian-dryland-salinity-assessment-2000-dc12cac_infrastructure-xls,victorian-dryland-salinity-assessment-2000-dc13cac_natural_envt-xls,victorian-dryland-salinity-assessment-2000-dc14cac_agriculture-xls,victorian-dryland-salinity-assessment-2000-dc16cac_agric_cost-xls,victorian-dryland-salinity-assessment-2000-dc17cac_shallow_wt-xls,victorian-dryland-salinity-assessment-2000-dc18cac_agric_cost_time-xls,victorian-dryland-salinity-assessment-2000-dc21cac_water_resources_new-xls,victorian-dryland-salinity-assessment-2000-dc22cac_risk-xls,licensed-broadcasting-transmitter-data,nsw-crime-data,recorded-crime-dataset-nsw,crime-statistics-in-nsw-by-month,2001-02-to-2007-08-local-government-survey-victoria,2009-green-light-report,annual-statistical-reports-fire-brigades-nsw-200304,annual-statistical-reports-fire-brigades-nsw-200405,annual-statistical-reports-fire-brigades-nsw-200506,annual-statistical-reports-fire-brigades-nsw-200607,arts-on-the-map,assets-and-liabilities-of-australian-located-operations,assets-of-australian-located-operations,assets-of-australian-located-operations-by-country,assets-of-financial-institutions,back-issues-of-monthly-banking-statistics,banks-assets,banks-consolidated-group-capital,banks-consolidated-group-impaired-assets,banks-consolidated-group-off-balance-sheet-business,banks-liabilities,building-societies-selected-assets-and-liabilities,byteback2842-locations-vic,cash-management-trusts,city-of-melbourne-street-furniture-database,community-services-nsw,consolidated-exposures-immediate-and-ultimate-risk-basis,consolidated-exposures-immediate-risk-basis-foreign-claims-by-country,consolidated-exposures-immediate-risk-basis-international-claims-by-country,consolidated-exposures-ultimate-risk-basis,consolidated-exposures-ultimate-risk-basis-foreign-claims-by-country,cosolidated-exposures-immediate-risk-basis,credit-unions-selected-assets-and-liabilities,daily-net-foreign-exchange-transactions,detox-your-home,education-national-assessment-program-literacy-and-numeracy-nsw,employment-data-by-nsw-regions,excise-beer-clearance-data-updated-each-month-beer-clearance-summary-data,finance-companies-and-general-financiers-selected-assets-and-liabilities,foreign-exchange-transactions-and-holdings-of-official-reserve-assets,half-yearly-life-insurance-bulletin-december-2010,health-behaviours-in-nsw,international-liabilities-by-country-of-the-australian-located-operations-of-banks-and-rfcs,liabilities-and-assets-monthly,liabilities-and-assets-weekly,liabilities-of-australian-located-operations,life-insurance-offices-statutory-funds,managed-funds,monetary-policy-changes,money-market-corporations-selected-assets-and-liabilities,monthly-airport-traffic-data-for-top-ten-airports-january-1985-to-december-2008,monthly-banking-statistics-april-2011,monthly-banking-statistics-june-2011,monthly-banking-statistics-may-2011,open-market-operations-2009-to-current,projected-households-vic-rvic-msd-2006-2056,projected-population-by-age-and-sex-vic-rvic-msd-2006-2056,public-unit-trust,quarterly-bank-performance-statistics,quarterly-general-insurance-performance-statistics-march-2011,quarterly-superannuation-performance-march-2011,recorded-crime-dataset-nsw,residential-land-bulletin,resourcesmart-retailers,resourcesmart-retailers-vic,road-fatalities-nsw,securitisation-vehicles,selected-asset-and-liabilities-of-the-private-non-financial-sectors,seperannuation-funds-outside-life-offices,solar-report-vic,towns-in-time-victoria,vif2008-projected-population-by-5-year-age-groups-and-sex-sla-lga-ssd-sd-2006-2026,vif2008-projected-population-totals-and-components-vic-rvic-msd-2006-2056,vif2008-projected-population-totals-sla-lga-ssd-sd-2006-2026,arts-festivals-victoria,arts-organisations-victoria,arts-spaces-and-places-victoria,ausgrid-average-electricity-use,collecting-institutions-victoria,indigenous-arts-organisations-victoria,latest-coastal-weather-observations-for-coolangatta-qld,top-10-fiction-books-brisbane-city-council".split(",") +goodotherdata = "abc-local-stations,abc-local-stations,abc-local-stations,act-emergency-services-agency-esa-28093-current-incidents,act-government-news-and-events,act-government-summaries-of-cabinet-outcomes,act-magistrates-court-judgements,act-supreme-court-judgements,act-supreme-court-sentences,action-bus-service-gtfs-feed-act,actpla-latest-news,agricultural-commodities-for-199697-linked-to-profit-function-surfaces,agricultural-structure-classification,agricultural-structure-classification,all-vacant-act-government-jobs,annual-family-income-1996-1997-to-1998-1999-three-year-average,apvma-pubcris-dataset-for-registered-agricultural-and-veterinary-chemical-products-and-approved-acti,argus-newspaper-collection-of-photographs-state-library-of-victoria,assessment-of-terrestrial-biodiversity-2002-biodiversity-audit-data-entry-system-bades,assessment-of-terrestrial-biodiversity-2002-database,assisted-immigration-1848-1912-index,ausgrid-average-electricity-use,ausgrid-average-electricity-use-2011,ausindustry-locations,ausindustry-locations,austender-contract-notice-export,australian-broadband-guarantee,australian-broadband-guarantee,australian-data-access,australian-dryland-salinity-assessment-spatial-data-12500000-nlwra-2001,australian-dryland-salinity-assessment-spatial-data-12500000-nlwra-2001,australian-groundwater-flow-systems-national-land-and-water-resources-audit-january-2000,australian-groundwater-flow-systems-national-land-and-water-resources-audit-january-2000,australian-irrigation-areas-raster-version-1a-national-land-and-water-resources-audit,australian-irrigation-areas-raster-version-1a-national-land-and-water-resources-audit,australian-irrigation-areas-vector-version-1a-national-land-and-water-resources-audit,australian-irrigation-areas-vector-version-1a-national-land-and-water-resources-audit,australian-public-service-statistical-bulletin-2010-11,australian-water-resources-assessment-2000-database,australiana-index-state-library-of-victoria,available-water-capacity-for-australian-areas-of-intensive-agriculture-of-layer-1-a-horizon-top-soil,bicycle-racks-mosman-municipal-council,bikeways-briisbane-city-council,bikeways-briisbane-city-council,boreholes-in-the-murray-basin-southeastern-australia,boreholes-in-the-murray-basin-southeastern-australia,british-convict-transportation-registers,calculated-annual-and-monthly-potential-evaporation-mm,calculated-annual-and-monthly-potential-evaporation-mm,canberra-suburb-boundaries,catchment-and-subcatchments-grid,cemeteries-brisbane-city-council,cemeteries-brisbane-city-council,coal-fields-in-the-murray-basin-southeastern-australia,coal-fields-in-the-murray-basin-southeastern-australia,commonwealth-agencies,commonwealth-electoral-boundaries-archive-2009,commonwealth-electoral-boundaries-archive-2009,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-all-infrastructure-buildings-road-rail-a,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-all-infrastructure-buildings-road-rail-a,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-increase-to-local-infrastructure-based-o,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-of-all-infrastructure-buildings-road-rai,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-of-all-infrastructure-buildings-road-rai,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-of-all-infrastructure-buildings-road-rai,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-of-all-infrastructure-buildings-road-rai,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-of-the-general-infrastructure-component-,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-of-the-rail-component-of-infrastructure-,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-the-general-infrastructure-component-bui,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-the-road-component-of-infrastructure-bas,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-the-road-component-of-infrastructure-bas,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-to-the-bridge-component-of-infrastructur,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-to-the-bridge-component-of-infrastructur,country-by-level-of-processing-trade-data-spreadsheet-2,country-by-level-of-processing-trade-data-spreadsheet-2011-12,crime-incidents-data-2004-international-crime-victimisation-survey-icvs-australian-component,cropping-management-practices-1998-1999,csiro-locations,csiro-locations,current-and-future-road-reports-traffic-restrictions-in-south-australia,current-and-future-road-reports-traffic-restrictions-in-south-australia,current-and-future-road-reports-traffic-restrictions-in-south-australia,current-and-future-road-reports-traffic-restrictions-in-south-australia,current-and-future-road-reports-traffic-restrictions-in-south-australia,current-and-future-road-reports-traffic-restrictions-in-south-australia,cybersafety-outreach-program,cybersafety-outreach-program,data-source-for-polygonal-data-used-by-the-asris-project-in-generation-of-modelled-surfaces,department-of-finance-and-deregulation-office-locations,department-of-finance-and-deregulation-office-locations,depositional-path-length,digital-enterprise,digital-hubs,digitised-maps,diisr-division-locations-excluding-ausindustry-enterprise-connect-and-nmi,diisr-division-locations-excluding-ausindustry-enterprise-connect-and-nmi,diisr-locations,diisr-portfolio-agency-locations-excluding-csiro,diisr-portfolio-agency-locations-excluding-csiro,directory-gov-au-full-data-export,distance-to-ridges,economics-of-australian-soil-conditions-199697-factor-most-limiting-yield-aciditysodicitysalinity,economics-of-australian-soil-conditions-199697-gross-benefit-acidity-hayr,economics-of-australian-soil-conditions-199697-gross-benefit-of-the-limiting-factor-hayr,economics-of-australian-soil-conditions-199697-gross-benefit-salinity-hayr,economics-of-australian-soil-conditions-199697-gross-benefit-sodicity-hayr,economics-of-australian-soil-conditions-199697-impact-cost-of-salinity-2000-2020-hayr,economics-of-australian-soil-conditions-199697-relative-yield-from-acidity,economics-of-australian-soil-conditions-199697-relative-yield-from-salinity-in-2000,economics-of-australian-soil-conditions-199697-relative-yield-from-salinity-in-2020,economics-of-australian-soil-conditions-199697-relative-yield-from-sodicity,edd-media-releases,edd-news-and-events,egovernment-resource-centre-website-analytics,elevation-of-the-pre-tertiary-basement-in-the-murray-basin,elevation-of-the-pre-tertiary-basement-in-the-murray-basin,enterprise-connect-locations,enterprise-connect-locations,equivalent-fresh-water-head-difference-between-the-shallowest-and-deepest-aquifers,equivalent-fresh-water-head-difference-between-the-shallowest-and-deepest-aquifers,erosion-gully-density,erosion-path-length,estimated-proportion-of-farms-carrying-out-landcare-related-work-1998-1999,estimated-value-of-agricultural-operations-evao-1996-1997,farm-equity-ratio-1996-1997-to-1998-1999-three-year-average,farm-family-cash-income-1196-1997-to-1998-1999-three-year-average,farmer-population-1996,farms-with-significant-degradation-problems-irrigation-salinity-1998-1999,farms-with-significant-degradation-problems-irrigation-salinity-1998-1999-2,farms-with-significant-degradation-problems-soil-acidity-1998-1999,forests-of-australia-2003,freedom-of-information-foi-summaries,geology-lithology-12-500-000-scale,glenorchy-city-council-building-footprints,glenorchy-city-council-building-footprints,glenorchy-city-council-building-footprints,glenorchy-city-council-kerbs,glenorchy-city-council-kerbs,glenorchy-city-council-kerbs,glenorchy-city-council-stormwater-pipes,glenorchy-city-council-stormwater-pipes,glenorchy-city-council-stormwater-pipes,glenorchy-city-council-stormwater-pits,glenorchy-city-council-stormwater-pits,glenorchy-city-council-stormwater-pits,groundwater-sdl-resource-units,groundwater-sdl-resource-units,groundwater-sdl-resource-units,higher-qualifications-of-farmers-and-farm-managers-1996,historical-australian-government-contract-data,historical-australian-government-contract-data,hydrologic-indicator-sites,hydrologic-indicator-sites,immigration-land-orders-1861-1874,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-biota-condition-sub-in,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-catchment-condition-in,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-feral-animal-density,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-human-population-densi,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-impoundment-density,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-industrial-point-sourc,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-intensive-agricultural,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-land-condition-sub-ind,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-native-vegetation-frag,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-nutrient-point-source-,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-pesticide-hazard,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-predicted-2050-salinit,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-protected-areas,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-rivers-in-acidificatio,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-rivers-in-salt-hazard,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-rivers-through-forests,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-soil-acidification-haz,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-soil-degradation-hazar,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-suspended-sediment-loa,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-weed-density,integrated-vegetation-cover-2003-version-1,john-t-collins-collection-state-library-of-victoria,journal-of-the-h-m-s-endeavour-1768-1771,journey-planner-data-act,krantz-sheldon-architectural-images,land-use-of-australia-version-3-28093-20012002,lands-surveys-historic-map-series-western-australia,latest-coastal-weather-observations-for-coolangatta-qld,launceston-city-council-addresses,launceston-city-council-building-footprints,launceston-city-council-contours,launceston-city-council-detail-survey-drawing-file,launceston-city-council-drainage,launceston-city-council-fences,launceston-city-council-pavement,launceston-city-council-railway,launceston-city-council-roads,libraries-act-announcements,licensed-broadcasting-transmitter-data,linc-tasmania,look-up-table-of-auslig-river-basins-of-australia-1997,major-water-resources-infrastructure-part-of-the-australian-water-resources-assessment-2000-database,mean-annual-concentration-of-mineral-nitrogen-in-soil-water-mgn-kgh20-in-the-pre-1788-scenario,mean-annual-concentration-of-mineral-nitrogen-in-soil-water-mgn-kgh20-in-the-pre-1788-scenario,mean-annual-concentration-of-mineral-nitrogen-in-soil-water-mgn-kgh20-in-the-present-day-scenario,mean-annual-concentration-of-mineral-nitrogen-in-soil-water-mgn-kgh20-in-the-present-day-scenario,mean-annual-deep-drainage-mm-y-in-the-pre-1788-scenario,mean-annual-deep-drainage-mm-y-in-the-pre-1788-scenario,mean-annual-deep-drainage-mm-y-in-the-present-day-scenario,mean-annual-deep-drainage-mm-y-in-the-present-day-scenario,mean-annual-transpiration-from-the-plant-canopy-for-the-pre-1788-scenario,mean-annual-transpiration-from-the-plant-canopy-for-the-pre-1788-scenario,mean-annual-transpiration-from-the-plant-canopy-for-the-present-day-scenario,mean-annual-transpiration-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-april-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-april-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-august-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-august-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-august-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-august-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-december-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-december-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-december-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-december-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-february-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-february-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-january-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-january-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-january-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-january-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-july-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-july-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-july-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-july-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-june-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-june-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-june-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-june-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-march-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-march-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-march-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-march-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-march-from-the-plant-canopy-for-the-present-day-scenario-2,mean-transpiration-in-march-from-the-plant-canopy-for-the-present-day-scenario-2,mean-transpiration-in-may-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-may-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-may-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-may-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-november-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-november-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-november-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-november-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-october-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-october-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-october-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-october-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-september-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-september-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-september-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-september-from-the-plant-canopy-for-the-present-day-scenario,mildenhall-photographs-of-early-canberra,mobility-map-brisbane-city,mobility-map-mt-coot-tha,mosman-local-government-area,mosman-rider-route,mosman-wwii-honour-roll,mosman-wwii-honour-roll,murray-darling-basin-water-resource-plan-areas-groundwater,murray-darling-basin-water-resource-plan-areas-groundwater,murray-darling-basin-water-resource-plan-areas-surface-water,murray-darling-basin-water-resource-plan-areas-surface-water,music-queensland,national-broadband-network,national-broadband-network,national-broadband-network-2011-10,national-broadband-network-2011-10,national-broadband-network-2011-12,national-broadband-network-2011-12,national-broadband-network-2012,national-broadband-network-28093-august-2011,national-broadband-network-28093-august-2011,national-broadband-network-28093-july-2011,national-broadband-network-28093-july-2011,national-broadband-network-february-2012,national-broadband-network-february-2012,national-broadband-network-september-2011,national-broadband-network-september-2011,national-library-of-australia-sheet-music-collection,national-measurement-institute-locations,national-parks-and-asset-locations-south-australia,national-public-toilet-map,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2000,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2000,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2000,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2000,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2000,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2000,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2000,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2020,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2020,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2020,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2020,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2020,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2020,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2020,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2050,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2050,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2050,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2050,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2050,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2050,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2050,nsw-newspapers-catalogue-data,nsw-rural-fire-service-current-incidents,nsw-rural-fire-service-major-updates,off-street-car-parks-mosman-municipal-council,open-database-brisbane-city-council,ost-of-salinity-to-local-infrastructure-1996-97-total-cost-of-the-rail-component-of-infrastructure-b,parking-areas-brisbane-city-council,parking-areas-brisbane-city-council,parks-and-reserves-mosman-municipal-council,parks-brisbane-city-council,parks-brisbane-city-council,picture-australia-metadata,picture-queensland,picture-queensland,playgrounds-mosman-municipal-council,police-station-locations,police-station-locations,port-phillip-papers-state-library-of-victoria,precis-forecast-national,precis-forecast-national,precis-forecast-new-south-wales,precis-forecast-new-south-wales,precis-forecast-new-south-wales,precis-forecast-northern-territory,precis-forecast-northern-territory,precis-forecast-queensland,precis-forecast-queensland,precis-forecast-south-australia,precis-forecast-south-australia,precis-forecast-south-australia,precis-forecast-tasmania,precis-forecast-tasmania,precis-forecast-tasmania,precis-forecast-victoria,precis-forecast-victoria,precis-forecast-victoria,precis-forecast-western-australia,precis-forecast-western-australia,public-amenities-maintained-by-mosman-council,radio-and-television-broadcasting-stations-book-internet-edition,real-estate-maps,recent-earthquakes,regional-development-australia,regional-development-australia-2011-september-2011,regional-development-australia-may-2012,reports-of-swooping-birds-mosman-municipal-council,sentinel-hotspots,sentinel-hotspots,slq-catalogue-searches,slq-catalogue-searches,slv-rural-water,slv-shipping,slwa-digital-photographic-collection,south-australian-boat-ramp-locator,south-australian-road-crash-statistics,state-library-of-victoria-online-image-collection,state-library-of-victoria-online-image-collection-inc-high-res,state-of-the-service-report-2010-11-australian-public-service-employee-survey-results,state-of-the-service-report-2010-11-australian-public-service-employee-survey-results,statistical-local-areas-1996-for-agricultural-structure-classification,surface-water-gauging-stations-part-of-the-australian-water-resources-assessment-2000-database,surface-water-gauging-stations-part-of-the-australian-water-resources-assessment-2000-database,surface-water-sdl-resource-units,surface-water-sdl-resource-units,tasmanian-herbarium,tasmanian-museum-and-art-gallery-faunal-collection".split(",") + + docsdb = couch['disclosr-documents'] if __name__ == "__main__": + orgs_list = [] + orgs_ids = {} for doc in docsdb.view('app/datasets'): + print " --- " print doc.id - if doc.value['url'] != "http://data.gov.au/data/": + + if doc.value['url'] != "http://data.gov.au/data/" and doc.value['agencyID'] != "qld": + + # Collect the package metadata. - pkg_name = name_munge(doc.value['metadata']['DCTERMS.Title'][:100]) - tags = doc.value['metadata']["Keywords / Tags"] - if not hasattr(tags, '__iter__'): - tags = [tags] - [re.sub('[^a-zA-Z0-9-_()]', '', tag).replace('&', 'and').lower() for tag in tags] - package_entity = { - 'name': pkg_name, - 'title': doc.value['metadata']['DCTERMS.Title'], - 'url': doc.value['metadata']['DCTERMS.Source.URI'], - - 'author': doc.value['metadata']["DCTERMS.Creator"], - 'maintainer': doc.value['metadata']["DCTERMS.Creator"], - 'licence_id': get_licence_id(doc.value['metadata']['DCTERMS.License']), - 'notes': html_to_text(doc.value['metadata']['Description']), - } - if len(tags) > 0: - package_entity['tags'] = tags - print tags - try: - #print doc.id - ckan.package_register_post(package_entity) - except CkanApiError, e: - if ckan.last_status == 409: - print "already exists" - else: - raise LoaderError('Unexpected status %s checking for package under \'%s\': %r' % ( - ckan.last_status, pkg_name, e.args)) - - print package_entity - #todo add to organisation (author/creator/maintainer) http://docs.ckan.org/en/latest/apiv3.html#examples ckan.logic.action.update.package_owner_org_update - #if 'data.gov.au Category' in doc.value['metadata'].keys(): #todo add to group - if 'Download' in doc.value['metadata'].keys(): + pkg_name = filter(lambda x: x in '0123456789abcdefghijklmnopqrstuvwxyz-_', + doc.value['url'].replace("http://data.gov.au/dataset/", '').replace('/', '')[:100]); + print pkg_name + if pkg_name != "": + + #add to or create organization using direct API + agency = doc.value['metadata']["Agency"] + if agency == "APS": + agency = "Australian Public Service Commission" + if agency == "Department of Broadband, Communications and the Digital Ecomomy": + agency = "Department of Broadband, Communications and the Digital Economy" + if agency == "Shared Services, Treasury Directorate": + agency = "Shared Services Procurement, Treasury Directorate" + if agency == "Treasury - Shared Services": + agency = "Shared Services Procurement, Treasury Directorate" + if agency == "Territory and Municipal Services (TAMS)": + agency = "Territory and Municipal Services Directorate" + if agency == "State Library of NSW": + agency = "State Library of New South Wales" + org_name = name_munge(agency[:100]) + if org_name not in orgs_list: + orgs_list = ckandirect.action.organization_list()['result'] + #print orgs_list + if org_name not in orgs_list: + try: + print "org not found, creating " + org_name + ckandirect.action.organization_create(name=org_name, title=agency, + description=agency) + orgs_list.append(org_name) + except ckanapi.ValidationError, e: + print e + raise LoaderError('Unexpected status') + else: + print "org found, adding dataset to " + org_name + + # cache org names -> id mapping + if org_name not in orgs_ids: + org = ckandirect.action.organization_show(id=org_name) + orgs_ids[org_name] = org["result"]["id"] + org_id = orgs_ids[org_name] + print "org id is " + org_id + tags = [] + creator = doc.value['metadata']["DCTERMS.Creator"] + if doc.value['agencyID'] == "AGIMO": + if len(doc.value['metadata']["Keywords / Tags"]) > 0: + if hasattr(doc.value['metadata']["Keywords / Tags"], '__iter__'): + tags = tags + doc.value['metadata']["Keywords / Tags"] + else: + tags = tags + [doc.value['metadata']["Keywords / Tags"]] + + tags = [re.sub('[^a-zA-Z0-9-_.]', '', tag.replace('&', 'and')).lower() for tag in tags if tag] + #print tags + extras = [] + + for extra_key in doc.value['metadata'].keys(): + if extra_key not in ["Description", "Content-Language", "DCTERMS.Description", + "Keywords / Tags", + "data.gov.au Category", "Download", "Permalink", "DCTERMS.Identifier"]: + if doc.value['metadata'][extra_key] != None and doc.value['metadata'][extra_key] != "": + extras.append([extra_key, doc.value['metadata'][extra_key]]) + + package_entity = { + 'name': pkg_name, + 'title': doc.value['metadata']['DCTERMS.Title'], + 'url': doc.value['metadata']['DCTERMS.Source.URI'], + 'tags': tags, #tags are mandatory? + 'author': creator, + 'maintainer': creator, + 'license_id': get_license_id(doc.value['metadata']['DCTERMS.License']), + 'notes': html2text.html2text(doc.value['metadata']['Description']).replace('AC/a!a','-').replace('AC/a!aC/',"'").replace("AC/a!E",":")replace("A "," "), + 'owner_org': org_id, + 'extras': extras, + 'private': (pkg_name not in goodcsvdata and pkg_name not in goodotherdata) + } + try: - pkg = ckan.package_entity_get(pkg_name) - resources = pkg.get('resources', []) - if len(resources) < len(doc.value['metadata']['Download']): - for resource in doc.value['metadata']['Download']: - print resource - # http://docs.ckan.org/en/ckan-1.7/domain-model-resource.html - # (KML/KMZ) / (Shapefile) /(Other) - format = "plain" - if resource['format'] == '(XML)': - format = 'xml' - if resource['format'] == '(CSV/XLS)': - format = 'csv' - name = resource['href'] - if 'name' in resource.keys(): - name = resource['name'] - ckan.add_package_resource(pkg_name, resource['href'], name=name, resource_type='data', - format=format, size=human2bytes(resource['size'].replace(',', ''))) + #print package_entity + ckan.package_register_post(package_entity) + except CkanApiError, e: + if ckan.last_message == "{\"name\": [\"That URL is already in use.\"]}": + print "package already exists" else: - print "resources already exist" - except CkanApiError, e: - if ckan.last_status == 404: - print "parent dataset does not exist" - else: + print ckan.last_message raise LoaderError('Unexpected status %s checking for package under \'%s\': %r' % ( ckan.last_status, pkg_name, e.args)) - + pkg = ckan.package_entity_get(pkg_name) + + + # add resources (downloadable data files) + if 'Download' in doc.value['metadata'].keys(): + try: + + resources = pkg.get('resources', []) + if len(resources) < len(doc.value['metadata']['Download']): + for resource in doc.value['metadata']['Download']: + + # http://docs.ckan.org/en/ckan-1.7/domain-model-resource.html + # (KML/KMZ) / (Shapefile) /(Other) + format = "plain" + if resource['format'] == '(XML)': + format = 'xml' + if resource['format'] == '(CSV/XLS)': + format = 'csv' + if resource['format'] == '(Shapefile)': + format = 'shp' + if resource['format'] == '(KML/KMZ)': + format = 'kml' + name = resource['href'] + if 'name' in resource.keys(): + name = resource['name'] + print resource + add_package_resource_cachedurl(ckan, pkg_name, url_fix(resource['href']), name, + format, get_license_id(doc.value['metadata']['DCTERMS.License']), + human2bytes(resource.get('size', '0B'))) + else: + print "resources already exist" + except CkanApiError, e: + if ckan.last_status == 404: + print "parent dataset does not exist" + else: + raise LoaderError('Unexpected status %s checking for package under \'%s\': %r' % ( + ckan.last_status, pkg_name, e.args)) +
--- /dev/null +++ b/documents/datagov-merge.php @@ -1,1 +1,26 @@ +<?php +include_once("../include/common.inc.php"); + + +setlocale(LC_CTYPE, 'C'); + +$db = $server->get_db('disclosr-documents'); +$datasets = Array(); +try { + $rows = $db->get_view("app", "datasets", null, true)->rows; + + foreach ($rows as $row) { + //print_r($row); + if ($row->value->url != "http://data.gov.au/data/") + $datasets[str_replace(Array("http://data.gov.au/dataset/","/"),"",$row->value->url)] = $row->id; + } +} catch (SetteeRestClientException $e) { + setteErrorHandler($e); +} +ksort($datasets); +foreach ($datasets as $datasetname => $datasetkey) { + print "$datasetname => $datasetkey<br>\n"; +} +?> +
--- /dev/null +++ b/documents/datagov-resourcereport.py @@ -1,1 +1,81 @@ +import couchdb +couch = couchdb.Server('http://127.0.0.1:5984/') +#couch = couchdb.Server('http://192.168.1.113:5984/') +import urllib +import urlparse +import httplib2 +import httplib +import csv + + +def url_fix(s, charset='utf-8'): + """Sometimes you get an URL by a user that just isn't a real + URL because it contains unsafe characters like ' ' and so on. This + function can fix some of the problems in a similar way browsers + handle data entered by the user: + + :param charset: The target charset for the URL if the url was + given as unicode string. + """ + if isinstance(s, unicode): + s = s.encode(charset, 'ignore') + if not urlparse.urlparse(s).scheme: + s = "http://"+s + scheme, netloc, path, qs, anchor = urlparse.urlsplit(s) + path = urllib.quote(path, '/%') + qs = urllib.quote_plus(qs, ':&=') + return urlparse.urlunsplit((scheme, netloc, path, qs, anchor)) + +# http://code.activestate.com/recipes/578019-bytes-to-human-human-to-bytes-converter/ +SYMBOLS = { + 'customary': ('B', 'KB', 'MB', 'GB', 'T', 'P', 'E', 'Z', 'Y'), + 'customary_ext': ('byte', 'kilo', 'mega', 'giga', 'tera', 'peta', 'exa', + 'zetta', 'iotta'), + 'iec': ('Bi', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'), + 'iec_ext': ('byte', 'kibi', 'mebi', 'gibi', 'tebi', 'pebi', 'exbi', + 'zebi', 'yobi'), +} + + +docsdb = couch['disclosr-documents'] +out = csv.writer(open("output.csv","w"), delimiter=',',quoting=csv.QUOTE_ALL) +if __name__ == "__main__": + for doc in docsdb.view('app/datasets'): + if doc.value['url'] != "http://data.gov.au/data/" and doc.value['agencyID'] != "qld": + # Collect the package metadata. + pkg_name = filter(lambda x: x in '0123456789abcdefghijklmnopqrstuvwxyz-_', + doc.value['url'].replace("http://data.gov.au/dataset/", '').replace('/', '')[:100]); + if 'Download' in doc.value['metadata'].keys() and len(doc.value['metadata']['Download']) > 0: + for resource in doc.value['metadata']['Download']: + # http://docs.ckan.org/en/ckan-1.7/domain-model-resource.html + # (KML/KMZ) / (Shapefile) /(Other) + format = "plain" + if resource['format'] == '(XML)': + format = 'xml' + if resource['format'] == '(CSV/XLS)': + format = 'csv' + if resource['format'] == '(Shapefile)': + format = 'shp' + if resource['format'] == '(KML/KMZ)': + format = 'kml' + name = resource['href'] + if 'name' in resource.keys(): + name = resource['name'] + if resource['href'].startswith("ftp"): + out.writerow([pkg_name, url_fix(resource['href']), name,format, "ftp", ""]) + else: + try: + h = httplib2.Http(disable_ssl_certificate_validation=True) + resp = h.request(url_fix(resource['href']), 'HEAD') + content_type = resp[0]['content-type'] if 'content-type' in resp[0].keys() else "" + out.writerow([pkg_name.encode('ascii', 'ignore'), url_fix(resource['href']).encode('ascii', 'ignore'), name.encode('ascii', 'ignore'),format, resp[0]['status'], content_type]) + except httplib2.ServerNotFoundError: + out.writerow([pkg_name.encode('ascii', 'ignore'), url_fix(resource['href']).encode('ascii', 'ignore'), name.encode('ascii', 'ignore'),format, "500","badurl"]) + except httplib.InvalidURL: + out.writerow([pkg_name.encode('ascii', 'ignore'), url_fix(resource['href']).encode('ascii', 'ignore'), name.encode('ascii', 'ignore'),format, "500","badurl"]) + except httplib2.RelativeURIError: + out.writerow([pkg_name.encode('ascii', 'ignore'), url_fix(resource['href']).encode('ascii', 'ignore'), name.encode('ascii', 'ignore'),format, "500","badurl"]) + else: + out.writerow([pkg_name.encode('ascii', 'ignore')]) +
--- a/documents/datagov.py +++ b/documents/datagov.py @@ -39,7 +39,9 @@ link = item.find("a") format = item.find(property="dc:format") linkobj = {"href":link['href'].replace("/bye?","").strip(), - "format": format.string.strip(), "size": format.next_sibling.string.strip()} + "format": format.string.strip()} + if format.next_sibling.string != None: + linkobj["size"] = format.next_sibling.string.strip() if link.string != None: linkobj["name"] = link.string.strip() doc['metadata'][last_title].append(linkobj)
--- /dev/null +++ b/documents/dataqld.py @@ -1,1 +1,28 @@ +import sys, os +import time +import scrape +from bs4 import BeautifulSoup +from unidecode import unidecode +import ckanclient + +# Instantiate the CKAN client. +ckan = ckanclient.CkanClient(base_location='https://data.qld.gov.au/api') + +# Get the package list. +package_list = ckan.package_register_get() +for package_name in package_list: +# Get the details of a package. + (url, mime_type, html) = scrape.fetchURL(scrape.docsdb, + "https://data.qld.gov.au/dataset/"+package_name , "data", "qld", False) + hash = scrape.mkhash(scrape.canonurl(url)) + print hash + doc = scrape.docsdb.get(hash) + if "metadata" not in doc.keys() or True: + ckan.package_entity_get(package_name) + package_entity = ckan.last_message + doc['type'] = "dataset" + doc['metadata'] = package_entity + print package_entity + scrape.docsdb.save(doc) +
--- a/documents/gazette.py +++ b/documents/gazette.py @@ -5,20 +5,53 @@ from unidecode import unidecode -listurl = "http://gazettes.ag.gov.au/portal/govgazonline.nsf/publications?OpenView&Start=3960" -(url, mime_type, listhtml) = scrape.fetchURL(scrape.docsdb, - listurl, "gazette", "AGD") -soup = BeautifulSoup(listhtml) -for row in soup.find_all('tr'): - if row.has_key('valign'): - for col in tr.find_all('td'): - print col.string - #url = scrape.fullurl(listurl, atag['href']) - #(url, mime_type, html) = scrape.fetchURL(scrape.docsdb, - # url, "data", "AGIMO") - #hash = scrape.mkhash(scrape.canonurl(url)) - #doc = scrape.docsdb.get(hash) - #print doc['metadata'] - #scrape.docsdb.save(doc) - #time.sleep(2) +items = 3950 +items = 1 +while True: + print str(items) + " (" +str(items/25) +" screens to go)" + listurl = "http://gazettes.ag.gov.au/portal/govgazonline.nsf/publications?OpenView&Start=" + str(items) + (listurl, mime_type, listhtml) = scrape.fetchURL(scrape.docsdb, + listurl, "gazette", "AGD", False) + for line in listhtml.split('\n'): + soup = BeautifulSoup(line) + #print line + for row in soup.find_all('tr'): + print line + if row.has_key('valign'): + i = 0 + date = "" + id = "" + type = "" + description = "" + name = "" + url = "" + for col in soup.find_all('td'): + #print ''.join(col.stripped_strings) + if i == 0: + date = ''.join(col.stripped_strings) + if i == 1: + id = ''.join(col.stripped_strings) + if i == 2: + type = ''.join(col.stripped_strings) + if i == 3: + description = ''.join(col.stripped_strings) + for link in col.findAll('a'): + if link.has_key("href"): + url = link['href'] + name = ''.join(link.stripped_strings) + print str(items) + " (" +str(items/25) +" screens to go)" + print [date, id, type, description, name, url] + itemurl = scrape.fullurl(listurl, url) + (itemurl, mime_type, html) = scrape.fetchURL(scrape.docsdb, + itemurl, "gazette", "AGD", False) + hash = scrape.mkhash(scrape.canonurl(itemurl)) + doc = scrape.docsdb.get(hash) + doc['metadata'] = {"date": date, "date": id, "type":type, "description":description,"name": name,"url": url} + scrape.docsdb.save(doc) + #time.sleep(2) + i = i + 1; + items = items - 25 + if items <= 0: + break +
--- a/documents/genericScrapers.py +++ b/documents/genericScrapers.py @@ -72,7 +72,8 @@ edate = date.today().strftime("%Y-%m-%d") doc = {'_id': dochash, 'agencyID': self.getAgencyID() , 'url': self.getURL(), 'docID': dochash, - "date": edate, "title": "Disclosure Log Updated", "description": self.remove_control_chars(description), "diff": diff} + "date": edate, "title": "Disclosure Log Updated", + "description": self.remove_control_chars(description), "diff": self.remove_control_chars(diff)} foidocsdb.save(doc) else: print "already saved" @@ -199,11 +200,16 @@ return table.find_all('tr') def getDate(self, content, entry, doc): - date = ''.join(content.stripped_strings).strip() - (a, b, c) = date.partition("(") - date = self.remove_control_chars(a.replace("Octber", "October").replace("1012","2012")) - print date - edate = parse(date, dayfirst=True, fuzzy=True).strftime("%Y-%m-%d") + strdate = ''.join(content.stripped_strings).strip() + (a, b, c) = strdate.partition("(") + strdate = self.remove_control_chars(a.replace("Octber", "October").replace("1012","2012")replace("Janrurary", "January").replace("1012","2012")) + print strdate + try: + edate = parse(strdate, dayfirst=True, fuzzy=True).strftime("%Y-%m-%d") + except ValueError: + print >> sys.stderr, "ERROR date invalid %s " % strdate + print >> sys.stderr, "ERROR date originally %s " % ''.join(content.stripped_strings).strip() + edate = date.today().strftime("%Y-%m-%d") print edate doc.update({'date': edate}) return @@ -266,8 +272,7 @@ 'Summary of FOIrequest received by agency/minister', 'Summary of FOI request received', 'Description of FOI Request', "FOI request", 'Results 1 to 67 of 67'] - if doc['title'] not in badtitles\ - and doc['description'] != '': + if doc['title'] not in badtitles and 'description' in doc.keys() and doc['description'] != '': print "saving" foidocsdb.save(doc) else: @@ -277,6 +282,6 @@ print "header row" else: - print "ERROR number of columns incorrect" + print >> sys.stderr, "ERROR number of columns incorrect" print row
--- a/documents/runScrapers.sh +++ b/documents/runScrapers.sh @@ -1,10 +1,22 @@ -for f in scrapers/*.py; - do echo "Processing $f file.."; - python $f; +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +cd $DIR +echo "" > /tmp/disclosr-error +for f in scrapers/*.py; do + echo "Processing $f file.."; + md5=`md5sum /tmp/disclosr-error` + python $f 3>&1 1>&2 2>&3 | tee --append /tmp/disclosr-error; + md52=`md5sum /tmp/disclosr-error` + if [ "$md5" != "$md52" ]; then + echo "^^^^^^^^^^^^^^ $f" >> /tmp/disclosr-error; + fi if [ "$?" -ne "0" ]; then echo "error"; - sleep 2; + sleep 1; fi done +if [ -s /tmp/disclosr-error ] ; then + echo "emailling logs.."; + mail -E -s "Disclosr errors" maxious@lambdacomplex.org < /tmp/disclosr-error ; +fi
--- a/documents/scrape.py +++ b/documents/scrape.py @@ -7,14 +7,15 @@ from urlparse import urljoin import time import os +import sys import mimetypes import urllib import urlparse import socket #couch = couchdb.Server('http://192.168.1.148:5984/') -couch = couchdb.Server('http://192.168.1.113:5984/') -#couch = couchdb.Server('http://127.0.0.1:5984/') +#couch = couchdb.Server('http://192.168.1.113:5984/') +couch = couchdb.Server('http://127.0.0.1:5984/') def mkhash(input): @@ -89,7 +90,7 @@ def getLastAttachment(docsdb, url): hash = mkhash(url) doc = docsdb.get(hash) - if doc != None: + if doc != None and "_attachments" in doc.keys(): last_attachment_fname = doc["_attachments"].keys()[-1] last_attachment = docsdb.get_attachment(doc, last_attachment_fname) return last_attachment @@ -103,7 +104,7 @@ req = urllib2.Request(url) print "Fetching %s (%s)" % (url, hash) if url.startswith("mailto") or url.startswith("javascript") or url.startswith("#") or url == None or url == "": - print "Not a valid HTTP url" + print >> sys.stderr, "Not a valid HTTP url" return (None, None, None) doc = docsdb.get(hash) if doc == None: @@ -111,10 +112,15 @@ else: if (('page_scraped' in doc) and ((time.time() - doc['page_scraped']) < 60 * 24 * 14) or (scrape_again == False)): print "Uh oh, trying to scrape URL again too soon!" + hash - last_attachment_fname = doc["_attachments"].keys()[-1] - last_attachment = docsdb.get_attachment(doc, last_attachment_fname) - content = last_attachment - return (doc['url'], doc['mime_type'], content.read()) + if (not doc.has_key('file_size') or doc["file_size"] != "0") and "_attachments" in doc.keys(): + last_attachment_fname = doc["_attachments"].keys()[-1] + last_attachment = docsdb.get_attachment(doc, last_attachment_fname) + content = last_attachment.read() + mime_type = doc['mime_type'] + else: + content = None + mime_type = None + return (doc['url'], mime_type, content) req.add_header("User-Agent", "Mozilla/4.0 (compatible; Prometheus webspider; owner maxious@lambdacomplex.org)") #if there is a previous version stored in couchdb, load caching helper tags @@ -159,13 +165,13 @@ #store as attachment epoch-filename except (urllib2.URLError, socket.timeout) as e: - print "error!" + print >> sys.stderr,"error!" error = "" if hasattr(e, 'reason'): error = "error %s in downloading %s" % (str(e.reason), url) elif hasattr(e, 'code'): error = "error %s in downloading %s" % (e.code, url) - print error + print >> sys.stderr, error doc['error'] = error docsdb.save(doc) return (None, None, None)
--- a/documents/scrapers/0049d35216493c545ef5f7f000e6b252.py +++ b/documents/scrapers/0049d35216493c545ef5f7f000e6b252.py @@ -42,7 +42,6 @@ 'data': {'request': '', 'session': '', 'more': ''} } - - amonpy.exception(data) + #amonpy.exception(data) pass
--- a/documents/scrapers/227cb6eb7d2c9f8a6e846df7447d6caa.py +++ b/documents/scrapers/227cb6eb7d2c9f8a6e846df7447d6caa.py @@ -18,13 +18,13 @@ if mime_type == "text/html" or mime_type == "application/xhtml+xml" or mime_type =="application/xml": # http://www.crummy.com/software/BeautifulSoup/documentation.html soup = BeautifulSoup(htcontent) - for row in soup.find(class_ = "ms-rteTable-GreyAlternating").find_all('tr'): + rowtitle = soup.find(class_ = "wc-title").find("h1").string + if rowtitle != None: + description = rowtitle + ": " + for row in soup.find(class_ ="wc-content").find_all('td'): if row != None: - rowtitle = row.find('th').string - if rowtitle != None: - description = description + "\n" + rowtitle + ": " - for text in row.find('td').stripped_strings: - description = description + text + for text in row.stripped_strings: + description = description + text + "\n" for atag in row.find_all("a"): if atag.has_key('href'): links.append(scrape.fullurl(link,atag['href'])) @@ -37,7 +37,7 @@ def getColumnCount(self): return 2 def getTable(self,soup): - return soup.find(class_ = "ms-rteTable-GreyAlternating") + return soup.find(class_ = "ms-rteTable-default") def getColumns(self,columns): (date, title) = columns return (title, date, title, title, None)
--- a/documents/scrapers/24bd71114d3975ed9a63ad29624c62c9.py +++ b/documents/scrapers/24bd71114d3975ed9a63ad29624c62c9.py @@ -7,7 +7,7 @@ #http://www.doughellmann.com/PyMOTW/abc/ class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper): def getTable(self,soup): - return soup.find(id = "inner_content") + return soup.find(class_="tborder") def getColumnCount(self): return 2 def getColumns(self,columns):
--- a/documents/scrapers/3d5871a44abbbc81ef5b3a420070755d.py +++ b/documents/scrapers/3d5871a44abbbc81ef5b3a420070755d.py @@ -8,40 +8,14 @@ from datetime import * #http://www.doughellmann.com/PyMOTW/abc/ -class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper): - def getTable(self,soup): - return soup.find(class_ = "inner-column").table - def getRows(self,table): - return table.tbody.find_all('tr',recursive=False) +class ScraperImplementation(genericScrapers.GenericHTMLDisclogScraper): def getColumnCount(self): - return 3 - def getColumns(self,columns): - (date, title, description) = columns - return (date, date, title, description, None) - def getDate(self, content, entry, doc): - i = 0 - date = "" - for string in content.stripped_strings: - if i ==1: - date = string - i = i+1 - edate = parse(date, dayfirst=True, fuzzy=True).strftime("%Y-%m-%d") - print edate - doc.update({'date': edate}) - return - def getTitle(self, content, entry, doc): - i = 0 - title = "" - for string in content.stripped_strings: - if i < 2: - title = title + string - i = i+1 - doc.update({'title': title}) - #print title - return + return 0 if __name__ == '__main__': - print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper) - print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper) +#http://www.csiro.au/Portals/About-CSIRO/How-we-work/Governance/FOI-Request-Disclosure-Log-2012-13.aspx +#http://www.csiro.au/Portals/About-CSIRO/How-we-work/Governance/FOI-Request-Disclosure-Log-2011-12.aspx + print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericHTMLDisclogScraper) + print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericHTMLDisclogScraper) ScraperImplementation().doScrape()
--- a/documents/scrapers/6fa04af95fbe7de96daa2c7560e0aad3.py +++ b/documents/scrapers/6fa04af95fbe7de96daa2c7560e0aad3.py @@ -6,8 +6,6 @@ #http://www.doughellmann.com/PyMOTW/abc/ class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper): - def getTable(self,soup): - return soup.find(id = "content_div_50269").table def getColumns(self,columns): (id, date, title, description, notes) = columns return (id, date, title, description, notes)
--- a/documents/scrapers/7c6adc1d41cf029bf1a0959e5156477a.py +++ b/documents/scrapers/7c6adc1d41cf029bf1a0959e5156477a.py @@ -21,11 +21,15 @@ d.make_links_absolute(base_url = self.getURL()) for table in d('table').items(): title= table('thead').text() - print title + print self.remove_control_chars(title) (idate,descA,descB,link,deldate,notes) = table('tbody tr').map(lambda i, e: pq(e).children().eq(1).text()) links = table('a').map(lambda i, e: pq(e).attr('href')) description = descA+" "+descB - edate = parse(idate[:12], dayfirst=True, fuzzy=True).strftime("%Y-%m-%d") + try: + edate = parse(idate[:12], dayfirst=True, fuzzy=True).strftime("%Y-%m-%d") + except ValueError: + edate = date.today().strftime("%Y-%m-%d") + pass print edate dochash = scrape.mkhash(self.remove_control_chars(title)) doc = foidocsdb.get(dochash)
--- a/documents/scrapers/8e874a2fde8aa0ccdc6d14573d766540.py +++ b/documents/scrapers/8e874a2fde8aa0ccdc6d14573d766540.py @@ -18,10 +18,10 @@ if mime_type == "text/html" or mime_type == "application/xhtml+xml" or mime_type =="application/xml": # http://www.crummy.com/software/BeautifulSoup/documentation.html soup = BeautifulSoup(htcontent) - for text in soup.find(id="divFullWidthColumn").stripped_strings: + for text in soup.find(class_ = "mainContent").stripped_strings: description = description + text.encode('ascii', 'ignore') - for atag in soup.find(id="divFullWidthColumn").find_all("a"): + for atag in soup.find(id="SortingTable").find_all("a"): if atag.has_key('href'): links.append(scrape.fullurl(link,atag['href']))
--- a/documents/scrapers/a687a9eaab9e10e9e118d3fd7cf0e13a.py +++ b/documents/scrapers/a687a9eaab9e10e9e118d3fd7cf0e13a.py @@ -7,11 +7,11 @@ #http://www.doughellmann.com/PyMOTW/abc/ class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper): def getTable(self,soup): - return soup.find(id="ctl00_ContentPlaceHolderMainNoAjax_EdtrTD1494_2").table + return soup.find(id="int-content").table def getColumnCount(self): - return 4 + return 3 def getColumns(self,columns): - (blank,id, title,date) = columns + (id, title,date) = columns return (id, date, title, title, None) if __name__ == '__main__':
--- /dev/null +++ b/documents/scrapers/b0ca7fddcd1c965787daea47f2d32e0a.py @@ -1,1 +1,17 @@ +import sys,os +sys.path.insert(0, os.path.join(os.path.dirname(__file__) or '.', '../')) +import genericScrapers +import scrape +from bs4 import BeautifulSoup +#http://www.doughellmann.com/PyMOTW/abc/ +class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper): + def getColumns(self,columns): + (id, date, title, description, notes) = columns + return (id, date, title, description, notes) + +if __name__ == '__main__': + print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper) + print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper) + ScraperImplementation().doScrape() +
--- a/documents/scrapers/dfd7414bb0c21a0076ab559901ae0588.py +++ b/documents/scrapers/dfd7414bb0c21a0076ab559901ae0588.py @@ -10,7 +10,7 @@ (id, date, title, description, notes) = columns return (id, date, title, description, notes) def getTable(self,soup): - return soup.find(class_ = "content") + return soup.find(class_ = "simpletable") if __name__ == '__main__': print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper)
--- a/documents/scrapers/f2ab2908d8ee56ed8d995ef4187e75e6.py +++ b/documents/scrapers/f2ab2908d8ee56ed8d995ef4187e75e6.py @@ -10,7 +10,7 @@ (id, date, title, description, notes) = columns return (id, date, title, description, notes) def getTable(self,soup): - return soup.find(id = "content").table + return soup.find("table") if __name__ == '__main__': print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper)
--- a/documents/template.inc.php +++ b/documents/template.inc.php @@ -58,11 +58,11 @@ <div class="navbar navbar-inverse navbar-fixed-top"> <div class="navbar-inner"> <div class="container-fluid"> - <a class="btn btn-navbar" data-toggle="collapse" data-target=".nav-collapse"> + <!--<a class="btn btn-navbar" data-toggle="collapse" data-target=".nav-collapse"> <span class="icon-bar"></span> <span class="icon-bar"></span> <span class="icon-bar"></span> - </a> + </a> --> <a class="brand" href="#">Australian Disclosure Logs</a> <div class="nav-collapse collapse"> @@ -79,6 +79,7 @@ <li><a href="agency.php">By Agency</a></li> <li><a href="date.php">By Date</a></li> <li><a href="disclogsList.php">List of Disclosure Logs</a></li> + <li><a href="charts.php">Charts</a></li> <li><a href="about.php">About</a></li> </ul>
--- /dev/null +++ b/employees-fail.php @@ -1,1 +1,134 @@ +<?php +include_once('include/common.inc.php'); +include_header('Charts'); +$db = $server->get_db('disclosr-agencies'); +?> +<div class="foundation-header"> + <h1><a href="about.php">Charts</a></h1> + <h4 class="subheader">Lorem ipsum.</h4> +</div> +<div id="scores" style="width:900px;height:500px;"></div> +<script id="source"> + window.onload = function() { + $(document).ready(function() { + var d1 = []; + var scorelabels = []; + <?php + try { + $rows = $db->get_view("app", "scoreHas?group=true", null, true)->rows; + + $dataValues = Array(); + foreach ($rows as $row) { + $dataValues[$row->value] = $row->key; + } + $i = 0; + ksort($dataValues); + foreach ($dataValues as $value => $key) { + + echo " d1.push([$i, $value]);" . PHP_EOL; + echo " scorelabels.push('$key');" . PHP_EOL; + $i++; + } + } catch (SetteeRestClientException $e) { + setteErrorHandler($e); + } + ?> + function scoretrackformatter(obj) { + if (scorelabels[Math.floor(obj.x)]) { + return (scorelabels[Math.floor(obj.x)])+"="+obj.y; + + } else { + return ""; + } + } + function scoretickformatter(val, axis) { + if (scorelabels[Math.floor(val)]) { + return '<p style="margin-top:8em;-webkit-transform:rotate(-90deg);">'+(scorelabels[Math.floor(val)])+"</b>"; + + } else { + return ""; + } + } + Flotr.draw(document.getElementById("scores"), [ {data: d1}], { + HtmlText: true, + bars : { + show : true + }, + mouse : { + track : true, + relative : true, + trackFormatter: scoretrackformatter + },yaxis: { + autoscaling: true + }, + xaxis: { + autoscaling: true, + minorTickFreq: 0.6, + noTicks : scorelabels.length, + tickFormatter: scoretickformatter + } + }); + + + + + + + + +<div id="employees" style="width:1000px;height:900px;"></div> +var emplabels = []; +function emptrackformatter(obj) { + + return (obj.series.label)+" = "+obj.y+" in "+emplabels[Math.floor(obj.x)]; + + } + function emptickformatter(val, axis) { + if (emplabels[Math.floor(val)]) { + return '<p style="margin-top:8em;-webkit-transform:rotate(-90deg);">'+(emplabels[Math.floor(val)])+"</b>"; + + } else { + return ""; + } + } +function onDataReceived(series) { + emplabels = series.labels; + Flotr.draw(document.getElementById("employees"), series.data, { + mouse : { + track : true, + relative : true, + trackFormatter: emptrackformatter + },yaxis: { + max: 10000, + scaling: 'logarithmic' + }, + xaxis: { + minorTickFreq: 1, + noTicks: emplabels.length, + showMinorLabels: true, + tickFormatter: emptickformatter + }, + legend: { + show: false + } + }); + } + + $.ajax({ + url: "admin/exportEmployees.csv.php?format=json", + method: 'GET', + dataType: 'json', + success: onDataReceived + }); + + + }); + }; + +</script> + +<?php +include_footer(); +?> +
--- a/getAgency.php +++ b/getAgency.php @@ -2,11 +2,12 @@ include_once('include/common.inc.php'); -function displayValue($key, $value, $mode) { +function displayValue($key, $value, $mode) +{ global $db, $schemas; - $ignoreKeys = Array("metadata" ,"metaTags", "statistics","rtkURLs","rtkDescriptions"); + $ignoreKeys = Array("metadata", "metaTags", "statistics", "rtkURLs", "rtkDescriptions"); if ($mode == "view") { - if (strpos($key, "_") === 0 || in_array($key,$ignoreKeys)) + if (strpos($key, "_") === 0 || in_array($key, $ignoreKeys)) return; echo "<tr>"; @@ -22,7 +23,8 @@ echo "<li "; if (isset($schemas['agency']["properties"][$key]['x-property'])) { echo ' property="' . $schemas['agency']["properties"][$key]['x-property'] . '" '; - } if (isset($schemas['agency']["properties"][$key]['x-itemprop'])) { + } + if (isset($schemas['agency']["properties"][$key]['x-itemprop'])) { echo ' itemprop="' . $schemas['agency']["properties"][$key]['x-itemprop'] . '" '; } echo " >"; @@ -62,7 +64,7 @@ </div>"; } else { if (strpos($key, "_") === 0) { - echo"<input type='hidden' id='$key' name='$key' value='$value'/>"; + echo "<input type='hidden' id='$key' name='$key' value='$value'/>"; } else if ($key == "parentOrg") { echo "<label for='$key'>$key</label><select id='$key' name='$key'><option value=''> Select... </option>"; $rows = $db->get_view("app", "byDeptStateName")->rows; @@ -70,7 +72,7 @@ foreach ($rows as $row) { echo "<option value='{$row->value}'" . (($row->value == $value) ? "SELECTED" : "") . " >" . str_replace("Department of ", "", $row->key) . "</option>"; } - echo" </select>"; + echo " </select>"; } else { echo "<label>$key</label><input class='input-text' type='text' id='$key' name='$key' value='$value'/>"; if ((strpos($key, "URL") > 0 || $key == 'website') && $value != "") { @@ -85,7 +87,8 @@ // } -function addDefaultFields($row) { +function addDefaultFields($row) +{ global $schemas; $defaultFields = array_keys($schemas['agency']['properties']); foreach ($defaultFields as $defaultField) { @@ -119,103 +122,103 @@ // by name = startkey="Ham"&endkey="Ham\ufff0" // edit? - $obj = $db->get($_REQUEST['id']); - include_header(isset($obj->name) ? $obj->name : ""); +$obj = $db->get($_REQUEST['id']); +include_header(isset($obj->name) ? $obj->name : ""); //print_r($row); - if (sizeof($_POST) > 0) { +if (sizeof($_POST) > 0) { //print_r($_POST); - foreach ($_POST as $postkey => $postvalue) { - if ($postvalue == "") { + foreach ($_POST as $postkey => $postvalue) { + if ($postvalue == "") { + unset($_POST[$postkey]); + } + if (is_array($postvalue)) { + if (count($postvalue) == 1 && $postvalue[0] == "") { unset($_POST[$postkey]); - } - if (is_array($postvalue)) { - if (count($postvalue) == 1 && $postvalue[0] == "") { - unset($_POST[$postkey]); - } else { - foreach ($_POST[$postkey] as $key => &$value) { - if ($value == "") { - unset($_POST[$postkey][$key]); - } + } else { + foreach ($_POST[$postkey] as $key => &$value) { + if ($value == "") { + unset($_POST[$postkey][$key]); } } } } - if (isset($_POST['_id']) && $db->get_rev($_POST['_id']) == $_POST['_rev']) { - echo "Edited version was latest version, continue saving"; - $newdoc = $_POST; - $newdoc['metadata']['lastModified'] = time(); - $obj = $db->save($newdoc); - } else { - echo "ALERT doc revised by someone else while editing. Document not saved."; - } - } - - $mode = "view"; - $rowArray = object_to_array($obj); - ksort($rowArray); - if ($mode == "edit") { - $row = addDefaultFields($rowArray); + } + if (isset($_POST['_id']) && $db->get_rev($_POST['_id']) == $_POST['_rev']) { + echo "Edited version was latest version, continue saving"; + $newdoc = $_POST; + $newdoc['metadata']['lastModified'] = time(); + $obj = $db->save($newdoc); } else { - $row = $rowArray; - } - - if ($mode == "view") { - echo ' <div class="container-fluid"> + echo "ALERT doc revised by someone else while editing. Document not saved."; + } +} + +$mode = "view"; +$rowArray = object_to_array($obj); +ksort($rowArray); +if ($mode == "edit") { + $row = addDefaultFields($rowArray); +} else { + $row = $rowArray; +} + +if ($mode == "view") { + echo ' <div class="container-fluid"> <div class="row-fluid"> <div class="span3"> <div class="well sidebar-nav"> <ul class="nav nav-list"> <li class="nav-header">Statistics</li>'; - - if (isset($row['statistics']['employees'])) { - echo '<div><i class="icon-user" style="float:left"></i><p style="margin-left:16px;">'; - $keys = array_keys($row['statistics']['employees']); - $lastkey = $keys[count($keys)-1]; - echo $row['statistics']['employees'][$lastkey]['value'].' employees <small>('.$lastkey.')</small>'; - echo '</div>'; - } - if (isset($row['statistics']['budget'])) { - echo '<div><i class="icon-shopping-cart" style="float:left"></i><p style="margin-left:16px;">'; - $keys = array_keys($row['statistics']['budget']); - $lastkey = $keys[count($keys)-1]; - echo "$".number_format(floatval($row['statistics']['budget'][$lastkey]['value'])).' <small>('.$lastkey.' budget)</small>'; - echo '</div>'; - } + + if (isset($row['statistics']['employees'])) { + echo '<div><i class="icon-user" style="float:left"></i><p style="margin-left:16px;">'; + $keys = array_keys($row['statistics']['employees']); + $lastkey = $keys[count($keys) - 1]; + echo $row['statistics']['employees'][$lastkey]['value'] . ' employees <small>(' . $lastkey . ')</small>'; + echo '</div>'; + } + if (isset($row['statistics']['budget'])) { + echo '<div><i class="icon-shopping-cart" style="float:left"></i><p style="margin-left:16px;">'; + $keys = array_keys($row['statistics']['budget']); + $lastkey = $keys[count($keys) - 1]; + echo "$" . number_format(floatval($row['statistics']['budget'][$lastkey]['value'])) . ' <small>(' . $lastkey . ' budget)</small>'; + echo '</div>'; + } echo ' </ul> </div><!--/.well --> </div><!--/span--> <div class="span9">'; - echo '<div itemscope itemtype="http://schema.org/GovernmentOrganization" typeof="schema:GovernmentOrganization" about="#' . $row['_id'] . '">'; - echo '<div class="hero-unit"> + echo '<div itemscope itemtype="http://schema.org/GovernmentOrganization" typeof="schema:GovernmentOrganization" about="#' . $row['_id'] . '">'; + echo '<div class="hero-unit"> <h1 itemprop="name">' . $row['name'] . '</h1>'; - if (isset($row['description'])) { - echo '<p>'.$row['description'].'</p>'; - } - echo '</div><table width="100%">'; - echo "<tr><th>Field Name</th><th>Field Value</th></tr>"; - } - if ($mode == "edit") { - ?> - <input id="addfield" type="button" value="Add Field"/> - <script> - window.onload = function() { - $(document).ready(function() { - // put all your jQuery goodness in here. - // http://charlie.griefer.com/blog/2009/09/17/jquery-dynamically-adding-form-elements/ - $('#addfield').click(function() { - var field_name=window.prompt("fieldname?",""); - if (field_name !="") { - $('#submitbutton').before($('<span></span>') - .append("<label>"+field_name+"</label>") - .append("<input class='input-text' type='text' id='"+field_name+"' name='"+field_name+"'/>") - ); - } - }); - }); - }; - </script> - <form id="editform" class="nice" method="post"> - <?php + if (isset($row['description'])) { + echo '<p>' . $row['description'] . '</p>'; + } + echo '</div><table width="100%">'; + echo "<tr><th>Field Name</th><th>Field Value</th></tr>"; +} +if ($mode == "edit") { +?> +<input id="addfield" type="button" value="Add Field"/> +<script> + window.onload = function () { + $(document).ready(function () { + // put all your jQuery goodness in here. + // http://charlie.griefer.com/blog/2009/09/17/jquery-dynamically-adding-form-elements/ + $('#addfield').click(function () { + var field_name = window.prompt("fieldname?", ""); + if (field_name != "") { + $('#submitbutton').before($('<span></span>') + .append("<label>" + field_name + "</label>") + .append("<input class='input-text' type='text' id='" + field_name + "' name='" + field_name + "'/>") + ); + } + }); + }); + }; +</script> +<form id="editform" class="nice" method="post"> + <?php } foreach ($row as $key => $value) { @@ -223,7 +226,7 @@ } if ($mode == "view") { echo "</table></div>"; - echo ' </div><!--/span--> + echo ' </div><!--/span--> </div><!--/row--> </div><!--/span--> </div><!--/row-->'; @@ -231,16 +234,16 @@ if ($mode == "edit") { echo '<input id="submitbutton" type="submit"/></form>'; } -} else { + } else { // show all list - include_header('Agencies'); - echo ' <div class="container-fluid"> + include_header('Agencies'); + echo ' <div class="container-fluid"> <div class="row-fluid"> <div class="span3"> <div class="well sidebar-nav"> <ul class="nav nav-list"> <li class="nav-header">Sidebar</li>'; - echo ' </ul> + echo ' </ul> </div><!--/.well --> </div><!--/span--> <div class="span9"> @@ -251,28 +254,28 @@ </div> <div class="row-fluid"> <div class="span4">'; - try { - $rows = $db->get_view("app", "byCanonicalName")->rows; - //print_r($rows); - $rowCount = count($rows); - foreach ($rows as $i => $row) { - if ($i % ($rowCount/3) == 0 && $i != 0 && $i != $rowCount -2 ) echo '</div><div class="span4">'; - // print_r($row); - echo '<span itemscope itemtype="http://schema.org/GovernmentOrganization" typeof="schema:GovernmentOrganization foaf:Organization" about="getAgency.php?id=' . $row->value->_id . '"> + try { + $rows = $db->get_view("app", "byCanonicalName")->rows; + //print_r($rows); + $rowCount = count($rows); + foreach ($rows as $i => $row) { + if ($i % ($rowCount / 3) == 0 && $i != 0 && $i != $rowCount - 2) echo '</div><div class="span4">'; + // print_r($row); + echo '<span itemscope itemtype="http://schema.org/GovernmentOrganization" typeof="schema:GovernmentOrganization foaf:Organization" about="getAgency.php?id=' . $row->value->_id . '"> <a href="getAgency.php?id=' . $row->value->_id . '" rel="schema:url foaf:page" property="schema:name foaf:name" itemprop="url"><span itemprop="name">' . - (isset($row->value->name) ? $row->value->name : "ERROR NAME MISSING") - . '</span></a></span><br><br>'; - } - - } catch (SetteeRestClientException $e) { - setteErrorHandler($e); - } - echo ' </div><!--/span--> + (isset($row->value->name) ? $row->value->name : "ERROR NAME MISSING") + . '</span></a></span><br><br>'; + } + + } catch (SetteeRestClientException $e) { + setteErrorHandler($e); + } + echo ' </div><!--/span--> </div><!--/row--> </div><!--/span--> </div><!--/row-->'; -} - -include_footer(); -?> - + } + + include_footer(); + ?> +
--- a/include/common.inc.php +++ b/include/common.inc.php @@ -19,7 +19,7 @@ Requests::register_autoloader(); $ENV = "DEV"; -if (isset($_SERVER['SERVER_NAME']) && $_SERVER['SERVER_NAME'] != 'localhost') { +if (false && isset($_SERVER['SERVER_NAME']) && $_SERVER['SERVER_NAME'] != 'localhost') { require $basePath."lib/amon-php/amon.php"; Amon::config(array('address'=> 'http://127.0.0.1:2464',
--- a/include/couchdb.inc.php +++ b/include/couchdb.inc.php @@ -14,8 +14,8 @@ } else if (php_uname('n') == "ikurt-20") { - $serverAddr = 'http://192.168.1.113:5984/'; - //$serverAddr = 'http://127.0.0.1:5984/'; + //$serverAddr = 'http://192.168.1.113:5984/'; + $serverAddr = 'http://127.0.0.1:5984/'; } else { $serverAddr = 'http://127.0.0.1:5984/'; }
--- a/include/template.inc.php +++ b/include/template.inc.php @@ -28,6 +28,9 @@ .sidebar-nav { padding: 9px 0; } + .flotr-dummy-div { + margin-left: -999px; + } </style> <link href="<?php echo $basePath ?>css/bootstrap-responsive.min.css" rel="stylesheet"> <!--[if lt IE 9]> @@ -54,6 +57,7 @@ <div class="nav-collapse collapse"> <ul class="nav"> <li><a href="getAgency.php">Agencies</a></li> + <li><a href="ranking.php">Open Gov Ranking</a></li> <li><a href="headcount.php">Employee Headcount Graph</a></li> <li><a href="budget.php">Budget Graph</a></li> <li><a href="about.php">About/FAQ</a></li>
--- a/js/bubbletree +++ b/js/bubbletree
--- a/js/flotr2 +++ b/js/flotr2
--- a/js/sigma +++ b/js/sigma
--- a/lib/phpquery +++ b/lib/phpquery
--- /dev/null +++ b/ranking.php @@ -1,1 +1,213 @@ - +<?php +include_once('include/common.inc.php'); +include_header('Open Gov Rankings'); +$db = $server->get_db('disclosr-agencies'); +?> +<div class="foundation-header"> + <h1><a href="about.php">Open Government Rankings</a></h1> + <h4 class="subheader"></h4> +</div> +<table> + <?php + $agenciesdb = $server->get_db('disclosr-agencies'); + //$docsdb = $server->get_db('disclosr-documents'); + $scoredagencies = Array(); + $scores = Array(); + $columnKeys = Array(); + + try { + $rows = $agenciesdb->get_view("app", "all", null, true)->rows; + + + if ($rows) { + foreach ($rows as $row) { + $columns = Array(); + foreach ($row->value as $key => $value) { + if ((strstr($key, "has") || strstr($key, "URL")) && $key != "rtkURLs") { + //echo "$key<br>"; + $columns[$key] = $value; + } + } + //print_r(array_keys($columns)); + $columnKeys = array_unique(array_merge($columnKeys, array_keys($columns))); + //print_r($columnKeys); + $score = count($columns); + $scores[$score]++; + $scoredagencies[] = Array("id"=> $row->key, "website"=> $row->value->website, "name" => $row->value->name, "columns" => $columns, "score" => $score); + } + } + + } catch (SetteeRestClientException $e) { + setteErrorHandler($e); + } + function cmp($a, $b) + { + if ($a['score'] == $b['score']) { + return strcmp($a['name'], $b['name']); + } + return ($a['score'] > $b['score']) ? -1 : 1; + } + + usort($scoredagencies, "cmp"); + echo "<tr>"; + echo "<th>Agency Name</th>"; + echo "<th>Score</th>"; + foreach ($columnKeys as $columnID) { + echo "<th>" . (isset($schemas['agency']["properties"][$columnID]['x-title']) ? $schemas['agency']["properties"][$columnID]['x-title'] : "<i>$columnID</i>") . "</th>"; + } + echo "</tr>"; + foreach ($scoredagencies as $scoredagency) { + echo "<tr>"; + echo "<td><b><a href='getAgency.php?id=" . $scoredagency['id'] . "'>". $scoredagency['name'] . "</a></b></td>"; + echo "<td><b>" . $scoredagency['score'] . "</b></td>"; + foreach ($columnKeys as $key) { + echo "<td style='text-align: center;'>"; + if (isset($scoredagency['columns'][$key])) { + $value = $scoredagency['columns'][$key]; + if (is_array($value)) { + if (count($value) == 1) { + $href = $value[0]; + } else { + $href = $value[0]; + } + + } else { + $href = $value; + } + if ($href[0] == "@") { + $href = str_replace("@","https://twitter.com/",$href); + } + //$href= urlencode($href); + + echo "<font color='lightgreen'>"; + + if (strstr($href, "http")) { + echo "<a title='Yes' href='$href' style='color:lightgreen;'>✓</a>"; + } else { + echo "✓"; + } + + echo "</font>"; + } else { + echo "<font color='orange'><abbr title='No'>✘</abbr></font>"; + } + echo "</td>"; + } + echo "</tr>\n"; + } + ?> +</table><br> +<div id="criteria" style="width:500px;height:900px;"></div> +<div id="scores" style="width:900px;height:500px;"></div> +<script id="source"> + window.onload = function () { + $(document).ready(function () { + var d1 = []; + var scorelabels = []; + <?php + try { + $rows = $db->get_view("app", "scoreHas?group=true", null, true)->rows; + + + $dataValues = Array(); + foreach ($rows as $row) { + $dataValues[$row->value] = $row->key; + } + $i = 0; + ksort($dataValues); + foreach ($dataValues as $value => $key) { + + echo " d1.push([$value, $i]);" . PHP_EOL; + echo " scorelabels.push('$key');" . PHP_EOL; + $i++; + } + } catch (SetteeRestClientException $e) { + setteErrorHandler($e); + } + ?> + function scoretrackformatter(obj) { + if (scorelabels[Math.floor(obj.y)]) { + return (scorelabels[Math.floor(obj.y)]) + "=" + obj.x; + + } else { + return ""; + } + } + + function scoretickformatter(val, axis) { + if (scorelabels[Math.floor(val)]) { + return (scorelabels[Math.floor(val)]) ; + + } else { + return ""; + } + } + + Flotr.draw(document.getElementById("criteria"), [ + {data: d1} + ], { + title: 'Total count of agencies with criteria', + HtmlText: true, + bars: { + show: true, + horizontal: true + }, + mouse: { + track: true, + relative: true, + trackFormatter: scoretrackformatter + }, yaxis: { + autoscaling: true, + minorTickFreq: 0.6, + noTicks: scorelabels.length, + tickFormatter: scoretickformatter + }, + xaxis: { + autoscaling: true + + } + }); + + var d2 = []; + <?php + try { + + ksort($scores); + foreach ($scores as $key => $value) { + + echo " d2.push([$key,$value]);" . PHP_EOL; + $i++; + } + } catch (SetteeRestClientException $e) { + setteErrorHandler($e); + } + ?> + + + Flotr.draw(document.getElementById("scores"), [ + {data: d2} + ], { + title: 'Frequency distribution of Scores', + HtmlText: true, + bars: { + show: true + }, + mouse: { + track: true, + relative: true + }, yaxis: { + autoscaling: true + }, + xaxis: { + autoscaling: true + + } + }); + + }); + }; +</script> +<?php +include_footer(); +?> +