load datasets into scrapr then into ckan filestore
load datasets into scrapr then into ckan filestore


Former-commit-id: ef39f297007c1ad1e7edee2c2819723b076ae3f4

--- a/admin/refreshDesignDoc.php
+++ b/admin/refreshDesignDoc.php
@@ -40,6 +40,9 @@
 $obj->views->byURL->map = "function(doc) {\n  emit(doc.url, doc);\n}";
 $obj->views->agency->map = "function(doc) {\n  emit(doc.agencyID, doc);\n}";
 $obj->views->byWebServer->map = "function(doc) {\n  emit(doc.web_server, doc);\n}";
+
+$obj->views->datasets->map = "function(doc) {\nif (doc.fieldName == \"data\") {\n  emit(doc._id, doc);\n}\n}";
+$obj->views->datasetGroups->map = "function(doc) {\nif (doc.fieldName == \"data\") {\n  doc.metadata[\"data.gov.au Category\"] && doc.metadata[\"data.gov.au Category\"].forEach(function(tag) {\n emit(tag, doc.url); \n  });\n}\n}";
 $obj->views->getValidationRequired->map = "function(doc) {\nif (doc.mime_type == \"text/html\" \n&& typeof(doc.validation) == \"undefined\") {\n  emit(doc._id, doc._attachments);\n}\n}";
 $docdb->save($obj, true);
 

file:b/disclosr.iml (new)
--- /dev/null
+++ b/disclosr.iml
@@ -1,1 +1,16 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="WEB_MODULE" version="4">
+  <component name="FacetManager">
+    <facet type="Python" name="Python">
+      <configuration sdkName="" />
+    </facet>
+  </component>
+  <component name="NewModuleRootManager" inherit-compiler-output="true">
+    <exclude-output />
+    <content url="file://$MODULE_DIR$" />
+    <orderEntry type="inheritedJdk" />
+    <orderEntry type="sourceFolder" forTests="false" />
+  </component>
+</module>
 
+

--- /dev/null
+++ b/documents/datagov-export-groups.py
@@ -1,1 +1,81 @@
+import ckanclient
+import couchdb
+from ckanclient import CkanApiError
+import re
 
+
+class LoaderError(Exception):
+    pass
+
+# Instantiate the CKAN client.
+#ckan = ckanclient.CkanClient(base_location='http://localhost:5000/api',    api_key='b47b24cd-591d-40c1-8677-d73101d56d1b')
+api_key = 'ff34526e-f794-4068-8235-fcbba38cd8bc'
+ckan = ckanclient.CkanClient(base_location='http://data.disclosurelo.gs/api',
+                             api_key=api_key)
+couch = couchdb.Server('http://127.0.0.1:5984/')
+#couch = couchdb.Server('http://192.168.1.113:5984/')
+
+# https://github.com/okfn/ckanext-importlib
+def munge(name):
+    # convert spaces to underscores
+    name = re.sub(' ', '_', name).lower()
+    # convert symbols to dashes
+    name = re.sub('[:]', '_-', name).lower()
+    name = re.sub('[/]', '-', name).lower()
+    # take out not-allowed characters
+    name = re.sub('[^a-zA-Z0-9-_]', '', name).lower()
+    # remove double underscores
+    name = re.sub('__', '_', name).lower()
+    return name
+
+
+def name_munge(input_name):
+    return munge(input_name.replace(' ', '').replace('.', '_').replace('&', 'and'))
+
+
+docsdb = couch['disclosr-documents']
+
+if __name__ == "__main__":
+    groups = {}
+    for doc in docsdb.view('app/datasetGroups'):
+            group_name = doc.key
+            if group_name != "Not specified":
+                pkg_name = filter(lambda x: x in '0123456789abcdefghijklmnopqrstuvwxyz-_',
+                                  doc.value.replace("http://data.gov.au/dataset/", '').replace('/', '')[:100]);
+                if group_name in groups.keys():
+                    groups[group_name] = list(set(groups[group_name] + [pkg_name]))
+                else:
+                    groups[group_name] = [pkg_name]
+
+    # add dataset to group(s)
+    for group_name in groups.keys():
+        if group_name != "Not specified":
+            group_url = name_munge(group_name[:100])
+            print group_name
+            print groups[group_name]
+            try:
+                # Update the group details
+                group_entity = ckan.group_entity_get(group_url)
+                print "group "+group_name+" exists"
+                if 'packages' in group_entity.keys():
+                    group_entity['packages'] = list(set(group_entity['packages'] + groups[group_name]))
+                else:
+                    group_entity['packages'] = groups[group_name]
+                ckan.group_entity_put(group_entity)
+            except CkanApiError, e:
+                if ckan.last_status == 404:
+                    print "group "+group_name+" does not exist, creating"
+                    group_entity = {
+                        'name': group_url,
+                        'title': group_name,
+                        'description': group_name,
+                        'packages': groups[group_name]
+                    }
+                    #print group_entity
+                    ckan.group_register_post(group_entity)
+                elif ckan.last_status == 409:
+                    print "group already exists"
+                else:
+                    raise LoaderError('Unexpected status %s adding to group under \'%s\': %r' % (
+                        ckan.last_status, pkg_name, e.args))
+

--- /dev/null
+++ b/documents/datagov-export.py
@@ -1,1 +1,329 @@
-
+# coding=utf-8
+import ckanclient
+import couchdb
+from ckanclient import CkanApiError
+import re
+import html2text # aaronsw :(
+import ckanapi # https://github.com/open-data/ckanapi
+import scrape
+import datetime, os, hashlib
+
+
+class LoaderError(Exception):
+    pass
+
+
+def add_package_resource_cachedurl(ckan, package_name, url, name, format, size, **kwargs):
+    # fileupload
+    ts = datetime.datetime.isoformat(datetime.datetime.now()).replace(':', '').split('.')[0]
+
+    file_key = os.path.join(ts, name)
+
+    auth_dict = ckan.storage_auth_get('/form/' + file_key, {})
+
+    fields = [(kv['name'].encode('ascii'), kv['value'].encode('ascii'))
+              for kv in auth_dict['fields']]
+    (url, mime_type, content) = scrape.fetchURL(scrape.docsdb,
+                                                url, "dataset_resource", "AGIMO", False)
+
+    files = [('file', os.path.basename(file_key), content)]
+
+    errcode, body = ckan._post_multipart(auth_dict['action'].encode('ascii'), fields, files)
+
+    if errcode == 200:
+        file_metadata = ckan.storage_metadata_get(file_key)
+        (url, msg) = file_metadata['_location'], ''
+    else:
+        (url, msg) = '', body
+        # fileupload done
+
+    if url == '':
+        raise CkanApiError(msg)
+    m = hashlib.sha1(msg)
+    #todo mime-type dectection based on content
+    r = dict(name=name,
+             mimetype=mime_type,
+             hash=m.hexdigest(), size=size, url=url)
+
+    r.update(kwargs)
+    if not r.has_key('name'): r['name'] = url
+
+    p = ckan.package_entity_get(package_name)
+    p['resources'].append(r)
+    return ckan.package_entity_put(p)
+
+
+# Instantiate the CKAN client.
+api_key = 'ff34526e-f794-4068-8235-fcbba38cd8bc'
+server = 'data.disclosurelo.gs'
+
+ckan = ckanclient.CkanClient(base_location='http://' + server + '/api',
+                             api_key=api_key)
+ckandirect = ckanapi.RemoteCKAN('http://' + server, api_key=api_key)
+couch = couchdb.Server('http://127.0.0.1:5984/')
+#couch = couchdb.Server('http://192.168.1.113:5984/')
+
+import urllib
+import urlparse
+
+
+def url_fix(s, charset='utf-8'):
+    """Sometimes you get an URL by a user that just isn't a real
+    URL because it contains unsafe characters like ' ' and so on.  This
+    function can fix some of the problems in a similar way browsers
+    handle data entered by the user:
+
+    >>> url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffsklärung)')
+    'http://de.wikipedia.org/wiki/Elf%20%28Begriffskl%C3%A4rung%29'
+
+    :param charset: The target charset for the URL if the url was
+                    given as unicode string.
+    """
+    if isinstance(s, unicode):
+        s = s.encode(charset, 'ignore')
+    if not urlparse.urlparse(s).scheme:
+        s = "http://" + s
+    scheme, netloc, path, qs, anchor = urlparse.urlsplit(s)
+    path = urllib.quote(path, '/%')
+    qs = urllib.quote_plus(qs, ':&=')
+    return urlparse.urlunsplit((scheme, netloc, path, qs, anchor))
+
+# http://code.activestate.com/recipes/578019-bytes-to-human-human-to-bytes-converter/
+SYMBOLS = {
+    'customary': ('B', 'KB', 'MB', 'GB', 'T', 'P', 'E', 'Z', 'Y'),
+    'customary_ext': ('byte', 'kilo', 'mega', 'giga', 'tera', 'peta', 'exa',
+                      'zetta', 'iotta'),
+    'iec': ('Bi', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'),
+    'iec_ext': ('byte', 'kibi', 'mebi', 'gibi', 'tebi', 'pebi', 'exbi',
+                'zebi', 'yobi'),
+}
+
+
+def human2bytes(s):
+    """
+    Attempts to guess the string format based on default symbols
+    set and return the corresponding bytes as an integer.
+    When unable to recognize the format ValueError is raised.
+
+      >>> human2bytes('0 B')
+      0
+      >>> human2bytes('1 K')
+      1024
+      >>> human2bytes('1 M')
+      1048576
+      >>> human2bytes('1 Gi')
+      1073741824
+      >>> human2bytes('1 tera')
+      1099511627776
+
+      >>> human2bytes('0.5kilo')
+      512
+      >>> human2bytes('0.1  byte')
+      0
+      >>> human2bytes('1 k')  # k is an alias for K
+      1024
+      >>> human2bytes('12 foo')
+      Traceback (most recent call last):
+          ...
+      ValueError: can't interpret '12 foo'
+    """
+    if s == None:
+        return 0
+    s = s.replace(',', '')
+    init = s
+    num = ""
+    while s and s[0:1].isdigit() or s[0:1] == '.':
+        num += s[0]
+        s = s[1:]
+    num = float(num)
+    letter = s.strip()
+    for name, sset in SYMBOLS.items():
+        if letter in sset:
+            break
+    else:
+        if letter == 'k':
+            # treat 'k' as an alias for 'K' as per: http://goo.gl/kTQMs
+            sset = SYMBOLS['customary']
+            letter = letter.upper()
+        else:
+            raise ValueError("can't interpret %r" % init)
+    prefix = {sset[0]: 1}
+    for i, s in enumerate(sset[1:]):
+        prefix[s] = 1 << (i + 1) * 10
+    return int(num * prefix[letter])
+
+# https://github.com/okfn/ckanext-importlib
+def munge(name):
+    # convert spaces to underscores
+    name = re.sub(' ', '_', name).lower()
+    # convert symbols to dashes
+    name = re.sub('[:]', '_-', name).lower()
+    name = re.sub('[/]', '-', name).lower()
+    # take out not-allowed characters
+    name = re.sub('[^a-zA-Z0-9-_]', '', name).lower()
+    # remove double underscores
+    name = re.sub('__', '_', name).lower()
+    return name
+
+
+def name_munge(input_name):
+    return munge(input_name.replace(' ', '').replace('.', '_').replace('&', 'and'))
+
+
+def get_licence_id(licencename):
+    map = {
+        "Creative Commons - Attribution-Share Alike 2.0 Australia (CC-SA)\nThe downloadable version of the database is licensed under CC-BY-SA Creative Commons Attribution Share Alike and contains only the database fields that are released under that license. These fields are object title, object number, object description as well as temporal, spatial and dimension details. It also contains a persistent URL for each record.": 'cc-by-sa',
+        "CreativeCommonsAttributionNonCommercial30AustraliaCCBYNC30": 'cc-nc',
+        'Otherpleasespecify': 'notspecified',
+        '': 'notspecified',
+        "Publicly available data": 'notspecified',
+        "CreativeCommonsAttributionNoDerivativeWorks30AustraliaCCBYND30": "other-closed",
+        "CreativeCommonsAttributionNonCommercialNoDerivs30AustraliaCCBYNCND30": "other-closed",
+        'CreativeCommonsAttribution30AustraliaCCBY30': 'cc-by',
+        "Creative Commons - Attribution 2.5 Australia (CC-BY)": 'cc-by',
+        'CreativeCommonsAttributionCCBY25': 'cc-by',
+        "PublicDomain": 'other-pd',
+    }
+    if licencename not in map.keys():
+        raise Exception(licencename + " not found");
+    return map[licencename];
+
+
+gooddata = ["afl-in-victoria", "annual-budget-initiatives-by-suburb-brisbane-city-council"]
+#athletics-in-victoria-gfyl,bicycle-racks-mosman-municipal-council,boat-ramps-brisbane-city-council,brisbane-access-ratings-database,bus-stops-brisbane-city-council,cemeteries-brisbane-city-council,cfa-locations,citycycle-stations-brisbane-city-council,community-gardens-brisbane-city-council,community-halls-brisbane-city-council,cooking-classes-gfyl,court-locations-victoria,customer-service-centres-brisbane-city-council,dance-in-victoria-gfyl,disability-activity-gfyl,dog-parks-brisbane-city-council,ferry-terminals-brisbane-city-council,fishing-club-in-victoria-gfyl,fitness-centres-in-victoria-gfyl,gardens-reserves-gfyl,golf-courses-brisbane-city-council,gymnastics-in-victoria-gfyl,historic-cemeteries-brisbane-city-council,ice-skating-centres-gfyl,immunisation-clinics-brisbane-city-council,libraries-brisbane-city-council,licenced-venues-victoria,lifesaving-locations-victoria,loading-zones-brisbane-city-council,major-projects-victoria,markets-in-victoria,martial-arts-in-victoria-gfyl,melbourne-water-use-by-postcode,members-of-parliament-both-houses-nsw,members-of-the-legislative-assembly-nsw,members-of-the-legislative-council-nsw,mfb-locations-vic,ministers-of-the-nsw-parliament,mosman-local-government-area,mosman-rider-route,mosman-wwii-honour-roll,neighbourhood-houses-gfyl,news-feeds-mosman-municipal-council,off-street-car-parks-mosman-municipal-council,orienteering-clubs-gfyl,parking-meter-areas-brisbane-city-council,parks-and-reserves-mosman-municipal-council,parks-brisbane-city-council,personal-training-gfyl,picnic-areas-brisbane-city-council,playgrounds-brisbane-city-council,playgrounds-mosman-municipal-council,police-region-crime-statistics-victoria,police-service-area-crime-statistics-victoria,pony-clubs-in-victoria-gfyl,prison-locations-victoria,public-amenities-maintained-by-mosman-council,public-art-brisbane-city-council,public-internet-locations-vic,public-toilets-brisbane-city-council,racecourse-locations-victoria,recent-development-applications-mosman-municipal-council,recreation-groups-gfyl,recreational-fishing-spots,regional-business-centres-brisbane-city-council,reports-of-swooping-birds-mosman-municipal-council,restricted-parking-areas-brisbane-city-council,rollerskating-centres-in-victoria-gfyl,sailing-clubs-gfyl,school-locations-victoria,shadow-ministers-of-the-nsw-parliament,skate-parks-gfyl,sporting-clubs-and-organisations-gfyl,stakeboard-parks-brisbane-city-council,state-bodies-gfyl,street-names-brisbane-city-council,suburbs-and-adjoining-suburbs-brisbane-city-council,swimming-pools-brisbane-city-council,swimming-pools-gfyl,tennis-courts-brisbane-city-council,top-40-book-club-reads-brisbane-city-council,tracks-and-trails-gfyl,triathlon-clubs-gfyl,urban-water-restrictions-victoria,veterinary-services-in-mosman,victorian-microbreweries,volunteering-centres-services-and-groups-victoria,walking-groups-gfyl,ward-offices-brisbane-city-council,waste-collection-days-brisbane-city-council,waste-transfer-stations-brisbane-city-council,water-consumption-in-melbourne,water-sports-in-victoria-gfyl,wifi-hot-spots-brisbane-city-council,yoga-pilates-and-tai-chi-in-victoria-gfyl,2809cycling-in-new-south-wales-what-the-data-tells-us2809-and-related-data,act-barbecue-bbq-locations,act-tafe-locations,ausindustry-locations,austender-contract-notice-export,austender-contract-notice-export,austender-contract-notice-export,austender-contract-notice-export,austender-contract-notice-export,austender-contract-notice-export,austender-contract-notice-export,austender-contract-notice-export,australian-gas-light-company-maps,australian-gas-light-company-maps,australian-ports,australian-public-service-statistical-bulletin-2011-12,australian-public-service-statistical-bulletin-snapshot-at-december-31-2011,australian-public-service-statistical-bulletin-tables-0910,austrics-timetable-set,capital-works-call-tender-schedule,collection-item-usage-state-library-of-victoria,country-and-commodity-trade-data-spreadsheet,country-and-commodity-trade-data-spreadsheet-2,country-by-level-of-processing-trade-data-spreadsheet,crime-incident-type-and-frequency-by-capital-city-and-nationally,csiro-locations,data-from-the-oaic-public-sector-information-survey-2012,data-from-the-oaic-public-sector-information-survey-2012,data-from-the-oaic-public-sector-information-survey-2012,department-of-finance-and-deregulation-office-locations,digitised-maps,diisr-division-locations-excluding-ausindustry-enterprise-connect-and-nmi,diisr-locations,diisr-portfolio-agency-locations-excluding-csiro,distance-to-legal-service-providers-from-disadvantaged-suburbs,enterprise-connect-locations,fire-insurance-maps-sydney-block-plans-1919-1940,fire-insurance-maps-sydney-block-plans-1919-1940,first-fleet-collection,first-fleet-collection,first-fleet-maps,first-fleet-maps,freedom-of-information-annual-estimated-costs-and-staff-time-statistical-data-2011-12,freedom-of-information-quarterly-request-and-review-statistical-data-2011-12,freedom-of-information-requests-estimated-costs-and-charges-collected-1982-83-to-2011-12,higher-education-course-completions,higher-education-enrolments,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,journey-planner-data-nt,library-catalogue-search-terms-state-library-of-victoria,location-of-act-schools,location-of-centrelink-offices,location-of-european-wasps-nests,location-of-lawyers-and-legal-service-providers-by-town,location-of-legal-assistance-service-providers,location-of-medicare-offices,location-of-medicare-offices,maps-of-the-southern-hemisphere-16th-18th-centuries,maps-of-the-southern-hemisphere-16th-18th-centuries,music-queensland,national-measurement-institute-locations,new-south-wales-officers-and-men-of-the-australian-imperial-force-a-i-f-and-the-australian-naval-for,new-south-wales-officers-and-men-of-the-australian-imperial-force-a-i-f-and-the-australian-naval-for,photographs-of-nsw-life-pre-1955,photographs-of-nsw-life-pre-1955,photographs-of-sydney-before-1885,photographs-of-sydney-before-1885,picture-queensland,plgr-28093-playgrounds-act,police-station-locations,queensland-public-libraries,rare-printed-books,rare-printed-books,real-estate-maps,regional-australia-funding-projects,sa-memory-state-library-of-south-australia,search-engine-terms-state-library-of-victoria,south-australian-photographs-state-library-of-south-australia,south-australian-sheet-music-state-library-of-south-australia,sydney-bond-store-maps-1894,sydney-bond-store-maps-1894,sydney-maps-1917,sydney-maps-1917,tafe-institute-locations-victoria,tafe-sa-campus-locations,tolt-public-toilets-act,victorian-public-library-branches-state-library-of-victoria,western-australia-public-library-network,world-war-one-photographs-by-frank-hurley,world-war-one-photographs-by-frank-hurley,citycat-timetables-brisbane-city-council,cityferry-timetables-brisbane-city-council,cost-of-salinity-to-local-infrastructure-1996-97-summary-of-component-costs-of-salinity-by-reporting,cost-of-salinity-to-local-infrastructure-1996-97-summary-of-component-costs-of-salinity-by-reporting,downstream-cost-calculator-model-and-data-for-199697-or-2001-prices,economics-of-australian-soil-conditions-199697-limiting-factor-or-relative-yield-min-of-ry_salt2000-,geographical-names-register-gnr-of-nsw,victorian-dryland-salinity-assessment-2000-d01cac_ramsar_final-xls,victorian-dryland-salinity-assessment-2000-d02cac_fauna_final-xls,victorian-dryland-salinity-assessment-2000-d03cac_fauna_dist_final-xls,victorian-dryland-salinity-assessment-2000-dc04cac_hydrol_final-xls,victorian-dryland-salinity-assessment-2000-dc05cac_wetland_final-xls,victorian-dryland-salinity-assessment-2000-dc06cac_util_final-xls,victorian-dryland-salinity-assessment-2000-dc07cac_road_final-xls,victorian-dryland-salinity-assessment-2000-dc08cac_towns_final-xls,victorian-dryland-salinity-assessment-2000-dc09cac_flora_final-xls,victorian-dryland-salinity-assessment-2000-dc10cac_flora_dist_final-xls,victorian-dryland-salinity-assessment-2000-dc12cac_infrastructure-xls,victorian-dryland-salinity-assessment-2000-dc13cac_natural_envt-xls,victorian-dryland-salinity-assessment-2000-dc14cac_agriculture-xls,victorian-dryland-salinity-assessment-2000-dc16cac_agric_cost-xls,victorian-dryland-salinity-assessment-2000-dc17cac_shallow_wt-xls,victorian-dryland-salinity-assessment-2000-dc18cac_agric_cost_time-xls,victorian-dryland-salinity-assessment-2000-dc21cac_water_resources_new-xls,victorian-dryland-salinity-assessment-2000-dc22cac_risk-xls,licensed-broadcasting-transmitter-data,nsw-crime-data,recorded-crime-dataset-nsw,crime-statistics-in-nsw-by-month,2001-02-to-2007-08-local-government-survey-victoria,2009-green-light-report,annual-statistical-reports-fire-brigades-nsw-200304,annual-statistical-reports-fire-brigades-nsw-200405,annual-statistical-reports-fire-brigades-nsw-200506,annual-statistical-reports-fire-brigades-nsw-200607,arts-on-the-map,assets-and-liabilities-of-australian-located-operations,assets-of-australian-located-operations,assets-of-australian-located-operations-by-country,assets-of-financial-institutions,back-issues-of-monthly-banking-statistics,banks-assets,banks-consolidated-group-capital,banks-consolidated-group-impaired-assets,banks-consolidated-group-off-balance-sheet-business,banks-liabilities,building-societies-selected-assets-and-liabilities,byteback2842-locations-vic,cash-management-trusts,city-of-melbourne-street-furniture-database,community-services-nsw,consolidated-exposures-immediate-and-ultimate-risk-basis,consolidated-exposures-immediate-risk-basis-foreign-claims-by-country,consolidated-exposures-immediate-risk-basis-international-claims-by-country,consolidated-exposures-ultimate-risk-basis,consolidated-exposures-ultimate-risk-basis-foreign-claims-by-country,cosolidated-exposures-immediate-risk-basis,credit-unions-selected-assets-and-liabilities,daily-net-foreign-exchange-transactions,detox-your-home,education-national-assessment-program-literacy-and-numeracy-nsw,employment-data-by-nsw-regions,excise-beer-clearance-data-updated-each-month-beer-clearance-summary-data,finance-companies-and-general-financiers-selected-assets-and-liabilities,foreign-exchange-transactions-and-holdings-of-official-reserve-assets,half-yearly-life-insurance-bulletin-december-2010,health-behaviours-in-nsw,international-liabilities-by-country-of-the-australian-located-operations-of-banks-and-rfcs,liabilities-and-assets-monthly,liabilities-and-assets-weekly,liabilities-of-australian-located-operations,life-insurance-offices-statutory-funds,managed-funds,monetary-policy-changes,money-market-corporations-selected-assets-and-liabilities,monthly-airport-traffic-data-for-top-ten-airports-january-1985-to-december-2008,monthly-banking-statistics-april-2011,monthly-banking-statistics-june-2011,monthly-banking-statistics-may-2011,open-market-operations-2009-to-current,projected-households-vic-rvic-msd-2006-2056,projected-population-by-age-and-sex-vic-rvic-msd-2006-2056,public-unit-trust,quarterly-bank-performance-statistics,quarterly-general-insurance-performance-statistics-march-2011,quarterly-superannuation-performance-march-2011,recorded-crime-dataset-nsw,residential-land-bulletin,resourcesmart-retailers,resourcesmart-retailers-vic,road-fatalities-nsw,securitisation-vehicles,selected-asset-and-liabilities-of-the-private-non-financial-sectors,seperannuation-funds-outside-life-offices,solar-report-vic,towns-in-time-victoria,vif2008-projected-population-by-5-year-age-groups-and-sex-sla-lga-ssd-sd-2006-2026,vif2008-projected-population-totals-and-components-vic-rvic-msd-2006-2056,vif2008-projected-population-totals-sla-lga-ssd-sd-2006-2026,arts-festivals-victoria,arts-organisations-victoria,arts-spaces-and-places-victoria,ausgrid-average-electricity-use,collecting-institutions-victoria,indigenous-arts-organisations-victoria,latest-coastal-weather-observations-for-coolangatta-qld,top-10-fiction-books-brisbane-city-council];
+
+
+docsdb = couch['disclosr-documents']
+
+if __name__ == "__main__":
+    orgs_list = []
+    orgs_ids = {}
+    for doc in docsdb.view('app/datasets'):
+        print "   ---   "
+        print doc.id
+
+        if doc.value['url'] != "http://data.gov.au/data/" and doc.value['agencyID'] != "qld":
+
+
+            # Collect the package metadata.
+            pkg_name = filter(lambda x: x in '0123456789abcdefghijklmnopqrstuvwxyz-_',
+                              doc.value['url'].replace("http://data.gov.au/dataset/", '').replace('/', '')[:100]);
+            print pkg_name
+            if pkg_name in gooddata:
+
+                #add to or create organization using direct API
+                agency = doc.value['metadata']["Agency"]
+                if agency == "APS":
+                    agency = "Australian Public Service Commission"
+                if agency == "Shared Services, Treasury Directorate":
+                    agency = "Shared Services Procurement, Treasury Directorate"
+                if agency == "Treasury - Shared Services":
+                    agency = "Shared Services Procurement, Treasury Directorate"
+                if agency == "Territory and Municipal Services (TAMS)":
+                    agency = "Territory and Municipal Services Directorate"
+                if agency == "State Library of NSW":
+                    agency = "State Library of New South Wales"
+                org_name = name_munge(agency[:100])
+                if org_name not in orgs_list:
+                    orgs_list = ckandirect.action.organization_list()['result']
+                    #print orgs_list
+                    if org_name not in orgs_list:
+                        try:
+                            print "org not found, creating " + org_name
+                            ckandirect.action.organization_create(name=org_name, title=agency,
+                                                                  description=agency)
+                            orgs_list.append(org_name)
+                        except ckanapi.ValidationError, e:
+                            print e
+                            raise LoaderError('Unexpected status')
+                    else:
+                        print "org found, adding dataset to " + org_name
+
+                # cache org names -> id mapping
+                if org_name not in orgs_ids:
+                    org = ckandirect.action.organization_show(id=org_name)
+                    orgs_ids[org_name] = org["result"]["id"]
+                org_id = orgs_ids[org_name]
+                print "org id is " + org_id
+                tags = []
+                creator = doc.value['metadata']["DCTERMS.Creator"]
+                if doc.value['agencyID'] == "AGIMO":
+                    if len(doc.value['metadata']["Keywords / Tags"]) > 0:
+                        if hasattr(doc.value['metadata']["Keywords / Tags"], '__iter__'):
+                            tags = tags + doc.value['metadata']["Keywords / Tags"]
+                        else:
+                            tags = tags + [doc.value['metadata']["Keywords / Tags"]]
+
+                    tags = [re.sub('[^a-zA-Z0-9-_.]', '', tag.replace('&', 'and')).lower() for tag in tags if tag]
+                    #print tags
+                    extras = []
+
+                    for extra_key in doc.value['metadata'].keys():
+                        if extra_key not in ["Description", "Content-Language", "DCTERMS.Description",
+                                             "Keywords / Tags",
+                                             "data.gov.au Category", "Download", "Permalink", "DCTERMS.Identifier"]:
+                            if doc.value['metadata'][extra_key] != None and doc.value['metadata'][extra_key] != "":
+                                extras.append([extra_key, doc.value['metadata'][extra_key]])
+
+                    package_entity = {
+                        'name': pkg_name,
+                        'title': doc.value['metadata']['DCTERMS.Title'],
+                        'url': doc.value['metadata']['DCTERMS.Source.URI'],
+                        'tags': tags, #tags are mandatory?
+                        'author': creator,
+                        'maintainer': creator,
+                        'licence_id': get_licence_id(doc.value['metadata']['DCTERMS.License']),
+                        'notes': html2text.html2text(doc.value['metadata']['Description']),
+                        'owner_org': org_id,
+                        'extras': extras
+                    }
+
+                try:
+                    #print package_entity
+                    ckan.package_register_post(package_entity)
+                except CkanApiError, e:
+                    if ckan.last_message == "{\"name\": [\"That URL is already in use.\"]}":
+                        print "package already exists"
+                    else:
+                        print ckan.last_message
+                        raise LoaderError('Unexpected status %s checking for package under \'%s\': %r' % (
+                            ckan.last_status, pkg_name, e.args))
+                pkg = ckan.package_entity_get(pkg_name)
+
+
+                # add resources (downloadable data files)
+                if 'Download' in doc.value['metadata'].keys():
+                    try:
+
+                        resources = pkg.get('resources', [])
+                        if len(resources) < len(doc.value['metadata']['Download']):
+                            for resource in doc.value['metadata']['Download']:
+
+                                # http://docs.ckan.org/en/ckan-1.7/domain-model-resource.html
+                                # (KML/KMZ) / (Shapefile) /(Other)
+                                format = "plain"
+                                if resource['format'] == '(XML)':
+                                    format = 'xml'
+                                if resource['format'] == '(CSV/XLS)':
+                                    format = 'csv'
+                                if resource['format'] == '(Shapefile)':
+                                    format = 'shp'
+                                if resource['format'] == '(KML/KMZ)':
+                                    format = 'kml'
+                                name = resource['href']
+                                if 'name' in resource.keys():
+                                    name = resource['name']
+                                print resource
+                                add_package_resource_cachedurl(ckan, pkg_name, url_fix(resource['href']), name,
+                                                          format,
+                                                          human2bytes(resource.get('size', '0B')),
+                                                          resource_type='data')
+                        else:
+                            print "resources already exist"
+                    except CkanApiError, e:
+                        if ckan.last_status == 404:
+                            print "parent dataset does not exist"
+                        else:
+                            raise LoaderError('Unexpected status %s checking for package under \'%s\': %r' % (
+                                ckan.last_status, pkg_name, e.args))
+

--- /dev/null
+++ b/documents/datagov-merge.php
@@ -1,1 +1,26 @@
+<?php
 
+include_once("../include/common.inc.php");
+
+
+setlocale(LC_CTYPE, 'C');
+
+$db = $server->get_db('disclosr-documents');
+$datasets = Array();
+try {
+    $rows = $db->get_view("app", "datasets", null, true)->rows;
+
+    foreach ($rows as $row) {
+        //print_r($row);
+        if ($row->value->url != "http://data.gov.au/data/")
+        $datasets[str_replace(Array("http://data.gov.au/dataset/","/"),"",$row->value->url)] = $row->id;
+    }
+} catch (SetteeRestClientException $e) {
+    setteErrorHandler($e);
+}
+ksort($datasets);
+foreach ($datasets as $datasetname => $datasetkey) {
+    print "$datasetname => $datasetkey<br>\n";
+}
+?>
+

--- /dev/null
+++ b/documents/datagov-resourcereport.py
@@ -1,1 +1,81 @@
+import couchdb
+couch = couchdb.Server('http://127.0.0.1:5984/')
+#couch = couchdb.Server('http://192.168.1.113:5984/')
 
+import urllib
+import urlparse
+import httplib2
+import httplib
+import csv
+
+
+def url_fix(s, charset='utf-8'):
+    """Sometimes you get an URL by a user that just isn't a real
+    URL because it contains unsafe characters like ' ' and so on.  This
+    function can fix some of the problems in a similar way browsers
+    handle data entered by the user:
+
+    :param charset: The target charset for the URL if the url was
+                    given as unicode string.
+    """
+    if isinstance(s, unicode):
+        s = s.encode(charset, 'ignore')
+    if not urlparse.urlparse(s).scheme:
+   	s = "http://"+s
+    scheme, netloc, path, qs, anchor = urlparse.urlsplit(s)
+    path = urllib.quote(path, '/%')
+    qs = urllib.quote_plus(qs, ':&=')
+    return urlparse.urlunsplit((scheme, netloc, path, qs, anchor))
+
+# http://code.activestate.com/recipes/578019-bytes-to-human-human-to-bytes-converter/
+SYMBOLS = {
+    'customary': ('B', 'KB', 'MB', 'GB', 'T', 'P', 'E', 'Z', 'Y'),
+    'customary_ext': ('byte', 'kilo', 'mega', 'giga', 'tera', 'peta', 'exa',
+                      'zetta', 'iotta'),
+    'iec': ('Bi', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'),
+    'iec_ext': ('byte', 'kibi', 'mebi', 'gibi', 'tebi', 'pebi', 'exbi',
+                'zebi', 'yobi'),
+}
+
+
+docsdb = couch['disclosr-documents']
+out = csv.writer(open("output.csv","w"), delimiter=',',quoting=csv.QUOTE_ALL)
+if __name__ == "__main__":
+    for doc in docsdb.view('app/datasets'):
+        if doc.value['url'] != "http://data.gov.au/data/" and doc.value['agencyID'] != "qld":
+            # Collect the package metadata.
+            pkg_name = filter(lambda x: x in '0123456789abcdefghijklmnopqrstuvwxyz-_',
+                              doc.value['url'].replace("http://data.gov.au/dataset/", '').replace('/', '')[:100]);
+            if 'Download' in doc.value['metadata'].keys() and len(doc.value['metadata']['Download']) > 0:
+                        for resource in doc.value['metadata']['Download']:
+                            # http://docs.ckan.org/en/ckan-1.7/domain-model-resource.html
+                            # (KML/KMZ) / (Shapefile) /(Other)
+                            format = "plain"
+                            if resource['format'] == '(XML)':
+                                format = 'xml'
+                            if resource['format'] == '(CSV/XLS)':
+                                format = 'csv'
+                            if resource['format'] == '(Shapefile)':
+                                format = 'shp'
+                            if resource['format'] == '(KML/KMZ)':
+                                format = 'kml'
+                            name = resource['href']
+                            if 'name' in resource.keys():
+                                name = resource['name']
+			    if resource['href'].startswith("ftp"):
+				    out.writerow([pkg_name, url_fix(resource['href']), name,format, "ftp", ""])
+			    else:
+				    try:
+					h = httplib2.Http(disable_ssl_certificate_validation=True)
+  				        resp = h.request(url_fix(resource['href']), 'HEAD')
+					content_type = resp[0]['