handle invalid input in file size and description markup for datagov import
handle invalid input in file size and description markup for datagov import


Former-commit-id: 6c34fd9f8a95d5075ab5fb36f9ee2721736dcaec

--- a/admin/refreshDesignDoc.php
+++ b/admin/refreshDesignDoc.php
@@ -42,6 +42,7 @@
 $obj->views->byWebServer->map = "function(doc) {\n  emit(doc.web_server, doc);\n}";
 
 $obj->views->datasets->map = "function(doc) {\nif (doc.fieldName == \"data\") {\n  emit(doc._id, doc);\n}\n}";
+$obj->views->datasetGroups->map = "function(doc) {\nif (doc.fieldName == \"data\") {\n  doc.metadata[\"data.gov.au Category\"] && doc.metadata[\"data.gov.au Category\"].forEach(function(tag) {\n emit(tag, doc.url); \n  });\n}\n}";
 $obj->views->getValidationRequired->map = "function(doc) {\nif (doc.mime_type == \"text/html\" \n&& typeof(doc.validation) == \"undefined\") {\n  emit(doc._id, doc._attachments);\n}\n}";
 $docdb->save($obj, true);
 

file:b/disclosr.iml (new)
--- /dev/null
+++ b/disclosr.iml
@@ -1,1 +1,16 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="WEB_MODULE" version="4">
+  <component name="FacetManager">
+    <facet type="Python" name="Python">
+      <configuration sdkName="" />
+    </facet>
+  </component>
+  <component name="NewModuleRootManager" inherit-compiler-output="true">
+    <exclude-output />
+    <content url="file://$MODULE_DIR$" />
+    <orderEntry type="inheritedJdk" />
+    <orderEntry type="sourceFolder" forTests="false" />
+  </component>
+</module>
 
+

--- /dev/null
+++ b/documents/datagov-export-groups.py
@@ -1,1 +1,81 @@
+import ckanclient
+import couchdb
+from ckanclient import CkanApiError
+import re
 
+
+class LoaderError(Exception):
+    pass
+
+# Instantiate the CKAN client.
+#ckan = ckanclient.CkanClient(base_location='http://localhost:5000/api',    api_key='b47b24cd-591d-40c1-8677-d73101d56d1b')
+api_key = 'ff34526e-f794-4068-8235-fcbba38cd8bc'
+ckan = ckanclient.CkanClient(base_location='http://data.disclosurelo.gs/api',
+                             api_key=api_key)
+couch = couchdb.Server('http://127.0.0.1:5984/')
+#couch = couchdb.Server('http://192.168.1.113:5984/')
+
+# https://github.com/okfn/ckanext-importlib
+def munge(name):
+    # convert spaces to underscores
+    name = re.sub(' ', '_', name).lower()
+    # convert symbols to dashes
+    name = re.sub('[:]', '_-', name).lower()
+    name = re.sub('[/]', '-', name).lower()
+    # take out not-allowed characters
+    name = re.sub('[^a-zA-Z0-9-_]', '', name).lower()
+    # remove double underscores
+    name = re.sub('__', '_', name).lower()
+    return name
+
+
+def name_munge(input_name):
+    return munge(input_name.replace(' ', '').replace('.', '_').replace('&', 'and'))
+
+
+docsdb = couch['disclosr-documents']
+
+if __name__ == "__main__":
+    groups = {}
+    for doc in docsdb.view('app/datasetGroups'):
+            group_name = doc.key
+            if group_name != "Not specified":
+                pkg_name = filter(lambda x: x in '0123456789abcdefghijklmnopqrstuvwxyz-_',
+                                  doc.value.replace("http://data.gov.au/dataset/", '').replace('/', '')[:100]);
+                if group_name in groups.keys():
+                    groups[group_name] = list(set(groups[group_name] + [pkg_name]))
+                else:
+                    groups[group_name] = [pkg_name]
+
+    # add dataset to group(s)
+    for group_name in groups.keys():
+        if group_name != "Not specified":
+            group_url = name_munge(group_name[:100])
+            print group_name
+            print groups[group_name]
+            try:
+                # Update the group details
+                group_entity = ckan.group_entity_get(group_url)
+                print "group "+group_name+" exists"
+                if 'packages' in group_entity.keys():
+                    group_entity['packages'] = list(set(group_entity['packages'] + groups[group_name]))
+                else:
+                    group_entity['packages'] = groups[group_name]
+                ckan.group_entity_put(group_entity)
+            except CkanApiError, e:
+                if ckan.last_status == 404:
+                    print "group "+group_name+" does not exist, creating"
+                    group_entity = {
+                        'name': group_url,
+                        'title': group_name,
+                        'description': group_name,
+                        'packages': groups[group_name]
+                    }
+                    #print group_entity
+                    ckan.group_register_post(group_entity)
+                elif ckan.last_status == 409:
+                    print "group already exists"
+                else:
+                    raise LoaderError('Unexpected status %s adding to group under \'%s\': %r' % (
+                        ckan.last_status, pkg_name, e.args))
+

--- a/documents/datagov-export.py
+++ b/documents/datagov-export.py
@@ -1,19 +1,80 @@
+# coding=utf-8
 import ckanclient
 import couchdb
 from ckanclient import CkanApiError
 import re
 import html2text # aaronsw :(
-
+import ckanapi # https://github.com/open-data/ckanapi
+import scrape
+import datetime, os, hashlib
+import urllib2
 
 class LoaderError(Exception):
     pass
 
+import tempfile
+def add_package_resource_cachedurl(ckan, package_name, url, name, format, license_id, size,**kwargs):
+    if "xls" in url:
+	format = "xls"
+    if "pdf" in url:
+	format = "pdf"
+    if "xlsx" in url:
+	format = "xlsx"
+    (returned_url, mime_type, content) = scrape.fetchURL(scrape.docsdb,
+                                                url, "dataset_resource", "AGIMO", False)
+    if mime_type in ["application/vnd.ms-excel","application/msexcel","application/x-msexcel","application/x-ms-excel","application/x-excel","application/x-dos_ms_excel","application/xls","application/x-xls"]:
+	format = "xls"
+    if mime_type in ["application/xlsx","application/x-xlsx","application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"]:
+	format = "xlsx"
+
+    if content != None:
+	    tf = tempfile.NamedTemporaryFile(delete=False)
+	    tfName = os.path.abspath(tf.name)
+	    print tfName
+	    tf.seek(0)
+	    tf.write(content)
+	    tf.flush()
+	    ckan.add_package_resource (package_name, tfName, name=name, format=format, license_id=license_id)
+    else:
+	print "fetch error"
+	return ckan.add_package_resource(package_name, url, name=name, resource_type='data',
+                                                      format=format,
+                                                      size=size, mimetype=mime_type, license_id=license_id)
+
 # Instantiate the CKAN client.
-#ckan = ckanclient.CkanClient(base_location='http://localhost:5000/api',    api_key='b47b24cd-591d-40c1-8677-d73101d56d1b')
-ckan = ckanclient.CkanClient(base_location='http://data.disclosurelo.gs/api',
-    api_key='482a9dd2-a976-4adf-ac77-d71d92a98a52')
-#couch = couchdb.Server('http://127.0.0.1:5984/')
-couch = couchdb.Server('http://192.168.1.113:5984/')
+api_key = 'ff34526e-f794-4068-8235-fcbba38cd8bc'
+server = 'data.disclosurelo.gs'
+
+ckan = ckanclient.CkanClient(base_location='http://' + server + '/api',
+                             api_key=api_key)
+ckandirect = ckanapi.RemoteCKAN('http://' + server, api_key=api_key)
+couch = couchdb.Server('http://127.0.0.1:5984/')
+#couch = couchdb.Server('http://192.168.1.113:5984/')
+
+import urllib
+import urlparse
+
+
+def url_fix(s, charset='utf-8'):
+    """Sometimes you get an URL by a user that just isn't a real
+    URL because it contains unsafe characters like ' ' and so on.  This
+    function can fix some of the problems in a similar way browsers
+    handle data entered by the user:
+
+    >>> url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffsklärung)')
+    'http://de.wikipedia.org/wiki/Elf%20%28Begriffskl%C3%A4rung%29'
+
+    :param charset: The target charset for the URL if the url was
+                    given as unicode string.
+    """
+    if isinstance(s, unicode):
+        s = s.encode(charset, 'ignore')
+    if not urlparse.urlparse(s).scheme:
+        s = "http://" + s
+    scheme, netloc, path, qs, anchor = urlparse.urlsplit(s)
+    path = urllib.quote(path, '/%')
+    qs = urllib.quote_plus(qs, ':&=')
+    return urlparse.urlunsplit((scheme, netloc, path, qs, anchor))
 
 # http://code.activestate.com/recipes/578019-bytes-to-human-human-to-bytes-converter/
 SYMBOLS = {
@@ -24,6 +85,7 @@
     'iec_ext': ('byte', 'kibi', 'mebi', 'gibi', 'tebi', 'pebi', 'exbi',
                 'zebi', 'yobi'),
 }
+
 
 def human2bytes(s):
     """
@@ -53,6 +115,9 @@
           ...
       ValueError: can't interpret '12 foo'
     """
+    if s == None:
+        return 0
+    s = s.replace(',', '')
     init = s
     num = ""
     while s and s[0:1].isdigit() or s[0:1] == '.':
@@ -91,18 +156,17 @@
 
 def name_munge(input_name):
     return munge(input_name.replace(' ', '').replace('.', '_').replace('&', 'and'))
-    #return input_name.replace(' ', '').replace('.', '_').replace('&', 'and')
-
-
-def get_licence_id(licencename):
+
+
+def get_license_id(licencename):
     map = {
         "Creative Commons - Attribution-Share Alike 2.0 Australia (CC-SA)\nThe downloadable version of the database is licensed under CC-BY-SA Creative Commons Attribution Share Alike and contains only the database fields that are released under that license. These fields are object title, object number, object description as well as temporal, spatial and dimension details. It also contains a persistent URL for each record.": 'cc-by-sa',
         "CreativeCommonsAttributionNonCommercial30AustraliaCCBYNC30": 'cc-nc',
         'Otherpleasespecify': 'notspecified',
         '': 'notspecified',
         "Publicly available data": 'notspecified',
-        "CreativeCommonsAttributionNoDerivativeWorks30AustraliaCCBYND30": "other-closed",
-        "CreativeCommonsAttributionNonCommercialNoDerivs30AustraliaCCBYNCND30": "other-closed",
+        "CreativeCommonsAttributionNoDerivativeWorks30AustraliaCCBYND30": "cc-by-nd",
+        "CreativeCommonsAttributionNonCommercialNoDerivs30AustraliaCCBYNCND30": "cc-nc-nd",
         'CreativeCommonsAttribution30AustraliaCCBY30': 'cc-by',
         "Creative Commons - Attribution 2.5 Australia (CC-BY)": 'cc-by',
         'CreativeCommonsAttributionCCBY25': 'cc-by',
@@ -112,108 +176,143 @@
         raise Exception(licencename + " not found");
     return map[licencename];
 
+goodcsvdata = "afl-in-victoria,annual-budget-initiatives-by-suburb-brisbane-city-council,athletics-in-victoria-gfyl,bicycle-racks-mosman-municipal-council,boat-ramps-brisbane-city-council,brisbane-access-ratings-database,bus-stops-brisbane-city-council,cemeteries-brisbane-city-council,cfa-locations,citycycle-stations-brisbane-city-council,community-gardens-brisbane-city-council,community-halls-brisbane-city-council,cooking-classes-gfyl,court-locations-victoria,customer-service-centres-brisbane-city-council,dance-in-victoria-gfyl,disability-activity-gfyl,dog-parks-brisbane-city-council,ferry-terminals-brisbane-city-council,fishing-club-in-victoria-gfyl,fitness-centres-in-victoria-gfyl,gardens-reserves-gfyl,golf-courses-brisbane-city-council,gymnastics-in-victoria-gfyl,historic-cemeteries-brisbane-city-council,ice-skating-centres-gfyl,immunisation-clinics-brisbane-city-council,libraries-brisbane-city-council,licenced-venues-victoria,lifesaving-locations-victoria,loading-zones-brisbane-city-council,major-projects-victoria,markets-in-victoria,martial-arts-in-victoria-gfyl,melbourne-water-use-by-postcode,members-of-parliament-both-houses-nsw,members-of-the-legislative-assembly-nsw,members-of-the-legislative-council-nsw,mfb-locations-vic,ministers-of-the-nsw-parliament,mosman-local-government-area,mosman-rider-route,mosman-wwii-honour-roll,neighbourhood-houses-gfyl,news-feeds-mosman-municipal-council,off-street-car-parks-mosman-municipal-council,orienteering-clubs-gfyl,parking-meter-areas-brisbane-city-council,parks-and-reserves-mosman-municipal-council,parks-brisbane-city-council,personal-training-gfyl,picnic-areas-brisbane-city-council,playgrounds-brisbane-city-council,playgrounds-mosman-municipal-council,police-region-crime-statistics-victoria,police-service-area-crime-statistics-victoria,pony-clubs-in-victoria-gfyl,prison-locations-victoria,public-amenities-maintained-by-mosman-council,public-art-brisbane-city-council,public-internet-locations-vic,public-toilets-brisbane-city-council,racecourse-locations-victoria,recent-development-applications-mosman-municipal-council,recreation-groups-gfyl,recreational-fishing-spots,regional-business-centres-brisbane-city-council,reports-of-swooping-birds-mosman-municipal-council,restricted-parking-areas-brisbane-city-council,rollerskating-centres-in-victoria-gfyl,sailing-clubs-gfyl,school-locations-victoria,shadow-ministers-of-the-nsw-parliament,skate-parks-gfyl,sporting-clubs-and-organisations-gfyl,stakeboard-parks-brisbane-city-council,state-bodies-gfyl,street-names-brisbane-city-council,suburbs-and-adjoining-suburbs-brisbane-city-council,swimming-pools-brisbane-city-council,swimming-pools-gfyl,tennis-courts-brisbane-city-council,top-40-book-club-reads-brisbane-city-council,tracks-and-trails-gfyl,triathlon-clubs-gfyl,urban-water-restrictions-victoria,veterinary-services-in-mosman,victorian-microbreweries,volunteering-centres-services-and-groups-victoria,walking-groups-gfyl,ward-offices-brisbane-city-council,waste-collection-days-brisbane-city-council,waste-transfer-stations-brisbane-city-council,water-consumption-in-melbourne,water-sports-in-victoria-gfyl,wifi-hot-spots-brisbane-city-council,yoga-pilates-and-tai-chi-in-victoria-gfyl,2809cycling-in-new-south-wales-what-the-data-tells-us2809-and-related-data,act-barbecue-bbq-locations,act-tafe-locations,ausindustry-locations,austender-contract-notice-export,austender-contract-notice-export,austender-contract-notice-export,austender-contract-notice-export,austender-contract-notice-export,austender-contract-notice-export,austender-contract-notice-export,austender-contract-notice-export,australian-gas-light-company-maps,australian-gas-light-company-maps,australian-ports,australian-public-service-statistical-bulletin-2011-12,australian-public-service-statistical-bulletin-snapshot-at-december-31-2011,australian-public-service-statistical-bulletin-tables-0910,austrics-timetable-set,capital-works-call-tender-schedule,collection-item-usage-state-library-of-victoria,country-and-commodity-trade-data-spreadsheet,country-and-commodity-trade-data-spreadsheet-2,country-by-level-of-processing-trade-data-spreadsheet,crime-incident-type-and-frequency-by-capital-city-and-nationally,csiro-locations,data-from-the-oaic-public-sector-information-survey-2012,data-from-the-oaic-public-sector-information-survey-2012,data-from-the-oaic-public-sector-information-survey-2012,department-of-finance-and-deregulation-office-locations,digitised-maps,diisr-division-locations-excluding-ausindustry-enterprise-connect-and-nmi,diisr-locations,diisr-portfolio-agency-locations-excluding-csiro,distance-to-legal-service-providers-from-disadvantaged-suburbs,enterprise-connect-locations,fire-insurance-maps-sydney-block-plans-1919-1940,fire-insurance-maps-sydney-block-plans-1919-1940,first-fleet-collection,first-fleet-collection,first-fleet-maps,first-fleet-maps,freedom-of-information-annual-estimated-costs-and-staff-time-statistical-data-2011-12,freedom-of-information-quarterly-request-and-review-statistical-data-2011-12,freedom-of-information-requests-estimated-costs-and-charges-collected-1982-83-to-2011-12,higher-education-course-completions,higher-education-enrolments,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,historical-australian-government-contract-data,journey-planner-data-nt,library-catalogue-search-terms-state-library-of-victoria,location-of-act-schools,location-of-centrelink-offices,location-of-european-wasps-nests,location-of-lawyers-and-legal-service-providers-by-town,location-of-legal-assistance-service-providers,location-of-medicare-offices,location-of-medicare-offices,maps-of-the-southern-hemisphere-16th-18th-centuries,maps-of-the-southern-hemisphere-16th-18th-centuries,music-queensland,national-measurement-institute-locations,new-south-wales-officers-and-men-of-the-australian-imperial-force-a-i-f-and-the-australian-naval-for,new-south-wales-officers-and-men-of-the-australian-imperial-force-a-i-f-and-the-australian-naval-for,photographs-of-nsw-life-pre-1955,photographs-of-nsw-life-pre-1955,photographs-of-sydney-before-1885,photographs-of-sydney-before-1885,picture-queensland,plgr-28093-playgrounds-act,police-station-locations,queensland-public-libraries,rare-printed-books,rare-printed-books,real-estate-maps,regional-australia-funding-projects,sa-memory-state-library-of-south-australia,search-engine-terms-state-library-of-victoria,south-australian-photographs-state-library-of-south-australia,south-australian-sheet-music-state-library-of-south-australia,sydney-bond-store-maps-1894,sydney-bond-store-maps-1894,sydney-maps-1917,sydney-maps-1917,tafe-institute-locations-victoria,tafe-sa-campus-locations,tolt-public-toilets-act,victorian-public-library-branches-state-library-of-victoria,western-australia-public-library-network,world-war-one-photographs-by-frank-hurley,world-war-one-photographs-by-frank-hurley,citycat-timetables-brisbane-city-council,cityferry-timetables-brisbane-city-council,cost-of-salinity-to-local-infrastructure-1996-97-summary-of-component-costs-of-salinity-by-reporting,cost-of-salinity-to-local-infrastructure-1996-97-summary-of-component-costs-of-salinity-by-reporting,downstream-cost-calculator-model-and-data-for-199697-or-2001-prices,economics-of-australian-soil-conditions-199697-limiting-factor-or-relative-yield-min-of-ry_salt2000-,geographical-names-register-gnr-of-nsw,victorian-dryland-salinity-assessment-2000-d01cac_ramsar_final-xls,victorian-dryland-salinity-assessment-2000-d02cac_fauna_final-xls,victorian-dryland-salinity-assessment-2000-d03cac_fauna_dist_final-xls,victorian-dryland-salinity-assessment-2000-dc04cac_hydrol_final-xls,victorian-dryland-salinity-assessment-2000-dc05cac_wetland_final-xls,victorian-dryland-salinity-assessment-2000-dc06cac_util_final-xls,victorian-dryland-salinity-assessment-2000-dc07cac_road_final-xls,victorian-dryland-salinity-assessment-2000-dc08cac_towns_final-xls,victorian-dryland-salinity-assessment-2000-dc09cac_flora_final-xls,victorian-dryland-salinity-assessment-2000-dc10cac_flora_dist_final-xls,victorian-dryland-salinity-assessment-2000-dc12cac_infrastructure-xls,victorian-dryland-salinity-assessment-2000-dc13cac_natural_envt-xls,victorian-dryland-salinity-assessment-2000-dc14cac_agriculture-xls,victorian-dryland-salinity-assessment-2000-dc16cac_agric_cost-xls,victorian-dryland-salinity-assessment-2000-dc17cac_shallow_wt-xls,victorian-dryland-salinity-assessment-2000-dc18cac_agric_cost_time-xls,victorian-dryland-salinity-assessment-2000-dc21cac_water_resources_new-xls,victorian-dryland-salinity-assessment-2000-dc22cac_risk-xls,licensed-broadcasting-transmitter-data,nsw-crime-data,recorded-crime-dataset-nsw,crime-statistics-in-nsw-by-month,2001-02-to-2007-08-local-government-survey-victoria,2009-green-light-report,annual-statistical-reports-fire-brigades-nsw-200304,annual-statistical-reports-fire-brigades-nsw-200405,annual-statistical-reports-fire-brigades-nsw-200506,annual-statistical-reports-fire-brigades-nsw-200607,arts-on-the-map,assets-and-liabilities-of-australian-located-operations,assets-of-australian-located-operations,assets-of-australian-located-operations-by-country,assets-of-financial-institutions,back-issues-of-monthly-banking-statistics,banks-assets,banks-consolidated-group-capital,banks-consolidated-group-impaired-assets,banks-consolidated-group-off-balance-sheet-business,banks-liabilities,building-societies-selected-assets-and-liabilities,byteback2842-locations-vic,cash-management-trusts,city-of-melbourne-street-furniture-database,community-services-nsw,consolidated-exposures-immediate-and-ultimate-risk-basis,consolidated-exposures-immediate-risk-basis-foreign-claims-by-country,consolidated-exposures-immediate-risk-basis-international-claims-by-country,consolidated-exposures-ultimate-risk-basis,consolidated-exposures-ultimate-risk-basis-foreign-claims-by-country,cosolidated-exposures-immediate-risk-basis,credit-unions-selected-assets-and-liabilities,daily-net-foreign-exchange-transactions,detox-your-home,education-national-assessment-program-literacy-and-numeracy-nsw,employment-data-by-nsw-regions,excise-beer-clearance-data-updated-each-month-beer-clearance-summary-data,finance-companies-and-general-financiers-selected-assets-and-liabilities,foreign-exchange-transactions-and-holdings-of-official-reserve-assets,half-yearly-life-insurance-bulletin-december-2010,health-behaviours-in-nsw,international-liabilities-by-country-of-the-australian-located-operations-of-banks-and-rfcs,liabilities-and-assets-monthly,liabilities-and-assets-weekly,liabilities-of-australian-located-operations,life-insurance-offices-statutory-funds,managed-funds,monetary-policy-changes,money-market-corporations-selected-assets-and-liabilities,monthly-airport-traffic-data-for-top-ten-airports-january-1985-to-december-2008,monthly-banking-statistics-april-2011,monthly-banking-statistics-june-2011,monthly-banking-statistics-may-2011,open-market-operations-2009-to-current,projected-households-vic-rvic-msd-2006-2056,projected-population-by-age-and-sex-vic-rvic-msd-2006-2056,public-unit-trust,quarterly-bank-performance-statistics,quarterly-general-insurance-performance-statistics-march-2011,quarterly-superannuation-performance-march-2011,recorded-crime-dataset-nsw,residential-land-bulletin,resourcesmart-retailers,resourcesmart-retailers-vic,road-fatalities-nsw,securitisation-vehicles,selected-asset-and-liabilities-of-the-private-non-financial-sectors,seperannuation-funds-outside-life-offices,solar-report-vic,towns-in-time-victoria,vif2008-projected-population-by-5-year-age-groups-and-sex-sla-lga-ssd-sd-2006-2026,vif2008-projected-population-totals-and-components-vic-rvic-msd-2006-2056,vif2008-projected-population-totals-sla-lga-ssd-sd-2006-2026,arts-festivals-victoria,arts-organisations-victoria,arts-spaces-and-places-victoria,ausgrid-average-electricity-use,collecting-institutions-victoria,indigenous-arts-organisations-victoria,latest-coastal-weather-observations-for-coolangatta-qld,top-10-fiction-books-brisbane-city-council".split(",")
+goodotherdata = "abc-local-stations,abc-local-stations,abc-local-stations,act-emergency-services-agency-esa-28093-current-incidents,act-government-news-and-events,act-government-summaries-of-cabinet-outcomes,act-magistrates-court-judgements,act-supreme-court-judgements,act-supreme-court-sentences,action-bus-service-gtfs-feed-act,actpla-latest-news,agricultural-commodities-for-199697-linked-to-profit-function-surfaces,agricultural-structure-classification,agricultural-structure-classification,all-vacant-act-government-jobs,annual-family-income-1996-1997-to-1998-1999-three-year-average,apvma-pubcris-dataset-for-registered-agricultural-and-veterinary-chemical-products-and-approved-acti,argus-newspaper-collection-of-photographs-state-library-of-victoria,assessment-of-terrestrial-biodiversity-2002-biodiversity-audit-data-entry-system-bades,assessment-of-terrestrial-biodiversity-2002-database,assisted-immigration-1848-1912-index,ausgrid-average-electricity-use,ausgrid-average-electricity-use-2011,ausindustry-locations,ausindustry-locations,austender-contract-notice-export,australian-broadband-guarantee,australian-broadband-guarantee,australian-data-access,australian-dryland-salinity-assessment-spatial-data-12500000-nlwra-2001,australian-dryland-salinity-assessment-spatial-data-12500000-nlwra-2001,australian-groundwater-flow-systems-national-land-and-water-resources-audit-january-2000,australian-groundwater-flow-systems-national-land-and-water-resources-audit-january-2000,australian-irrigation-areas-raster-version-1a-national-land-and-water-resources-audit,australian-irrigation-areas-raster-version-1a-national-land-and-water-resources-audit,australian-irrigation-areas-vector-version-1a-national-land-and-water-resources-audit,australian-irrigation-areas-vector-version-1a-national-land-and-water-resources-audit,australian-public-service-statistical-bulletin-2010-11,australian-water-resources-assessment-2000-database,australiana-index-state-library-of-victoria,available-water-capacity-for-australian-areas-of-intensive-agriculture-of-layer-1-a-horizon-top-soil,bicycle-racks-mosman-municipal-council,bikeways-briisbane-city-council,bikeways-briisbane-city-council,boreholes-in-the-murray-basin-southeastern-australia,boreholes-in-the-murray-basin-southeastern-australia,british-convict-transportation-registers,calculated-annual-and-monthly-potential-evaporation-mm,calculated-annual-and-monthly-potential-evaporation-mm,canberra-suburb-boundaries,catchment-and-subcatchments-grid,cemeteries-brisbane-city-council,cemeteries-brisbane-city-council,coal-fields-in-the-murray-basin-southeastern-australia,coal-fields-in-the-murray-basin-southeastern-australia,commonwealth-agencies,commonwealth-electoral-boundaries-archive-2009,commonwealth-electoral-boundaries-archive-2009,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-all-infrastructure-buildings-road-rail-a,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-all-infrastructure-buildings-road-rail-a,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-increase-to-local-infrastructure-based-o,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-of-all-infrastructure-buildings-road-rai,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-of-all-infrastructure-buildings-road-rai,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-of-all-infrastructure-buildings-road-rai,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-of-all-infrastructure-buildings-road-rai,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-of-the-general-infrastructure-component-,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-of-the-rail-component-of-infrastructure-,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-the-general-infrastructure-component-bui,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-the-road-component-of-infrastructure-bas,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-the-road-component-of-infrastructure-bas,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-to-the-bridge-component-of-infrastructur,cost-of-salinity-to-local-infrastructure-1996-97-total-cost-to-the-bridge-component-of-infrastructur,country-by-level-of-processing-trade-data-spreadsheet-2,country-by-level-of-processing-trade-data-spreadsheet-2011-12,crime-incidents-data-2004-international-crime-victimisation-survey-icvs-australian-component,cropping-management-practices-1998-1999,csiro-locations,csiro-locations,current-and-future-road-reports-traffic-restrictions-in-south-australia,current-and-future-road-reports-traffic-restrictions-in-south-australia,current-and-future-road-reports-traffic-restrictions-in-south-australia,current-and-future-road-reports-traffic-restrictions-in-south-australia,current-and-future-road-reports-traffic-restrictions-in-south-australia,current-and-future-road-reports-traffic-restrictions-in-south-australia,cybersafety-outreach-program,cybersafety-outreach-program,data-source-for-polygonal-data-used-by-the-asris-project-in-generation-of-modelled-surfaces,department-of-finance-and-deregulation-office-locations,department-of-finance-and-deregulation-office-locations,depositional-path-length,digital-enterprise,digital-hubs,digitised-maps,diisr-division-locations-excluding-ausindustry-enterprise-connect-and-nmi,diisr-division-locations-excluding-ausindustry-enterprise-connect-and-nmi,diisr-locations,diisr-portfolio-agency-locations-excluding-csiro,diisr-portfolio-agency-locations-excluding-csiro,directory-gov-au-full-data-export,distance-to-ridges,economics-of-australian-soil-conditions-199697-factor-most-limiting-yield-aciditysodicitysalinity,economics-of-australian-soil-conditions-199697-gross-benefit-acidity-hayr,economics-of-australian-soil-conditions-199697-gross-benefit-of-the-limiting-factor-hayr,economics-of-australian-soil-conditions-199697-gross-benefit-salinity-hayr,economics-of-australian-soil-conditions-199697-gross-benefit-sodicity-hayr,economics-of-australian-soil-conditions-199697-impact-cost-of-salinity-2000-2020-hayr,economics-of-australian-soil-conditions-199697-relative-yield-from-acidity,economics-of-australian-soil-conditions-199697-relative-yield-from-salinity-in-2000,economics-of-australian-soil-conditions-199697-relative-yield-from-salinity-in-2020,economics-of-australian-soil-conditions-199697-relative-yield-from-sodicity,edd-media-releases,edd-news-and-events,egovernment-resource-centre-website-analytics,elevation-of-the-pre-tertiary-basement-in-the-murray-basin,elevation-of-the-pre-tertiary-basement-in-the-murray-basin,enterprise-connect-locations,enterprise-connect-locations,equivalent-fresh-water-head-difference-between-the-shallowest-and-deepest-aquifers,equivalent-fresh-water-head-difference-between-the-shallowest-and-deepest-aquifers,erosion-gully-density,erosion-path-length,estimated-proportion-of-farms-carrying-out-landcare-related-work-1998-1999,estimated-value-of-agricultural-operations-evao-1996-1997,farm-equity-ratio-1996-1997-to-1998-1999-three-year-average,farm-family-cash-income-1196-1997-to-1998-1999-three-year-average,farmer-population-1996,farms-with-significant-degradation-problems-irrigation-salinity-1998-1999,farms-with-significant-degradation-problems-irrigation-salinity-1998-1999-2,farms-with-significant-degradation-problems-soil-acidity-1998-1999,forests-of-australia-2003,freedom-of-information-foi-summaries,geology-lithology-12-500-000-scale,glenorchy-city-council-building-footprints,glenorchy-city-council-building-footprints,glenorchy-city-council-building-footprints,glenorchy-city-council-kerbs,glenorchy-city-council-kerbs,glenorchy-city-council-kerbs,glenorchy-city-council-stormwater-pipes,glenorchy-city-council-stormwater-pipes,glenorchy-city-council-stormwater-pipes,glenorchy-city-council-stormwater-pits,glenorchy-city-council-stormwater-pits,glenorchy-city-council-stormwater-pits,groundwater-sdl-resource-units,groundwater-sdl-resource-units,groundwater-sdl-resource-units,higher-qualifications-of-farmers-and-farm-managers-1996,historical-australian-government-contract-data,historical-australian-government-contract-data,hydrologic-indicator-sites,hydrologic-indicator-sites,immigration-land-orders-1861-1874,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-biota-condition-sub-in,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-catchment-condition-in,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-feral-animal-density,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-human-population-densi,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-impoundment-density,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-industrial-point-sourc,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-intensive-agricultural,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-land-condition-sub-ind,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-native-vegetation-frag,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-nutrient-point-source-,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-pesticide-hazard,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-predicted-2050-salinit,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-protected-areas,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-rivers-in-acidificatio,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-rivers-in-salt-hazard,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-rivers-through-forests,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-soil-acidification-haz,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-soil-degradation-hazar,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-suspended-sediment-loa,indicators-of-catchment-condition-in-the-intensive-land-use-zone-of-australia-weed-density,integrated-vegetation-cover-2003-version-1,john-t-collins-collection-state-library-of-victoria,journal-of-the-h-m-s-endeavour-1768-1771,journey-planner-data-act,krantz-sheldon-architectural-images,land-use-of-australia-version-3-28093-20012002,lands-surveys-historic-map-series-western-australia,latest-coastal-weather-observations-for-coolangatta-qld,launceston-city-council-addresses,launceston-city-council-building-footprints,launceston-city-council-contours,launceston-city-council-detail-survey-drawing-file,launceston-city-council-drainage,launceston-city-council-fences,launceston-city-council-pavement,launceston-city-council-railway,launceston-city-council-roads,libraries-act-announcements,licensed-broadcasting-transmitter-data,linc-tasmania,look-up-table-of-auslig-river-basins-of-australia-1997,major-water-resources-infrastructure-part-of-the-australian-water-resources-assessment-2000-database,mean-annual-concentration-of-mineral-nitrogen-in-soil-water-mgn-kgh20-in-the-pre-1788-scenario,mean-annual-concentration-of-mineral-nitrogen-in-soil-water-mgn-kgh20-in-the-pre-1788-scenario,mean-annual-concentration-of-mineral-nitrogen-in-soil-water-mgn-kgh20-in-the-present-day-scenario,mean-annual-concentration-of-mineral-nitrogen-in-soil-water-mgn-kgh20-in-the-present-day-scenario,mean-annual-deep-drainage-mm-y-in-the-pre-1788-scenario,mean-annual-deep-drainage-mm-y-in-the-pre-1788-scenario,mean-annual-deep-drainage-mm-y-in-the-present-day-scenario,mean-annual-deep-drainage-mm-y-in-the-present-day-scenario,mean-annual-transpiration-from-the-plant-canopy-for-the-pre-1788-scenario,mean-annual-transpiration-from-the-plant-canopy-for-the-pre-1788-scenario,mean-annual-transpiration-from-the-plant-canopy-for-the-present-day-scenario,mean-annual-transpiration-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-april-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-april-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-august-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-august-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-august-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-august-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-december-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-december-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-december-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-december-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-february-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-february-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-january-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-january-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-january-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-january-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-july-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-july-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-july-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-july-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-june-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-june-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-june-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-june-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-march-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-march-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-march-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-march-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-march-from-the-plant-canopy-for-the-present-day-scenario-2,mean-transpiration-in-march-from-the-plant-canopy-for-the-present-day-scenario-2,mean-transpiration-in-may-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-may-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-may-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-may-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-november-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-november-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-november-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-november-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-october-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-october-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-october-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-october-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-september-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-september-from-the-plant-canopy-for-the-pre-1788-scenario,mean-transpiration-in-september-from-the-plant-canopy-for-the-present-day-scenario,mean-transpiration-in-september-from-the-plant-canopy-for-the-present-day-scenario,mildenhall-photographs-of-early-canberra,mobility-map-brisbane-city,mobility-map-mt-coot-tha,mosman-local-government-area,mosman-rider-route,mosman-wwii-honour-roll,mosman-wwii-honour-roll,murray-darling-basin-water-resource-plan-areas-groundwater,murray-darling-basin-water-resource-plan-areas-groundwater,murray-darling-basin-water-resource-plan-areas-surface-water,murray-darling-basin-water-resource-plan-areas-surface-water,music-queensland,national-broadband-network,national-broadband-network,national-broadband-network-2011-10,national-broadband-network-2011-10,national-broadband-network-2011-12,national-broadband-network-2011-12,national-broadband-network-2012,national-broadband-network-28093-august-2011,national-broadband-network-28093-august-2011,national-broadband-network-28093-july-2011,national-broadband-network-28093-july-2011,national-broadband-network-february-2012,national-broadband-network-february-2012,national-broadband-network-september-2011,national-broadband-network-september-2011,national-library-of-australia-sheet-music-collection,national-measurement-institute-locations,national-parks-and-asset-locations-south-australia,national-public-toilet-map,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2000,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2000,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2000,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2000,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2000,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2000,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2000,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2020,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2020,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2020,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2020,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2020,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2020,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2020,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2050,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2050,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2050,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2050,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2050,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2050,new-south-wales-dryland-salinity-assessment-2000-assessmet-of-dryland-salinity-extent-2050,nsw-newspapers-catalogue-data,nsw-rural-fire-service-current-incidents,nsw-rural-fire-service-major-updates,off-street-car-parks-mosman-municipal-council,open-database-brisbane-city-council,ost-of-salinity-to-local-infrastructure-1996-97-total-cost-of-the-rail-component-of-infrastructure-b,parking-areas-brisbane-city-council,parking-areas-brisbane-city-council,parks-and-reserves-mosman-municipal-council,parks-brisbane-city-council,parks-brisbane-city-council,picture-australia-metadata,picture-queensland,picture-queensland,playgrounds-mosman-municipal-council,police-station-locations,police-station-locations,port-phillip-papers-state-library-of-victoria,precis-forecast-national,precis-forecast-national,precis-forecast-new-south-wales,precis-forecast-new-south-wales,precis-forecast-new-south-wales,precis-forecast-northern-territory,precis-forecast-northern-territory,precis-forecast-queensland,precis-forecast-queensland,precis-forecast-south-australia,precis-forecast-south-australia,precis-forecast-south-australia,precis-forecast-tasmania,precis-forecast-tasmania,precis-forecast-tasmania,precis-forecast-victoria,precis-forecast-victoria,precis-forecast-victoria,precis-forecast-western-australia,precis-forecast-western-australia,public-amenities-maintained-by-mosman-council,radio-and-television-broadcasting-stations-book-internet-edition,real-estate-maps,recent-earthquakes,regional-development-australia,regional-development-australia-2011-september-2011,regional-development-australia-may-2012,reports-of-swooping-birds-mosman-municipal-council,sentinel-hotspots,sentinel-hotspots,slq-catalogue-searches,slq-catalogue-searches,slv-rural-water,slv-shipping,slwa-digital-photographic-collection,south-australian-boat-ramp-locator,south-australian-road-crash-statistics,state-library-of-victoria-online-image-collection,state-library-of-victoria-online-image-collection-inc-high-res,state-of-the-service-report-2010-11-australian-public-service-employee-survey-results,state-of-the-service-report-2010-11-australian-public-service-employee-survey-results,statistical-local-areas-1996-for-agricultural-structure-classification,surface-water-gauging-stations-part-of-the-australian-water-resources-assessment-2000-database,surface-water-gauging-stations-part-of-the-australian-water-resources-assessment-2000-database,surface-water-sdl-resource-units,surface-water-sdl-resource-units,tasmanian-herbarium,tasmanian-museum-and-art-gallery-faunal-collection".split(",")
+
+
 docsdb = couch['disclosr-documents']
 
 if __name__ == "__main__":
+    orgs_list = []
+    orgs_ids = {}
     for doc in docsdb.view('app/datasets'):
+        print "   ---   "
         print doc.id
-        if doc.value['url'] != "http://data.gov.au/data/":
+
+        if doc.value['url'] != "http://data.gov.au/data/" and doc.value['agencyID'] != "qld":
+
+
             # Collect the package metadata.
-            pkg_name = doc.value['url'].replace("http://data.gov.au/dataset/",'').replace('/','');                                                                  _
-            tags = []
-            if doc.value['agencyID'] == "AGIMO":
-                if len(doc.value['metadata']["Keywords / Tags"]) > 0:
-                    if hasattr(doc.value['metadata']["Keywords / Tags"], '__iter__'):
-                        tags = tags + doc.value['metadata']["Keywords / Tags"]
+            pkg_name = filter(lambda x: x in '0123456789abcdefghijklmnopqrstuvwxyz-_',
+                              doc.value['url'].replace("http://data.gov.au/dataset/", '').replace('/', '')[:100]);
+            print pkg_name
+            if pkg_name != "":
+
+                #add to or create organization using direct API
+                agency = doc.value['metadata']["Agency"]
+                if agency == "APS":
+                    agency = "Australian Public Service Commission"
+                if agency == "Department of Broadband, Communications and the Digital Ecomomy":
+                    agency = "Department of Broadband, Communications and the Digital Economy"
+                if agency == "Shared Services, Treasury Directorate":
+                    agency = "Shared Services Procurement, Treasury Directorate"
+                if agency == "Treasury - Shared Services":
+                    agency = "Shared Services Procurement, Treasury Directorate"
+                if agency == "Territory and Municipal Services (TAMS)":
+                    agency = "Territory and Municipal Services Directorate"
+                if agency == "State Library of NSW":
+                    agency = "State Library of New South Wales"
+                org_name = name_munge(agency[:100])
+                if org_name not in orgs_list:
+                    orgs_list = ckandirect.action.organization_list()['result']
+                    #print orgs_list
+                    if org_name not in orgs_list:
+                        try:
+                            print "org not found, creating " + org_name
+                            ckandirect.action.organization_create(name=org_name, title=agency,
+                                                                  description=agency)
+                            orgs_list.append(org_name)
+                        except ckanapi.ValidationError, e:
+                            print e
+                            raise LoaderError('Unexpected status')
                     else:
-                        tags = tags + [doc.value['metadata']["Keywords / Tags"]]
-                if 'data.gov.au Category' in doc.value['metadata'].keys() and len(doc.value['metadata']['data.gov.au Category']) > 0:
-                    if hasattr(doc.value['metadata']['data.gov.au Category'], '__iter__'):
-                        tags = tags + doc.value['metadata']['data.gov.au Category']
+                        print "org found, adding dataset to " + org_name
+
+                # cache org names -> id mapping
+                if org_name not in orgs_ids:
+                    org = ckandirect.action.organization_show(id=org_name)
+                    orgs_ids[org_name] = org["result"]["id"]
+                org_id = orgs_ids[org_name]
+                print "org id is " + org_id
+                tags = []
+                creator = doc.value['metadata']["DCTERMS.Creator"]
+                if doc.value['agencyID'] == "AGIMO":
+                    if len(doc.value['metadata']["Keywords / Tags"]) > 0:
+                        if hasattr(doc.value['metadata']["Keywords / Tags"], '__iter__'):
+                            tags = tags + doc.value['metadata']["Keywords / Tags"]
+                        else:
+                            tags = tags + [doc.value['metadata']["Keywords / Tags"]]
+
+                    tags = [re.sub('[^a-zA-Z0-9-_.]', '', tag.replace('&', 'and')).lower() for tag in tags if tag]
+                    #print tags
+                    extras = []
+
+                    for extra_key in doc.value['metadata'].keys():
+                        if extra_key not in ["Description", "Content-Language", "DCTERMS.Description",
+                                             "Keywords / Tags",
+                                             "data.gov.au Category", "Download", "Permalink", "DCTERMS.Identifier"]:
+                            if doc.value['metadata'][extra_key] != None and doc.value['metadata'][extra_key] != "":
+                                extras.append([extra_key, doc.value['metadata'][extra_key]])
+
+                    package_entity = {
+                        'name': pkg_name,
+                        'title': doc.value['metadata']['DCTERMS.Title'],
+                        'url': doc.value['metadata']['DCTERMS.Source.URI'],
+                        'tags': tags, #tags are mandatory?
+                        'author': creator,
+                        'maintainer': creator,
+                        'license_id': get_license_id(doc.value['metadata']['DCTERMS.License']),
+                        'notes': html2text.html2text(doc.value['metadata']['Description']).replace('AC/a!a','-').replace('AC/a!aC/',"'").replace("AC/a!E",":")replace("A "," "),
+                        'owner_org': org_id,
+                        'extras': extras,
+                        'private': (pkg_name not in goodcsvdata and pkg_name not in goodotherdata)
+                    }
+
+                try:
+                    #print package_entity
+                    ckan.package_register_post(package_entity)
+                except CkanApiError, e:
+                    if ckan.last_message == "{\"name\": [\"That URL is already in use.\"]}":
+                        print "package already exists"
                     else:
-                        tags = tags + [doc.value['metadata']['data.gov.au Category']]
-                tags = [re.sub('[^a-zA-Z0-9-_.]', '', tag.replace('&', 'and')).lower() for tag in tags if tag]
-                print tags
-                package_entity = {
-                    'name': pkg_name,
-                    'title': doc.value['metadata']['DCTERMS.Title'],
-                    'url': doc.value['metadata']['DCTERMS.Source.URI'],
-                    'tags': tags, #tags are mandatory?
-                    'author': doc.value['metadata']["DCTERMS.Creator"],
-                    'maintainer': doc.value['metadata']["DCTERMS.Creator"],
-                    'licence_id': get_licence_id(doc.value['metadata']['DCTERMS.License']),
-                    'notes': html2text.html2text(doc.value['metadata']['Description']),
-                }
-            if doc.value['agencyID'] == "qld":
-                package_entity = doc.value['metadata']
-
-            try:
-                print package_entity
-                ckan.package_register_post(package_entity)
-            except CkanApiError, e:
-                if ckan.last_status == 409:
-                    print "package already exists"
-                else:
-                    raise LoaderError('Unexpected status %s checking for package under \'%s\': %r' % (
-                        ckan.last_status, pkg_name, e.args))
-
-
-            #add to group
-
-            group_name = name_munge(doc.value['metadata']["Agency"][:100])
-            try:
-                print ckan.group_entity_get(group_name)
-
-                # Update the group details
-                group_entity = ckan.last_message
-                print "group exists"
-                if 'packages' in group_entity.keys():
-                    group_entity['packages'] = list(set(group_entity['packages'] + [pkg_name]))
-                else:
-                    group_entity['packages'] = [pkg_name]
-                ckan.group_entity_put(group_entity)
-            except CkanApiError, e:
-                if ckan.last_status == 404:
-                    print "group does not exist, creating"
-                    group_entity = {
-                        'name': group_name,
-                        'title': doc.value['metadata']["Agency"],
-                        'description': doc.value['metadata']["Agency"],
-                        'packages': [pkg_name],
-                        # 'type': "organization" # not allowed via API, use database query
-                        # update "group" set type = 'organization';
-                        }
-                    print group_entity
-                    ckan.group_register_post(group_entity)
-                else:
-                    raise LoaderError('Unexpected status %s adding to group under \'%s\': %r' % (
-                        ckan.last_status, pkg_name, e.args))
-            if 'Download' in doc.value['metadata'].keys():
-                try:
-                    pkg = ckan.package_entity_get(pkg_name)
-                    resources = pkg.get('resources', [])
-                    if len(resources) < len(doc.value['metadata']['Download']):
-                        for resource in doc.value['metadata']['Download']:
-
-                            # http://docs.ckan.org/en/ckan-1.7/domain-model-resource.html
-                            # (KML/KMZ) / (Shapefile) /(Other)
-                            format = "plain"
-                            if resource['format'] == '(XML)':
-                                format = 'xml'
-                            if resource['format'] == '(CSV/XLS)':
-                                format = 'csv'
-                            name = resource['href']
-                            if 'name' in resource.keys():
-                                name = resource['name']
-                            print resource
-                            ckan.add_package_resource(pkg_name, resource['href'], name=name, resource_type='data',
-                                format=format, size=human2bytes(resource['size'].replace(',', '')))
-                    else:
-                        print "resources already exist"
-                except CkanApiError, e:
-                    if ckan.last_status == 404:
-                        print "parent dataset does not exist"
-                    else:
+                        print ckan.last_message
                         raise LoaderError('Unexpected status %s checking for package under \'%s\': %r' % (
                             ckan.last_status, pkg_name, e.args))
-
+                pkg = ckan.package_entity_get(pkg_name)
+
+
+                # add resources (downloadable data files)
+                if 'Download' in doc.value['metadata'].keys():
+                    try:
+
+                        resources = pkg.get('resources', [])
+                        if len(resources) < len(doc.value['metadata']['Download']):
+                            for resource in doc.value['metadata']['Download']:
+
+                                # http://docs.ckan.org/en/ckan-1.7/domain-model-resource.html
+                                # (KML/KMZ) / (Shapefile) /(Other)
+                                format = "plain"
+                                if resource['format'] == '(XML)':
+                                    format = 'xml'
+                                if resource['format'] == '(CSV/XLS)':
+                                    format = 'csv'
+                                if resource['format'] == '(Shapefile)':
+                                    format = 'shp'
+                                if resource['format'] == '(KML/KMZ)':
+                                    format = 'kml'
+                                name = resource['href']
+                                if 'name' in resource.keys():
+                                    name = resource['name']
+                                print resource
+                                add_package_resource_cachedurl(ckan, pkg_name, url_fix(resource['href']), name,
+                                                          format, get_license_id(doc.value['metadata']['DCTERMS.License']),
+                                                          human2bytes(resource.get('size', '0B')))
+                        else:
+                            print "resources already exist"
+                    except CkanApiError, e:
+                        if ckan.last_status == 404:
+                            print "parent dataset does not exist"
+                        else:
+                            raise LoaderError('Unexpected status %s checking for package under \'%s\': %r' % (
+                                ckan.last_status, pkg_name, e.args))
+

--- /dev/null
+++ b/documents/datagov-resourcereport.py
@@ -1,1 +1,81 @@
+import couchdb
+couch = couchdb.Server('http://127.0.0.1:5984/')
+#couch = couchdb.Server('http://192.168.1.113:5984/')
 
+import urllib
+import urlparse
+import httplib2
+import httplib
+import csv
+
+
+def url_fix(s, charset='utf-8'):
+    """Sometimes you get an URL by a user that just isn't a real
+    URL because it contains unsafe characters like ' ' and so on.  This
+    function can fix some of the problems in a similar way browsers
+    handle data entered by the user:
+
+    :param charset: The target charset for the URL if the url was
+                    given as unicode string.
+    """
+    if isinstance(s, unicode):
+        s = s.encode(charset, 'ignore')
+    if not urlparse.urlparse(s).scheme:
+   	s = "http://"+s
+    scheme, netloc, path, qs, anchor = urlparse.urlsplit(s)
+    path = urllib.quote(path, '/%')
+    qs = urllib.quote_plus(qs, ':&=')
+    return urlparse.urlunsplit((scheme, netloc, path, qs, anchor))
+
+# http://code.activestate.com/recipes/578019-bytes-to-human-human-to-bytes-converter/
+SYMBOLS = {
+    'customary': ('B', 'KB', 'MB', 'GB', 'T', 'P', 'E', 'Z', 'Y'),
+    'customary_ext': ('byte', 'kilo', 'mega', 'giga', 'tera', 'peta', 'exa',
+                      'zetta', 'iotta'),
+    'iec': ('Bi', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'),
+    'iec_ext': ('byte', 'kibi', 'mebi', 'gibi', 'tebi', 'pebi', 'exbi',
+                'zebi', 'yobi'),
+}
+
+
+docsdb = couch['disclosr-documents']
+out = csv.writer(open("output.csv","w"), delimiter=',',quoting=csv.QUOTE_ALL)
+if __name__ == "__main__":
+    for doc in docsdb.view('app/datasets'):
+        if doc.value['url'] != "http://data.gov.au/data/" and doc.value['agencyID'] != "qld":
+            # Collect the package metadata.
+            pkg_name = filter(lambda x: x in '0123456789abcdefghijklmnopqrstuvwxyz-_',
+                              doc.value['url'].replace("http://data.gov.au/dataset/", '').replace('/', '')[:100]);
+            if 'Download' in doc.value['metadata'].keys() and len(doc.value['metadata']['Download']) > 0:
+                        for resource in doc.value['metadata']['Download']:
+                            # http://docs.ckan.org/en/ckan-1.7/domain-model-resource.html
+                            # (KML/KMZ) / (Shapefile) /(Other)
+                            format = "plain"
+                            if resource['format'] == '(XML)':
+                                format = 'xml'
+                            if resource['format'] == '(CSV/XLS)':
+                                format = 'csv'
+                            if resource['format'] == '(Shapefile)':
+                                format = 'shp'
+                            if resource['format'] == '(KML/KMZ)':
+                                format = 'kml'
+                            name = resource['href']
+                            if 'name' in resource.keys():
+                                name = resource['name']
+			    if resource['href'].startswith("ftp"):
+				    out.writerow([pkg_name, url_fix(resource['href']), name,format, "ftp", ""])
+			    else:
+				    try:
+					h = httplib2.Http(disable_ssl_certificate_validation=True)
+  				        resp = h.request(url_fix(resource['href']), 'HEAD')
+					content_type = resp[0]['content-type'] if 'content-type' in resp[0].keys() else ""
+					out.writerow([pkg_name.encode('ascii', 'ignore'), url_fix(resource['href']).encode('ascii', 'ignore'), name.encode('ascii', 'ignore'),format, resp[0]['status'], content_type])
+				    except httplib2.ServerNotFoundError:
+					out.writerow([pkg_name.encode('ascii', 'ignore'), url_fix(resource['href']).encode('ascii', 'ignore'), name.encode('ascii', 'ignore'),format, "500","badurl"])
+				    except httplib.InvalidURL:
+					out.writerow([pkg_name.encode('ascii', 'ignore'), url_fix(resource['href']).encode('ascii', 'ignore'), name.encode('ascii', 'ignore'),format, "500","badurl"])
+				    except httplib2.RelativeURIError:
+					out.writerow([pkg_name.encode('ascii', 'ignore'), url_fix(resource['href']).encode('ascii', 'ignore'), name.encode('ascii', 'ignore'),format, "500","badurl"])
+	    else:
+		out.writerow([pkg_name.encode('ascii', 'ignore')])
+

--- a/documents/datagov.py
+++ b/documents/datagov.py
@@ -39,7 +39,9 @@
                                     link = item.find("a")
                                     format = item.find(property="dc:format")
                                     linkobj = {"href":link['href'].replace("/bye?","").strip(),
-                                            "format": format.string.strip(), "size": format.next_sibling.string.strip()}
+                                            "format": format.string.strip()}
+				    if format.next_sibling.string != None:
+					linkobj["size"] = format.next_sibling.string.strip()
                                     if link.string != None:
                                         linkobj["name"] = link.string.strip()
                                     doc['metadata'][last_title].append(linkobj)

--- a/documents/genericScrapers.py
+++ b/documents/genericScrapers.py
@@ -200,11 +200,16 @@
         return table.find_all('tr')
 
     def getDate(self, content, entry, doc):
-        date = ''.join(content.stripped_strings).strip()
-        (a, b, c) = date.partition("(")
-        date = self.remove_control_chars(a.replace("Octber", "October").replace("1012","2012"))
-        print date
-        edate = parse(date, dayfirst=True, fuzzy=True).strftime("%Y-%m-%d")
+        strdate = ''.join(content.stripped_strings).strip()
+        (a, b, c) = strdate.partition("(")
+        strdate = self.remove_control_chars(a.replace("Octber", "October").replace("1012","2012")replace("Janrurary", "January").replace("1012","2012"))
+        print strdate
+        try:
+		edate = parse(strdate, dayfirst=True, fuzzy=True).strftime("%Y-%m-%d")
+	except ValueError:
+		print >> sys.stderr, "ERROR date invalid %s " % strdate
+		print >> sys.stderr, "ERROR date originally %s " % ''.join(content.stripped_strings).strip()
+		edate = date.today().strftime("%Y-%m-%d")  
         print edate
         doc.update({'date': edate})
         return
@@ -267,8 +272,7 @@
                                          'Summary of FOIrequest received by agency/minister',
                                          'Summary of FOI request received', 'Description of    FOI Request',
                                          "FOI request", 'Results 1 to 67 of 67']
-                            if doc['title'] not in badtitles\
-                            and doc['description'] != '':
+                            if doc['title'] not in badtitles and 'description' in doc.keys() and doc['description'] != '':
                                 print "saving"
                                 foidocsdb.save(doc)
                         else:

--- a/documents/runScrapers.sh
+++ b/documents/runScrapers.sh
@@ -1,3 +1,5 @@
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+cd $DIR
 echo "" > /tmp/disclosr-error
 for f in scrapers/*.py; do
 	echo "Processing $f file..";

--- a/documents/scrape.py
+++ b/documents/scrape.py
@@ -90,7 +90,7 @@
 def getLastAttachment(docsdb, url):
     hash = mkhash(url)
     doc = docsdb.get(hash)
-    if doc != None:
+    if doc != None and "_attachments" in doc.keys():
         last_attachment_fname = doc["_attachments"].keys()[-1]
         last_attachment = docsdb.get_attachment(doc, last_attachment_fname)
         return last_attachment
@@ -112,10 +112,15 @@
     else:
         if (('page_scraped' in doc) and ((time.time() - doc['page_scraped']) < 60 * 24 * 14) or (scrape_again == False)):
             print "Uh oh, trying to scrape URL again too soon!" + hash
-            last_attachment_fname = doc["_attachments"].keys()[-1]
-            last_attachment = docsdb.get_attachment(doc, last_attachment_fname)
-            content = last_attachment
-            return (doc['url'], doc['mime_type'], content.read())
+	    if (not doc.has_key('file_size') or doc["file_size"] != "0") and "_attachments" in doc.keys():
+	            last_attachment_fname = doc["_attachments"].keys()[-1]
+	            last_attachment = docsdb.get_attachment(doc, last_attachment_fname)
+        	    content = last_attachment.read()
+		    mime_type = doc['mime_type']
+	    else:
+		    content = None
+		    mime_type = None
+            return (doc['url'], mime_type, content)
 
     req.add_header("User-Agent", "Mozilla/4.0 (compatible; Prometheus webspider; owner maxious@lambdacomplex.org)")
     #if there is a previous version stored in couchdb, load caching helper tags

--- a/documents/scrapers/0049d35216493c545ef5f7f000e6b252.py
+++ b/documents/scrapers/0049d35216493c545ef5f7f000e6b252.py
@@ -42,7 +42,6 @@
                         'data': {'request': '', 'session': '', 'more': ''}
 
                 }
-
-	        amonpy.exception(data)
+		#amonpy.exception(data)
 	pass
 

--- a/documents/scrapers/24bd71114d3975ed9a63ad29624c62c9.py
+++ b/documents/scrapers/24bd71114d3975ed9a63ad29624c62c9.py
@@ -7,7 +7,7 @@
 #http://www.doughellmann.com/PyMOTW/abc/
 class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
         def getTable(self,soup):
-                return soup.find(id = "inner_content")       
+                return soup.find(class_="tborder")       
         def getColumnCount(self):
                 return 2
         def getColumns(self,columns):

--- a/documents/scrapers/3d5871a44abbbc81ef5b3a420070755d.py
+++ b/documents/scrapers/3d5871a44abbbc81ef5b3a420070755d.py
@@ -8,40 +8,14 @@
 from datetime import *
 
 #http://www.doughellmann.com/PyMOTW/abc/
-class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
-        def getTable(self,soup):
-                return soup.find(class_ = "inner-column").table       
-        def getRows(self,table):
-                return table.tbody.find_all('tr',recursive=False)
+class ScraperImplementation(genericScrapers.GenericHTMLDisclogScraper):
         def getColumnCount(self):
-                return 3
-        def getColumns(self,columns):
-                (date, title, description) = columns
-                return (date, date, title, description, None)
-        def getDate(self, content, entry, doc):
-		i = 0
-		date = ""
-		for string in content.stripped_strings:
-    			if i ==1:
-				date = string
-			i = i+1
-                edate = parse(date, dayfirst=True, fuzzy=True).strftime("%Y-%m-%d")
-                print edate
-                doc.update({'date': edate})
-                return
-   	def getTitle(self, content, entry, doc):
-		i = 0
-		title = ""
-		for string in content.stripped_strings:
-    			if i < 2:
-				title = title + string
-			i = i+1
-                doc.update({'title': title})
-		#print title
-                return
+                return 0
 
 if __name__ == '__main__':
-    print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper)
-    print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper)
+#http://www.csiro.au/Portals/About-CSIRO/How-we-work/Governance/FOI-Request-Disclosure-Log-2012-13.aspx
+#http://www.csiro.au/Portals/About-CSIRO/How-we-work/Governance/FOI-Request-Disclosure-Log-2011-12.aspx
+    print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericHTMLDisclogScraper)
+    print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericHTMLDisclogScraper)
     ScraperImplementation().doScrape()
 

--- a/documents/scrapers/6fa04af95fbe7de96daa2c7560e0aad3.py
+++ b/documents/scrapers/6fa04af95fbe7de96daa2c7560e0aad3.py
@@ -6,8 +6,6 @@
 
 #http://www.doughellmann.com/PyMOTW/abc/
 class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
-        def getTable(self,soup):
-                return soup.find(id = "content_div_50269").table
         def getColumns(self,columns):
                 (id, date, title, description, notes) = columns
                 return (id, date, title, description, notes)

--- a/documents/scrapers/7c6adc1d41cf029bf1a0959e5156477a.py
+++ b/documents/scrapers/7c6adc1d41cf029bf1a0959e5156477a.py
@@ -21,11 +21,15 @@
         d.make_links_absolute(base_url = self.getURL())
         for table in d('table').items():
             title= table('thead').text()
-            print title
+            print self.remove_control_chars(title)
             (idate,descA,descB,link,deldate,notes) = table('tbody tr').map(lambda i, e: pq(e).children().eq(1).text())
             links = table('a').map(lambda i, e: pq(e).attr('href'))
             description = descA+" "+descB
-            edate = parse(idate[:12], dayfirst=True, fuzzy=True).strftime("%Y-%m-%d")
+	    try:
+	            edate = parse(idate[:12], dayfirst=True, fuzzy=True).strftime("%Y-%m-%d")
+	    except ValueError:
+		    edate = date.today().strftime("%Y-%m-%d")
+		    pass
             print edate
             dochash = scrape.mkhash(self.remove_control_chars(title))
             doc = foidocsdb.get(dochash)

--- a/documents/scrapers/8e874a2fde8aa0ccdc6d14573d766540.py
+++ b/documents/scrapers/8e874a2fde8aa0ccdc6d14573d766540.py
@@ -18,10 +18,10 @@
                                         if mime_type == "text/html" or mime_type == "application/xhtml+xml" or mime_type =="application/xml":
                                         # http://www.crummy.com/software/BeautifulSoup/documentation.html
                                                 soup = BeautifulSoup(htcontent)
-                                                for text in soup.find(id="divFullWidthColumn").stripped_strings:
+                                                for text in soup.find(class_ = "mainContent").stripped_strings:
                                                     description = description + text.encode('ascii', 'ignore')
 
-                                                for atag in soup.find(id="divFullWidthColumn").find_all("a"):
+                                                for atag in soup.find(id="SortingTable").find_all("a"):
                                                       	if atag.has_key('href'):
                                                               	links.append(scrape.fullurl(link,atag['href']))
 

--- a/documents/scrapers/a687a9eaab9e10e9e118d3fd7cf0e13a.py
+++ b/documents/scrapers/a687a9eaab9e10e9e118d3fd7cf0e13a.py
@@ -7,11 +7,11 @@
 #http://www.doughellmann.com/PyMOTW/abc/
 class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
         def getTable(self,soup):
-                return soup.find(id="ctl00_ContentPlaceHolderMainNoAjax_EdtrTD1494_2").table       
+                return soup.find(id="int-content").table       
         def getColumnCount(self):
-                return 4
+                return 3
         def getColumns(self,columns):
-                (blank,id, title,date) = columns
+                (id, title,date) = columns
                 return (id, date, title, title, None)
 
 if __name__ == '__main__':

--- /dev/null
+++ b/documents/scrapers/b0ca7fddcd1c965787daea47f2d32e0a.py
@@ -1,1 +1,17 @@
+import sys,os
+sys.path.insert(0, os.path.join(os.path.dirname(__file__) or '.', '../'))
+import genericScrapers
+import scrape
+from bs4 import BeautifulSoup
 
+#http://www.doughellmann.com/PyMOTW/abc/
+class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
+        def getColumns(self,columns):
+                (id, date, title, description, notes) = columns
+                return (id, date, title, description, notes)
+
+if __name__ == '__main__':
+    print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper)
+    print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper)
+    ScraperImplementation().doScrape()
+

--- a/documents/scrapers/dfd7414bb0c21a0076ab559901ae0588.py
+++ b/documents/scrapers/dfd7414bb0c21a0076ab559901ae0588.py
@@ -10,7 +10,7 @@
                 (id, date, title, description, notes) = columns
                 return (id, date, title, description, notes)
         def getTable(self,soup):
-                return soup.find(class_ = "content")
+                return soup.find(class_ = "simpletable")
 
 if __name__ == '__main__':
     print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper)