From: Alex Sadleir Date: Sun, 14 Apr 2013 09:42:53 +0000 Subject: cache org ids while importing datagov X-Git-Url: http://maxious.lambdacomplex.org/git/?p=disclosr.git&a=commitdiff&h=f5133c3240e293b5bbc84ff2cb6a00d1ea8bd900 --- cache org ids while importing datagov Former-commit-id: 3bb1449efe6725785ffb74bfe250e27913e47885 --- --- a/documents/datagov-export.py +++ b/documents/datagov-export.py @@ -3,6 +3,7 @@ from ckanclient import CkanApiError import re import html2text # aaronsw :( +import ckanapi # https://github.com/open-data/ckanapi class LoaderError(Exception): @@ -10,10 +11,12 @@ # Instantiate the CKAN client. #ckan = ckanclient.CkanClient(base_location='http://localhost:5000/api', api_key='b47b24cd-591d-40c1-8677-d73101d56d1b') +api_key = 'b3ab75e4-afbb-465b-a09d-8171c8c69a7a' ckan = ckanclient.CkanClient(base_location='http://data.disclosurelo.gs/api', - api_key='482a9dd2-a976-4adf-ac77-d71d92a98a52') -#couch = couchdb.Server('http://127.0.0.1:5984/') -couch = couchdb.Server('http://192.168.1.113:5984/') + api_key=api_key) +ckandirect = ckanapi.RemoteCKAN('http://data.disclosurelo.gs', api_key=api_key) +couch = couchdb.Server('http://127.0.0.1:5984/') +#couch = couchdb.Server('http://192.168.1.113:5984/') # http://code.activestate.com/recipes/578019-bytes-to-human-human-to-bytes-converter/ SYMBOLS = { @@ -24,6 +27,7 @@ 'iec_ext': ('byte', 'kibi', 'mebi', 'gibi', 'tebi', 'pebi', 'exbi', 'zebi', 'yobi'), } + def human2bytes(s): """ @@ -91,7 +95,6 @@ def name_munge(input_name): return munge(input_name.replace(' ', '').replace('.', '_').replace('&', 'and')) - #return input_name.replace(' ', '').replace('.', '_').replace('&', 'and') def get_licence_id(licencename): @@ -112,14 +115,46 @@ raise Exception(licencename + " not found"); return map[licencename]; + docsdb = couch['disclosr-documents'] if __name__ == "__main__": + orgs_list = [] + orgs_ids = {} for doc in docsdb.view('app/datasets'): + print " --- " print doc.id - if doc.value['url'] != "http://data.gov.au/data/": + + if doc.value['url'] != "http://data.gov.au/data/" and doc.value['agencyID'] != "qld": + + # Collect the package metadata. - pkg_name = doc.value['url'].replace("http://data.gov.au/dataset/",'').replace('/',''); _ + pkg_name = filter(lambda x: x in '0123456789abcdefghijklmnopqrstuvwxyz-_', + doc.value['url'].replace("http://data.gov.au/dataset/", '').replace('/', '')[:100]); + print pkg_name + #add to or create organization using direct API + org_name = name_munge(doc.value['metadata']["Agency"][:100]) + if org_name not in orgs_list: + orgs_list = ckandirect.action.organization_list()['result'] + #print orgs_list + if org_name not in orgs_list: + try: + print "org not found, creating " + org_name + ckandirect.action.organization_create(name=org_name, title=doc.value['metadata']["Agency"], + description=doc.value['metadata']["Agency"]) + orgs_list.append(org_name) + except ckanapi.ValidationError, e: + print e + raise LoaderError('Unexpected status') + else: + print "org found, adding dataset to " + org_name + + # cache org names -> id mapping + if org_name not in orgs_ids: + org = ckandirect.action.organization_show(id=org_name) + orgs_ids[org_name] = org["result"]["id"] + org_id = orgs_ids[org_name] + print "org id is "+org_id tags = [] if doc.value['agencyID'] == "AGIMO": if len(doc.value['metadata']["Keywords / Tags"]) > 0: @@ -127,13 +162,9 @@ tags = tags + doc.value['metadata']["Keywords / Tags"] else: tags = tags + [doc.value['metadata']["Keywords / Tags"]] - if 'data.gov.au Category' in doc.value['metadata'].keys() and len(doc.value['metadata']['data.gov.au Category']) > 0: - if hasattr(doc.value['metadata']['data.gov.au Category'], '__iter__'): - tags = tags + doc.value['metadata']['data.gov.au Category'] - else: - tags = tags + [doc.value['metadata']['data.gov.au Category']] + tags = [re.sub('[^a-zA-Z0-9-_.]', '', tag.replace('&', 'and')).lower() for tag in tags if tag] - print tags + #print tags package_entity = { 'name': pkg_name, 'title': doc.value['metadata']['DCTERMS.Title'], @@ -143,54 +174,65 @@ 'maintainer': doc.value['metadata']["DCTERMS.Creator"], 'licence_id': get_licence_id(doc.value['metadata']['DCTERMS.License']), 'notes': html2text.html2text(doc.value['metadata']['Description']), + 'owner_org': org_id + #todo add missing key values like jurasdiction } if doc.value['agencyID'] == "qld": package_entity = doc.value['metadata'] try: - print package_entity + #print package_entity ckan.package_register_post(package_entity) except CkanApiError, e: - if ckan.last_status == 409: + if ckan.last_message == "{\"name\": [\"That URL is already in use.\"]}": print "package already exists" else: + print ckan.last_message raise LoaderError('Unexpected status %s checking for package under \'%s\': %r' % ( ckan.last_status, pkg_name, e.args)) - - - #add to group - - group_name = name_munge(doc.value['metadata']["Agency"][:100]) - try: - print ckan.group_entity_get(group_name) - - # Update the group details - group_entity = ckan.last_message - print "group exists" - if 'packages' in group_entity.keys(): - group_entity['packages'] = list(set(group_entity['packages'] + [pkg_name])) + pkg = ckan.package_entity_get(pkg_name) + + # add dataset to group(s) + groups = [] + if 'data.gov.au Category' in doc.value['metadata'].keys() and len( + doc.value['metadata']['data.gov.au Category']) > 0: + if hasattr(doc.value['metadata']['data.gov.au Category'], '__iter__'): + groups = groups + doc.value['metadata']['data.gov.au Category'] else: - group_entity['packages'] = [pkg_name] - ckan.group_entity_put(group_entity) - except CkanApiError, e: - if ckan.last_status == 404: - print "group does not exist, creating" - group_entity = { - 'name': group_name, - 'title': doc.value['metadata']["Agency"], - 'description': doc.value['metadata']["Agency"], - 'packages': [pkg_name], - # 'type': "organization" # not allowed via API, use database query - # update "group" set type = 'organization'; + groups = groups + [doc.value['metadata']['data.gov.au Category']] + + for group_name in groups: + group_url = name_munge(group_name[:100]) + try: + # Update the group details + group_entity = ckan.group_entity_get(group_url) + print "group "+group_name+" exists" + if 'packages' in group_entity.keys(): + group_entity['packages'] = list(set(group_entity['packages'] + [pkg_name])) + else: + group_entity['packages'] = [pkg_name] + ckan.group_entity_put(group_entity) + except CkanApiError, e: + if ckan.last_status == 404: + print "group "+group_name+" does not exist, creating" + group_entity = { + 'name': group_url, + 'title': group_name, + 'description': group_name, + 'packages': [pkg_name] } - print group_entity - ckan.group_register_post(group_entity) - else: - raise LoaderError('Unexpected status %s adding to group under \'%s\': %r' % ( - ckan.last_status, pkg_name, e.args)) + #print group_entity + ckan.group_register_post(group_entity) + elif ckan.last_status == 409: + print "group already exists" + else: + raise LoaderError('Unexpected status %s adding to group under \'%s\': %r' % ( + ckan.last_status, pkg_name, e.args)) + + # add resources (downloadable data files) if 'Download' in doc.value['metadata'].keys(): try: - pkg = ckan.package_entity_get(pkg_name) + resources = pkg.get('resources', []) if len(resources) < len(doc.value['metadata']['Download']): for resource in doc.value['metadata']['Download']: @@ -207,7 +249,8 @@ name = resource['name'] print resource ckan.add_package_resource(pkg_name, resource['href'], name=name, resource_type='data', - format=format, size=human2bytes(resource['size'].replace(',', ''))) + format=format, + size=human2bytes(resource['size'].replace(',', ''))) else: print "resources already exist" except CkanApiError, e: --- a/documents/genericScrapers.py +++ b/documents/genericScrapers.py @@ -202,7 +202,7 @@ def getDate(self, content, entry, doc): strdate = ''.join(content.stripped_strings).strip() (a, b, c) = strdate.partition("(") - strdate = self.remove_control_chars(a.replace("Octber", "October").replace("Janrurary", "January").replace("1012","2012")) + strdate = self.remove_control_chars(a.replace("Octber", "October").replace("1012","2012")replace("Janrurary", "January").replace("1012","2012")) print strdate try: edate = parse(strdate, dayfirst=True, fuzzy=True).strftime("%Y-%m-%d")