From: Maxious Date: Sat, 28 Jan 2012 06:59:01 +0000 Subject: Scrape required and chart of complied features views X-Git-Url: https://maxious.lambdacomplex.org/git/?p=disclosr.git&a=commitdiff&h=3732ed2e5b49d875223990d6390ef382c5e27764 --- Scrape required and chart of complied features views Former-commit-id: 1f70b43713b7686e9f7a8a0f6a5aced655d53221 --- --- a/alaveteli/exportAgencies.csv.php +++ b/alaveteli/exportAgencies.csv.php @@ -1,6 +1,21 @@ get_view("app", "byCanonicalName", null, true)->rows; //print_r($rows); foreach ($agencies as $agency) { - // print_r($agency); - + // print_r($agency); + if (isset($agency->value->foiEmail) && $agency->value->foiEmail != "null" && !isset($agency->value->status)) { $row = Array(); $row["#id"] = $agency->id; @@ -58,9 +75,7 @@ if (isset($agency->value->shortName)) { $row["short_name"] = $agency->value->shortName; } else { - $out = Array(); - preg_match_all('/[A-Z]/', trim($agency->value->name), $out); - $row["short_name"] = implode("", $out[0]); + $row["short_name"] = shortName($agency->value->name); } $row["notes"] = ""; $row["publication_scheme"] = (isset($agency->value->infoPublicationSchemeURL) ? $agency->value->infoPublicationSchemeURL : ""); @@ -75,8 +90,8 @@ if (isset($agency->value->foiBodies)) { foreach ($agency->value->foiBodies as $foiBody) { - $row['name'] = $foiBody; - $row['short_name'] = ""; + $row['name'] = iconv("UTF-8", "ASCII//TRANSLIT",$foiBody); + $row["short_name"] = shortName($foiBody); fputcsv($fp, array_values($row)); } } --- a/alaveteli/exportCategories.rb.php +++ b/alaveteli/exportCategories.rb.php @@ -1,19 +1,20 @@ get_db('disclosr-agencies'); try { $rows = $db->get_view("app", "byDeptStateName", null, true)->rows; //print_r($rows); foreach ($rows as $row) { - echo ' [ "'.phrase_to_tag(dept_to_portfolio($row->key)).'","'. dept_to_portfolio($row->key).'","part of the '.dept_to_portfolio($row->key).' portfolio" ],'.PHP_EOL; + echo ' [ "' . phrase_to_tag(dept_to_portfolio($row->key)) . '","' . dept_to_portfolio($row->key) . '","part of the ' . dept_to_portfolio($row->key) . ' portfolio" ],' . PHP_EOL; } } catch (SetteeRestClientException $e) { setteErrorHandler($e); --- a/include/common.inc.php +++ b/include/common.inc.php @@ -62,7 +62,3 @@ } } -?> - - - --- a/include/couchdb.inc.php +++ b/include/couchdb.inc.php @@ -1,8 +1,8 @@ language = "javascript"; $obj->views->all->map = "function(doc) { emit(doc._id, doc); };"; $obj->views->byABN->map = "function(doc) { emit(doc.abn, doc); };"; - $obj->views->byCanonicalName->map = "function(doc) { + $obj->views->byCanonicalName->map = "function(doc) { if (doc.parentOrg || doc.orgType == 'FMA-DepartmentOfState') { emit(doc.name, doc); } };"; - $obj->views->byDeptStateName->map = "function(doc) { + $obj->views->byDeptStateName->map = "function(doc) { if (doc.orgType == 'FMA-DepartmentOfState') { emit(doc.name, doc._id); } };"; - $obj->views->parentOrgs->map = "function(doc) { + $obj->views->parentOrgs->map = "function(doc) { if (doc.parentOrg) { emit(doc._id, doc.parentOrg); } @@ -34,15 +34,25 @@ } } };"; - - $obj->views->foiEmails->map = "function(doc) { + + $obj->views->foiEmails->map = "function(doc) { emit(doc._id, doc.foiEmail); };"; - + $obj->views->byLastModified->map = "function(doc) { emit(doc.metadata.lastModified, doc); }"; $obj->views->getActive->map = 'function(doc) { if (doc.status == "active") { emit(doc._id, doc); } };'; $obj->views->getSuspended->map = 'function(doc) { if (doc.status == "suspended") { emit(doc._id, doc); } };'; - $obj->views->getScrapeRequired->map = "function(doc) { emit(doc.abn, doc); };"; + $obj->views->getScrapeRequired->map = "function(doc) { + +var lastScrape = Date.parse(doc.metadata.lastScraped); + +var today = new Date(); + +if (!lastScrape || lastScrape.getTime() + 1000 != today.getTime()) { + emit(doc._id, doc); +} + +};"; $obj->views->showNamesABNs->map = "function(doc) { emit(doc._id, {name: doc.name, abn: doc.abn}); };"; $obj->views->getConflicts->map = "function(doc) { if (doc._conflicts) { @@ -50,6 +60,26 @@ } }"; // http://stackoverflow.com/questions/646628/javascript-startswith + $obj->views->scoreHas->map = 'if(!String.prototype.startsWith){ + String.prototype.startsWith = function (str) { + return !this.indexOf(str); + } +} +if(!String.prototype.endsWith){ + String.prototype.endsWith = function(suffix) { +     return this.indexOf(suffix, this.length - suffix.length) !== -1; + }; +} +function(doc) { +if (typeof(doc["status"]) == "undefined" || doc["status"] != "suspended") { +for(var propName in doc) { + if(typeof(doc[propName]) != "undefined" && (propName.startsWith("has") || propName.endsWith("URL"))) { + emit(propName, 1); + } +} + emit("total", 1); + } +}'; $obj->views->score->map = 'if(!String.prototype.startsWith){ String.prototype.startsWith = function (str) { return !this.indexOf(str); @@ -72,19 +102,17 @@ return $db->save($obj, true); } +if (php_uname('n') == "vanille") { -if( php_uname('n') == "vanille") { + $server = new SetteeServer('http://192.168.178.21:5984'); +} else +if (php_uname('n') == "KYUUBEY") { -$server = new SetteeServer('http://192.168.178.21:5984'); -} else - if( php_uname('n') == "KYUUBEY") { - -$server = new SetteeServer('http://192.168.1.148:5984'); + $server = new SetteeServer('http://192.168.1.148:5984'); } else { $server = new SetteeServer('http://127.0.0.1:5984'); } + function setteErrorHandler($e) { echo $e->getMessage() . "
" . PHP_EOL; } -?> - --- a/include/template.inc.php +++ b/include/template.inc.php @@ -74,5 +74,3 @@ - --- /dev/null +++ b/scrape.py @@ -1,1 +1,76 @@ +#http://packages.python.org/CouchDB/client.html +import couchdb +import urllib2 +from BeautifulSoup import BeautifulSoup +import re +#http://diveintopython.org/http_web_services/etags.html +class NotModifiedHandler(urllib2.BaseHandler): + def http_error_304(self, req, fp, code, message, headers): + addinfourl = urllib2.addinfourl(fp, headers, req.get_full_url()) + addinfourl.code = code + return addinfourl + +def scrapeAndStore(URL, depth, agency): + URL = "http://www.google.com" + req = urllib2.Request(URL) + etag = 'y' + last_modified = 'y' + #if there is a previous version sotred in couchdb, load caching helper tags + if etag: + req.add_header("If-None-Match", etag) + if last_modified: + req.add_header("If-Modified-Since", last_modified) + + opener = urllib2.build_opener(NotModifiedHandler()) + url_handle = opener.open(req) + headers = url_handle.info() # the addinfourls have the .info() too + etag = headers.getheader("ETag") + last_modified = headers.getheader("Last-Modified") + web_server = headers.getheader("Server") + file_size = headers.getheader("Content-Length") + mime_type = headers.getheader("Content-Type") + + if hasattr(url_handle, 'code'): + if url_handle.code == 304: + print "the web page has not been modified" + else: + #do scraping + html = url_handle.read() + # http://www.crummy.com/software/BeautifulSoup/documentation.html + soup = BeautifulSoup(html) + links = soup.findAll('a') # soup.findAll('a', id=re.compile("^p-")) + for link in links: + print link['href'] + #for each unique link + #if html mimetype + # go down X levels, + # diff with last stored attachment, store in document + #if not + # remember to save parentURL and title (link text that lead to document) + + #store as attachment epoch-filename + else: + print "error %s in downloading %s", url_handle.code, URL + #record/alert error to error database + + + + + + + + + + + +couch = couchdb.Server('http://192.168.1.148:5984/') + +# select database +agencydb = couch['disclosr-agencies'] + +for row in agencydb.view('app/getScrapeRequired'): #not recently scraped agencies view? + agency = agencydb.get(row.id) + print agency['name'] +scrapeAndStore("A",1,1) + --- a/unimplemented/scrape.py +++ /dev/null @@ -1,64 +1,1 @@ -#http://packages.python.org/CouchDB/client.html -import couchdb -import urllib2 -from BeautifulSoup import BeautifulSoup -import re -couch = couchdb.Server() # Assuming localhost:5984 -# If your CouchDB server is running elsewhere, set it up like this: -# couch = couchdb.Server('http://example.com:5984/') - -# select database -agencydb = couch['disclosr-agencies'] - -for row in agencydb.view('app/getScrapeRequired'): #not recently scraped agencies view? - agency = agencydb.get(row.id) - print agency['agencyName'] - -#http://diveintopython.org/http_web_services/etags.html -class NotModifiedHandler(urllib2.BaseHandler): - def http_error_304(self, req, fp, code, message, headers): - addinfourl = urllib2.addinfourl(fp, headers, req.get_full_url()) - addinfourl.code = code - return addinfourl - -def scrapeAndStore(URL, depth, agency): - URL = "http://www.hole.fi/jajvirta/weblog/" - req = urllib2.Request(URL) - - #if there is a previous version sotred in couchdb, load caching helper tags - if etag: - req.add_header("If-None-Match", etag) - if last_modified: - req.add_header("If-Modified-Since", last_modified) - - opener = urllib2.build_opener(NotModifiedHandler()) - url_handle = opener.open(req) - headers = url_handle.info() # the addinfourls have the .info() too - etag = headers.getheader("ETag") - last_modified = headers.getheader("Last-Modified") - web_server = headers.getheader("Server") - file_size = headers.getheader("Content-Length") - mime_type = headers.getheader("Content-Type") - - if hasattr(url_handle, 'code') and url_handle.code == 304: - print "the web page has not been modified" - else: - print "error %s in downloading %s", url_handle.code, URL - #record/alert error to error database - - #do scraping - html = ? - # http://www.crummy.com/software/BeautifulSoup/documentation.html - soup = BeautifulSoup(html) -links = soup.findAll('a') # soup.findAll('a', id=re.compile("^p-")) -for link in links: - print link['href'] - #for each unique link - #if html mimetype - # go down X levels, - # diff with last stored attachment, store in document - #if not - # remember to save parentURL and title (link text that lead to document) - - #store as attachment epoch-filename