argh
Former-commit-id: 5633b69c577c7553ef393e89754b6647eedbf014
--- a/couchdb/settee/src/classes/SetteeDatabase.class.php
+++ b/couchdb/settee/src/classes/SetteeDatabase.class.php
@@ -251,7 +251,7 @@
*
* @return void
*/
- function get_view($design_doc, $view_name, $key = null, $descending = false, $limit = false, $reduce=false) {
+ function get_view($design_doc, $view_name, $key = null, $descending = false, $limit = false, $reduce=null) {
$id = "_design/" . urlencode($design_doc);
$view_name = urlencode($view_name);
$id .= "/_view/$view_name";
@@ -269,10 +269,12 @@
if ($descending) {
$data .= "&descending=true";
}
- if ($reduce) {
+ if ($reduce != null) {
+ if ($reduce == true) {
$data .= "&reduce=true";
} else {
$data .= "&reduce=false";
+ }
}
if ($limit) {
$data .= "&limit=".$limit;
--- a/documents/scrapers/7c6adc1d41cf029bf1a0959e5156477a.py
+++ b/documents/scrapers/7c6adc1d41cf029bf1a0959e5156477a.py
@@ -7,6 +7,8 @@
from pyquery import PyQuery as pq
from lxml import etree
import urllib
+import dateutil
+from dateutil.parser import *
class ACMADisclogScraper(genericScrapers.GenericDisclogScraper):
@@ -14,23 +16,30 @@
foidocsdb = scrape.couch['disclosr-foidocuments']
(url, mime_type, content) = scrape.fetchURL(scrape.docsdb,
self.getURL(), "foidocuments", self.getAgencyID())
-
- d = pq(content)
- d.make_links_absolute()
- d.table.filter('.ncTAF_DataTABLE')
- print [i.text() for i in d.items('span')]
- description = ""
- dochash = scrape.mkhash(description)
- doc = foidocsdb.get(dochash)
- if doc is None:
- print "saving " + dochash
- edate = date.today().strftime("%Y-%m-%d")
- doc = {'_id': dochash, 'agencyID': self.getAgencyID()
- , 'url': self.getURL(), 'docID': dochash,
- "date": edate, "title": "Disclosure Log Updated", "description": description}
- #foidocsdb.save(doc)
- else:
- print "already saved"
+
+ d = pq(content.read())
+ d.make_links_absolute(base_url = self.getURL())
+ for table in d('table').items():
+ title= table('thead').text()
+ print title
+ (idate,descA,descB,link,deldate,notes) = table('tbody tr').map(lambda i, e: pq(e).children().eq(1).text())
+ links = table('a').map(lambda i, e: pq(e).attr('href'))
+ description = descA+" "+descB
+ edate = parse(idate[:12], dayfirst=True, fuzzy=True).strftime("%Y-%m-%d")
+ print edate
+ dochash = scrape.mkhash(self.remove_control_chars(title))
+ doc = foidocsdb.get(dochash)
+ if doc is None:
+ print "saving " + dochash
+ edate = date.today().strftime("%Y-%m-%d")
+ doc = {'_id': dochash, 'agencyID': self.getAgencyID()
+ , 'url': self.getURL(), 'docID': dochash,
+ "links": links,
+ "date": edate, "notes": notes, "title": "Disclosure Log Updated", "description": description}
+ #print doc
+ foidocsdb.save(doc)
+ else:
+ print "already saved"
if __name__ == '__main__':
--- a/documents/scrapers/7c6adc1d41cf029bf1a0959e5156477a.txt
+++ /dev/null
@@ -1,1 +1,1 @@
-acma style
+