pyquery scraper beginnings
[disclosr.git] / documents / scrapers / 7c6adc1d41cf029bf1a0959e5156477a.py
blob:a/documents/scrapers/7c6adc1d41cf029bf1a0959e5156477a.py -> blob:b/documents/scrapers/7c6adc1d41cf029bf1a0959e5156477a.py
--- a/documents/scrapers/7c6adc1d41cf029bf1a0959e5156477a.py
+++ b/documents/scrapers/7c6adc1d41cf029bf1a0959e5156477a.py
@@ -7,6 +7,8 @@
 from pyquery import PyQuery as pq
 from lxml import etree
 import urllib
+import dateutil
+from dateutil.parser import *
 
 class ACMADisclogScraper(genericScrapers.GenericDisclogScraper):
 
@@ -16,21 +18,32 @@
              self.getURL(), "foidocuments", self.getAgencyID())
 
         d = pq(content)
-        d.make_links_absolute()
-        d.table.filter('.ncTAF_DataTABLE')
-        print [i.text() for i in d.items('span')]
-        description = ""
-        dochash = scrape.mkhash(description)
-        doc = foidocsdb.get(dochash)
-        if doc is None:
-            print "saving " + dochash
-            edate = date.today().strftime("%Y-%m-%d")
-            doc = {'_id': dochash, 'agencyID': self.getAgencyID()
-            , 'url': self.getURL(), 'docID': dochash,
-            "date": edate, "title": "Disclosure Log Updated", "description": description}
-            #foidocsdb.save(doc)
-        else:
-            print "already saved"
+        d.make_links_absolute(base_url = self.getURL())
+        for table in d('table').items():
+            title= table('thead').text()
+            print self.remove_control_chars(title)
+            (idate,descA,descB,link,deldate,notes) = table('tbody tr').map(lambda i, e: pq(e).children().eq(1).text())
+            links = table('a').map(lambda i, e: pq(e).attr('href'))
+            description = descA+" "+descB
+	    try:
+	            edate = parse(idate[:12], dayfirst=True, fuzzy=True).strftime("%Y-%m-%d")
+	    except ValueError:
+		    edate = date.today().strftime("%Y-%m-%d")
+		    pass
+            print edate
+            dochash = scrape.mkhash(self.remove_control_chars(title))
+            doc = foidocsdb.get(dochash)
+            if doc is None:
+                print "saving " + dochash
+                edate = date.today().strftime("%Y-%m-%d")
+                doc = {'_id': dochash, 'agencyID': self.getAgencyID()
+                , 'url': self.getURL(), 'docID': dochash,
+                "links": links,
+                "date": edate, "notes": notes, "title": title, "description": description}
+                #print doc
+                foidocsdb.save(doc)
+            else:
+                print "already saved"
 
 
 if __name__ == '__main__':