fix dept of finance scraper
fix dept of finance scraper


Former-commit-id: 8111dc99a1087b94bd5e1fae0d1099b4022a9ddf

--- /dev/null
+++ b/admin/massdelete.sh
@@ -1,1 +1,10 @@
+for line in `curl "http://localhost:5984/disclosr-foidocuments/_design/app/_view/byAgencyID?reduce=false&keys=%5B\"5716ce0aacfe98f7d638b7a66b7f1040\"%5D&limit=600" | xargs -L1`; do
+#	echo $line
+	id=`echo $line | grep -Po '_id:.*?[^\\\],' | perl -pe 's/_id://; s/^//; s/,$//'`
+	rev=`echo $line | grep -Po 'rev:.*?[^\\\],'| perl -pe 's/rev://; s/^//; s/,$//'`
+	if [ -n "$id" ]; then
+		echo "curl -X DELETE http://localhost:5984/disclosr-foidocuments/$id?rev=$rev"
+		curl -X DELETE http://localhost:5984/disclosr-foidocuments/$id?rev=$rev
+	fi
+done;
 

--- a/documents/about.php
+++ b/documents/about.php
@@ -5,6 +5,7 @@
 include_once('../include/common.inc.php');
 ?>
 <h1>About</h1>
+Written and managed by Alex Sadleir (maxious [at] lambdacomplex.org) 
 <?php
 include_footer_documents();
 ?>

--- a/documents/agency.php
+++ b/documents/agency.php
@@ -31,6 +31,12 @@
     } else {
         $rows = $foidocsdb->get_view("app", "byAgencyID?group=true", null, false, false, true)->rows;
         if ($rows) {
+function cmp($a, $b)
+{
+	global $idtoname;
+    return strcmp($idtoname[$a->key], $idtoname[$b->key]);
+}
+usort($rows, "cmp");
             foreach ($rows as $row) {
                 echo '<a href="agency.php?id=' . $row->key . '">' . $idtoname[$row->key] . " (" . $row->value . " records)</a> <br>\n";
             }

--- a/documents/scrapers/e2a845e55bc9986e6c75c5ad2c508b8d.py
+++ b/documents/scrapers/e2a845e55bc9986e6c75c5ad2c508b8d.py
@@ -1,16 +1,54 @@
 import sys,os
 sys.path.insert(0, os.path.join(os.path.dirname(__file__) or '.', '../'))
 import genericScrapers
-#RSS feed not detailed
+import dateutil
+from dateutil.parser import *
+from datetime import *
+import scrape
+from bs4 import BeautifulSoup
+class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
 
-#http://www.doughellmann.com/PyMOTW/abc/
-class ScraperImplementation(genericScrapers.GenericRSSDisclogScraper):
-        def getColumns(self,columns):
-                (id, date, title, description, notes) = columns
-                return (id, date, title, description, notes)
+    def __init__(self):
+        super(ScraperImplementation, self).__init__()
+    def getTable(self, soup):
+        return soup.find(id='content')
+
+    def getDescription(self,content, entry,doc):
+        link = None
+        links = []
+        description = ""
+        for atag in entry.find_all('a'):
+            if atag.has_attr('href'):
+                link = scrape.fullurl(self.getURL(), atag['href'])
+                (url, mime_type, htcontent) = scrape.fetchURL(scrape.docsdb, link, "foidocuments", self.getAgencyID(), False)
+                if htcontent != None:
+                    if mime_type == "text/html" or mime_type == "application/xhtml+xml" or mime_type =="application/xml":
+                        soup = BeautifulSoup(htcontent)
+                        row  = soup.find(id="foidetails")
+			if row == None:
+				row  = soup.find(id="content").table
+			if row == None:
+				row  = soup.find(id="content")
+                        description = ''.join(row.stripped_strings)
+                        for atag in row.find_all("a"):
+                                    if atag.has_attr('href'):
+                                        links.append(scrape.fullurl(link, atag['href']))
+
+        if links != []:
+                     doc.update({'links': links})
+        if description != "":
+            doc.update({ 'description': description})
+
+    def getColumnCount(self):
+        return 3
+
+    def getColumns(self, columns):
+        (id, title, date) = columns
+        return (id, date, title, title, None)
+
 
 if __name__ == '__main__':
-    print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericRSSDisclogScraper)
-    print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericRSSDisclogScraper)
+    print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper)
+    print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper)
     ScraperImplementation().doScrape()
 

--- a/documents/template.inc.php
+++ b/documents/template.inc.php
@@ -77,7 +77,7 @@
                     </p>
                     <ul class="nav">
                         <li><a href="agency.php">By Agency</a></li>
-                        <li><a href="date.php">By Date</a></li>
+<!--                        <li><a href="date.php">By Date</a></li> -->
                         <li><a href="disclogsList.php">List of Disclosure Logs</a></li>
                         <li><a href="charts.php">Charts</a></li>
                         <li><a href="about.php">About</a></li>