FOI stats importer fixed
FOI stats importer fixed


Former-commit-id: 81a6a149848e27565b7a7052d2a7ff4e5aaa9310

--- a/admin/importOAICFOIrequests.php
+++ b/admin/importOAICFOIrequests.php
@@ -17,13 +17,13 @@
         if ($row >= 1) {
             //          print_r($data);
             $name = trim($data[2]);
-            echo "$name <br>";
+//            echo "$name <br>";
             if ($data[0] != "TOTALS" && $data[0] != "") {
                 if (isset($nametoid[$name])) {
                     $id = $nametoid[$name];
                     $timePeriod = $data[0] . "-Q" . $data[1];
 
-                    echo "$timePeriod <br>";
+//                    echo "$timePeriod <br>";
                     unset($data[0]);
                     unset($data[1]);
                     unset($data[2]);
@@ -38,10 +38,13 @@
                     $result = Array("source" => "http://data.gov.au/dataset/freedom-of-information-quarterly-request-and-review-statistical-data-2011-12/");
                     foreach ($data as $key => $datum) {
                         if ($datum != 0) {
+// tODO prefix header with "FOI"
+if (isset($stats[$id][$timePeriod][$key])) $datum += $stats[$id][$timePeriod][$key];
                             $result[trim($headers[$key])] = $datum;
                         }
                     }
                     $stats[$id][$timePeriod] = $result;
+// TODO merge if already exists
                     //print_r($stats);
                 } else {
                     echo "<br>ERROR NAME MISSING FROM ID LIST<br><bR> $row" . PHP_EOL;
@@ -57,21 +60,24 @@
     }
     fclose($handle);
 }
+echo "all stats loaded successfuly";
 foreach ($stats as $id => $stat) {
     echo $id . "<br>" . PHP_EOL;
-    $doc = $db->get($id);
+    $doc = $db->get($id); 
     echo $doc->name . "<br>" . PHP_EOL;
-    print_r($stat);
-    die();
+//    print_r($stat);
     // print_r($doc);
     $changed = false;
     if (!isset($doc->statistics)) {
         $changed = true;
         $doc->statistics = Array();
+    } else {
+	$doc->statistics = object_to_array($doc->statistics);
     }
     foreach ($stat as $timePeriod => $value) {
-        if (!isset($doc->statistics->foiRequests->$timePeriod)
-                || $doc->statistics->foiRequests->$timePeriod != $value) {
+        if (!isset($doc->statistics["foiRequests"][$timePeriod])
+                || $doc->statistics["foiRequests"][$timePeriod] != $value
+		) {
             $changed = true;
             $doc->statistics["foiRequests"][$timePeriod] = $value;
         }
@@ -81,6 +87,7 @@
     } else {
         echo "not changed" . "<br>" . PHP_EOL;
     }
+//print_r($doc);die();
 }
 ?>
 

--- a/admin/refreshDesignDoc.php
+++ b/admin/refreshDesignDoc.php
@@ -112,15 +112,25 @@
   }
 }";
 $obj->views->getStatistics->map = 
-"function(doc) {
-  if (doc.statistics) {
-	for (var statisticSet in doc.statistics)  {
-for (var statisticPeriod in doc.statistics[statisticSet])  {
-    emit([statisticSet,statisticPeriod], doc.statistics[statisticSet][statisticPeriod]['value']);
+"
+function (doc) {
+    if (doc.statistics) {
+        for (var statisticSet in doc.statistics) {
+            for (var statisticPeriod in doc.statistics[statisticSet]) {
+                if (doc.statistics[statisticSet][statisticPeriod]['value']) {
+                    emit([statisticSet, statisticPeriod], doc.statistics[statisticSet][statisticPeriod]['value']);
+                } else {
+                    for (var statisticSubSet in doc.statistics[statisticSet][statisticPeriod]) {
+                        if (statisticSubSet != 'source' && statisticSubSet != 'value') {
+                            emit([statisticSubSet, statisticPeriod], doc.statistics[statisticSet][statisticPeriod][statisticSubSet]);
+                        }
+                    }
+                }
+            }
+        }
+    }
 }
-}
-  }
-}";
+";
 $obj->views->getStatistics->reduce = '_sum';
 // http://stackoverflow.com/questions/646628/javascript-startswith
 $obj->views->score->map = 'if(!String.prototype.startsWith){

--- a/documents/charts.php
+++ b/documents/charts.php
@@ -5,11 +5,20 @@
 $agenciesdb = $server->get_db('disclosr-agencies');
 
 $idtoname = Array();
+$idtofoirequestssuccessful = Array();
 foreach ($agenciesdb->get_view("app", "byCanonicalName")->rows as $row) {
     $idtoname[$row->id] = trim($row->value->name);
+    $foirequestssuccessful = 0;
+if(isset($row->value->statistics->foiRequests)) {
+    foreach ($row->value->statistics->foiRequests as $statperiod) {
+	$statperiod=object_to_array($statperiod);
+	if (isset($statperiod["Requests for other information granted in full"])) $foirequestssuccessful += $statperiod["Requests for other information granted in full"];
+	if (isset($statperiod["Requests for other information granted in part"])) $foirequestssuccessful += $statperiod["Requests for other information granted in part"];
+	}
+}
+   $idtofoirequestssuccessful[$row->id] =$foirequestssuccessful;
 }
 $foidocsdb = $server->get_db('disclosr-foidocuments');
-
 ?>
 <div class="foundation-header">
     <h1><a href="about.php">Charts</a></h1>
@@ -28,7 +37,6 @@
             <?php
                 try {
                     $rows = $foidocsdb->get_view("app", "byDateMonthYear?group=true",null, false,false,true)->rows;
-
 
                     $dataValues = Array();
                     foreach ($rows as $row) {
@@ -95,6 +103,7 @@
     };
 
     var d2 = [];
+    var d3 = [];
     var agencylabels = [];
     function agencytrackformatter(obj) {
 
@@ -112,12 +121,17 @@
     <?php
         try {
             $rows = $foidocsdb->get_view("app", "byAgencyID?group=true",null, false,false,true)->rows;
-
+function cmp($a, $b)
+{
+    return $a->value > $b->value;
+}
+usort($rows, "cmp");
 
             $dataValues = Array();
             $i = 0;
             foreach ($rows as $row) {
                 echo "       d2.push([ $row->value,$i]);" . PHP_EOL;
+                echo "       d3.push([ ".$idtofoirequestssuccessful[$row->key].",$i]);" . PHP_EOL;
                 echo "       agencylabels.push(['".str_replace("'","",$idtoname[$row->key])."']);" . PHP_EOL;
 
                 $i++;
@@ -154,7 +168,7 @@
                 autoscaleMargin: 1
             },
             legend: {
-                show: false
+                show: true
             }
         }
     );

--- a/documents/genericScrapers.py
+++ b/documents/genericScrapers.py
@@ -198,6 +198,8 @@
 
     def getRows(self, table):
         return table.find_all('tr')
+    def findColumns(self, row):
+        return row.find_all('td')
 
     def getDocHash(self, id,date, url):
                         if id.string is None:
@@ -245,7 +247,7 @@
                 soup = BeautifulSoup(content)
                 table = self.getTable(soup)
                 for row in self.getRows(table):
-                    columns = row.find_all('td')
+                    columns = self.findColumns(row)
                     if len(columns) is self.getColumnCount():
                         (id, date, title,
                          description, notes) = self.getColumns(columns)

--- a/documents/index.php
+++ b/documents/index.php
@@ -18,6 +18,7 @@
     $idtoname[$row->id] = trim($row->value->name);
 }
 $foidocsdb = $server->get_db('disclosr-foidocuments');
+//print_r($foidocsdb);
 try {
     $rows = $foidocsdb->get_view("app", "byDate", Array($endkey, '0000-00-00'), true, 20, null, $enddocid)->rows;
     if ($rows) {

--- a/documents/rss.xml.php
+++ b/documents/rss.xml.php
@@ -31,11 +31,12 @@
 
 
 //print_r($rows);
+$i =0;
 foreach ($rows as $row) {
     //Create an empty FeedItem
     $newItem = $TestFeed->createNewItem();
     //Add elements to the feed item
-    $newItem->setTitle($row->value->title);
+    $newItem->setTitle(preg_replace('/[\x00-\x1F\x80-\xFF]/', '', $row->value->title));
     $newItem->setLink("http://disclosurelo.gs/view.php?id=" . $row->value->_id);
     $newItem->setDate(strtotime($row->value->date));
     $newItem->setDescription(displayLogEntry($row, $idtoname));
@@ -43,6 +44,8 @@
     $newItem->addElement('guid', "http://disclosurelo.gs/view.php?id=" . $row->value->_id, array('isPermaLink' => 'true'));
     //Now add the feed item
     $TestFeed->addItem($newItem);
+$i++;
+if ($i > 50) break;
 }
 //OK. Everything is done. Now genarate the feed.
 $TestFeed->generateFeed();

--- a/documents/runScrapers.sh
+++ b/documents/runScrapers.sh
@@ -16,6 +16,7 @@
 		sleep 1;
 	fi
 done
+curl "localhost:5984/disclosr-foidocuments/_design/app/_view/byDate?startkey=\"9999-99-99\"&endkey=\"0000-00-00\"&descending=true&limit=20"
 if [ -s /tmp/disclosr-error ] ; then
     echo "emailling logs..";
     mail -E -s "Disclosr errors" maxious@lambdacomplex.org < /tmp/disclosr-error ;

--- a/documents/scrapers/1803322b27286950cab0c543168b5f21.py
+++ b/documents/scrapers/1803322b27286950cab0c543168b5f21.py
@@ -45,14 +45,5 @@
     print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper)
 
     nsi = ScraperImplementation()
-    nsi.disclogURL = "http://www.dbcde.gov.au/about_us/freedom_of_information_disclosure_log/foi_list?result_146858_result_page=1"
-    nsi.doScrape()
-    nsi.disclogURL = "http://www.dbcde.gov.au/about_us/freedom_of_information_disclosure_log/foi_list?result_146858_result_page=2"
-    nsi.doScrape()
-    nsi.disclogURL = "http://www.dbcde.gov.au/about_us/freedom_of_information_disclosure_log/foi_list?result_146858_result_page=3"
-    nsi.doScrape()
-    nsi.disclogURL = "http://www.dbcde.gov.au/about_us/freedom_of_information_disclosure_log/foi_list?result_146858_result_page=4"
-    nsi.doScrape()
-    nsi.disclogURL = "http://www.dbcde.gov.au/about_us/freedom_of_information_disclosure_log/foi_list?result_146858_result_page=5"
     nsi.doScrape()
 

--- a/documents/scrapers/1d404c4934f74feacd00dcb434e7c10a.py
+++ b/documents/scrapers/1d404c4934f74feacd00dcb434e7c10a.py
@@ -6,8 +6,8 @@
 
 #http://www.doughellmann.com/PyMOTW/abc/
 class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
-        #def getTable(self,soup):
-        #        return soup.find(id = "cphMain_C001_Col01").table       
+        def getTable(self,soup):
+                return soup.findAll('table')[1]     
         def getColumnCount(self):
                 return 5
         def getColumns(self,columns):

--- a/documents/scrapers/227cb6eb7d2c9f8a6e846df7447d6caa.py
+++ b/documents/scrapers/227cb6eb7d2c9f8a6e846df7447d6caa.py
@@ -33,13 +33,16 @@
                  	doc.update({'links': links})
                 if description != "":
                         doc.update({ 'description': description})
-
+	def getRows(self, table):
+		return table.find_all(class_ = "dl-row");
+	def findColumns(self, table):
+		return table.find_all('div');
 	def getColumnCount(self):
 		return 2
 	def getTable(self,soup):
-		return soup.find(class_ = "ms-rteTable-default")
+		return soup.find(class_ = "foi-dl-list")
 	def getColumns(self,columns):
-		(date, title) = columns
+		(title,date) = columns
 		return (title, date, title, title, None)
 
 if __name__ == '__main__':

--- a/documents/scrapers/41a166419503bb50e410c58be54c102f.py
+++ b/documents/scrapers/41a166419503bb50e410c58be54c102f.py
@@ -8,7 +8,7 @@
 #http://www.doughellmann.com/PyMOTW/abc/
 class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
     def getTable(self,soup):
-        return soup.find(id= "ctl00_MSO_ContentDiv").table
+        return soup.find(class_ = "rgMasterTable")
 
     def getColumns(self,columns):
         (id, title, description, notes) = columns

--- a/documents/scrapers/601aedeef4344638d635bdd761e9fdba.py
+++ b/documents/scrapers/601aedeef4344638d635bdd761e9fdba.py
@@ -6,8 +6,8 @@
 
 #http://www.doughellmann.com/PyMOTW/abc/
 class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
-        #def getTable(self,soup):
-        #        return soup.find(id = "ctl00_PlaceHolderMain_intro2__ControlWrapper_CerRichHtmlField").table       
+        def getTable(self,soup):
+                return soup.find(id = "main").table       
         def getColumnCount(self):
                 return 4
         def getColumns(self,columns):

--- a/documents/scrapers/8c9421f852c441910bf1d93a57b31d64.py
+++ b/documents/scrapers/8c9421f852c441910bf1d93a57b31d64.py
@@ -5,6 +5,8 @@
 
 #http://www.doughellmann.com/PyMOTW/abc/
 class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
+	def getTable(self,soup):
+                return soup.find(id = "page_content").table
         def getColumns(self,columns):
                 (id, date, title, description, notes) = columns
                 return (id, date, title, description, notes)

--- a/documents/scrapers/ad033512610d8e36886ab6a795f26561.py
+++ b/documents/scrapers/ad033512610d8e36886ab6a795f26561.py
@@ -6,8 +6,8 @@
 
 #http://www.doughellmann.com/PyMOTW/abc/
 class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
-        def getTable(self,soup):
-                return soup.find(id = "_ctl0__ctl0_MainContentPlaceHolder_MainContentPlaceHolder_ContentSpan").findAll("table")[3]
+#        def getTable(self,soup):
+#                return soup.find(_class = "content").table
         def getColumnCount(self):
                 return 5
         def getColumns(self,columns):

--- a/documents/scrapers/bf16d4ba0d306ee03e5a1d32aaba3da1.py
+++ b/documents/scrapers/bf16d4ba0d306ee03e5a1d32aaba3da1.py
@@ -7,7 +7,7 @@
 #http://www.doughellmann.com/PyMOTW/abc/
 class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
         def getTable(self,soup):
-                return soup.find(summary="This table shows every FOI request to date.")       
+                return soup
         def getColumnCount(self):
                 return 5
         def getColumns(self,columns):

--- a/documents/scrapers/e2a845e55bc9986e6c75c5ad2c508b8d.py
+++ b/documents/scrapers/e2a845e55bc9986e6c75c5ad2c508b8d.py
@@ -1,16 +1,54 @@
 import sys,os
 sys.path.insert(0, os.path.join(os.path.dirname(__file__) or '.', '../'))
 import genericScrapers
-#RSS feed not detailed
+import dateutil
+from dateutil.parser import *
+from datetime import *
+import scrape
+from bs4 import BeautifulSoup
+class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
 
-#http://www.doughellmann.com/PyMOTW/abc/
-class ScraperImplementation(genericScrapers.GenericRSSDisclogScraper):
-        def getColumns(self,columns):
-                (id, date, title, description, notes) = columns
-                return (id, date, title, description, notes)
+    def __init__(self):
+        super(ScraperImplementation, self).__init__()
+    def getTable(self, soup):
+        return soup.find(id='zone-content')
+
+    def getDescription(self,content, entry,doc):
+        link = None
+        links = []
+        description = ""
+        for atag in entry.find_all('a'):
+            if atag.has_attr('href'):
+                link = scrape.fullurl(self.getURL(), atag['href'])
+                (url, mime_type, htcontent) = scrape.fetchURL(scrape.docsdb, link, "foidocuments", self.getAgencyID(), False)
+                if htcontent != None:
+                    if mime_type == "text/html" or mime_type == "application/xhtml+xml" or mime_type =="application/xml":
+                        soup = BeautifulSoup(htcontent)
+                        row  = soup.find(id="foidetails")
+			if row == None:
+				row  = soup.find(id="content").table
+			if row == None:
+				row  = soup.find(id="content")
+                        description = ''.join(row.stripped_strings)
+                        for atag in row.find_all("a"):
+                                    if atag.has_attr('href'):
+                                        links.append(scrape.fullurl(link, atag['href']))
+
+        if links != []:
+                     doc.update({'links': links})
+        if description != "":
+            doc.update({ 'description': description})
+
+    def getColumnCount(self):
+        return 3
+
+    def getColumns(self, columns):
+        (id, title, date) = columns
+        return (id, date, title, title, None)
+
 
 if __name__ == '__main__':
-    print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericRSSDisclogScraper)
-    print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericRSSDisclogScraper)
+    print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper)
+    print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper)
     ScraperImplementation().doScrape()