Merge branch 'master' of ssh://apples.lambdacomplex.org/git/disclosr
Merge branch 'master' of ssh://apples.lambdacomplex.org/git/disclosr

Conflicts:
admin/logo.svg

Former-commit-id: 850f69e8af3bbfb3467f5708676d7bf99fce2571

--- a/admin/importOAICFOIrequests.php
+++ b/admin/importOAICFOIrequests.php
@@ -17,13 +17,13 @@
         if ($row >= 1) {
             //          print_r($data);
             $name = trim($data[2]);
-            echo "$name <br>";
+//            echo "$name <br>";
             if ($data[0] != "TOTALS" && $data[0] != "") {
                 if (isset($nametoid[$name])) {
                     $id = $nametoid[$name];
                     $timePeriod = $data[0] . "-Q" . $data[1];
 
-                    echo "$timePeriod <br>";
+//                    echo "$timePeriod <br>";
                     unset($data[0]);
                     unset($data[1]);
                     unset($data[2]);
@@ -38,10 +38,13 @@
                     $result = Array("source" => "http://data.gov.au/dataset/freedom-of-information-quarterly-request-and-review-statistical-data-2011-12/");
                     foreach ($data as $key => $datum) {
                         if ($datum != 0) {
+// tODO prefix header with "FOI"
+if (isset($stats[$id][$timePeriod][$key])) $datum += $stats[$id][$timePeriod][$key];
                             $result[trim($headers[$key])] = $datum;
                         }
                     }
                     $stats[$id][$timePeriod] = $result;
+// TODO merge if already exists
                     //print_r($stats);
                 } else {
                     echo "<br>ERROR NAME MISSING FROM ID LIST<br><bR> $row" . PHP_EOL;
@@ -57,21 +60,24 @@
     }
     fclose($handle);
 }
+echo "all stats loaded successfuly";
 foreach ($stats as $id => $stat) {
     echo $id . "<br>" . PHP_EOL;
-    $doc = $db->get($id);
+    $doc = $db->get($id); 
     echo $doc->name . "<br>" . PHP_EOL;
-    print_r($stat);
-    die();
+//    print_r($stat);
     // print_r($doc);
     $changed = false;
     if (!isset($doc->statistics)) {
         $changed = true;
         $doc->statistics = Array();
+    } else {
+	$doc->statistics = object_to_array($doc->statistics);
     }
     foreach ($stat as $timePeriod => $value) {
-        if (!isset($doc->statistics->foiRequests->$timePeriod)
-                || $doc->statistics->foiRequests->$timePeriod != $value) {
+        if (!isset($doc->statistics["foiRequests"][$timePeriod])
+                || $doc->statistics["foiRequests"][$timePeriod] != $value
+		) {
             $changed = true;
             $doc->statistics["foiRequests"][$timePeriod] = $value;
         }
@@ -81,6 +87,7 @@
     } else {
         echo "not changed" . "<br>" . PHP_EOL;
     }
+//print_r($doc);die();
 }
 ?>
 

file:a/admin/logo.svg (deleted)
--- a/admin/logo.svg
+++ /dev/null
@@ -1,58 +1,1 @@
-<?xml version="1.0" encoding="utf-8"?>

-<!-- Generator: Adobe Illustrator 15.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0)  -->

-<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">

-<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"

-	 width="100px" height="100px" viewBox="0 0 100 100" enable-background="new 0 0 100 100" xml:space="preserve">

-<g id="docs">

-	<path d="M86.107,11.001l5.229,9.952c0,0-1.832,0.083-5.297,1.95c-2.312,1.249-6.468,6.246-6.468,6.246l-7.745-11.148

-		c0,0,4.2-3.996,6.513-5.242C81.805,10.889,86.107,11.001,86.107,11.001"/>

-	<path d="M65.604,41.642l-3.151-9.868c0,0-5.44,3.56-8.017,4.074c-1.009,0.202-1.931,0.335-2.75,0.425L65.604,41.642z"/>

-	<path d="M72.326,44.231c0.268-0.226,0.537-0.44,0.804-0.616c3.104-2.054,6.14-3.685,6.269-3.755l1.881-1.005l1.369,1.634

-		l2.865,3.417l3.197-4.334L76.68,30.694l-8.74,11.847L72.326,44.231z"/>

-	<path d="M39.918,31.734l4.825,1.86l3.33,0.212c0.04,0.001,0.269,0.015,0.652,0.015c0.91,0,2.798-0.072,5.196-0.551

-		c1.427-0.284,5.007-2.332,7.093-3.695l2.889-1.888l1.05,3.285l2.496,7.812l5.889-7.985l-4.625,0.163l1.349-6.225l-14.928-3.233

-		l-2.095,9.667c-0.531-2.599-1.841-5.727-1.841-5.727l-13.488,1.522c0,0,0.885,2.206,1.586,4.529L39.918,31.734z"/>

-	<path d="M91.232,66.473c-1.102-0.691-2.322-1.143-3.414-1.434l-3.779,9.805c1.932,1.246,5.197,5.738,5.197,5.738l7.336-9.207

-		C96.572,71.377,93.162,67.682,91.232,66.473z"/>

-	<path d="M93.191,53.076l-3.655,1.225c-0.019,0.007-1.779,0.613-4.117,2.068l2.817,4.869l0.625,1.08

-		c3.307-0.562,7.728-1.923,7.728-1.923l-2.332-15.261c0,0-2.934,1.277-5.853,2.221l2.318,2.766L93.191,53.076z"/>

-	<path d="M79.271,46.91l0.865,0.334l0.459,0.801l3.504,6.05c2.646-1.636,4.61-2.287,4.61-2.287l-8.075-9.632

-		c0,0-2.584,1.391-5.376,3.188L79.271,46.91z"/>

-</g>

-<g id="trunk">

-	

-		<radialGradient id="SVGID_1_" cx="66.0195" cy="72.8555" r="7.877" gradientTransform="matrix(1 0 0 2 0 -72.8555)" gradientUnits="userSpaceOnUse">

-		<stop  offset="0.6503" style="stop-color:#E61E24"/>

-		<stop  offset="1" style="stop-color:#9D1F1F"/>

-	</radialGradient>

-	<circle fill="url(#SVGID_1_)" cx="66.02" cy="72.855" r="7.877"/>

-	

-		<radialGradient id="SVGID_2_" cx="22.6929" cy="72.8555" r="7.877" gradientTransform="matrix(1 0 0 2 0 -72.8555)" gradientUnits="userSpaceOnUse">

-		<stop  offset="0.6503" style="stop-color:#E61E24"/>

-		<stop  offset="1" style="stop-color:#9D1F1F"/>

-	</radialGradient>

-	<circle fill="url(#SVGID_2_)" cx="22.693" cy="72.855" r="7.877"/>

-	

-		<radialGradient id="SVGID_3_" cx="15.4717" cy="59.7266" r="12.1478" gradientTransform="matrix(1 0 0 2 0 -59.7266)" gradientUnits="userSpaceOnUse">

-		<stop  offset="0.6503" style="stop-color:#E61E24"/>

-		<stop  offset="1" style="stop-color:#9D1F1F"/>

-	</radialGradient>

-	<path fill="url(#SVGID_3_)" d="M22.693,62.352c1.915,0,3.705,0.521,5.251,1.42V47.912H12.191L3,57.103v14.438h9.281

-		C12.931,66.369,17.347,62.352,22.693,62.352z"/>

-	

-		<radialGradient id="SVGID_4_" cx="43.4023" cy="68.917" r="9.0473" gradientTransform="matrix(1 0 0 2 0 -68.917)" gradientUnits="userSpaceOnUse">

-		<stop  offset="0.6503" style="stop-color:#E61E24"/>

-		<stop  offset="1" style="stop-color:#9D1F1F"/>

-	</radialGradient>

-	<path fill="url(#SVGID_4_)" d="M55.925,69.986l-9.583-3.695H30.88c1.186,1.475,1.978,3.279,2.225,5.252h22.502

-		C55.674,71.01,55.779,70.49,55.925,69.986z"/>

-	

-		<radialGradient id="SVGID_5_" cx="58.7637" cy="54.9512" r="25.2357" gradientTransform="matrix(1 0 0 2 0 -54.9512)" gradientUnits="userSpaceOnUse">

-		<stop  offset="0.6503" style="stop-color:#E61E24"/>

-		<stop  offset="1" style="stop-color:#9D1F1F"/>

-	</radialGradient>

-	<path fill="url(#SVGID_5_)" d="M79.312,48.238L39.961,33.06l-9.384,24.335l26.381,10.174c1.824-3.115,5.198-5.218,9.062-5.218

-		c5.791,0,10.504,4.712,10.504,10.501c0,0.744-0.082,1.473-0.229,2.174l4.713,1.816l5.943-15.411L79.312,48.238z"/>

-</g>

-</svg>

 

--- /dev/null
+++ b/admin/massdelete.sh
@@ -1,1 +1,10 @@
+for line in `curl "http://localhost:5984/disclosr-foidocuments/_design/app/_view/byAgencyID?reduce=false&keys=%5B\"5716ce0aacfe98f7d638b7a66b7f1040\"%5D&limit=600" | xargs -L1`; do
+#	echo $line
+	id=`echo $line | grep -Po '_id:.*?[^\\\],' | perl -pe 's/_id://; s/^//; s/,$//'`
+	rev=`echo $line | grep -Po 'rev:.*?[^\\\],'| perl -pe 's/rev://; s/^//; s/,$//'`
+	if [ -n "$id" ]; then
+		echo "curl -X DELETE http://localhost:5984/disclosr-foidocuments/$id?rev=$rev"
+		curl -X DELETE http://localhost:5984/disclosr-foidocuments/$id?rev=$rev
+	fi
+done;
 

--- a/admin/neo4jimporter/pom.xml
+++ b/admin/neo4jimporter/pom.xml
@@ -52,7 +52,7 @@
                     </execution>
                 </executions>
                 <configuration>
-                    <mainClass>Importer</mainClass>
+                    <mainClass>StAXSample</mainClass>
                 </configuration>
             </plugin>
    <plugin>

--- a/admin/neo4jimporter/src/main/java/StAXSample.java
+++ b/admin/neo4jimporter/src/main/java/StAXSample.java
@@ -35,13 +35,14 @@
     }
 
     public static void main(String[] args) {
-        if (args.length != 1) {
+        /*if (args.length != 1) {
             System.out.println("Usage: StAXSample file.xml");
             System.exit(-1);
-        }
+        } */
 
         StAXSample ss = new StAXSample();
-        ss.setFilename(args[0]);
+        //ss.setFilename(args[0]);
+        ss.setFilename("agency-sample.xml");
         ss.run();
     }
 

--- a/admin/refreshDesignDoc.php
+++ b/admin/refreshDesignDoc.php
@@ -8,21 +8,13 @@
 $obj->_id = "_design/" . urlencode("app");
 $obj->language = "javascript";
 $obj->views->all->map = "function(doc) {   emit(doc._id, doc); };";
-$obj->views->byDate->map = "function(doc) {   emit(doc.date, doc); };";
+$obj->views->byDate->map = "function(doc) {  if (doc.title != \"Disclosure Log Updated\") { emit(doc.date, doc); } };";
 $obj->views->byDateMonthYear->map = "function(doc) {   emit(doc.date, doc); };";
 $obj->views->byDateMonthYear->reduce = "_count";
 $obj->views->byAgencyID->map = "function(doc) {   emit(doc.agencyID, doc); };";
 $obj->views->byAgencyID->reduce = "_count";
-$obj->views->fieldNames->map = '
-function(doc) {
-for(var propName in doc) {
-     	emit(propName, doc._id);
-	}
-
-}';
-$obj->views->fieldNames->reduce = 'function (key, values, rereduce) {
-    return values.length;
-}';
+$obj->views->fieldNames->map = 'function(doc) { for(var propName in doc) {      	emit(propName, doc._id); 	}}';
+$obj->views->fieldNames->reduce = 'function (key, values, rereduce) {      return values.length; }';
 // allow safe updates (even if slightly slower due to extra: rev-detection check).
 $foidb->save($obj, true);
 
@@ -120,15 +112,25 @@
   }
 }";
 $obj->views->getStatistics->map = 
-"function(doc) {
-  if (doc.statistics) {
-	for (var statisticSet in doc.statistics)  {
-for (var statisticPeriod in doc.statistics[statisticSet])  {
-    emit([statisticSet,statisticPeriod], doc.statistics[statisticSet][statisticPeriod]['value']);
+"
+function (doc) {
+    if (doc.statistics) {
+        for (var statisticSet in doc.statistics) {
+            for (var statisticPeriod in doc.statistics[statisticSet]) {
+                if (doc.statistics[statisticSet][statisticPeriod]['value']) {
+                    emit([statisticSet, statisticPeriod], doc.statistics[statisticSet][statisticPeriod]['value']);
+                } else {
+                    for (var statisticSubSet in doc.statistics[statisticSet][statisticPeriod]) {
+                        if (statisticSubSet != 'source' && statisticSubSet != 'value') {
+                            emit([statisticSubSet, statisticPeriod], doc.statistics[statisticSet][statisticPeriod][statisticSubSet]);
+                        }
+                    }
+                }
+            }
+        }
+    }
 }
-}
-  }
-}";
+";
 $obj->views->getStatistics->reduce = '_sum';
 // http://stackoverflow.com/questions/646628/javascript-startswith
 $obj->views->score->map = 'if(!String.prototype.startsWith){

--- a/documents/about.php
+++ b/documents/about.php
@@ -5,6 +5,7 @@
 include_once('../include/common.inc.php');
 ?>
 <h1>About</h1>
+Written and managed by Alex Sadleir (maxious [at] lambdacomplex.org) 
 <?php
 include_footer_documents();
 ?>

--- a/documents/agency.php
+++ b/documents/agency.php
@@ -19,7 +19,7 @@
     <br>
 <?php
 try {
-    if ($_REQUEST['id']) {
+    if (isset($_REQUEST['id'])) {
         $rows = $foidocsdb->get_view("app", "byAgencyID", $_REQUEST['id'], false, false, false)->rows;
         foreach ($rows as $row) {
             //print_r($rows);
@@ -31,6 +31,12 @@
     } else {
         $rows = $foidocsdb->get_view("app", "byAgencyID?group=true", null, false, false, true)->rows;
         if ($rows) {
+function cmp($a, $b)
+{
+	global $idtoname;
+    return strcmp($idtoname[$a->key], $idtoname[$b->key]);
+}
+usort($rows, "cmp");
             foreach ($rows as $row) {
                 echo '<a href="agency.php?id=' . $row->key . '">' . $idtoname[$row->key] . " (" . $row->value . " records)</a> <br>\n";
             }
@@ -42,3 +48,4 @@
 echo "<a class='btn btn-large btn-primary' href='?end_key=$endkey' style='float:right;'>next page <i class='icon-circle-arrow-right icon-white'></i></a>";
 include_footer_documents();
 ?>
+

--- a/documents/charts.php
+++ b/documents/charts.php
@@ -5,18 +5,27 @@
 $agenciesdb = $server->get_db('disclosr-agencies');
 
 $idtoname = Array();
+$idtofoirequestssuccessful = Array();
 foreach ($agenciesdb->get_view("app", "byCanonicalName")->rows as $row) {
     $idtoname[$row->id] = trim($row->value->name);
+    $foirequestssuccessful = 0;
+if(isset($row->value->statistics->foiRequests)) {
+    foreach ($row->value->statistics->foiRequests as $statperiod) {
+	$statperiod=object_to_array($statperiod);
+	if (isset($statperiod["Requests for other information granted in full"])) $foirequestssuccessful += $statperiod["Requests for other information granted in full"];
+	if (isset($statperiod["Requests for other information granted in part"])) $foirequestssuccessful += $statperiod["Requests for other information granted in part"];
+	}
+}
+   $idtofoirequestssuccessful[$row->id] =$foirequestssuccessful;
 }
 $foidocsdb = $server->get_db('disclosr-foidocuments');
-
 ?>
 <div class="foundation-header">
     <h1><a href="about.php">Charts</a></h1>
     <h4 class="subheader"></h4>
 </div>
 <div id="bydate" style="width:1000px;height:300px;"></div>
-<div id="byagency" style="width:1200px;height:800px;"></div>
+<div id="byagency" style="width:1000px;height:1400px;"></div>
 <script id="source">
     window.onload = function () {
         $(document).ready(function () {
@@ -28,7 +37,6 @@
             <?php
                 try {
                     $rows = $foidocsdb->get_view("app", "byDateMonthYear?group=true",null, false,false,true)->rows;
-
 
                     $dataValues = Array();
                     foreach ($rows as $row) {
@@ -95,6 +103,7 @@
     };
 
     var d2 = [];
+    var d3 = [];
     var agencylabels = [];
     function agencytrackformatter(obj) {
 
@@ -112,12 +121,17 @@
     <?php
         try {
             $rows = $foidocsdb->get_view("app", "byAgencyID?group=true",null, false,false,true)->rows;
-
+function cmp($a, $b)
+{
+    return $a->value > $b->value;
+}
+usort($rows, "cmp");
 
             $dataValues = Array();
             $i = 0;
             foreach ($rows as $row) {
                 echo "       d2.push([ $row->value,$i]);" . PHP_EOL;
+                echo "       d3.push([ ".$idtofoirequestssuccessful[$row->key].",$i]);" . PHP_EOL;
                 echo "       agencylabels.push(['".str_replace("'","",$idtoname[$row->key])."']);" . PHP_EOL;
 
                 $i++;
@@ -154,7 +168,7 @@
                 autoscaleMargin: 1
             },
             legend: {
-                show: false
+                show: true
             }
         }
     );

--- a/documents/datagov-export-groups.py
+++ b/documents/datagov-export-groups.py
@@ -10,7 +10,11 @@
 # Instantiate the CKAN client.
 #ckan = ckanclient.CkanClient(base_location='http://localhost:5000/api',    api_key='b47b24cd-591d-40c1-8677-d73101d56d1b')
 api_key = 'ff34526e-f794-4068-8235-fcbba38cd8bc'
-ckan = ckanclient.CkanClient(base_location='http://data.disclosurelo.gs/api',
+server = 'data.disclosurelo.gs'
+api_key = 'c30eb6f5-0f90-47e0-bf05-9b1b4e3a461a'
+server = 'ckan.data.gov.au'
+
+ckan = ckanclient.CkanClient(base_location='http://' + server + '/api',
                              api_key=api_key)
 couch = couchdb.Server('http://127.0.0.1:5984/')
 #couch = couchdb.Server('http://192.168.1.113:5984/')

--- a/documents/datagov-export.py
+++ b/documents/datagov-export.py
@@ -14,6 +14,7 @@
 
 import tempfile
 def add_package_resource_cachedurl(ckan, package_name, url, name, format, license_id, size,**kwargs):
+    excluded_apis = "recent-earthquakes,sentinel-hotspots,abc-local-stations,action-bus-service-gtfs-feed-act,current-and-future-road-reports-traffic-restrictions-in-south-australia,current-and-future-road-reports-traffic-restrictions-in-south-australia,current-and-future-road-reports-traffic-restrictions-in-south-australia,current-and-future-road-reports-traffic-restrictions-in-south-australia,current-and-future-road-reports-traffic-restrictions-in-south-australia,current-and-future-road-reports-traffic-restrictions-in-south-australia,journey-planner-data-act,live-traffic-cameras-nsw,precis-forecast-national,precis-forecast-national,precis-forecast-new-south-wales,precis-forecast-new-south-wales,precis-forecast-northern-territory,precis-forecast-northern-territory,precis-forecast-queensland,precis-forecast-queensland,precis-forecast-south-australia,precis-forecast-south-australia,precis-forecast-tasmania,precis-forecast-tasmania,precis-forecast-victoria,precis-forecast-victoria,precis-forecast-western-australia,precis-forecast-western-australia,register-of-penalty-notices-nsw,sentinel-hotspots,trove-people-and-organisations-data,weather-data-services-radar,abc-local-stations,act-emergency-services-agency-esa-28093-current-incidents,act-emergency-services-agency-esa-news-alerts,act-government-news-and-events,act-government-summaries-of-cabinet-outcomes,act-magistrates-court-judgements,act-supreme-court-judgements,act-supreme-court-sentences,actpla-latest-news,all-vacant-act-government-jobs,community-engagement-current-engagements,community-engagement-news,edd-media-releases,edd-news-and-events,freedom-of-information-foi-summaries,libraries-act-announcements,nsw-rural-fire-service-current-incidents,nsw-rural-fire-service-major-updates,precis-forecast-new-south-wales,precis-forecast-south-australia,precis-forecast-tasmania,precis-forecast-victoria,sentinel-hotspots,south-australian-road-crash-statistics,trove-people-and-organisations-data,weather-warnings-for-new-south-wales-australian-capital-territory,weather-warnings-for-northern-territory,weather-warnings-for-queensland,weather-warnings-for-south-australia,weather-warnings-for-tasmania,weather-warnings-for-victoria,weather-warnings-for-western-australia".split(",")
     if "xls" in url:
 	format = "xls"
     if "pdf" in url:
@@ -27,8 +28,12 @@
     if mime_type in ["application/xlsx","application/x-xlsx","application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"]:
 	format = "xlsx"
 
-    if content != None:
-	    tf = tempfile.NamedTemporaryFile(delete=False)
+    #put file extensions on for windows users downloading files
+    suffix = name.encode("ascii","ignore").replace("/","")
+    if len(suffix) < 5 or (suffix[-4] != "." and suffix[-5] != "."):
+	suffix = suffix + "." + format
+    if content != None and package_name not in excluded_apis:
+	    tf = tempfile.NamedTemporaryFile(suffix=suffix)
 	    tfName = os.path.abspath(tf.name)
 	    print tfName
 	    tf.seek(0)
@@ -196,7 +201,7 @@
             pkg_name = filter(lambda x: x in '0123456789abcdefghijklmnopqrstuvwxyz-_',
                               doc.value['url'].replace("http://data.gov.au/dataset/", '').replace('/', '')[:100]);
             print pkg_name
-            if pkg_name != "":
+            if pkg_name != "" :
 
                 #add to or create organization using direct API
                 agency = doc.value['metadata']["Agency"]
@@ -262,7 +267,7 @@
                         'author': creator,
                         'maintainer': creator,
                         'license_id': get_license_id(doc.value['metadata']['DCTERMS.License']),
-                        'notes': html2text.html2text(doc.value['metadata']['Description']).replace('AC/a!a','-').replace('AC/a!aC/',"'").replace("AC/a!E",":")replace("A "," "),
+                        'notes': html2text.html2text(doc.value['metadata']['Description']).replace('AC/a!a','-').replace('AC/a!aC/',"'").replace("AC/a!E",":").replace("A "," "),
                         'owner_org': org_id,
                         'extras': extras,
                         'private': (pkg_name not in goodcsvdata and pkg_name not in goodotherdata)

--- a/documents/genericScrapers.py
+++ b/documents/genericScrapers.py
@@ -198,11 +198,24 @@
 
     def getRows(self, table):
         return table.find_all('tr')
+    def findColumns(self, row):
+        return row.find_all('td')
+
+    def getDocHash(self, id,date, url):
+                        if id.string is None:
+			    print "no id, using date as hash"
+                            return scrape.mkhash(
+                                self.remove_control_chars(
+                                    url + (''.join(date.stripped_strings))))
+                        else:
+                            return scrape.mkhash(
+                                self.remove_control_chars(
+                                    url + (''.join(id.stripped_strings))))
 
     def getDate(self, content, entry, doc):
         strdate = ''.join(content.stripped_strings).strip()
         (a, b, c) = strdate.partition("(")
-        strdate = self.remove_control_chars(a.replace("Octber", "October").replace("1012","2012")replace("Janrurary", "January").replace("1012","2012"))
+        strdate = self.remove_control_chars(a.replace("Octber", "October").replace("1012","2012").replace("Janrurary", "January").replace("1012","2012"))
         print strdate
         try:
 		edate = parse(strdate, dayfirst=True, fuzzy=True).strftime("%Y-%m-%d")
@@ -234,20 +247,13 @@
                 soup = BeautifulSoup(content)
                 table = self.getTable(soup)
                 for row in self.getRows(table):
-                    columns = row.find_all('td')
+                    columns = self.findColumns(row)
                     if len(columns) is self.getColumnCount():
                         (id, date, title,
                          description, notes) = self.getColumns(columns)
                         print self.remove_control_chars(
                             ''.join(id.stripped_strings))
-                        if id.string is None:
-                            dochash = scrape.mkhash(
-                                self.remove_control_chars(
-                                    url + (''.join(date.stripped_strings))))
-                        else:
-                            dochash = scrape.mkhash(
-                                self.remove_control_chars(
-                                    url + (''.join(id.stripped_strings))))
+                        dochash = self.getDocHash(id,date,url)
                         doc = foidocsdb.get(dochash)
 
                         if doc is None:

--- a/documents/index.php
+++ b/documents/index.php
@@ -18,6 +18,7 @@
     $idtoname[$row->id] = trim($row->value->name);
 }
 $foidocsdb = $server->get_db('disclosr-foidocuments');
+//print_r($foidocsdb);
 try {
     $rows = $foidocsdb->get_view("app", "byDate", Array($endkey, '0000-00-00'), true, 20, null, $enddocid)->rows;
     if ($rows) {

--- a/documents/robots.txt
+++ b/documents/robots.txt
@@ -3,4 +3,5 @@
 
 User-agent: *
 Disallow: /admin/
+Disallow: /viewDocument.php
 Sitemap: http://disclosurelo.gs/sitemap.xml.php

--- a/documents/rss.xml.php
+++ b/documents/rss.xml.php
@@ -31,11 +31,12 @@
 
 
 //print_r($rows);
+$i =0;
 foreach ($rows as $row) {
     //Create an empty FeedItem
     $newItem = $TestFeed->createNewItem();
     //Add elements to the feed item
-    $newItem->setTitle($row->value->title);
+    $newItem->setTitle(preg_replace('/[\x00-\x1F\x80-\xFF]/', '', $row->value->title));
     $newItem->setLink("http://disclosurelo.gs/view.php?id=" . $row->value->_id);
     $newItem->setDate(strtotime($row->value->date));
     $newItem->setDescription(displayLogEntry($row, $idtoname));
@@ -43,6 +44,8 @@
     $newItem->addElement('guid', "http://disclosurelo.gs/view.php?id=" . $row->value->_id, array('isPermaLink' => 'true'));
     //Now add the feed item
     $TestFeed->addItem($newItem);
+$i++;
+if ($i > 50) break;
 }
 //OK. Everything is done. Now genarate the feed.
 $TestFeed->generateFeed();

--- a/documents/runScrapers.sh
+++ b/documents/runScrapers.sh
@@ -1,7 +1,9 @@
+#!/bin/bash
 DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+echo $DIR
 cd $DIR
 echo "" > /tmp/disclosr-error
-for f in scrapers/*.py; do
+for f in $DIR/scrapers/*.py; do
 	echo "Processing $f file..";
 	md5=`md5sum /tmp/disclosr-error`
 	python $f 3>&1 1>&2 2>&3 | tee --append /tmp/disclosr-error;
@@ -14,6 +16,7 @@
 		sleep 1;
 	fi
 done
+curl "localhost:5984/disclosr-foidocuments/_design/app/_view/byDate?startkey=\"9999-99-99\"&endkey=\"0000-00-00\"&descending=true&limit=20"
 if [ -s /tmp/disclosr-error ] ; then
     echo "emailling logs..";
     mail -E -s "Disclosr errors" maxious@lambdacomplex.org < /tmp/disclosr-error ;

--- a/documents/scrape.py
+++ b/documents/scrape.py
@@ -197,7 +197,7 @@
                 links = soup.findAll('a') # soup.findAll('a', id=re.compile("^p-"))
                 linkurls = set([])
                 for link in links:
-                    if link.has_key("href"):
+                    if link.has_attr("href"):
                         if link['href'].startswith("http"):
                             # lets not do external links for now
                             # linkurls.add(link['href'])

--- a/documents/scrapers/0e46f8bd1414b1fdd4f0543d54a97500.py
+++ b/documents/scrapers/0e46f8bd1414b1fdd4f0543d54a97500.py
@@ -7,7 +7,7 @@
 #http://www.doughellmann.com/PyMOTW/abc/
 class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
         def getTable(self,soup):
-                return soup.find(id = "maincontentcontainer").table
+               return soup.find(class_ = "contentcontainer").table
         def getColumnCount(self):
                 return 5
         def getColumns(self,columns):

--- a/documents/scrapers/1803322b27286950cab0c543168b5f21.py
+++ b/documents/scrapers/1803322b27286950cab0c543168b5f21.py
@@ -16,7 +16,7 @@
         links = []
         description = ""
         for atag in entry.find_all('a'):
-            if atag.has_key('href'):
+            if atag.has_attr('href'):
                 link = scrape.fullurl(self.getURL(), atag['href'])
                 (url, mime_type, htcontent) = scrape.fetchURL(scrape.docsdb, link, "foidocuments", self.getAgencyID(), False)
                 if htcontent != None:
@@ -25,7 +25,7 @@
                         row  = soup.find(id="content_div_148050")
                         description = ''.join(row.stripped_strings)
                         for atag in row.find_all("a"):
-                                    if atag.has_key('href'):
+                                    if atag.has_attr('href'):
                                         links.append(scrape.fullurl(link, atag['href']))
 
         if links != []:
@@ -45,14 +45,5 @@
     print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper)
 
     nsi = ScraperImplementation()
-    nsi.disclogURL = "http://www.dbcde.gov.au/about_us/freedom_of_information_disclosure_log/foi_list?result_146858_result_page=1"
-    nsi.doScrape()
-    nsi.disclogURL = "http://www.dbcde.gov.au/about_us/freedom_of_information_disclosure_log/foi_list?result_146858_result_page=2"
-    nsi.doScrape()
-    nsi.disclogURL = "http://www.dbcde.gov.au/about_us/freedom_of_information_disclosure_log/foi_list?result_146858_result_page=3"
-    nsi.doScrape()
-    nsi.disclogURL = "http://www.dbcde.gov.au/about_us/freedom_of_information_disclosure_log/foi_list?result_146858_result_page=4"
-    nsi.doScrape()
-    nsi.disclogURL = "http://www.dbcde.gov.au/about_us/freedom_of_information_disclosure_log/foi_list?result_146858_result_page=5"
     nsi.doScrape()
 

--- a/documents/scrapers/1d404c4934f74feacd00dcb434e7c10a.py
+++ b/documents/scrapers/1d404c4934f74feacd00dcb434e7c10a.py
@@ -6,8 +6,8 @@
 
 #http://www.doughellmann.com/PyMOTW/abc/
 class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
-        #def getTable(self,soup):
-        #        return soup.find(id = "cphMain_C001_Col01").table       
+        def getTable(self,soup):
+                return soup.findAll('table')[1]     
         def getColumnCount(self):