Merge branch 'master' of ssh://apples.lambdacomplex.org/git/disclosr
Conflicts:
admin/logo.svg
Former-commit-id: 850f69e8af3bbfb3467f5708676d7bf99fce2571
--- a/admin/importOAICFOIrequests.php
+++ b/admin/importOAICFOIrequests.php
@@ -17,13 +17,13 @@
if ($row >= 1) {
// print_r($data);
$name = trim($data[2]);
- echo "$name <br>";
+// echo "$name <br>";
if ($data[0] != "TOTALS" && $data[0] != "") {
if (isset($nametoid[$name])) {
$id = $nametoid[$name];
$timePeriod = $data[0] . "-Q" . $data[1];
- echo "$timePeriod <br>";
+// echo "$timePeriod <br>";
unset($data[0]);
unset($data[1]);
unset($data[2]);
@@ -38,10 +38,13 @@
$result = Array("source" => "http://data.gov.au/dataset/freedom-of-information-quarterly-request-and-review-statistical-data-2011-12/");
foreach ($data as $key => $datum) {
if ($datum != 0) {
+// tODO prefix header with "FOI"
+if (isset($stats[$id][$timePeriod][$key])) $datum += $stats[$id][$timePeriod][$key];
$result[trim($headers[$key])] = $datum;
}
}
$stats[$id][$timePeriod] = $result;
+// TODO merge if already exists
//print_r($stats);
} else {
echo "<br>ERROR NAME MISSING FROM ID LIST<br><bR> $row" . PHP_EOL;
@@ -57,21 +60,24 @@
}
fclose($handle);
}
+echo "all stats loaded successfuly";
foreach ($stats as $id => $stat) {
echo $id . "<br>" . PHP_EOL;
- $doc = $db->get($id);
+ $doc = $db->get($id);
echo $doc->name . "<br>" . PHP_EOL;
- print_r($stat);
- die();
+// print_r($stat);
// print_r($doc);
$changed = false;
if (!isset($doc->statistics)) {
$changed = true;
$doc->statistics = Array();
+ } else {
+ $doc->statistics = object_to_array($doc->statistics);
}
foreach ($stat as $timePeriod => $value) {
- if (!isset($doc->statistics->foiRequests->$timePeriod)
- || $doc->statistics->foiRequests->$timePeriod != $value) {
+ if (!isset($doc->statistics["foiRequests"][$timePeriod])
+ || $doc->statistics["foiRequests"][$timePeriod] != $value
+ ) {
$changed = true;
$doc->statistics["foiRequests"][$timePeriod] = $value;
}
@@ -81,6 +87,7 @@
} else {
echo "not changed" . "<br>" . PHP_EOL;
}
+//print_r($doc);die();
}
?>
--- a/admin/logo.svg
+++ /dev/null
@@ -1,58 +1,1 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Generator: Adobe Illustrator 15.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
-<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
-<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
- width="100px" height="100px" viewBox="0 0 100 100" enable-background="new 0 0 100 100" xml:space="preserve">
-<g id="docs">
- <path d="M86.107,11.001l5.229,9.952c0,0-1.832,0.083-5.297,1.95c-2.312,1.249-6.468,6.246-6.468,6.246l-7.745-11.148
- c0,0,4.2-3.996,6.513-5.242C81.805,10.889,86.107,11.001,86.107,11.001"/>
- <path d="M65.604,41.642l-3.151-9.868c0,0-5.44,3.56-8.017,4.074c-1.009,0.202-1.931,0.335-2.75,0.425L65.604,41.642z"/>
- <path d="M72.326,44.231c0.268-0.226,0.537-0.44,0.804-0.616c3.104-2.054,6.14-3.685,6.269-3.755l1.881-1.005l1.369,1.634
- l2.865,3.417l3.197-4.334L76.68,30.694l-8.74,11.847L72.326,44.231z"/>
- <path d="M39.918,31.734l4.825,1.86l3.33,0.212c0.04,0.001,0.269,0.015,0.652,0.015c0.91,0,2.798-0.072,5.196-0.551
- c1.427-0.284,5.007-2.332,7.093-3.695l2.889-1.888l1.05,3.285l2.496,7.812l5.889-7.985l-4.625,0.163l1.349-6.225l-14.928-3.233
- l-2.095,9.667c-0.531-2.599-1.841-5.727-1.841-5.727l-13.488,1.522c0,0,0.885,2.206,1.586,4.529L39.918,31.734z"/>
- <path d="M91.232,66.473c-1.102-0.691-2.322-1.143-3.414-1.434l-3.779,9.805c1.932,1.246,5.197,5.738,5.197,5.738l7.336-9.207
- C96.572,71.377,93.162,67.682,91.232,66.473z"/>
- <path d="M93.191,53.076l-3.655,1.225c-0.019,0.007-1.779,0.613-4.117,2.068l2.817,4.869l0.625,1.08
- c3.307-0.562,7.728-1.923,7.728-1.923l-2.332-15.261c0,0-2.934,1.277-5.853,2.221l2.318,2.766L93.191,53.076z"/>
- <path d="M79.271,46.91l0.865,0.334l0.459,0.801l3.504,6.05c2.646-1.636,4.61-2.287,4.61-2.287l-8.075-9.632
- c0,0-2.584,1.391-5.376,3.188L79.271,46.91z"/>
-</g>
-<g id="trunk">
-
- <radialGradient id="SVGID_1_" cx="66.0195" cy="72.8555" r="7.877" gradientTransform="matrix(1 0 0 2 0 -72.8555)" gradientUnits="userSpaceOnUse">
- <stop offset="0.6503" style="stop-color:#E61E24"/>
- <stop offset="1" style="stop-color:#9D1F1F"/>
- </radialGradient>
- <circle fill="url(#SVGID_1_)" cx="66.02" cy="72.855" r="7.877"/>
-
- <radialGradient id="SVGID_2_" cx="22.6929" cy="72.8555" r="7.877" gradientTransform="matrix(1 0 0 2 0 -72.8555)" gradientUnits="userSpaceOnUse">
- <stop offset="0.6503" style="stop-color:#E61E24"/>
- <stop offset="1" style="stop-color:#9D1F1F"/>
- </radialGradient>
- <circle fill="url(#SVGID_2_)" cx="22.693" cy="72.855" r="7.877"/>
-
- <radialGradient id="SVGID_3_" cx="15.4717" cy="59.7266" r="12.1478" gradientTransform="matrix(1 0 0 2 0 -59.7266)" gradientUnits="userSpaceOnUse">
- <stop offset="0.6503" style="stop-color:#E61E24"/>
- <stop offset="1" style="stop-color:#9D1F1F"/>
- </radialGradient>
- <path fill="url(#SVGID_3_)" d="M22.693,62.352c1.915,0,3.705,0.521,5.251,1.42V47.912H12.191L3,57.103v14.438h9.281
- C12.931,66.369,17.347,62.352,22.693,62.352z"/>
-
- <radialGradient id="SVGID_4_" cx="43.4023" cy="68.917" r="9.0473" gradientTransform="matrix(1 0 0 2 0 -68.917)" gradientUnits="userSpaceOnUse">
- <stop offset="0.6503" style="stop-color:#E61E24"/>
- <stop offset="1" style="stop-color:#9D1F1F"/>
- </radialGradient>
- <path fill="url(#SVGID_4_)" d="M55.925,69.986l-9.583-3.695H30.88c1.186,1.475,1.978,3.279,2.225,5.252h22.502
- C55.674,71.01,55.779,70.49,55.925,69.986z"/>
-
- <radialGradient id="SVGID_5_" cx="58.7637" cy="54.9512" r="25.2357" gradientTransform="matrix(1 0 0 2 0 -54.9512)" gradientUnits="userSpaceOnUse">
- <stop offset="0.6503" style="stop-color:#E61E24"/>
- <stop offset="1" style="stop-color:#9D1F1F"/>
- </radialGradient>
- <path fill="url(#SVGID_5_)" d="M79.312,48.238L39.961,33.06l-9.384,24.335l26.381,10.174c1.824-3.115,5.198-5.218,9.062-5.218
- c5.791,0,10.504,4.712,10.504,10.501c0,0.744-0.082,1.473-0.229,2.174l4.713,1.816l5.943-15.411L79.312,48.238z"/>
-</g>
-</svg>
--- /dev/null
+++ b/admin/massdelete.sh
@@ -1,1 +1,10 @@
+for line in `curl "http://localhost:5984/disclosr-foidocuments/_design/app/_view/byAgencyID?reduce=false&keys=%5B\"5716ce0aacfe98f7d638b7a66b7f1040\"%5D&limit=600" | xargs -L1`; do
+# echo $line
+ id=`echo $line | grep -Po '_id:.*?[^\\\],' | perl -pe 's/_id://; s/^//; s/,$//'`
+ rev=`echo $line | grep -Po 'rev:.*?[^\\\],'| perl -pe 's/rev://; s/^//; s/,$//'`
+ if [ -n "$id" ]; then
+ echo "curl -X DELETE http://localhost:5984/disclosr-foidocuments/$id?rev=$rev"
+ curl -X DELETE http://localhost:5984/disclosr-foidocuments/$id?rev=$rev
+ fi
+done;
--- a/admin/neo4jimporter/pom.xml
+++ b/admin/neo4jimporter/pom.xml
@@ -52,7 +52,7 @@
</execution>
</executions>
<configuration>
- <mainClass>Importer</mainClass>
+ <mainClass>StAXSample</mainClass>
</configuration>
</plugin>
<plugin>
--- a/admin/neo4jimporter/src/main/java/StAXSample.java
+++ b/admin/neo4jimporter/src/main/java/StAXSample.java
@@ -35,13 +35,14 @@
}
public static void main(String[] args) {
- if (args.length != 1) {
+ /*if (args.length != 1) {
System.out.println("Usage: StAXSample file.xml");
System.exit(-1);
- }
+ } */
StAXSample ss = new StAXSample();
- ss.setFilename(args[0]);
+ //ss.setFilename(args[0]);
+ ss.setFilename("agency-sample.xml");
ss.run();
}
--- a/admin/refreshDesignDoc.php
+++ b/admin/refreshDesignDoc.php
@@ -8,21 +8,13 @@
$obj->_id = "_design/" . urlencode("app");
$obj->language = "javascript";
$obj->views->all->map = "function(doc) { emit(doc._id, doc); };";
-$obj->views->byDate->map = "function(doc) { emit(doc.date, doc); };";
+$obj->views->byDate->map = "function(doc) { if (doc.title != \"Disclosure Log Updated\") { emit(doc.date, doc); } };";
$obj->views->byDateMonthYear->map = "function(doc) { emit(doc.date, doc); };";
$obj->views->byDateMonthYear->reduce = "_count";
$obj->views->byAgencyID->map = "function(doc) { emit(doc.agencyID, doc); };";
$obj->views->byAgencyID->reduce = "_count";
-$obj->views->fieldNames->map = '
-function(doc) {
-for(var propName in doc) {
- emit(propName, doc._id);
- }
-
-}';
-$obj->views->fieldNames->reduce = 'function (key, values, rereduce) {
- return values.length;
-}';
+$obj->views->fieldNames->map = 'function(doc) { for(var propName in doc) { emit(propName, doc._id); }}';
+$obj->views->fieldNames->reduce = 'function (key, values, rereduce) { return values.length; }';
// allow safe updates (even if slightly slower due to extra: rev-detection check).
$foidb->save($obj, true);
@@ -120,15 +112,25 @@
}
}";
$obj->views->getStatistics->map =
-"function(doc) {
- if (doc.statistics) {
- for (var statisticSet in doc.statistics) {
-for (var statisticPeriod in doc.statistics[statisticSet]) {
- emit([statisticSet,statisticPeriod], doc.statistics[statisticSet][statisticPeriod]['value']);
+"
+function (doc) {
+ if (doc.statistics) {
+ for (var statisticSet in doc.statistics) {
+ for (var statisticPeriod in doc.statistics[statisticSet]) {
+ if (doc.statistics[statisticSet][statisticPeriod]['value']) {
+ emit([statisticSet, statisticPeriod], doc.statistics[statisticSet][statisticPeriod]['value']);
+ } else {
+ for (var statisticSubSet in doc.statistics[statisticSet][statisticPeriod]) {
+ if (statisticSubSet != 'source' && statisticSubSet != 'value') {
+ emit([statisticSubSet, statisticPeriod], doc.statistics[statisticSet][statisticPeriod][statisticSubSet]);
+ }
+ }
+ }
+ }
+ }
+ }
}
-}
- }
-}";
+";
$obj->views->getStatistics->reduce = '_sum';
// http://stackoverflow.com/questions/646628/javascript-startswith
$obj->views->score->map = 'if(!String.prototype.startsWith){
--- a/documents/about.php
+++ b/documents/about.php
@@ -5,6 +5,7 @@
include_once('../include/common.inc.php');
?>
<h1>About</h1>
+Written and managed by Alex Sadleir (maxious [at] lambdacomplex.org)
<?php
include_footer_documents();
?>
--- a/documents/agency.php
+++ b/documents/agency.php
@@ -19,7 +19,7 @@
<br>
<?php
try {
- if ($_REQUEST['id']) {
+ if (isset($_REQUEST['id'])) {
$rows = $foidocsdb->get_view("app", "byAgencyID", $_REQUEST['id'], false, false, false)->rows;
foreach ($rows as $row) {
//print_r($rows);
@@ -31,6 +31,12 @@
} else {
$rows = $foidocsdb->get_view("app", "byAgencyID?group=true", null, false, false, true)->rows;
if ($rows) {
+function cmp($a, $b)
+{
+ global $idtoname;
+ return strcmp($idtoname[$a->key], $idtoname[$b->key]);
+}
+usort($rows, "cmp");
foreach ($rows as $row) {
echo '<a href="agency.php?id=' . $row->key . '">' . $idtoname[$row->key] . " (" . $row->value . " records)</a> <br>\n";
}
@@ -42,3 +48,4 @@
echo "<a class='btn btn-large btn-primary' href='?end_key=$endkey' style='float:right;'>next page <i class='icon-circle-arrow-right icon-white'></i></a>";
include_footer_documents();
?>
+
--- a/documents/charts.php
+++ b/documents/charts.php
@@ -5,18 +5,27 @@
$agenciesdb = $server->get_db('disclosr-agencies');
$idtoname = Array();
+$idtofoirequestssuccessful = Array();
foreach ($agenciesdb->get_view("app", "byCanonicalName")->rows as $row) {
$idtoname[$row->id] = trim($row->value->name);
+ $foirequestssuccessful = 0;
+if(isset($row->value->statistics->foiRequests)) {
+ foreach ($row->value->statistics->foiRequests as $statperiod) {
+ $statperiod=object_to_array($statperiod);
+ if (isset($statperiod["Requests for other information granted in full"])) $foirequestssuccessful += $statperiod["Requests for other information granted in full"];
+ if (isset($statperiod["Requests for other information granted in part"])) $foirequestssuccessful += $statperiod["Requests for other information granted in part"];
+ }
+}
+ $idtofoirequestssuccessful[$row->id] =$foirequestssuccessful;
}
$foidocsdb = $server->get_db('disclosr-foidocuments');
-
?>
<div class="foundation-header">
<h1><a href="about.php">Charts</a></h1>
<h4 class="subheader"></h4>
</div>
<div id="bydate" style="width:1000px;height:300px;"></div>
-<div id="byagency" style="width:1200px;height:800px;"></div>
+<div id="byagency" style="width:1000px;height:1400px;"></div>
<script id="source">
window.onload = function () {
$(document).ready(function () {
@@ -28,7 +37,6 @@
<?php
try {
$rows = $foidocsdb->get_view("app", "byDateMonthYear?group=true",null, false,false,true)->rows;
-
$dataValues = Array();
foreach ($rows as $row) {
@@ -95,6 +103,7 @@
};
var d2 = [];
+ var d3 = [];
var agencylabels = [];
function agencytrackformatter(obj) {
@@ -112,12 +121,17 @@
<?php
try {
$rows = $foidocsdb->get_view("app", "byAgencyID?group=true",null, false,false,true)->rows;
-
+function cmp($a, $b)
+{
+ return $a->value > $b->value;
+}
+usort($rows, "cmp");
$dataValues = Array();
$i = 0;
foreach ($rows as $row) {
echo " d2.push([ $row->value,$i]);" . PHP_EOL;
+ echo " d3.push([ ".$idtofoirequestssuccessful[$row->key].",$i]);" . PHP_EOL;
echo " agencylabels.push(['".str_replace("'","",$idtoname[$row->key])."']);" . PHP_EOL;
$i++;
@@ -154,7 +168,7 @@
autoscaleMargin: 1
},
legend: {
- show: false
+ show: true
}
}
);
--- a/documents/datagov-export-groups.py
+++ b/documents/datagov-export-groups.py
@@ -10,7 +10,11 @@
# Instantiate the CKAN client.
#ckan = ckanclient.CkanClient(base_location='http://localhost:5000/api', api_key='b47b24cd-591d-40c1-8677-d73101d56d1b')
api_key = 'ff34526e-f794-4068-8235-fcbba38cd8bc'
-ckan = ckanclient.CkanClient(base_location='http://data.disclosurelo.gs/api',
+server = 'data.disclosurelo.gs'
+api_key = 'c30eb6f5-0f90-47e0-bf05-9b1b4e3a461a'
+server = 'ckan.data.gov.au'
+
+ckan = ckanclient.CkanClient(base_location='http://' + server + '/api',
api_key=api_key)
couch = couchdb.Server('http://127.0.0.1:5984/')
#couch = couchdb.Server('http://192.168.1.113:5984/')
--- a/documents/datagov-export.py
+++ b/documents/datagov-export.py
@@ -14,6 +14,7 @@
import tempfile
def add_package_resource_cachedurl(ckan, package_name, url, name, format, license_id, size,**kwargs):
+ excluded_apis = "recent-earthquakes,sentinel-hotspots,abc-local-stations,action-bus-service-gtfs-feed-act,current-and-future-road-reports-traffic-restrictions-in-south-australia,current-and-future-road-reports-traffic-restrictions-in-south-australia,current-and-future-road-reports-traffic-restrictions-in-south-australia,current-and-future-road-reports-traffic-restrictions-in-south-australia,current-and-future-road-reports-traffic-restrictions-in-south-australia,current-and-future-road-reports-traffic-restrictions-in-south-australia,journey-planner-data-act,live-traffic-cameras-nsw,precis-forecast-national,precis-forecast-national,precis-forecast-new-south-wales,precis-forecast-new-south-wales,precis-forecast-northern-territory,precis-forecast-northern-territory,precis-forecast-queensland,precis-forecast-queensland,precis-forecast-south-australia,precis-forecast-south-australia,precis-forecast-tasmania,precis-forecast-tasmania,precis-forecast-victoria,precis-forecast-victoria,precis-forecast-western-australia,precis-forecast-western-australia,register-of-penalty-notices-nsw,sentinel-hotspots,trove-people-and-organisations-data,weather-data-services-radar,abc-local-stations,act-emergency-services-agency-esa-28093-current-incidents,act-emergency-services-agency-esa-news-alerts,act-government-news-and-events,act-government-summaries-of-cabinet-outcomes,act-magistrates-court-judgements,act-supreme-court-judgements,act-supreme-court-sentences,actpla-latest-news,all-vacant-act-government-jobs,community-engagement-current-engagements,community-engagement-news,edd-media-releases,edd-news-and-events,freedom-of-information-foi-summaries,libraries-act-announcements,nsw-rural-fire-service-current-incidents,nsw-rural-fire-service-major-updates,precis-forecast-new-south-wales,precis-forecast-south-australia,precis-forecast-tasmania,precis-forecast-victoria,sentinel-hotspots,south-australian-road-crash-statistics,trove-people-and-organisations-data,weather-warnings-for-new-south-wales-australian-capital-territory,weather-warnings-for-northern-territory,weather-warnings-for-queensland,weather-warnings-for-south-australia,weather-warnings-for-tasmania,weather-warnings-for-victoria,weather-warnings-for-western-australia".split(",")
if "xls" in url:
format = "xls"
if "pdf" in url:
@@ -27,8 +28,12 @@
if mime_type in ["application/xlsx","application/x-xlsx","application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"]:
format = "xlsx"
- if content != None:
- tf = tempfile.NamedTemporaryFile(delete=False)
+ #put file extensions on for windows users downloading files
+ suffix = name.encode("ascii","ignore").replace("/","")
+ if len(suffix) < 5 or (suffix[-4] != "." and suffix[-5] != "."):
+ suffix = suffix + "." + format
+ if content != None and package_name not in excluded_apis:
+ tf = tempfile.NamedTemporaryFile(suffix=suffix)
tfName = os.path.abspath(tf.name)
print tfName
tf.seek(0)
@@ -196,7 +201,7 @@
pkg_name = filter(lambda x: x in '0123456789abcdefghijklmnopqrstuvwxyz-_',
doc.value['url'].replace("http://data.gov.au/dataset/", '').replace('/', '')[:100]);
print pkg_name
- if pkg_name != "":
+ if pkg_name != "" :
#add to or create organization using direct API
agency = doc.value['metadata']["Agency"]
@@ -262,7 +267,7 @@
'author': creator,
'maintainer': creator,
'license_id': get_license_id(doc.value['metadata']['DCTERMS.License']),
- 'notes': html2text.html2text(doc.value['metadata']['Description']).replace('AC/a!a','-').replace('AC/a!aC/',"'").replace("AC/a!E",":")replace("A "," "),
+ 'notes': html2text.html2text(doc.value['metadata']['Description']).replace('AC/a!a','-').replace('AC/a!aC/',"'").replace("AC/a!E",":").replace("A "," "),
'owner_org': org_id,
'extras': extras,
'private': (pkg_name not in goodcsvdata and pkg_name not in goodotherdata)
--- a/documents/genericScrapers.py
+++ b/documents/genericScrapers.py
@@ -198,11 +198,24 @@
def getRows(self, table):
return table.find_all('tr')
+ def findColumns(self, row):
+ return row.find_all('td')
+
+ def getDocHash(self, id,date, url):
+ if id.string is None:
+ print "no id, using date as hash"
+ return scrape.mkhash(
+ self.remove_control_chars(
+ url + (''.join(date.stripped_strings))))
+ else:
+ return scrape.mkhash(
+ self.remove_control_chars(
+ url + (''.join(id.stripped_strings))))
def getDate(self, content, entry, doc):
strdate = ''.join(content.stripped_strings).strip()
(a, b, c) = strdate.partition("(")
- strdate = self.remove_control_chars(a.replace("Octber", "October").replace("1012","2012")replace("Janrurary", "January").replace("1012","2012"))
+ strdate = self.remove_control_chars(a.replace("Octber", "October").replace("1012","2012").replace("Janrurary", "January").replace("1012","2012"))
print strdate
try:
edate = parse(strdate, dayfirst=True, fuzzy=True).strftime("%Y-%m-%d")
@@ -234,20 +247,13 @@
soup = BeautifulSoup(content)
table = self.getTable(soup)
for row in self.getRows(table):
- columns = row.find_all('td')
+ columns = self.findColumns(row)
if len(columns) is self.getColumnCount():
(id, date, title,
description, notes) = self.getColumns(columns)
print self.remove_control_chars(
''.join(id.stripped_strings))
- if id.string is None:
- dochash = scrape.mkhash(
- self.remove_control_chars(
- url + (''.join(date.stripped_strings))))
- else:
- dochash = scrape.mkhash(
- self.remove_control_chars(
- url + (''.join(id.stripped_strings))))
+ dochash = self.getDocHash(id,date,url)
doc = foidocsdb.get(dochash)
if doc is None:
--- a/documents/index.php
+++ b/documents/index.php
@@ -18,6 +18,7 @@
$idtoname[$row->id] = trim($row->value->name);
}
$foidocsdb = $server->get_db('disclosr-foidocuments');
+//print_r($foidocsdb);
try {
$rows = $foidocsdb->get_view("app", "byDate", Array($endkey, '0000-00-00'), true, 20, null, $enddocid)->rows;
if ($rows) {
--- a/documents/robots.txt
+++ b/documents/robots.txt
@@ -3,4 +3,5 @@
User-agent: *
Disallow: /admin/
+Disallow: /viewDocument.php
Sitemap: http://disclosurelo.gs/sitemap.xml.php
--- a/documents/rss.xml.php
+++ b/documents/rss.xml.php
@@ -31,11 +31,12 @@
//print_r($rows);
+$i =0;
foreach ($rows as $row) {
//Create an empty FeedItem
$newItem = $TestFeed->createNewItem();
//Add elements to the feed item
- $newItem->setTitle($row->value->title);
+ $newItem->setTitle(preg_replace('/[\x00-\x1F\x80-\xFF]/', '', $row->value->title));
$newItem->setLink("http://disclosurelo.gs/view.php?id=" . $row->value->_id);
$newItem->setDate(strtotime($row->value->date));
$newItem->setDescription(displayLogEntry($row, $idtoname));
@@ -43,6 +44,8 @@
$newItem->addElement('guid', "http://disclosurelo.gs/view.php?id=" . $row->value->_id, array('isPermaLink' => 'true'));
//Now add the feed item
$TestFeed->addItem($newItem);
+$i++;
+if ($i > 50) break;
}
//OK. Everything is done. Now genarate the feed.
$TestFeed->generateFeed();
--- a/documents/runScrapers.sh
+++ b/documents/runScrapers.sh
@@ -1,7 +1,9 @@
+#!/bin/bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+echo $DIR
cd $DIR
echo "" > /tmp/disclosr-error
-for f in scrapers/*.py; do
+for f in $DIR/scrapers/*.py; do
echo "Processing $f file..";
md5=`md5sum /tmp/disclosr-error`
python $f 3>&1 1>&2 2>&3 | tee --append /tmp/disclosr-error;
@@ -14,6 +16,7 @@
sleep 1;
fi
done
+curl "localhost:5984/disclosr-foidocuments/_design/app/_view/byDate?startkey=\"9999-99-99\"&endkey=\"0000-00-00\"&descending=true&limit=20"
if [ -s /tmp/disclosr-error ] ; then
echo "emailling logs..";
mail -E -s "Disclosr errors" maxious@lambdacomplex.org < /tmp/disclosr-error ;
--- a/documents/scrape.py
+++ b/documents/scrape.py
@@ -197,7 +197,7 @@
links = soup.findAll('a') # soup.findAll('a', id=re.compile("^p-"))
linkurls = set([])
for link in links:
- if link.has_key("href"):
+ if link.has_attr("href"):
if link['href'].startswith("http"):
# lets not do external links for now
# linkurls.add(link['href'])
--- a/documents/scrapers/0e46f8bd1414b1fdd4f0543d54a97500.py
+++ b/documents/scrapers/0e46f8bd1414b1fdd4f0543d54a97500.py
@@ -7,7 +7,7 @@
#http://www.doughellmann.com/PyMOTW/abc/
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
def getTable(self,soup):
- return soup.find(id = "maincontentcontainer").table
+ return soup.find(class_ = "contentcontainer").table
def getColumnCount(self):
return 5
def getColumns(self,columns):
--- a/documents/scrapers/1803322b27286950cab0c543168b5f21.py
+++ b/documents/scrapers/1803322b27286950cab0c543168b5f21.py
@@ -16,7 +16,7 @@
links = []
description = ""
for atag in entry.find_all('a'):
- if atag.has_key('href'):
+ if atag.has_attr('href'):
link = scrape.fullurl(self.getURL(), atag['href'])
(url, mime_type, htcontent) = scrape.fetchURL(scrape.docsdb, link, "foidocuments", self.getAgencyID(), False)
if htcontent != None:
@@ -25,7 +25,7 @@
row = soup.find(id="content_div_148050")
description = ''.join(row.stripped_strings)
for atag in row.find_all("a"):
- if atag.has_key('href'):
+ if atag.has_attr('href'):
links.append(scrape.fullurl(link, atag['href']))
if links != []:
@@ -45,14 +45,5 @@
print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper)
nsi = ScraperImplementation()
- nsi.disclogURL = "http://www.dbcde.gov.au/about_us/freedom_of_information_disclosure_log/foi_list?result_146858_result_page=1"
- nsi.doScrape()
- nsi.disclogURL = "http://www.dbcde.gov.au/about_us/freedom_of_information_disclosure_log/foi_list?result_146858_result_page=2"
- nsi.doScrape()
- nsi.disclogURL = "http://www.dbcde.gov.au/about_us/freedom_of_information_disclosure_log/foi_list?result_146858_result_page=3"
- nsi.doScrape()
- nsi.disclogURL = "http://www.dbcde.gov.au/about_us/freedom_of_information_disclosure_log/foi_list?result_146858_result_page=4"
- nsi.doScrape()
- nsi.disclogURL = "http://www.dbcde.gov.au/about_us/freedom_of_information_disclosure_log/foi_list?result_146858_result_page=5"
nsi.doScrape()
--- a/documents/scrapers/1d404c4934f74feacd00dcb434e7c10a.py
+++ b/documents/scrapers/1d404c4934f74feacd00dcb434e7c10a.py
@@ -6,8 +6,8 @@
#http://www.doughellmann.com/PyMOTW/abc/
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
- #def getTable(self,soup):
- # return soup.find(id = "cphMain_C001_Col01").table
+ def getTable(self,soup):
+ return soup.findAll('table')[1]
def getColumnCount(self):
return 5
def getColumns(self,columns):
--- a/documents/scrapers/227cb6eb7d2c9f8a6e846df7447d6caa.py
+++ b/documents/scrapers/227cb6eb7d2c9f8a6e846df7447d6caa.py
@@ -11,7 +11,7 @@
links = []
description = ""
for atag in entry.find_all('a'):
- if atag.has_key('href'):
+ if atag.has_attr('href'):
link = scrape.fullurl(self.getURL(),atag['href'])
(url,mime_type,htcontent) = scrape.fetchURL(scrape.docsdb, link, "foidocuments", self.getAgencyID(), False)
if htcontent != None:
@@ -26,20 +26,23 @@
for text in row.stripped_strings:
description = description + text + "\n"
for atag in row.find_all("a"):
- if atag.has_key('href'):
+ if atag.has_attr('href'):
links.append(scrape.fullurl(link,atag['href']))
if links != []:
doc.update({'links': links})
if description != "":
doc.update({ 'description': description})
-
+ def getRows(self, table):
+ return table.find_all(class_ = "dl-row");
+ def findColumns(self, table):
+ return table.find_all('div');
def getColumnCount(self):
return 2
def getTable(self,soup):
- return soup.find(class_ = "ms-rteTable-default")
+ return soup.find(class_ = "foi-dl-list")
def getColumns(self,columns):
- (date, title) = columns
+ (title,date) = columns
return (title, date, title, title, None)
if __name__ == '__main__':
--- a/documents/scrapers/41a166419503bb50e410c58be54c102f.py
+++ b/documents/scrapers/41a166419503bb50e410c58be54c102f.py
@@ -8,7 +8,7 @@
#http://www.doughellmann.com/PyMOTW/abc/
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
def getTable(self,soup):
- return soup.find(id= "ctl00_MSO_ContentDiv").table
+ return soup.find(class_ = "rgMasterTable")
def getColumns(self,columns):
(id, title, description, notes) = columns
--- a/documents/scrapers/53d2884f8afd026096a27bd5051ec50e.py
+++ b/documents/scrapers/53d2884f8afd026096a27bd5051ec50e.py
@@ -16,7 +16,7 @@
link = None
links = []
for atag in entry.find_all('a'):
- if atag.has_key('href'):
+ if atag.has_attr('href'):
link = scrape.fullurl(self.getURL(),atag['href'])
(url,mime_type,htcontent) = scrape.fetchURL(scrape.docsdb, link, "foidocuments", self.getAgencyID(), False)
if htcontent != None:
@@ -24,7 +24,7 @@
# http://www.crummy.com/software/BeautifulSoup/documentation.html
soup = BeautifulSoup(htcontent)
for atag in soup.find(class_ = "article-content").find_all('a'):
- if atag.has_key('href'):
+ if atag.has_attr('href'):
links.append(scrape.fullurl(link,atag['href']))
if links != []:
--- a/documents/scrapers/5716ce0aacfe98f7d638b7a66b7f1040.py
+++ b/documents/scrapers/5716ce0aacfe98f7d638b7a66b7f1040.py
@@ -6,6 +6,11 @@
#http://www.doughellmann.com/PyMOTW/abc/
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
+ def getDocHash(self, id,date, url):
+ ''' url changes on ever request so ignore for hash '''
+ return scrape.mkhash(
+ self.remove_control_chars(
+ ''.join(id.stripped_strings)))
def getColumnCount(self):
return 4
def getColumns(self,columns):
--- a/documents/scrapers/601aedeef4344638d635bdd761e9fdba.py
+++ b/documents/scrapers/601aedeef4344638d635bdd761e9fdba.py
@@ -6,8 +6,8 @@
#http://www.doughellmann.com/PyMOTW/abc/
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
- #def getTable(self,soup):
- # return soup.find(id = "ctl00_PlaceHolderMain_intro2__ControlWrapper_CerRichHtmlField").table
+ def getTable(self,soup):
+ return soup.find(id = "main").table
def getColumnCount(self):
return 4
def getColumns(self,columns):
--- a/documents/scrapers/69d59284ef0ccd2677394d82d3292abc.py
+++ b/documents/scrapers/69d59284ef0ccd2677394d82d3292abc.py
@@ -6,8 +6,6 @@
#http://www.doughellmann.com/PyMOTW/abc/
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
- def getTable(self,soup):
- return soup.find(id = "centercontent").table
def getColumnCount(self):
return 5
def getColumns(self,columns):
--- a/documents/scrapers/8c9421f852c441910bf1d93a57b31d64.py
+++ b/documents/scrapers/8c9421f852c441910bf1d93a57b31d64.py
@@ -5,6 +5,8 @@
#http://www.doughellmann.com/PyMOTW/abc/
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
+ def getTable(self,soup):
+ return soup.find(id = "page_content").table
def getColumns(self,columns):
(id, date, title, description, notes) = columns
return (id, date, title, description, notes)
--- a/documents/scrapers/8e874a2fde8aa0ccdc6d14573d766540.py
+++ b/documents/scrapers/8e874a2fde8aa0ccdc6d14573d766540.py
@@ -11,7 +11,7 @@
links = []
description = ""
for atag in entry.find_all('a'):
- if atag.has_key('href'):
+ if atag.has_attr('href'):
link = scrape.fullurl(self.getURL(),atag['href'])
(url,mime_type,htcontent) = scrape.fetchURL(scrape.docsdb, link, "foidocuments", self.getAgencyID(), False)
if htcontent != None:
@@ -22,7 +22,7 @@
description = description + text.encode('ascii', 'ignore')
for atag in soup.find(id="SortingTable").find_all("a"):
- if atag.has_key('href'):
+ if atag.has_attr('href'):
links.append(scrape.fullurl(link,atag['href']))
if links != []:
@@ -43,7 +43,7 @@
links = []
description = ""
for atag in entry.find_all('a'):
- if atag.has_key('href'):
+ if atag.has_attr('href'):
link = scrape.fullurl(self.getURL(),atag['href'])
(url,mime_type,htcontent) = scrape.fetchURL(scrape.docsdb, link, "foidocuments", self.getAgencyID(), False)
if htcontent != None:
@@ -53,7 +53,7 @@
for text in soup.find(id="content-item").stripped_strings:
description = description + text + " \n"
for atag in soup.find(id="content-item").find_all("a"):
- if atag.has_key('href'):
+ if atag.has_attr('href'):
links.append(scrape.fullurl(link,atag['href']))
if links != []:
doc.update({'links': links})
--- a/documents/scrapers/ad033512610d8e36886ab6a795f26561.py
+++ b/documents/scrapers/ad033512610d8e36886ab6a795f26561.py
@@ -6,8 +6,8 @@
#http://www.doughellmann.com/PyMOTW/abc/
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
- def getTable(self,soup):
- return soup.find(id = "_ctl0__ctl0_MainContentPlaceHolder_MainContentPlaceHolder_ContentSpan").findAll("table")[3]
+# def getTable(self,soup):
+# return soup.find(_class = "content").table
def getColumnCount(self):
return 5
def getColumns(self,columns):
--- a/documents/scrapers/be9996f0ac58f71f23d074e82d44ead3.py
+++ b/documents/scrapers/be9996f0ac58f71f23d074e82d44ead3.py
@@ -17,7 +17,7 @@
dldivs = soup.find('div',class_="download")
if dldivs != None:
for atag in dldivs.find_all("a"):
- if atag.has_key('href'):
+ if atag.has_attr('href'):
links.append(scrape.fullurl(url,atag['href']))
nodldivs = soup.find('div',class_="incompleteNotification")
if nodldivs != None and nodldivs.stripped_strings != None:
--- a/documents/scrapers/bf16d4ba0d306ee03e5a1d32aaba3da1.py
+++ b/documents/scrapers/bf16d4ba0d306ee03e5a1d32aaba3da1.py
@@ -7,7 +7,7 @@
#http://www.doughellmann.com/PyMOTW/abc/
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
def getTable(self,soup):
- return soup.find(summary="This table shows every FOI request to date.")
+ return soup
def getColumnCount(self):
return 5
def getColumns(self,columns):
--- a/documents/scrapers/d1296c366287f7a9faedf235c7e6df01.py
+++ b/documents/scrapers/d1296c366287f7a9faedf235c7e6df01.py
@@ -6,8 +6,6 @@
#http://www.doughellmann.com/PyMOTW/abc/
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
- def getTable(self,soup):
- return soup.find(id="main").table
def getColumnCount(self):
return 7
def getColumns(self,columns):
--- a/documents/scrapers/e2a845e55bc9986e6c75c5ad2c508b8d.py
+++ b/documents/scrapers/e2a845e55bc9986e6c75c5ad2c508b8d.py
@@ -1,16 +1,54 @@
import sys,os
sys.path.insert(0, os.path.join(os.path.dirname(__file__) or '.', '../'))
import genericScrapers
-#RSS feed not detailed
+import dateutil
+from dateutil.parser import *
+from datetime import *
+import scrape
+from bs4 import BeautifulSoup
+class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
-#http://www.doughellmann.com/PyMOTW/abc/
-class ScraperImplementation(genericScrapers.GenericRSSDisclogScraper):
- def getColumns(self,columns):
- (id, date, title, description, notes) = columns
- return (id, date, title, description, notes)
+ def __init__(self):
+ super(ScraperImplementation, self).__init__()
+ def getTable(self, soup):
+ return soup.find(id='zone-content')
+
+ def getDescription(self,content, entry,doc):
+ link = None
+ links = []
+ description = ""
+ for atag in entry.find_all('a'):
+ if atag.has_attr('href'):
+ link = scrape.fullurl(self.getURL(), atag['href'])
+ (url, mime_type, htcontent) = scrape.fetchURL(scrape.docsdb, link, "foidocuments", self.getAgencyID(), False)
+ if htcontent != None:
+ if mime_type == "text/html" or mime_type == "application/xhtml+xml" or mime_type =="application/xml":
+ soup = BeautifulSoup(htcontent)
+ row = soup.find(id="foidetails")
+ if row == None:
+ row = soup.find(id="content").table
+ if row == None:
+ row = soup.find(id="content")
+ description = ''.join(row.stripped_strings)
+ for atag in row.find_all("a"):
+ if atag.has_attr('href'):
+ links.append(scrape.fullurl(link, atag['href']))
+
+ if links != []:
+ doc.update({'links': links})
+ if description != "":
+ doc.update({ 'description': description})
+
+ def getColumnCount(self):
+ return 3
+
+ def getColumns(self, columns):
+ (id, title, date) = columns
+ return (id, date, title, title, None)
+
if __name__ == '__main__':
- print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericRSSDisclogScraper)
- print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericRSSDisclogScraper)
+ print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper)
+ print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper)
ScraperImplementation().doScrape()
--- a/documents/scrapers/f5ce2d1651739704634eb8ca4b2b46d3.py
+++ b/documents/scrapers/f5ce2d1651739704634eb8ca4b2b46d3.py
@@ -7,12 +7,12 @@
#http://www.doughellmann.com/PyMOTW/abc/
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
def getTable(self,soup):
- return soup.find(id = "ctl00_PlaceHolderMain_PublishingPageContent__ControlWrapper_RichHtmlField").table
+ return soup.find(id = "block-system-main").table
def getColumnCount(self):
- return 7
+ return 2
def getColumns(self,columns):
- (id, date, title, description,link,deldate, notes) = columns
- return (id, date, title, description, notes)
+ (date, title) = columns
+ return (date, date, title, title, None)
if __name__ == '__main__':
print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper)
--- a/documents/sitemap.xml.php
+++ b/documents/sitemap.xml.php
@@ -1,30 +1,48 @@
<?php
include ('../include/common.inc.php');
-$last_updated = date('Y-m-d', @filemtime('cbrfeed.zip'));
header("Content-Type: text/xml");
echo "<?xml version='1.0' encoding='UTF-8'?>";
echo '<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">' . "\n";
echo " <url><loc>" . local_url() . "index.php</loc><priority>1.0</priority></url>\n";
foreach (scandir("./") as $file) {
- if (strpos($file, ".php") !== false && $file != "index.php" && $file != "sitemap.xml.php")
+ if (strpos($file, ".php") !== false && ($file != "index.php" && $file != "sitemap.xml.php"&& $file != "viewDocument.php")) {
echo " <url><loc>" . local_url() . "$file</loc><priority>0.6</priority></url>\n";
+ }
}
$agenciesdb = $server->get_db('disclosr-agencies');
+$foidocsdb = $server->get_db('disclosr-foidocuments');
try {
$rows = $agenciesdb->get_view("app", "byCanonicalName")->rows;
foreach ($rows as $row) {
echo '<url><loc>' . local_url() . 'agency.php?id=' . $row->value->_id . "</loc><priority>0.3</priority></url>\n";
}
+ unset($rows);
+ $rows = null;
} catch (SetteeRestClientException $e) {
setteErrorHandler($e);
}
-$foidocsdb = $server->get_db('disclosr-foidocuments');
+
+foreach (range(0, 8) as $number) {
try {
- $rows = $foidocsdb->get_view("app", "all")->rows;
+ $rows = $foidocsdb->get_view("app", "all", Array($number,$number+1))->rows;
foreach ($rows as $row) {
echo '<url><loc>' . local_url() . 'view.php?id=' . $row->value->_id . "</loc><priority>0.3</priority></url>\n";
}
+ unset($rows);
+ $rows = null;
+} catch (SetteeRestClientException $e) {
+ setteErrorHandler($e);
+}
+}
+
+try {
+ $rows = $foidocsdb->get_view("app", "all", Array('9','fffffffff'))->rows;
+ foreach ($rows as $row) {
+ echo '<url><loc>' . local_url() . 'view.php?id=' . $row->value->_id . "</loc><priority>0.3</priority></url>\n";
+ }
+ unset($rows);
+ $rows = null;
} catch (SetteeRestClientException $e) {
setteErrorHandler($e);
}
--- a/documents/template.inc.php
+++ b/documents/template.inc.php
@@ -77,7 +77,7 @@
</p>
<ul class="nav">
<li><a href="agency.php">By Agency</a></li>
- <li><a href="date.php">By Date</a></li>
+<!-- <li><a href="date.php">By Date</a></li> -->
<li><a href="disclogsList.php">List of Disclosure Logs</a></li>
<li><a href="charts.php">Charts</a></li>
<li><a href="about.php">About</a></li>
--- /dev/null
+++ b/exportAgencies.csv.php
@@ -1,1 +1,88 @@
+<?php
+// use https://github.com/okfn/publicbodies/blob/master/data/nz.csv format
+include_once("include/common.inc.php");
+setlocale(LC_CTYPE, 'C');
+
+$headers = Array("title","abbr","key","category","parent","parent_key","description","url","jurisdiction","jurisdiction_code","source","source_url","address","contact","email","tags","created_at","updated_at");
+
+$db = $server->get_db('disclosr-agencies');
+
+
+$foiEmail = Array();
+try {
+ $rows = $db->get_view("app", "foiEmails", null, true)->rows;
+ //print_r($rows);
+ foreach ($rows as $row) {
+ $foiEmail[$row->key] = $row->value;
+ }
+} catch (SetteeRestClientException $e) {
+ setteErrorHandler($e);
+ die();
+}
+
+$fp = fopen('php://output', 'w');
+if ($fp && $db) {
+ header('Content-Type: text/csv; charset=utf-8');
+ header('Content-Disposition: attachment; filename="export.' . date("c") . '.csv"');
+ header('Pragma: no-cache');
+ header('Expires: 0');
+ fputcsv($fp, $headers);
+ try {
+ $agencies = $db->get_view("app", "byCanonicalName", null, true)->rows;
+ //print_r($rows);
+ foreach ($agencies as $agency) {
+ // print_r($agency);
+
+ if (isset($agency->value->foiEmail) && $agency->value->foiEmail != "null" && !isset($agency->value->status)) {
+ $row = Array();
+ $row["title"] = trim($agency->value->name);
+ $row["abbr"] = (isset($agency->value->shortName) ? $agency->value->shortName : "");
+ $row["key"] = (isset($agency->value->shortName) ? "au/".strtolower($agency->value->shortName) : "");
+ $row["category"] ="";
+ $row["parent"] ="";
+ $row["parentkey"] ="";
+ $row["description"] = (isset($agency->value->description) ? $agency->value->description : "");
+ $row["url"] = (isset($agency->value->website) ? $agency->value->website : "");
+ $row["jurisdiction"] = "Australia";
+ $row["jurisdiction_code"] = "au";
+
+ $row["source"] ="";
+ $row["source_url"] ="";
+ $row["address"] ="";
+ $row["contact"] ="";
+
+ $row["email"] = (isset($agency->value->foiEmail) ? $agency->value->foiEmail : "");
+ $row["tags"] ="";
+ $row["created_at"] ="";
+ $row["updated_at"] ="";
+
+
+ $otherBodies = Array();
+ if (isset($agency->value->foiBodies)) {
+ $otherBodies = array_merge($otherBodies, $agency->value->foiBodies);
+ }
+ if (isset($agency->value->positions)) {
+ $positions = Array();
+ foreach ($agency->value->positions as $position) {
+ $positions[] = "Office of the ".$position;
+ }
+ $otherBodies = array_merge($otherBodies, $positions);
+ }
+ sort($otherBodies);
+ if (count($otherBodies) > 0) {
+ $row["description"] .= "<br/> This department also responds to requests for information held by " . implode(", ", $otherBodies);
+ }
+
+
+ fputcsv($fp, array_values($row));
+ }
+ }
+ } catch (SetteeRestClientException $e) {
+ setteErrorHandler($e);
+ }
+
+ die;
+}
+?>
+
--- a/getAgency.php
+++ b/getAgency.php
@@ -188,7 +188,7 @@
</div><!--/.well -->
</div><!--/span-->
<div class="span9">';
- echo '<div itemscope itemtype="http://schema.org/GovernmentOrganization" typeof="schema:GovernmentOrganization" about="#' . $row['_id'] . '">';
+ echo '<div itemscope itemtype="http://schema.org/GovernmentOrganization" typeof="schema:GovernmentOrganization org:Organization" about="#' . $row['_id'] . '">';
echo '<div class="hero-unit">
<h1 itemprop="name">' . $row['name'] . '</h1>';
if (isset($row['description'])) {
--- a/graph.php
+++ b/graph.php
@@ -9,13 +9,13 @@
function add_node($id, $label, $parent="") {
global $format;
if ($format == "html") {
- // echo "nodes[\"$id\"] = graph.newNode({label: \"$label\"});" . PHP_EOL;
+ // echo "nodes[\"$id\"] = graph.newNode({label: \"$label\"});" . PHP_EOL;
}
if ($format == "dot" && $label != "") {
- echo "$id [label=\"$label\"];". PHP_EOL;
+ echo "\"$id\" [label=\"$label\", shape=plaintext];". PHP_EOL;
}
if ($format == "gexf") {
- echo "<node id='$id' label=\"".htmlentities($label,ENT_XML1)."\" ".($parent != ""? "pid='$parent'><viz:size value='1'/>":"><viz:size value='2'/>")
+ echo "<node id='$id' label=\"".htmlentities($label)."\" ".($parent != ""? "pid='$parent'><viz:size value='1'/>":"><viz:size value='2'/>")
."<viz:color b='".rand(0,255)."' g='".rand(0,255)."' r='".rand(0,255)."'/>"
."</node>". PHP_EOL;
}
@@ -27,7 +27,7 @@
// echo "graph.newEdge(nodes[\"$from\"], nodes['$to'], {color: '$color'});" . PHP_EOL;
}
if ($format == "dot") {
- echo "$from -> $to ".($color != ""? "[color=$color]":"").";". PHP_EOL;
+ echo "\"$from\" -> \"$to\" ".($color != ""? "[color=$color]":"").";". PHP_EOL;
}
if ($format == "gexf") {
echo "<edge id='$from$to' source='$from' target='$to' />". PHP_EOL;
@@ -55,7 +55,7 @@
$rows = $db->get_view("app", "byCanonicalName", null, true)->rows;
//print_r($rows);
foreach ($rows as $row) {
- add_node($row->id, $row->key);
+ add_node($row->id, $row->value->name);
}
} catch (SetteeRestClientException $e) {
setteErrorHandler($e);
--- a/include/common.inc.php
+++ b/include/common.inc.php
@@ -10,6 +10,7 @@
|| strstr($_SERVER['PHP_SELF'], "documents/")
|| $_SERVER['SERVER_NAME'] == "disclosurelo.gs"
|| $_SERVER['SERVER_NAME'] == "www.disclosurelo.gs"
+ || $_SERVER['SERVER_NAME'] == "direct.disclosurelo.gs"
)
$basePath = "../";
--- a/include/template.inc.php
+++ b/include/template.inc.php
@@ -44,7 +44,7 @@
<![endif]-->
</head>
- <body xmlns:schema="http://schema.org/" xmlns:foaf="http://xmlns.com/foaf/0.1/">
+ <body xmlns:schema="http://schema.org/" xmlns:foaf="http://xmlns.com/foaf/0.1/" xmlns:org="http://www.w3.org/ns/org#" xmlns:skos="http://www.w3.org/2004/02/skos/core#">
<div class="navbar navbar-inverse navbar-fixed-top">
<div class="navbar-inner">
<div class="container-fluid">
--- a/ranking.php
+++ b/ranking.php
@@ -32,8 +32,12 @@
$columnKeys = array_unique(array_merge($columnKeys, array_keys($columns)));
//print_r($columnKeys);
$score = count($columns);
- $scores[$score]++;
- $scoredagencies[] = Array("id"=> $row->key, "website"=> $row->value->website, "name" => $row->value->name, "columns" => $columns, "score" => $score);
+ if (isset($scores[$score])){
+$scores[$score]++;
+} else {
+$scores[$score] =1;
+}
+ $scoredagencies[] = Array("id"=> $row->key, "website"=> (isset($row->value->website)?$row->value->website:""), "name" => $row->value->name, "columns" => $columns, "score" => $score);
}
}
@@ -74,7 +78,7 @@
} else {
$href = $value;
}
- if ($href[0] == "@") {
+ if (isset($href[0]) && $href[0] == "@") {
$href = str_replace("@","https://twitter.com/",$href);
}
//$href= urlencode($href);
--- a/robots.txt
+++ b/robots.txt
@@ -3,4 +3,5 @@
User-agent: *
Disallow: /admin/
+Disallow: /viewDocument.php
Sitemap: http://orgs.disclosurelo.gs/sitemap.xml.php
--- a/schemas/agency.json.php
+++ b/schemas/agency.json.php
@@ -4,7 +4,7 @@
"description" => "Representation of government agency and online transparency measures",
"type" => "object",
"properties" => Array(
- "name" => Array("type" => "string", "required" => true, "x-itemprop" => "name", "x-property" => "schema:name foaf:name", "x-title" => "Name", "description" => "Name, most recent and broadest"),
+ "name" => Array("type" => "string", "required" => true, "x-itemprop" => "name", "x-property" => "schema:name foaf:name skos:prefLabel ", "x-title" => "Name", "description" => "Name, most recent and broadest"),
"shortName" => Array("type" => "string", "required" => false, "x-title" => "Short Name", "description" => "Name shortened, usually to an acronym"),
"description" => Array("type" => "string", "required" => false, "x-title" => "Description", "description" => "Description of roles and responsiblities of organisation"),
"foiEmail" => Array("type" => "string", "required" => false, "x-title" => "FOI Contact Email", "x-itemprop" => "email", "description" => "FOI contact email if not foi@"),
@@ -14,15 +14,15 @@
"items" => Array("type" => "string")),
"positions" => Array("type" => "array", "required" => true, "x-title" => "Political Positions", "description" => "Ministers and Parliamentary Secretaries",
"items" => Array("type" => "string")),
- "foiBodies" => Array("type" => "array", "required" => true, "x-title" => "FOI Bodies","x-property"=>"schema:members foaf:knows", "description" => "Organisational units within this agency that are subject to FOI Act but are not autonomous",
+ "foiBodies" => Array("type" => "array", "required" => true, "x-title" => "FOI Bodies","x-property"=>"schema:members foaf:knows org:hasSubOrganization", "description" => "Organisational units within this agency that are subject to FOI Act but are not autonomous",
"items" => Array("type" => "string")),
"legislation" => Array("type" => "array", "required" => true, "x-title" => "Legislation", "description" => "Legislation administered by or created for the establishment of this organisation",
"items" => Array("type" => "string")),
- "orgType" => Array("type" => "string", "required" => true, "x-title" => "Organisation Type", "description" => "Org type based on legal formation via FMA/CAC legislation etc."),
- "parentOrg" => Array("type" => "string", "required" => true, "x-title" => "Parent Organisation", "description" => "Parent organisation, usually a department of state"),
+ "orgType" => Array("type" => "string", "required" => true, "x-title" => "Organisation Type", "x-property" => "org:classification", "description" => "Org type based on legal formation via FMA/CAC legislation etc."),
+ "parentOrg" => Array("type" => "string", "required" => true, "x-title" => "Parent Organisation", "x-property" => "org:subOrganizationOf", "description" => "Parent organisation, usually a department of state"),
"website" => Array("type" => "string", "required" => true, "x-title" => "Website", "x-itemprop" => "url", "x-property" => "schema:url foaf:homepage", "description" => "Website URL"),
- "abn" => Array("type" => "string", "required" => true, "x-title" => "Australian Business Number", "description" => "ABN from business register"),
- "established" => Array("type" => "string", "required" => true, "x-title" => "Date established", "description" => "Date established"),
+ "abn" => Array("type" => "string", "required" => true, "x-title" => "Australian Business Number", "x-property" => "org:identifier", "description" => "ABN from business register"),
+ "established" => Array("type" => "string", "required" => true, "x-title" => "Date established", "x-property" => "schema:foundingDate", "description" => "Date established"),
"employees" => Array("type" => "string", "required" => true, "x-title" => "2010-2011 employees", "description" => "2010-2011 employees"),
"contractListURL" => Array("type" => "string", "required" => true, "x-title" => "Contract Listing", "description" => "Departmental and agency contracts, <a href='http://www.aph.gov.au/senate/pubs/standing_orders/d05.htm'>mandated by the Senate</a>" ),
"budgetURL" => Array("type" => "string", "required" => true,"x-title" => "Budget", "description" => "Portfolio Budget Statements and Portfolio Additional Estimates Statements"),
--- a/sitemap.xml.php
+++ b/sitemap.xml.php
@@ -7,7 +7,7 @@
echo '<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">' . "\n";
echo " <url><loc>" . local_url() . "index.php</loc><priority>1.0</priority></url>\n";
foreach (scandir("./") as $file) {
- if (strpos($file, ".php") !== false && $file != "index.php" && $file != "sitemap.xml.php")
+ if (strpos($file, ".php") !== false && $file != "index.php" && $file != "sitemap.xml.php"&& $file != "viewDocument.php")
echo " <url><loc>" . local_url() . "$file</loc><priority>0.3</priority></url>\n";
}