fix that darn CASA scraper
Former-commit-id: 288d5ab60033e36608bf898869eecdf64180ba59
--- a/admin/logo.svg
+++ b/admin/logo.svg
@@ -1,34 +1,34 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Generator: Adobe Illustrator 15.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
-<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
-<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" width="100px"
- height="100px" viewBox="0 -25.635 100 100" enable-background="new 0 -25.635 100 100" xml:space="preserve">
-<g id="docs">
- <path fill="#C2A385" d="M86.108-9.909l5.229,9.952c0,0-1.832,0.083-5.297,1.95c-2.312,1.249-6.468,6.246-6.468,6.246L71.827-2.909
- c0,0,4.201-3.996,6.513-5.242C81.805-10.022,86.108-9.909,86.108-9.909"/>
- <path fill="#C2A385" d="M65.604,20.731l-3.152-9.868c0,0-5.441,3.56-8.017,4.074c-1.008,0.202-1.93,0.335-2.749,0.425
- L65.604,20.731z"/>
- <path fill="#C2A385" d="M72.326,23.321c0.268-0.226,0.537-0.44,0.804-0.616c3.104-2.054,6.139-3.685,6.268-3.755l1.882-1.005
- l1.369,1.634l2.864,3.417l3.198-4.334L76.68,9.783l-8.74,11.847L72.326,23.321z"/>
- <path fill="#C2A385" d="M39.918,10.823l4.825,1.86l3.33,0.212c0.04,0.001,0.269,0.015,0.652,0.015c0.91,0,2.798-0.072,5.196-0.551
- c1.427-0.284,5.007-2.332,7.092-3.695l2.889-1.888l1.05,3.285l2.496,7.812l5.889-7.985l-4.625,0.163l1.348-6.225L55.133,0.593
- l-2.095,9.667c-0.531-2.599-1.841-5.727-1.841-5.727L37.709,6.055c0,0,0.885,2.206,1.586,4.529L39.918,10.823z"/>
- <path fill="#C2A385" d="M91.233,45.562c-1.102-0.691-2.323-1.142-3.415-1.433l-3.779,9.804c1.932,1.246,5.197,5.738,5.197,5.738
- l7.336-9.206C96.572,50.466,93.162,46.771,91.233,45.562z"/>
- <path fill="#C2A385" d="M93.192,32.166l-3.656,1.224c-0.019,0.007-1.779,0.613-4.117,2.069l2.817,4.868l0.626,1.08
- c3.306-0.562,7.727-1.922,7.727-1.922l-2.332-15.261c0,0-2.934,1.277-5.852,2.221l2.318,2.765L93.192,32.166z"/>
- <path fill="#C2A385" d="M79.272,25.999l0.864,0.334l0.46,0.801l3.503,6.05c2.646-1.636,4.611-2.287,4.611-2.287l-8.075-9.632
- c0,0-2.584,1.391-5.376,3.188L79.272,25.999z"/>
-</g>
-<g id="trunk">
- <circle fill="#C00000" cx="66.019" cy="52.945" r="7.877"/>
- <circle fill="#C00000" cx="22.693" cy="52.945" r="7.877"/>
- <path fill="#C00000" d="M22.693,42.441c1.915,0,3.705,0.522,5.251,1.421V28.001H12.191L3,37.192v14.439h9.281
- C12.931,46.459,17.347,42.441,22.693,42.441z"/>
- <path fill="#C00000" d="M55.925,50.075l-9.583-3.695H30.88c1.186,1.476,1.978,3.28,2.225,5.252h22.502
- C55.674,51.1,55.78,50.58,55.925,50.075z"/>
- <path fill="#C00000" d="M79.312,28.328L39.961,13.149l-9.384,24.335l26.381,10.174c1.824-3.115,5.198-5.218,9.062-5.218
- c5.791,0,10.503,4.712,10.503,10.502c0,0.744-0.081,1.471-0.229,2.173l4.713,1.817L86.95,41.52L79.312,28.328z"/>
-</g>
-</svg>
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Generator: Adobe Illustrator 15.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" width="100px"
+ height="100px" viewBox="0 -25.635 100 100" enable-background="new 0 -25.635 100 100" xml:space="preserve">
+<g id="docs">
+ <path fill="#C2A385" d="M86.108-9.909l5.229,9.952c0,0-1.832,0.083-5.297,1.95c-2.312,1.249-6.468,6.246-6.468,6.246L71.827-2.909
+ c0,0,4.201-3.996,6.513-5.242C81.805-10.022,86.108-9.909,86.108-9.909"/>
+ <path fill="#C2A385" d="M65.604,20.731l-3.152-9.868c0,0-5.441,3.56-8.017,4.074c-1.008,0.202-1.93,0.335-2.749,0.425
+ L65.604,20.731z"/>
+ <path fill="#C2A385" d="M72.326,23.321c0.268-0.226,0.537-0.44,0.804-0.616c3.104-2.054,6.139-3.685,6.268-3.755l1.882-1.005
+ l1.369,1.634l2.864,3.417l3.198-4.334L76.68,9.783l-8.74,11.847L72.326,23.321z"/>
+ <path fill="#C2A385" d="M39.918,10.823l4.825,1.86l3.33,0.212c0.04,0.001,0.269,0.015,0.652,0.015c0.91,0,2.798-0.072,5.196-0.551
+ c1.427-0.284,5.007-2.332,7.092-3.695l2.889-1.888l1.05,3.285l2.496,7.812l5.889-7.985l-4.625,0.163l1.348-6.225L55.133,0.593
+ l-2.095,9.667c-0.531-2.599-1.841-5.727-1.841-5.727L37.709,6.055c0,0,0.885,2.206,1.586,4.529L39.918,10.823z"/>
+ <path fill="#C2A385" d="M91.233,45.562c-1.102-0.691-2.323-1.142-3.415-1.433l-3.779,9.804c1.932,1.246,5.197,5.738,5.197,5.738
+ l7.336-9.206C96.572,50.466,93.162,46.771,91.233,45.562z"/>
+ <path fill="#C2A385" d="M93.192,32.166l-3.656,1.224c-0.019,0.007-1.779,0.613-4.117,2.069l2.817,4.868l0.626,1.08
+ c3.306-0.562,7.727-1.922,7.727-1.922l-2.332-15.261c0,0-2.934,1.277-5.852,2.221l2.318,2.765L93.192,32.166z"/>
+ <path fill="#C2A385" d="M79.272,25.999l0.864,0.334l0.46,0.801l3.503,6.05c2.646-1.636,4.611-2.287,4.611-2.287l-8.075-9.632
+ c0,0-2.584,1.391-5.376,3.188L79.272,25.999z"/>
+</g>
+<g id="trunk">
+ <circle fill="#C00000" cx="66.019" cy="52.945" r="7.877"/>
+ <circle fill="#C00000" cx="22.693" cy="52.945" r="7.877"/>
+ <path fill="#C00000" d="M22.693,42.441c1.915,0,3.705,0.522,5.251,1.421V28.001H12.191L3,37.192v14.439h9.281
+ C12.931,46.459,17.347,42.441,22.693,42.441z"/>
+ <path fill="#C00000" d="M55.925,50.075l-9.583-3.695H30.88c1.186,1.476,1.978,3.28,2.225,5.252h22.502
+ C55.674,51.1,55.78,50.58,55.925,50.075z"/>
+ <path fill="#C00000" d="M79.312,28.328L39.961,13.149l-9.384,24.335l26.381,10.174c1.824-3.115,5.198-5.218,9.062-5.218
+ c5.791,0,10.503,4.712,10.503,10.502c0,0.744-0.081,1.471-0.229,2.173l4.713,1.817L86.95,41.52L79.312,28.328z"/>
+</g>
+</svg>
--- a/admin/refreshDesignDoc.php
+++ b/admin/refreshDesignDoc.php
@@ -8,21 +8,13 @@
$obj->_id = "_design/" . urlencode("app");
$obj->language = "javascript";
$obj->views->all->map = "function(doc) { emit(doc._id, doc); };";
-$obj->views->byDate->map = "function(doc) { if (doc.title != "Disclosure Log Updated") { emit(doc.date, doc); } };";
+$obj->views->byDate->map = "function(doc) { if (doc.title != \"Disclosure Log Updated\") { emit(doc.date, doc); } };";
$obj->views->byDateMonthYear->map = "function(doc) { emit(doc.date, doc); };";
$obj->views->byDateMonthYear->reduce = "_count";
$obj->views->byAgencyID->map = "function(doc) { emit(doc.agencyID, doc); };";
$obj->views->byAgencyID->reduce = "_count";
-$obj->views->fieldNames->map = '
-function(doc) {
-for(var propName in doc) {
- emit(propName, doc._id);
- }
-
-}';
-$obj->views->fieldNames->reduce = 'function (key, values, rereduce) {
- return values.length;
-}';
+$obj->views->fieldNames->map = 'function(doc) { for(var propName in doc) { emit(propName, doc._id); }}';
+$obj->views->fieldNames->reduce = 'function (key, values, rereduce) { return values.length; }';
// allow safe updates (even if slightly slower due to extra: rev-detection check).
$foidb->save($obj, true);
--- a/documents/genericScrapers.py
+++ b/documents/genericScrapers.py
@@ -199,6 +199,17 @@
def getRows(self, table):
return table.find_all('tr')
+ def getDocHash(self, id,date, url):
+ if id.string is None:
+ print "no id, using date as hash"
+ return scrape.mkhash(
+ self.remove_control_chars(
+ url + (''.join(date.stripped_strings))))
+ else:
+ return scrape.mkhash(
+ self.remove_control_chars(
+ url + (''.join(id.stripped_strings))))
+
def getDate(self, content, entry, doc):
strdate = ''.join(content.stripped_strings).strip()
(a, b, c) = strdate.partition("(")
@@ -240,15 +251,7 @@
description, notes) = self.getColumns(columns)
print self.remove_control_chars(
''.join(id.stripped_strings))
- if id.string is None:
- print "no id, using date as hash"
- dochash = scrape.mkhash(
- self.remove_control_chars(
- url + (''.join(date.stripped_strings))))
- else:
- dochash = scrape.mkhash(
- self.remove_control_chars(
- url + (''.join(id.stripped_strings))))
+ dochash = self.getDocHash(id,date,url)
doc = foidocsdb.get(dochash)
if doc is None:
--- a/documents/robots.txt
+++ b/documents/robots.txt
@@ -3,4 +3,5 @@
User-agent: *
Disallow: /admin/
+Disallow: /viewDocument.php
Sitemap: http://disclosurelo.gs/sitemap.xml.php
--- a/documents/runScrapers.sh
+++ b/documents/runScrapers.sh
@@ -1,3 +1,4 @@
+#!/bin/bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
echo $DIR
cd $DIR
--- a/documents/scrape.py
+++ b/documents/scrape.py
@@ -197,7 +197,7 @@
links = soup.findAll('a') # soup.findAll('a', id=re.compile("^p-"))
linkurls = set([])
for link in links:
- if link.has_key("href"):
+ if link.has_attr("href"):
if link['href'].startswith("http"):
# lets not do external links for now
# linkurls.add(link['href'])
--- a/documents/scrapers/0e46f8bd1414b1fdd4f0543d54a97500.py
+++ b/documents/scrapers/0e46f8bd1414b1fdd4f0543d54a97500.py
@@ -7,7 +7,7 @@
#http://www.doughellmann.com/PyMOTW/abc/
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
def getTable(self,soup):
- return soup.find(id = "maincontentcontainer").table
+ return soup.find(class_ = "contentcontainer").table
def getColumnCount(self):
return 5
def getColumns(self,columns):
--- a/documents/scrapers/1803322b27286950cab0c543168b5f21.py
+++ b/documents/scrapers/1803322b27286950cab0c543168b5f21.py
@@ -16,7 +16,7 @@
links = []
description = ""
for atag in entry.find_all('a'):
- if atag.has_key('href'):
+ if atag.has_attr('href'):
link = scrape.fullurl(self.getURL(), atag['href'])
(url, mime_type, htcontent) = scrape.fetchURL(scrape.docsdb, link, "foidocuments", self.getAgencyID(), False)
if htcontent != None:
@@ -25,7 +25,7 @@
row = soup.find(id="content_div_148050")
description = ''.join(row.stripped_strings)
for atag in row.find_all("a"):
- if atag.has_key('href'):
+ if atag.has_attr('href'):
links.append(scrape.fullurl(link, atag['href']))
if links != []:
--- a/documents/scrapers/227cb6eb7d2c9f8a6e846df7447d6caa.py
+++ b/documents/scrapers/227cb6eb7d2c9f8a6e846df7447d6caa.py
@@ -11,7 +11,7 @@
links = []
description = ""
for atag in entry.find_all('a'):
- if atag.has_key('href'):
+ if atag.has_attr('href'):
link = scrape.fullurl(self.getURL(),atag['href'])
(url,mime_type,htcontent) = scrape.fetchURL(scrape.docsdb, link, "foidocuments", self.getAgencyID(), False)
if htcontent != None:
@@ -26,7 +26,7 @@
for text in row.stripped_strings:
description = description + text + "\n"
for atag in row.find_all("a"):
- if atag.has_key('href'):
+ if atag.has_attr('href'):
links.append(scrape.fullurl(link,atag['href']))
if links != []:
--- a/documents/scrapers/53d2884f8afd026096a27bd5051ec50e.py
+++ b/documents/scrapers/53d2884f8afd026096a27bd5051ec50e.py
@@ -16,7 +16,7 @@
link = None
links = []
for atag in entry.find_all('a'):
- if atag.has_key('href'):
+ if atag.has_attr('href'):
link = scrape.fullurl(self.getURL(),atag['href'])
(url,mime_type,htcontent) = scrape.fetchURL(scrape.docsdb, link, "foidocuments", self.getAgencyID(), False)
if htcontent != None:
@@ -24,7 +24,7 @@
# http://www.crummy.com/software/BeautifulSoup/documentation.html
soup = BeautifulSoup(htcontent)
for atag in soup.find(class_ = "article-content").find_all('a'):
- if atag.has_key('href'):
+ if atag.has_attr('href'):
links.append(scrape.fullurl(link,atag['href']))
if links != []:
--- a/documents/scrapers/5716ce0aacfe98f7d638b7a66b7f1040.py
+++ b/documents/scrapers/5716ce0aacfe98f7d638b7a66b7f1040.py
@@ -6,6 +6,11 @@
#http://www.doughellmann.com/PyMOTW/abc/
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
+ def getDocHash(self, id,date, url):
+ ''' url changes on ever request so ignore for hash '''
+ return scrape.mkhash(
+ self.remove_control_chars(
+ ''.join(id.stripped_strings)))
def getColumnCount(self):
return 4
def getColumns(self,columns):
--- a/documents/scrapers/69d59284ef0ccd2677394d82d3292abc.py
+++ b/documents/scrapers/69d59284ef0ccd2677394d82d3292abc.py
@@ -6,8 +6,6 @@
#http://www.doughellmann.com/PyMOTW/abc/
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
- def getTable(self,soup):
- return soup.find(id = "centercontent").table
def getColumnCount(self):
return 5
def getColumns(self,columns):
--- a/documents/scrapers/8e874a2fde8aa0ccdc6d14573d766540.py
+++ b/documents/scrapers/8e874a2fde8aa0ccdc6d14573d766540.py
@@ -11,7 +11,7 @@
links = []
description = ""
for atag in entry.find_all('a'):
- if atag.has_key('href'):
+ if atag.has_attr('href'):
link = scrape.fullurl(self.getURL(),atag['href'])
(url,mime_type,htcontent) = scrape.fetchURL(scrape.docsdb, link, "foidocuments", self.getAgencyID(), False)
if htcontent != None:
@@ -22,7 +22,7 @@
description = description + text.encode('ascii', 'ignore')
for atag in soup.find(id="SortingTable").find_all("a"):
- if atag.has_key('href'):
+ if atag.has_attr('href'):
links.append(scrape.fullurl(link,atag['href']))
if links != []:
@@ -43,7 +43,7 @@
links = []
description = ""
for atag in entry.find_all('a'):
- if atag.has_key('href'):
+ if atag.has_attr('href'):
link = scrape.fullurl(self.getURL(),atag['href'])
(url,mime_type,htcontent) = scrape.fetchURL(scrape.docsdb, link, "foidocuments", self.getAgencyID(), False)
if htcontent != None:
@@ -53,7 +53,7 @@
for text in soup.find(id="content-item").stripped_strings:
description = description + text + " \n"
for atag in soup.find(id="content-item").find_all("a"):
- if atag.has_key('href'):
+ if atag.has_attr('href'):
links.append(scrape.fullurl(link,atag['href']))
if links != []:
doc.update({'links': links})
--- a/documents/scrapers/be9996f0ac58f71f23d074e82d44ead3.py
+++ b/documents/scrapers/be9996f0ac58f71f23d074e82d44ead3.py
@@ -17,7 +17,7 @@
dldivs = soup.find('div',class_="download")
if dldivs != None:
for atag in dldivs.find_all("a"):
- if atag.has_key('href'):
+ if atag.has_attr('href'):
links.append(scrape.fullurl(url,atag['href']))
nodldivs = soup.find('div',class_="incompleteNotification")
if nodldivs != None and nodldivs.stripped_strings != None:
--- a/documents/scrapers/d1296c366287f7a9faedf235c7e6df01.py
+++ b/documents/scrapers/d1296c366287f7a9faedf235c7e6df01.py
@@ -6,8 +6,6 @@
#http://www.doughellmann.com/PyMOTW/abc/
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
- def getTable(self,soup):
- return soup.find(id="main").table
def getColumnCount(self):
return 7
def getColumns(self,columns):
--- a/documents/sitemap.xml.php
+++ b/documents/sitemap.xml.php
@@ -6,7 +6,7 @@
echo '<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">' . "\n";
echo " <url><loc>" . local_url() . "index.php</loc><priority>1.0</priority></url>\n";
foreach (scandir("./") as $file) {
- if (strpos($file, ".php") !== false && $file != "index.php" && $file != "sitemap.xml.php") {
+ if (strpos($file, ".php") !== false && ($file != "index.php" && $file != "sitemap.xml.php"&& $file != "viewDocument.php")) {
echo " <url><loc>" . local_url() . "$file</loc><priority>0.6</priority></url>\n";
}
}
--- /dev/null
+++ b/exportAgencies.csv.php
@@ -1,1 +1,88 @@
+<?php
+// use https://github.com/okfn/publicbodies/blob/master/data/nz.csv format
+include_once("include/common.inc.php");
+setlocale(LC_CTYPE, 'C');
+
+$headers = Array("title","abbr","key","category","parent","parent_key","description","url","jurisdiction","jurisdiction_code","source","source_url","address","contact","email","tags","created_at","updated_at");
+
+$db = $server->get_db('disclosr-agencies');
+
+
+$foiEmail = Array();
+try {
+ $rows = $db->get_view("app", "foiEmails", null, true)->rows;
+ //print_r($rows);
+ foreach ($rows as $row) {
+ $foiEmail[$row->key] = $row->value;
+ }
+} catch (SetteeRestClientException $e) {
+ setteErrorHandler($e);
+ die();
+}
+
+$fp = fopen('php://output', 'w');
+if ($fp && $db) {
+ header('Content-Type: text/csv; charset=utf-8');
+ header('Content-Disposition: attachment; filename="export.' . date("c") . '.csv"');
+ header('Pragma: no-cache');
+ header('Expires: 0');
+ fputcsv($fp, $headers);
+ try {
+ $agencies = $db->get_view("app", "byCanonicalName", null, true)->rows;
+ //print_r($rows);
+ foreach ($agencies as $agency) {
+ // print_r($agency);
+
+ if (isset($agency->value->foiEmail) && $agency->value->foiEmail != "null" && !isset($agency->value->status)) {
+ $row = Array();
+ $row["title"] = trim($agency->value->name);
+ $row["abbr"] = (isset($agency->value->shortName) ? $agency->value->shortName : "");
+ $row["key"] = (isset($agency->value->shortName) ? "au/".strtolower($agency->value->shortName) : "");
+ $row["category"] ="";
+ $row["parent"] ="";
+ $row["parentkey"] ="";
+ $row["description"] = (isset($agency->value->description) ? $agency->value->description : "");
+ $row["url"] = (isset($agency->value->website) ? $agency->value->website : "");
+ $row["jurisdiction"] = "Australia";
+ $row["jurisdiction_code"] = "au";
+
+ $row["source"] ="";
+ $row["source_url"] ="";
+ $row["address"] ="";
+ $row["contact"] ="";
+
+ $row["email"] = (isset($agency->value->foiEmail) ? $agency->value->foiEmail : "");
+ $row["tags"] ="";
+ $row["created_at"] ="";
+ $row["updated_at"] ="";
+
+
+ $otherBodies = Array();
+ if (isset($agency->value->foiBodies)) {
+ $otherBodies = array_merge($otherBodies, $agency->value->foiBodies);
+ }
+ if (isset($agency->value->positions)) {
+ $positions = Array();
+ foreach ($agency->value->positions as $position) {
+ $positions[] = "Office of the ".$position;
+ }
+ $otherBodies = array_merge($otherBodies, $positions);
+ }
+ sort($otherBodies);
+ if (count($otherBodies) > 0) {
+ $row["description"] .= "<br/> This department also responds to requests for information held by " . implode(", ", $otherBodies);
+ }
+
+
+ fputcsv($fp, array_values($row));
+ }
+ }
+ } catch (SetteeRestClientException $e) {
+ setteErrorHandler($e);
+ }
+
+ die;
+}
+?>
+
--- a/getAgency.php
+++ b/getAgency.php
@@ -188,7 +188,7 @@
</div><!--/.well -->
</div><!--/span-->
<div class="span9">';
- echo '<div itemscope itemtype="http://schema.org/GovernmentOrganization" typeof="schema:GovernmentOrganization" about="#' . $row['_id'] . '">';
+ echo '<div itemscope itemtype="http://schema.org/GovernmentOrganization" typeof="schema:GovernmentOrganization org:Organization" about="#' . $row['_id'] . '">';
echo '<div class="hero-unit">
<h1 itemprop="name">' . $row['name'] . '</h1>';
if (isset($row['description'])) {
--- a/graph.php
+++ b/graph.php
@@ -9,13 +9,13 @@
function add_node($id, $label, $parent="") {
global $format;
if ($format == "html") {
- // echo "nodes[\"$id\"] = graph.newNode({label: \"$label\"});" . PHP_EOL;
+ // echo "nodes[\"$id\"] = graph.newNode({label: \"$label\"});" . PHP_EOL;
}
if ($format == "dot" && $label != "") {
- echo "$id [label=\"$label\"];". PHP_EOL;
+ echo "\"$id\" [label=\"$label\", shape=plaintext];". PHP_EOL;
}
if ($format == "gexf") {
- echo "<node id='$id' label=\"".htmlentities($label,ENT_XML1)."\" ".($parent != ""? "pid='$parent'><viz:size value='1'/>":"><viz:size value='2'/>")
+ echo "<node id='$id' label=\"".htmlentities($label)."\" ".($parent != ""? "pid='$parent'><viz:size value='1'/>":"><viz:size value='2'/>")
."<viz:color b='".rand(0,255)."' g='".rand(0,255)."' r='".rand(0,255)."'/>"
."</node>". PHP_EOL;
}
@@ -27,7 +27,7 @@
// echo "graph.newEdge(nodes[\"$from\"], nodes['$to'], {color: '$color'});" . PHP_EOL;
}
if ($format == "dot") {
- echo "$from -> $to ".($color != ""? "[color=$color]":"").";". PHP_EOL;
+ echo "\"$from\" -> \"$to\" ".($color != ""? "[color=$color]":"").";". PHP_EOL;
}
if ($format == "gexf") {
echo "<edge id='$from$to' source='$from' target='$to' />". PHP_EOL;
@@ -55,7 +55,7 @@
$rows = $db->get_view("app", "byCanonicalName", null, true)->rows;
//print_r($rows);
foreach ($rows as $row) {
- add_node($row->id, $row->key);
+ add_node($row->id, $row->value->name);
}
} catch (SetteeRestClientException $e) {
setteErrorHandler($e);
--- a/include/template.inc.php
+++ b/include/template.inc.php
@@ -44,7 +44,7 @@
<![endif]-->
</head>
- <body xmlns:schema="http://schema.org/" xmlns:foaf="http://xmlns.com/foaf/0.1/">
+ <body xmlns:schema="http://schema.org/" xmlns:foaf="http://xmlns.com/foaf/0.1/" xmlns:org="http://www.w3.org/ns/org#" xmlns:skos="http://www.w3.org/2004/02/skos/core#">
<div class="navbar navbar-inverse navbar-fixed-top">
<div class="navbar-inner">
<div class="container-fluid">
--- a/ranking.php
+++ b/ranking.php
@@ -32,8 +32,12 @@
$columnKeys = array_unique(array_merge($columnKeys, array_keys($columns)));
//print_r($columnKeys);
$score = count($columns);
- $scores[$score]++;
- $scoredagencies[] = Array("id"=> $row->key, "website"=> $row->value->website, "name" => $row->value->name, "columns" => $columns, "score" => $score);
+ if (isset($scores[$score])){
+$scores[$score]++;
+} else {
+$scores[$score] =1;
+}
+ $scoredagencies[] = Array("id"=> $row->key, "website"=> (isset($row->value->website)?$row->value->website:""), "name" => $row->value->name, "columns" => $columns, "score" => $score);
}
}
@@ -74,7 +78,7 @@
} else {
$href = $value;
}
- if ($href[0] == "@") {
+ if (isset($href[0]) && $href[0] == "@") {
$href = str_replace("@","https://twitter.com/",$href);
}
//$href= urlencode($href);
--- a/robots.txt
+++ b/robots.txt
@@ -3,4 +3,5 @@
User-agent: *
Disallow: /admin/
+Disallow: /viewDocument.php
Sitemap: http://orgs.disclosurelo.gs/sitemap.xml.php