[submodule "couchdb/couchdb-lucene"] | [submodule "couchdb/couchdb-lucene"] |
path = couchdb/couchdb-lucene | path = couchdb/couchdb-lucene |
url = https://github.com/rnewson/couchdb-lucene.git | url = https://github.com/rnewson/couchdb-lucene.git |
[submodule "couchdb/settee"] | [submodule "couchdb/settee"] |
path = couchdb/settee | path = couchdb/settee |
url = https://github.com/inadarei/settee.git | url = https://github.com/inadarei/settee.git |
[submodule "lib/php-diff"] | [submodule "lib/php-diff"] |
path = lib/php-diff | path = lib/php-diff |
url = https://github.com/chrisboulton/php-diff.git | url = https://github.com/chrisboulton/php-diff.git |
[submodule "lib/Requests"] | [submodule "lib/Requests"] |
path = lib/Requests | path = lib/Requests |
url = https://github.com/rmccue/Requests.git | url = https://github.com/rmccue/Requests.git |
[submodule "js/flotr2"] | [submodule "js/flotr2"] |
path = js/flotr2 | path = js/flotr2 |
url = https://github.com/HumbleSoftware/Flotr2.git | url = https://github.com/HumbleSoftware/Flotr2.git |
[submodule "lib/phpquery"] | [submodule "lib/phpquery"] |
path = lib/phpquery | path = lib/phpquery |
url = https://github.com/TobiaszCudnik/phpquery.git | url = https://github.com/TobiaszCudnik/phpquery.git |
[submodule "js/sigma"] | [submodule "js/sigma"] |
path = js/sigma | path = js/sigma |
url = https://github.com/jacomyal/sigma.js.git | url = https://github.com/jacomyal/sigma.js.git |
[submodule "js/bubbletree"] | [submodule "js/bubbletree"] |
path = js/bubbletree | path = js/bubbletree |
url = https://github.com/okfn/bubbletree.git | url = https://github.com/okfn/bubbletree.git |
[submodule "lib/querypath"] | [submodule "lib/querypath"] |
path = lib/querypath | path = lib/querypath |
url = https://github.com/technosophos/querypath.git | url = https://github.com/technosophos/querypath.git |
[submodule "lib/amon-php"] | [submodule "lib/amon-php"] |
path = lib/amon-php | path = lib/amon-php |
url = https://github.com/martinrusev/amon-php.git | url = https://github.com/martinrusev/amon-php.git |
[submodule "documents/lib/parsedatetime"] | [submodule "documents/lib/parsedatetime"] |
path = documents/lib/parsedatetime | path = documents/lib/parsedatetime |
url = git://github.com/bear/parsedatetime.git | url = git://github.com/bear/parsedatetime.git |
[submodule "lib/FeedWriter"] | |
path = lib/FeedWriter | |
url = https://github.com/mibe/FeedWriter | |
*.pyc |
<?php | <?php |
include('template.inc.php'); | include('template.inc.php'); |
include_header_documents(""); | include_header_documents(""); |
include_once('../include/common.inc.php'); | include_once('../include/common.inc.php'); |
echo "<table> | echo "<table> |
<tr><th>Agency Name</th><th>Disclosure Log URL recorded?</th><th>Do we monitor this URL?</th></tr>"; | <tr><th>Agency Name</th><th>Disclosure Log URL recorded?</th><th>Do we monitor this URL?</th></tr>"; |
$agenciesdb = $server->get_db('disclosr-agencies'); | $agenciesdb = $server->get_db('disclosr-agencies'); |
$docsdb = $server->get_db('disclosr-documents'); | $docsdb = $server->get_db('disclosr-documents'); |
$agencies = 0; | $agencies = 0; |
$disclogs = 0; | $disclogs = 0; |
$red = 0; | $red = 0; |
$green = 0; | $green = 0; |
$orange = 0; | $orange = 0; |
try { | try { |
$rows = $agenciesdb->get_view("app", "byCanonicalName", null, true)->rows; | $rows = $agenciesdb->get_view("app", "byCanonicalName", null, true)->rows; |
if ($rows) { | if ($rows) { |
foreach ($rows as $row) { | foreach ($rows as $row) { |
if (!isset($row->value->status) || $row->value->status != "suspended") { | |
echo "<tr><td><b>" . $row->value->name . "</b>"; | echo "<tr><td><a href='" . $row->value->website ."'><b>". $row->value->name . "</b></a>"; |
if ($ENV == "DEV") | if ($ENV == "DEV") |
echo "<br>(" . $row->id . ")"; | echo "<br>(" . $row->id . ")"; |
echo "</td>\n"; | echo "</td>\n"; |
$agencies++; | $agencies++; |
echo "<td>"; | echo "<td>"; |
if (isset($row->value->FOIDocumentsURL)) { | if (isset($row->value->FOIDocumentsURL)) { |
$disclogs++; | $disclogs++; |
echo '<a href="' . $row->value->FOIDocumentsURL . '">' | echo '<a href="' . $row->value->FOIDocumentsURL . '">' |
. $row->value->FOIDocumentsURL . '</a>'; | . $row->value->FOIDocumentsURL . '</a>'; |
if ($ENV == "DEV") | if ($ENV == "DEV") |
echo '<br><small>(<a href="viewDocument.php?hash=' . md5($row->value->FOIDocumentsURL) . '">' | echo '<br><small>(<a href="viewDocument.php?hash=' . md5($row->value->FOIDocumentsURL) . '">' |
. 'view local copy</a>)</small>'; | . 'view local copy</a>)</small>'; |
} else { | } else { |
echo "<font color='red'>✘</font>"; | echo "<font color='red'><abbr title='No'>✘</abbr></font>"; |
} | } |
echo "</td>\n<td>"; | echo "</td>\n<td>"; |
if (isset($row->value->FOIDocumentsURL)) { | if (isset($row->value->FOIDocumentsURL)) { |
if (file_exists("./scrapers/" . $row->id . '.py')) { | if (file_exists("./scrapers/" . $row->id . '.py')) { |
echo "<font color='green'>✔</font>"; | echo "<font color='green'><abbr title='Yes'>✔</abbr></font>"; |
$green++; | $green++; |
} else if (file_exists("./scrapers/" . $row->id . '.txt')) { | } else if (file_exists("./scrapers/" . $row->id . '.txt')) { |
echo "<font color='orange'><b>▬</b></font>"; | echo "<font color='orange'><abbr title='Work in progress'><b>▬</b></abbr></font>"; |
$orange++; | $orange++; |
} else { | } else { |
echo "<font color='red'>✘</font>"; | echo "<font color='red'><abbr title='No'>✘</abbr></font>"; |
$red++; | $red++; |
} | } |
} | } |
echo "</td></tr>\n"; | echo "</td></tr>\n"; |
} | } |
} | } |
} | |
} catch (SetteeRestClientException $e) { | } catch (SetteeRestClientException $e) { |
setteErrorHandler($e); | setteErrorHandler($e); |
} | } |
echo "</table>"; | echo "</table>"; |
echo $agencies." agencies ".(($disclogs/$agencies)*100)."% with disclosure logs, ".(($green/$disclogs)*100)."% with scrapers ".(($red/$disclogs)*100)."% without scrapers ".(($orange/$disclogs)*100)."% WIP scrapers "; | echo $agencies." agencies, ".round(($disclogs/$agencies)*100)."% with disclosure logs; " |
.round(($green/$disclogs)*100)."% logs with scrapers ".round(($red/$disclogs)*100)."% logs without scrapers ".round(($orange/$disclogs)*100)."% logs Work-In-Progress scrapers "; | |
include_footer_documents(); | include_footer_documents(); |
?> | ?> |
import sys,os | import sys,os |
sys.path.insert(0, os.path.join(os.path.dirname(__file__) or '.', '../')) | sys.path.insert(0, os.path.join(os.path.dirname(__file__) or '.', '../')) |
import scrape | import scrape |
from bs4 import BeautifulSoup | from bs4 import BeautifulSoup |
from time import mktime | from time import mktime |
import feedparser | import feedparser |
import abc | import abc |
import unicodedata, re | import unicodedata, re |
import dateutil | import dateutil |
from dateutil.parser import * | from dateutil.parser import * |
from datetime import * | from datetime import * |
class GenericDisclogScraper(object): | class GenericDisclogScraper(object): |
__metaclass__ = abc.ABCMeta | __metaclass__ = abc.ABCMeta |
agencyID = None | agencyID = None |
disclogURL = None | disclogURL = None |
def remove_control_chars(self, input): | def remove_control_chars(self, input): |
return "".join([i for i in input if ord(i) in range(32, 127)]) | return "".join([i for i in input if ord(i) in range(32, 127)]) |
def getAgencyID(self): | def getAgencyID(self): |
""" disclosr agency id """ | """ disclosr agency id """ |
if self.agencyID == None: | if self.agencyID == None: |
self.agencyID = os.path.basename(sys.argv[0]).replace(".py","") | self.agencyID = os.path.basename(sys.argv[0]).replace(".py","") |
return self.agencyID | return self.agencyID |
def getURL(self): | def getURL(self): |
""" disclog URL""" | """ disclog URL""" |
if self.disclogURL == None: | if self.disclogURL == None: |
agency = scrape.agencydb.get(self.getAgencyID()) | agency = scrape.agencydb.get(self.getAgencyID()) |
self.disclogURL = agency['FOIDocumentsURL'] | self.disclogURL = agency['FOIDocumentsURL'] |
return self.disclogURL | return self.disclogURL |
@abc.abstractmethod | @abc.abstractmethod |
def doScrape(self): | def doScrape(self): |
""" do the scraping """ | """ do the scraping """ |
return | return |
@abc.abstractmethod | @abc.abstractmethod |
def getDescription(self, content, entry, doc): | def getDescription(self, content, entry, doc): |
""" get description""" | """ get description""" |
return | return |
class GenericRSSDisclogScraper(GenericDisclogScraper): | class GenericRSSDisclogScraper(GenericDisclogScraper): |
def doScrape(self): | def doScrape(self): |
foidocsdb = scrape.couch['disclosr-foidocuments'] | foidocsdb = scrape.couch['disclosr-foidocuments'] |
(url,mime_type,content) = scrape.fetchURL(scrape.docsdb, self.getURL(), "foidocuments", self.getAgencyID()) | (url,mime_type,content) = scrape.fetchURL(scrape.docsdb, self.getURL(), "foidocuments", self.getAgencyID()) |
feed = feedparser.parse(content) | feed = feedparser.parse(content) |
for entry in feed.entries: | for entry in feed.entries: |
#print entry | #print entry |
print entry.id | print entry.id |
hash = scrape.mkhash(entry.id) | hash = scrape.mkhash(entry.id) |
#print hash | #print hash |
doc = foidocsdb.get(hash) | doc = foidocsdb.get(hash) |
#print doc | #print doc |
if doc == None: | if doc == None: |
print "saving "+ hash | print "saving "+ hash |
edate = datetime.fromtimestamp(mktime( entry.published_parsed)).strftime("%Y-%m-%d") | edate = datetime.fromtimestamp(mktime( entry.published_parsed)).strftime("%Y-%m-%d") |
doc = {'_id': hash, 'agencyID': self.getAgencyID(), 'url': entry.link, 'docID': entry.id, | doc = {'_id': hash, 'agencyID': self.getAgencyID(), 'url': entry.link, 'docID': entry.id, |
"date": edate,"title": entry.title} | "date": edate,"title": entry.title} |
self.getDescription(entry,entry, doc) | self.getDescription(entry,entry, doc) |
foidocsdb.save(doc) | foidocsdb.save(doc) |
else: | else: |
print "already saved" | print "already saved" |
def getDescription(self, content, entry, doc): | def getDescription(self, content, entry, doc): |
""" get description from rss entry""" | """ get description from rss entry""" |
doc.update({'description': content.summary}) | doc.update({'description': content.summary}) |
return | return |
class GenericOAICDisclogScraper(GenericDisclogScraper): | class GenericOAICDisclogScraper(GenericDisclogScraper): |
__metaclass__ = abc.ABCMeta | __metaclass__ = abc.ABCMeta |
@abc.abstractmethod | @abc.abstractmethod |
def getColumns(self,colu |