From: Maxious Date: Tue, 27 Nov 2012 12:08:46 +0000 Subject: add stats X-Git-Url: http://maxious.lambdacomplex.org/git/?p=disclosr.git&a=commitdiff&h=bb060099996b9864b18d6b67f8263c8f37dded50 --- add stats Former-commit-id: 7d58ec500723843bb55f285d866ce8d0c0ae41de --- --- a/documents/disclogsList.php +++ b/documents/disclogsList.php @@ -8,6 +8,11 @@ Agency NameDisclosure Log URL recorded?Do we monitor this URL?"; $agenciesdb = $server->get_db('disclosr-agencies'); $docsdb = $server->get_db('disclosr-documents'); +$agencies = 0; +$disclogs = 0; +$red = 0; +$green = 0; +$orange = 0; try { $rows = $agenciesdb->get_view("app", "byCanonicalName", null, true)->rows; @@ -19,10 +24,11 @@ if ($ENV == "DEV") echo "
(" . $row->id . ")"; echo "\n"; - +$agencies++; echo ""; if (isset($row->value->FOIDocumentsURL)) { + $disclogs++; echo '' . $row->value->FOIDocumentsURL . ''; if ($ENV == "DEV") @@ -35,10 +41,13 @@ if (isset($row->value->FOIDocumentsURL)) { if (file_exists("./scrapers/" . $row->id . '.py')) { echo ""; + $green++; } else if (file_exists("./scrapers/" . $row->id . '.txt')) { echo ""; + $orange++; } else { echo ""; + $red++; } } echo "\n"; @@ -48,6 +57,8 @@ setteErrorHandler($e); } echo ""; +echo $agencies." agencies ".(($disclogs/$agencies)*100)."% with disclosure logs, ".(($green/$disclogs)*100)."% with scrapers ".(($red/$disclogs)*100)."% without scrapers ".(($orange/$disclogs)*100)."% WIP scrapers "; + include_footer_documents(); ?> --- a/documents/scrape.py +++ b/documents/scrape.py @@ -204,12 +204,12 @@ scrapeAndStore(docsdb, agency[key],0,key,agency['_id']) if key == 'website' and False: scrapeAndStore(docsdb, agency[key],0,key,agency['_id']) + agency['metadata']['lastScraped'] = time.time() if key.endswith('URL') and False: print key depth = 1 if 'scrapeDepth' in agency.keys(): depth = agency['scrapeDepth'] scrapeAndStore(docsdb, agency[key],depth,key,agency['_id']) - agency['metadata']['lastScraped'] = time.time() agencydb.save(agency) --- /dev/null +++ b/documents/scrapers/0603dfcc930a791efaa64f31ae5fceda.py @@ -1,1 +1,21 @@ +import sys,os +sys.path.insert(0, os.path.join(os.path.dirname(__file__) or '.', '../')) +import genericScrapers +import scrape +from bs4 import BeautifulSoup +#http://www.doughellmann.com/PyMOTW/abc/ +class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper): + def getTable(self,soup): + return soup.find(id="body-content").table + def getColumnCount(self): + return 5 + def getColumns(self,columns): + (id, date, title, description, notes) = columns + return (id, date, title, description, notes) + +if __name__ == '__main__': + print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper) + print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper) + ScraperImplementation().doScrape() + --- /dev/null +++ b/documents/scrapers/1fda9544d2a3fa4cd92aec4b206a6763.py @@ -1,1 +1,21 @@ +import sys,os +sys.path.insert(0, os.path.join(os.path.dirname(__file__) or '.', '../')) +import genericScrapers +import scrape +from bs4 import BeautifulSoup +#http://www.doughellmann.com/PyMOTW/abc/ +class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper): + def getTable(self,soup): + return soup.find(_class = "article-content").table + def getColumnCount(self): + return 5 + def getColumns(self,columns): + (id, title, date, description, notes) = columns + return (id, date, title, description, notes) + +if __name__ == '__main__': + print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper) + print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper) + ScraperImplementation().doScrape() + --- /dev/null +++ b/documents/scrapers/3d426eb8c85c8f04b814eee597efd866.py @@ -1,1 +1,21 @@ +import sys,os +sys.path.insert(0, os.path.join(os.path.dirname(__file__) or '.', '../')) +import genericScrapers +import scrape +from bs4 import BeautifulSoup +#http://www.doughellmann.com/PyMOTW/abc/ +class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper): + def getTable(self,soup): + return soup.find(id = "primary").table + def getColumnCount(self): + return 5 + def getColumns(self,columns): + (id, date, title, description, notes) = columns + return (id, date, title, description, notes) + +if __name__ == '__main__': + print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper) + print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper) + ScraperImplementation().doScrape() + --- /dev/null +++ b/documents/scrapers/50601505ef69483121a6d130bb0515e4.txt @@ -1,1 +1,1 @@ - +apsc has ACMA style disclog --- /dev/null +++ b/documents/scrapers/6fa04af95fbe7de96daa2c7560e0aad3.txt @@ -1,1 +1,19 @@ +import sys,os +sys.path.insert(0, os.path.join(os.path.dirname(__file__) or '.', '../')) +import genericScrapers +import scrape +from bs4 import BeautifulSoup +#http://www.doughellmann.com/PyMOTW/abc/ +class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper): + def getTable(self,soup): + return soup.find(id = "content_div_50269").table + def getColumns(self,columns): + (id, date, title, description, notes) = columns + return (id, date, title, description, notes) + +if __name__ == '__main__': + print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper) + print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper) + ScraperImplementation().doScrape() + --- /dev/null +++ b/documents/scrapers/bf6e587f166040b63681cd2ff76fbfdf.txt @@ -1,1 +1,1 @@ - +no disclog yet --- /dev/null +++ b/documents/scrapers/c1302c8d7cbbd911f0d4d8a4128f8079.txt @@ -1,1 +1,1 @@ - +uses RET disclog --- /dev/null +++ b/documents/scrapers/c25f628f9f38d889485d7a4bff873b23.txt @@ -1,1 +1,20 @@ +import sys,os +sys.path.insert(0, os.path.join(os.path.dirname(__file__) or '.', '../')) +import genericScrapers +import scrape +from bs4 import BeautifulSoup +#http://www.doughellmann.com/PyMOTW/abc/ +class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper): + + def getColumnCount(self): + return 4 + def getColumns(self,columns): + (id, date, title, description) = columns + return (id, date, title, description, None) + +if __name__ == '__main__': + print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper) + print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper) + ScraperImplementation().doScrape() + --- /dev/null +++ b/documents/scrapers/f0caafbcf292c90e7b8ad18ddcf9afc3.txt @@ -1,1 +1,21 @@ +import sys,os +sys.path.insert(0, os.path.join(os.path.dirname(__file__) or '.', '../')) +import genericScrapers +import scrape +from bs4 import BeautifulSoup +#http://www.doughellmann.com/PyMOTW/abc/ +class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper): + def getTable(self,soup): + return soup.find(id = "genericContent").table.tbody + def getColumnCount(self): + return 5 + def getColumns(self,columns): + (id, date,title, description, notes) = columns + return (id, date, title, description, notes) + +if __name__ == '__main__': + print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper) + print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper) + ScraperImplementation().doScrape() + --- a/include/couchdb.inc.php +++ b/include/couchdb.inc.php @@ -10,6 +10,7 @@ if (php_uname('n') == "KYUUBEY") { $serverAddr = 'http://192.168.1.148:5984/'; + $serverAddr = 'http://127.0.0.1:5984/'; } else { $serverAddr = 'http://127.0.0.1:5984/'; }