fix that darn CASA scraper
Former-commit-id: 288d5ab60033e36608bf898869eecdf64180ba59
--- a/documents/genericScrapers.py
+++ b/documents/genericScrapers.py
@@ -199,6 +199,17 @@
def getRows(self, table):
return table.find_all('tr')
+ def getDocHash(self, id,date, url):
+ if id.string is None:
+ print "no id, using date as hash"
+ return scrape.mkhash(
+ self.remove_control_chars(
+ url + (''.join(date.stripped_strings))))
+ else:
+ return scrape.mkhash(
+ self.remove_control_chars(
+ url + (''.join(id.stripped_strings))))
+
def getDate(self, content, entry, doc):
strdate = ''.join(content.stripped_strings).strip()
(a, b, c) = strdate.partition("(")
@@ -240,15 +251,7 @@
description, notes) = self.getColumns(columns)
print self.remove_control_chars(
''.join(id.stripped_strings))
- if id.string is None:
- print "no id, using date as hash"
- dochash = scrape.mkhash(
- self.remove_control_chars(
- url + (''.join(date.stripped_strings))))
- else:
- dochash = scrape.mkhash(
- self.remove_control_chars(
- url + (''.join(id.stripped_strings))))
+ dochash = self.getDocHash(id,date,url)
doc = foidocsdb.get(dochash)
if doc is None:
--- a/documents/runScrapers.sh
+++ b/documents/runScrapers.sh
@@ -1,3 +1,4 @@
+#!/bin/bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
echo $DIR
cd $DIR
--- a/documents/scrapers/5716ce0aacfe98f7d638b7a66b7f1040.py
+++ b/documents/scrapers/5716ce0aacfe98f7d638b7a66b7f1040.py
@@ -6,6 +6,11 @@
#http://www.doughellmann.com/PyMOTW/abc/
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
+ def getDocHash(self, id,date, url):
+ ''' url changes on ever request so ignore for hash '''
+ return scrape.mkhash(
+ self.remove_control_chars(
+ ''.join(id.stripped_strings)))
def getColumnCount(self):
return 4
def getColumns(self,columns):