more scrapers, fix column order, extract title from lengthy description
Former-commit-id: 765a9aab684bd300c60e5c698b8df43404db1d32
--- a/documents/genericScrapers.py
+++ b/documents/genericScrapers.py
@@ -88,8 +88,12 @@
return
def getTable(self, soup):
return soup.table
+ def getRows(self, table):
+ return table.find_all('tr')
def getDate(self, content, entry, doc):
- edate = parse(''.join(content.stripped_strings).strip(), dayfirst=True, fuzzy=True).strftime("%Y-%m-%d")
+ date = ''.join(content.stripped_strings).strip()
+ date = str.replace("Octber","October",date)
+ edate = parse(date, dayfirst=True, fuzzy=True).strftime("%Y-%m-%d")
print edate
doc.update({'date': edate})
return
@@ -110,10 +114,10 @@
# http://www.crummy.com/software/BeautifulSoup/documentation.html
soup = BeautifulSoup(content)
table = self.getTable(soup)
- for row in table.find_all('tr'):
+ for row in self.getRows(table):
columns = row.find_all('td')
if len(columns) == self.getColumnCount():
- (id, date, description, title, notes) = self.getColumns(columns)
+ (id, date, title, description, notes) = self.getColumns(columns)
print ''.join(id.stripped_strings)
if id.string == None:
hash = scrape.mkhash(self.remove_control_chars(url+(''.join(date.stripped_strings))))
--- a/documents/scrapers/1097fa8afdcf5db89d212d0979226667.py
+++ b/documents/scrapers/1097fa8afdcf5db89d212d0979226667.py
@@ -8,7 +8,7 @@
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
def getColumns(self,columns):
(id, date, title, description, notes) = columns
- return (id, date, description, title, notes)
+ return (id, date, title, description, notes)
if __name__ == '__main__':
print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper)
--- a/documents/scrapers/157cbe654bdaa0a48e6650152ae34489.py
+++ b/documents/scrapers/157cbe654bdaa0a48e6650152ae34489.py
@@ -10,7 +10,7 @@
return 5
def getColumns(self,columns):
(id, date, title, description, notes) = columns
- return (id, date, description, title, notes)
+ return (id, date, title, description, notes)
def getTable(self,soup):
return soup.find_all('table')[4]
--- /dev/null
+++ b/documents/scrapers/38ca99d2790975a40dde3fae41dbdc3d.py
@@ -1,1 +1,32 @@
+import sys,os
+sys.path.insert(0, os.path.join(os.path.dirname(__file__) or '.', '../'))
+import genericScrapers
+import scrape
+from bs4 import BeautifulSoup
+import dateutil
+from dateutil.parser import *
+from datetime import *
+#http://www.doughellmann.com/PyMOTW/abc/
+class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
+ def getColumnCount(self):
+ return 3
+ def getColumns(self,columns):
+ (date, title, description) = columns
+ return (date, date, title, description, None)
+ def getTitle(self, content, entry, doc):
+ i = 0
+ title = ""
+ for string in content.stripped_strings:
+ if i < 2:
+ title = title + string
+ i = i+1
+ doc.update({'title': title})
+ print title
+ return
+
+if __name__ == '__main__':
+ print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper)
+ print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper)
+ ScraperImplementation().doScrape()
+
--- a/documents/scrapers/3cd40b1240e987cbcd3f0e67054ce259.py
+++ b/documents/scrapers/3cd40b1240e987cbcd3f0e67054ce259.py
@@ -7,7 +7,7 @@
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
def getColumns(self,columns):
(id, date, description, title, notes) = columns
- return (id, date, description, title, notes)
+ return (id, date, title, description, notes)
if __name__ == '__main__':
print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper)
--- /dev/null
+++ b/documents/scrapers/3d5871a44abbbc81ef5b3a420070755d.py
@@ -1,1 +1,47 @@
+import sys,os
+sys.path.insert(0, os.path.join(os.path.dirname(__file__) or '.', '../'))
+import genericScrapers
+import scrape
+from bs4 import BeautifulSoup
+import dateutil
+from dateutil.parser import *
+from datetime import *
+#http://www.doughellmann.com/PyMOTW/abc/
+class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
+ def getTable(self,soup):
+ return soup.find(class_ = "inner-column").table
+ def getRows(self,table):
+ return table.tbody.find_all('tr',recursive=False)
+ def getColumnCount(self):
+ return 3
+ def getColumns(self,columns):
+ (date, title, description) = columns
+ return (date, date, title, description, None)
+ def getDate(self, content, entry, doc):
+ i = 0
+ date = ""
+ for string in content.stripped_strings:
+ if i ==1:
+ date = string
+ i = i+1
+ edate = parse(date, dayfirst=True, fuzzy=True).strftime("%Y-%m-%d")
+ print edate
+ doc.update({'date': edate})
+ return
+ def getTitle(self, content, entry, doc):
+ i = 0
+ title = ""
+ for string in content.stripped_strings:
+ if i < 2:
+ title = title + string
+ i = i+1
+ doc.update({'title': title})
+ #print title
+ return
+
+if __name__ == '__main__':
+ print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper)
+ print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper)
+ ScraperImplementation().doScrape()
+
--- /dev/null
+++ b/documents/scrapers/53b14397c8f27c29ff07b6319f7a0ec5.py
@@ -1,1 +1,21 @@
+import sys,os
+sys.path.insert(0, os.path.join(os.path.dirname(__file__) or '.', '../'))
+import genericScrapers
+import scrape
+from bs4 import BeautifulSoup
+#http://www.doughellmann.com/PyMOTW/abc/
+class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
+ #def getTable(self,soup):
+ # return soup.find(id = "ctl00_PlaceHolderMain_intro2__ControlWrapper_CerRichHtmlField").table
+ def getColumnCount(self):
+ return 5
+ def getColumns(self,columns):
+ (id, date, title, description,notes) = columns
+ return (id, date, title, description, notes)
+
+if __name__ == '__main__':
+ print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper)
+ print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper)
+ ScraperImplementation().doScrape()
+
--- a/documents/scrapers/53d2884f8afd026096a27bd5051ec50e.py
+++ b/documents/scrapers/53d2884f8afd026096a27bd5051ec50e.py
@@ -10,7 +10,7 @@
return soup.find(class_ = "ms-rtestate-field").table
def getColumns(self,columns):
(id, date, title, description, notes) = columns
- return (id, date, description, title, notes)
+ return (id, date, title, description, notes)
def getLinks(self, content, entry, doc):
link = None
--- /dev/null
+++ b/documents/scrapers/55b69726fde4b4898ecf6d7217d1d1d2.py
@@ -1,1 +1,21 @@
+import sys,os
+sys.path.insert(0, os.path.join(os.path.dirname(__file__) or '.', '../'))
+import genericScrapers
+import scrape
+from bs4 import BeautifulSoup
+#http://www.doughellmann.com/PyMOTW/abc/
+class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
+ #def getTable(self,soup):
+ # return soup.find(id = "ctl00_PlaceHolderMain_intro2__ControlWrapper_CerRichHtmlField").table
+ def getColumnCount(self):
+ return 4
+ def getColumns(self,columns):
+ (id, date, title, description) = columns
+ return (id, date, title, description, None)
+
+if __name__ == '__main__':
+ print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper)
+ print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper)
+ ScraperImplementation().doScrape()
+
--- /dev/null
+++ b/documents/scrapers/627f116dfe42c9f27ad6747be0aa44e2.txt
@@ -1,1 +1,2 @@
+see parent dhs
--- a/documents/scrapers/6fe3c812a99d486963133459b2768cf6.py
+++ b/documents/scrapers/6fe3c812a99d486963133459b2768cf6.py
@@ -8,7 +8,7 @@
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
def getColumns(self,columns):
(id, date, title, description, notes) = columns
- return (id, date, description, title, notes)
+ return (id, date, title, description, notes)
if __name__ == '__main__':
print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper)
--- a/documents/scrapers/795c33ed030293dbdb155c909ea47e37.py
+++ b/documents/scrapers/795c33ed030293dbdb155c909ea47e37.py
@@ -10,7 +10,7 @@
return 7
def getColumns(self,columns):
(id, date, title, description, notes, deletedate, otherinfo) = columns
- return (id, date, description, title, notes)
+ return (id, date, title, description, notes)
#def getTable(self,soup):
# return soup.find(class_ = "box").table
--- a/documents/scrapers/8c9421f852c441910bf1d93a57b31d64.py
+++ b/documents/scrapers/8c9421f852c441910bf1d93a57b31d64.py
@@ -7,7 +7,7 @@
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
def getColumns(self,columns):
(id, date, title, description, notes) = columns
- return (id, date, description, title, notes)
+ return (id, date, title, description, notes)
if __name__ == '__main__':
print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper)
--- a/documents/scrapers/8ef0e5802f99800f514b3a148e013b75.py
+++ b/documents/scrapers/8ef0e5802f99800f514b3a148e013b75.py
@@ -12,7 +12,7 @@
return soup.find(class_ = "content").table
def getColumns(self,columns):
(id, date, title, description) = columns
- return (id, date, description, title, None)
+ return (id, date, title, description, None)
if __name__ == '__main__':
print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper)
--- a/documents/scrapers/c43ca6780764f4e61918e8836be74420.py
+++ b/documents/scrapers/c43ca6780764f4e61918e8836be74420.py
@@ -7,7 +7,7 @@
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
def getColumns(self,columns):
(id, date, title,description,notes) = columns
- return (id, date, description, title, notes)
+ return (id, date, title, description, notes)
if __name__ == '__main__':
print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper)
--- /dev/null
+++ b/documents/scrapers/dae7e934f1c341ccc9547a89a8af917e.py
@@ -1,1 +1,21 @@
+import sys,os
+sys.path.insert(0, os.path.join(os.path.dirname(__file__) or '.', '../'))
+import genericScrapers
+import scrape
+from bs4 import BeautifulSoup
+#http://www.doughellmann.com/PyMOTW/abc/
+class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
+ #def getTable(self,soup):
+ # return soup.find(id = "ctl00_PlaceHolderMain_intro2__ControlWrapper_CerRichHtmlField").table
+ def getColumnCount(self):
+ return 5
+ def getColumns(self,columns):
+ (id, date, title, description,notes) = columns
+ return (id, date, title, description, notes)
+
+if __name__ == '__main__':
+ print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper)
+ print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper)
+ ScraperImplementation().doScrape()
+
--- a/documents/scrapers/dfd7414bb0c21a0076ab559901ae0588.py
+++ b/documents/scrapers/dfd7414bb0c21a0076ab559901ae0588.py
@@ -8,7 +8,7 @@
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
def getColumns(self,columns):
(id, date, title, description, notes) = columns
- return (id, date, description, title, notes)
+ return (id, date, title, description, notes)
def getTable(self,soup):
return soup.find(class_ = "content")
--- a/documents/scrapers/e2a845e55bc9986e6c75c5ad2c508b8d.py
+++ b/documents/scrapers/e2a845e55bc9986e6c75c5ad2c508b8d.py
@@ -7,7 +7,7 @@
class ScraperImplementation(genericScrapers.GenericRSSDisclogScraper):
def getColumns(self,columns):
(id, date, title, description, notes) = columns
- return (id, date, description, title, notes)
+ return (id, date, title, description, notes)
if __name__ == '__main__':
print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericRSSDisclogScraper)
--- a/documents/scrapers/f2ab2908d8ee56ed8d995ef4187e75e6.py
+++ b/documents/scrapers/f2ab2908d8ee56ed8d995ef4187e75e6.py
@@ -8,7 +8,7 @@
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
def getColumns(self,columns):
(id, date, title, description, notes) = columns
- return (id, date, description, title, notes)
+ return (id, date, title, description, notes)
def getTable(self,soup):
return soup.find(id = "content").table
--- a/documents/scrapers/rtk.py
+++ b/documents/scrapers/rtk.py
@@ -7,7 +7,7 @@
class ScraperImplementation(genericScrapers.GenericRSSDisclogScraper):
def getColumns(self,columns):
(id, date, title, description, notes) = columns
- return (id, date, description, title, notes)
+ return (id, date, title, description, notes)
if __name__ == '__main__':
print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericRSSDisclogScraper)