more scrapers, fix column order, extract title from lengthy description
[disclosr.git] / documents / scrapers / 3d5871a44abbbc81ef5b3a420070755d.py
blob:a/documents/scrapers/3d5871a44abbbc81ef5b3a420070755d.py -> blob:b/documents/scrapers/3d5871a44abbbc81ef5b3a420070755d.py
--- a/documents/scrapers/3d5871a44abbbc81ef5b3a420070755d.py
+++ b/documents/scrapers/3d5871a44abbbc81ef5b3a420070755d.py
@@ -1,1 +1,47 @@
+import sys,os
+sys.path.insert(0, os.path.join(os.path.dirname(__file__) or '.', '../'))
+import genericScrapers
+import scrape
+from bs4 import BeautifulSoup
+import dateutil
+from dateutil.parser import *
+from datetime import *
 
+#http://www.doughellmann.com/PyMOTW/abc/
+class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
+        def getTable(self,soup):
+                return soup.find(class_ = "inner-column").table       
+        def getRows(self,table):
+                return table.tbody.find_all('tr',recursive=False)
+        def getColumnCount(self):
+                return 3
+        def getColumns(self,columns):
+                (date, title, description) = columns
+                return (date, date, title, description, None)
+        def getDate(self, content, entry, doc):
+		i = 0
+		date = ""
+		for string in content.stripped_strings:
+    			if i ==1:
+				date = string
+			i = i+1
+                edate = parse(date, dayfirst=True, fuzzy=True).strftime("%Y-%m-%d")
+                print edate
+                doc.update({'date': edate})
+                return
+   	def getTitle(self, content, entry, doc):
+		i = 0
+		title = ""
+		for string in content.stripped_strings:
+    			if i < 2:
+				title = title + string
+			i = i+1
+                doc.update({'title': title})
+		#print title
+                return
+
+if __name__ == '__main__':
+    print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper)
+    print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper)
+    ScraperImplementation().doScrape()
+