beginnings rss scraper
[disclosr.git] / documents / genericScrapers.py
blob:a/documents/genericScrapers.py -> blob:b/documents/genericScrapers.py
--- a/documents/genericScrapers.py
+++ b/documents/genericScrapers.py
@@ -8,17 +8,36 @@
 import feedparser
 import abc
 
-class GenericRSSDisclogScraper(object):
+class GenericDisclogScraper(object):
         __metaclass__ = abc.ABCMeta
-        @abc.abstractmethod
+	agencyID = None
+	disclogURL = None
         def getAgencyID(self):
                 """ disclosr agency id """
-                return
+		if self.agencyID == None:
+			self.agencyID = os.path.basename(sys.argv[0]).replace(".py","")
+                return self.agencyID
 
-        @abc.abstractmethod
         def getURL(self):
                 """ disclog URL"""
-                return
+		if self.disclogURL == None:
+			agency = scrape.agencydb.get(self.getAgencyID())
+			self.disclogURL = agency['FOIDocumentsURL']
+                return self.disclogURL
+
+	@abc.abstractmethod
+	def doScrape(self):
+		""" do the scraping """
+		return
+
+
+
+class GenericRSSDisclogScraper(GenericDisclogScraper):
+        def getDescription(self, entry, doc):
+                """ get description from rss entry"""
+                doc['description'] = entry.summary
+		return
+
        	def doScrape(self):
                	foidocsdb = scrape.couch['disclosr-foidocuments']
                 (url,mime_type,content) = scrape.fetchURL(scrape.docsdb, self.getURL(), "foidocuments", self.getAgencyID())
@@ -26,29 +45,22 @@
 		for entry in feed.entries:
 			#print entry
 			print entry.id
-			hash = scrape.mkhash(entry.link)			
+			hash = scrape.mkhash(entry.id)
+			#print hash
 		  	doc = foidocsdb.get(hash)
+			#print doc
 			if doc == None:
                         	print "saving"
 				edate = datetime.fromtimestamp(mktime( entry.published_parsed)).strftime("%Y-%m-%d")
-                                doc = {'id': hash, 'agencyID': self.getAgencyID(), 'url': entry.link, 'docID': entry.id,
-                                "date": edate, "description": entry.summary,"title": entry.title}
+                                doc = {'_id': hash, 'agencyID': self.getAgencyID(), 'url': entry.link, 'docID': entry.id,
+                                "date": edate,"title": entry.title}
+				self.getDescription(entry, doc)
                                 foidocsdb.save(doc)
                         else:
                         	print "already saved"			
 
-class GenericOAICDisclogScraper(object):
-	__metaclass__ = abc.ABCMeta
-	@abc.abstractmethod
-	def getAgencyID(self):
-		""" disclosr agency id """
-		return
-
-	@abc.abstractmethod
-	def getURL(self):
-		""" disclog URL"""
-		return
-
+class GenericOAICDisclogScraper(GenericDisclogScraper):
+        __metaclass__ = abc.ABCMeta
 	@abc.abstractmethod
 	def getColumns(self,columns):
 		""" rearranges columns if required """
@@ -86,7 +98,7 @@
                                                         	edate = ""+str(dtdate[0])+'-'+str(dtdate[1])+'-'+str(dtdate[2])
 							else:
 								edate = ""
-							doc = {'id': hash, 'agencyID': self.getAgencyID(), 'url': self.getURL(), "links": links, 'docID': id.string,
+							doc = {'_id': hash, 'agencyID': self.getAgencyID(), 'url': self.getURL(), "links": links, 'docID': id.string,
 			 				 "date": edate, "description": descriptiontxt,"title": title.string,"notes": notes.string}
 							foidocsdb.save(doc)
 						else: