more scrapers
[disclosr.git] / documents / genericScrapers.py
blob:a/documents/genericScrapers.py -> blob:b/documents/genericScrapers.py
--- a/documents/genericScrapers.py
+++ b/documents/genericScrapers.py
@@ -9,6 +9,7 @@
 import dateutil
 from dateutil.parser import *
 from datetime import *
+import codecs
 
 class GenericDisclogScraper(object):
         __metaclass__ = abc.ABCMeta
@@ -93,7 +94,7 @@
 	def getDate(self, content, entry, doc):
 		date = ''.join(content.stripped_strings).strip()
 		(a,b,c) = date.partition("(")
-		date = a.replace("Octber","October")
+		date = self.remove_control_chars(a.replace("Octber","October"))
 		print date
 		edate = parse(date, dayfirst=True, fuzzy=True).strftime("%Y-%m-%d")
 		print edate
@@ -120,7 +121,7 @@
 					columns = row.find_all('td')
 					if len(columns) == self.getColumnCount():
 						(id, date, title, description, notes) = self.getColumns(columns)
-						print ''.join(id.stripped_strings)
+						print self.remove_control_chars(''.join(id.stripped_strings))
 						if id.string == None:
 							hash = scrape.mkhash(self.remove_control_chars(url+(''.join(date.stripped_strings))))
 						else:
@@ -136,7 +137,12 @@
 							self.getDescription(description,row, doc)
 							if notes != None:
                                         			doc.update({ 'notes': (''.join(notes.stripped_strings))})
-							foidocsdb.save(doc)
+                                                        badtitles = ['-','Summary of FOI Request','FOI request(in summary form)','Summary of FOI request received by the ASC',
+'Summary of FOI request received by agency/minister','Description of Documents Requested','FOI request','Description of FOI Request','Summary of request','Description','Summary',
+'Summary of FOIrequest received by agency/minister','Summary of FOI request received','Description of    FOI Request',"FOI request",'Results 1 to 67 of 67']
+							if doc['title'] not in badtitles and doc['description'] != '':
+                                                            print "saving"
+                                                            foidocsdb.save(doc)
 						else:
 							print "already saved "+hash