--- a/documents/scrapers/7c6adc1d41cf029bf1a0959e5156477a.py +++ b/documents/scrapers/7c6adc1d41cf029bf1a0959e5156477a.py @@ -16,7 +16,7 @@ foidocsdb = scrape.couch['disclosr-foidocuments'] (url, mime_type, content) = scrape.fetchURL(scrape.docsdb, self.getURL(), "foidocuments", self.getAgencyID()) - + d = pq(content.read()) d.make_links_absolute(base_url = self.getURL()) for table in d('table').items(): @@ -35,7 +35,7 @@ doc = {'_id': dochash, 'agencyID': self.getAgencyID() , 'url': self.getURL(), 'docID': dochash, "links": links, - "date": edate, "notes": notes, "title": "Disclosure Log Updated", "description": description} + "date": edate, "notes": notes, "title": title, "description": description} #print doc foidocsdb.save(doc) else: