--- a/scrape.py +++ b/scrape.py @@ -3,15 +3,7 @@ import urllib2 from BeautifulSoup import BeautifulSoup import re - -couch = couchdb.Server('http://192.168.1.148:5984/') - -# select database -agencydb = couch['disclosr-agencies'] - -for row in agencydb.view('app/getScrapeRequired'): #not recently scraped agencies view? - agency = agencydb.get(row.id) - print agency['agencyName'] +import hashlib #http://diveintopython.org/http_web_services/etags.html class NotModifiedHandler(urllib2.BaseHandler): @@ -20,26 +12,27 @@ addinfourl.code = code return addinfourl -def scrapeAndStore(URL, depth, agency): - URL = "http://www.hole.fi/jajvirta/weblog/" - req = urllib2.Request(URL) - - #if there is a previous version sotred in couchdb, load caching helper tags - if etag: - req.add_header("If-None-Match", etag) - if last_modified: - req.add_header("If-Modified-Since", last_modified) +def scrapeAndStore(docsdb, url, depth, agencyID): + hash = hashlib.md5(url).hexdigest() + req = urllib2.Request(url) + print "Fetching %s", url + doc = docsdb['hash'] + #if there is a previous version stored in couchdb, load caching helper tags + if doc.has_key('etag'): + req.add_header("If-None-Match", doc['etag']) + if doc.has_key('last_modified'): + req.add_header("If-Modified-Since", doc['last_modified']) opener = urllib2.build_opener(NotModifiedHandler()) url_handle = opener.open(req) headers = url_handle.info() # the addinfourls have the .info() too - etag = headers.getheader("ETag") - last_modified = headers.getheader("Last-Modified") - web_server = headers.getheader("Server") - file_size = headers.getheader("Content-Length") - mime_type = headers.getheader("Content-Type") + doc['etag'] = headers.getheader("ETag") + doc['last_modified'] = headers.getheader("Last-Modified") + doc['web_server'] = headers.getheader("Server") + doc['file_size'] = headers.getheader("Content-Length") + doc['mime_type'] = headers.getheader("Content-Type") - if hasattr(url_handle, 'code') + if hasattr(url_handle, 'code'): if url_handle.code == 304: print "the web page has not been modified" else: @@ -49,13 +42,14 @@ soup = BeautifulSoup(html) links = soup.findAll('a') # soup.findAll('a', id=re.compile("^p-")) for link in links: - print link['href'] - #for each unique link - #if html mimetype - # go down X levels, - # diff with last stored attachment, store in document - #if not - # remember to save parentURL and title (link text that lead to document) + if link.has_key("href"): + print link['href'] + #for each unique link + #if html mimetype + # go down X levels, + # diff with last stored attachment, store in document + #if not + # remember to save parentURL and title (link text that lead to document) #store as attachment epoch-filename else: @@ -63,3 +57,23 @@ #record/alert error to error database + + + + + + + + + +couch = couchdb.Server('http://192.168.1.148:5984/') + +# select database +agencydb = couch['disclosr-agencies'] +docsdb = couch['disclosr-documents'] + +for row in agencydb.view('app/getScrapeRequired'): #not recently scraped agencies view? + agency = agencydb.get(row.id) + print agency['name'] + scrapeAndStore(docsdb, agency['website'],1,agency['_id']) +