scraoer fxies
[disclosr.git] / documents / scrapers / 3d5871a44abbbc81ef5b3a420070755d.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
import sys,os
sys.path.insert(0, os.path.join(os.path.dirname(__file__) or '.', '../'))
import genericScrapers
import scrape
from bs4 import BeautifulSoup
import dateutil
from dateutil.parser import *
from datetime import *
 
#http://www.doughellmann.com/PyMOTW/abc/
class ScraperImplementation(genericScrapers.GenericOAICDisclogScraper):
        def getTable(self,soup):
                return soup.find(class_ = "inner-column").table       
        def getRows(self,table):
                return table.tbody.find_all('tr',recursive=False)
        def getColumnCount(self):
                return 3
        def getColumns(self,columns):
                (date, title, description) = columns
                return (date, date, title, description, None)
        def getDate(self, content, entry, doc):
                i = 0
                date = ""
                for string in content.stripped_strings:
                        if i ==1:
                                date = string
                        i = i+1
                edate = parse(date, dayfirst=True, fuzzy=True).strftime("%Y-%m-%d")
                print edate
                doc.update({'date': edate})
                return
        def getTitle(self, content, entry, doc):
                i = 0
                title = ""
                for string in content.stripped_strings:
                        if i < 2:
                                title = title + string
                        i = i+1
                doc.update({'title': title})
                #print title
                return
 
if __name__ == '__main__':
#http://www.csiro.au/Portals/About-CSIRO/How-we-work/Governance/FOI-Request-Disclosure-Log-2012-13.aspx
#http://www.csiro.au/Portals/About-CSIRO/How-we-work/Governance/FOI-Request-Disclosure-Log-2011-12.aspx
    print 'Subclass:', issubclass(ScraperImplementation, genericScrapers.GenericOAICDisclogScraper)
    print 'Instance:', isinstance(ScraperImplementation(), genericScrapers.GenericOAICDisclogScraper)
    ScraperImplementation().doScrape()