rdf refactor
rdf refactor

<VirtualHost 127.0.0.1:8080> <VirtualHost 127.0.0.1:8080>
ServerAdmin webmaster@localhost ServerAdmin webmaster@localhost
   
ServerName data.gov.au ServerName data.gov.au
ServerAlias www.data.gov.au, opendata.linkdigital.com.au ServerAlias www.data.gov.au, opendata.linkdigital.com.au
   
   
#allow access to local static content eg. logos #allow access to local static content eg. logos
Alias /index.html /var/www/index.html Alias /index.html /var/www/index.html
Alias /logos /var/www/logos Alias /logos /var/www/logos
Alias /tools /var/www/tools.shtml Alias /tools /var/www/tools.shtml
Alias /opendata /var/www/opendata.shtml Alias /opendata /var/www/opendata.shtml
Alias /opengov /var/www/opengov.shtml Alias /opengov /var/www/opengov.shtml
   
DocumentRoot /var/www DocumentRoot /var/www
<IfModule mod_expires.c> <IfModule mod_expires.c>
<FilesMatch "\.(jpe?g|png|gif|js|css)$"> <FilesMatch "\.(jpe?g|png|gif|js|css)$">
ExpiresActive On ExpiresActive On
ExpiresDefault "access plus 1 week" ExpiresDefault "access plus 1 week"
</FilesMatch> </FilesMatch>
</IfModule> </IfModule>
  AddType application/octet-stream .woff
  <FilesMatch "\.(eot|otf|woff|ttf)$">
  SetEnvIf Origin "^http(s)?://(.+\.)?(data.gov.au)$" origin_is=$0
  Header set Access-Control-Allow-Origin %{origin_is}e env=origin_is
  </FilesMatch>
   
<Directory /> <Directory />
Options FollowSymLinks Options FollowSymLinks
AllowOverride None AllowOverride None
</Directory> </Directory>
   
<Directory /var/www/> <Directory /var/www/>
Options -Indexes -FollowSymLinks -MultiViews +Includes Options -Indexes -FollowSymLinks -MultiViews +Includes
AllowOverride None AllowOverride None
   
Order allow,deny Order allow,deny
allow from all allow from all
   
AddType text/html .shtml AddType text/html .shtml
AddOutputFilter INCLUDES .shtml AddOutputFilter INCLUDES .shtml
</Directory> </Directory>
   
ErrorLog ${APACHE_LOG_DIR}/error.log ErrorLog ${APACHE_LOG_DIR}/error.log
   
# Possible values include: debug, info, notice, warn, error, crit, alert, emerg. # Possible values include: debug, info, notice, warn, error, crit, alert, emerg.
LogLevel warn LogLevel warn
   
CustomLog ${APACHE_LOG_DIR}/access.log varnishcombined CustomLog ${APACHE_LOG_DIR}/access.log varnishcombined
RewriteEngine On RewriteEngine On
   
RewriteMap lc int:tolower RewriteMap lc int:tolower
   
# RewriteRule ^/apps/antenna-mate/?$ http://antennamate.com/ [PT] # RewriteRule ^/apps/antenna-mate/?$ http://antennamate.com/ [PT]
# RewriteRule ^/apps/postcode-finder/?$ http://www.aus-emaps.com/postcode_finder.php [PT] # RewriteRule ^/apps/postcode-finder/?$ http://www.aus-emaps.com/postcode_finder.php [PT]
# RewriteRule ^/apps/the-australian-cost-of-living-heatmap/?$ http://www.creditcardcompare.com.au/tools/cost-of-living-heatmap/ [PT] # RewriteRule ^/apps/the-australian-cost-of-living-heatmap/?$ http://www.creditcardcompare.com.au/tools/cost-of-living-heatmap/ [PT]
RewriteRule ^/catalogues/?$ /organization [PT] RewriteRule ^/catalogues/?$ /organization [PT]
RewriteRule ^/dataset/“cycling-in-new-south-wales-what-the-data-tells-us”-and-related-data/? /dataset/2809cycling-in-new-south-wales-what-the-data-tells-us2809-and-related-data [PT] RewriteRule ^/dataset/“cycling-in-new-south-wales-what-the-data-tells-us”-and-related-data/? /dataset/2809cycling-in-new-south-wales-what-the-data-tells-us2809-and-related-data [PT]
RewriteRule ^/dataset/apvma-pubcris-dataset-for-registered-agricultural-and-veterinary-chemical-products-and-approved-actives/? /dataset/apvma-pubcris-dataset-for-registered-agricultural-and-veterinary-chemical-products-and-approved-acti [PT] RewriteRule ^/dataset/apvma-pubcris-dataset-for-registered-agricultural-and-veterinary-chemical-products-and-approved-actives/? /dataset/apvma-pubcris-dataset-for-registered-agricultural-and-veterinary-chemical-products-and-approved-acti [PT]
RewriteRule ^/dataset_category/business/?$ /group/business [PT] RewriteRule ^/dataset_category/business/?$ /group/business [PT]
RewriteRule ^/dataset_category/community/?$ /group/community [PT] RewriteRule ^/dataset_category/community/?$ /group/community [PT]
RewriteRule ^/dataset_category/geography/?$ /group/geography [PT] RewriteRule ^/dataset_category/geography/?$ /group/geography [PT]
RewriteRule ^/dataset_category/government/?$ /group/government [PT] RewriteRule ^/dataset_category/government/?$ /group/government [PT]
RewriteRule ^/dataset_category/health/?$ /group/health [PT] RewriteRule ^/dataset_category/health/?$ /group/health [PT]
RewriteRule ^/dataset_category/?$ /group [PT] RewriteRule ^/dataset_category/?$ /group [PT]
RewriteRule ^/feed/?$ /feeds/dataset.atom [PT] RewriteRule ^/feed/?$ /feeds/dataset.atom [PT]
RewriteRule ^/jurisdiction/australian-capital-territory/?$ /dataset?q=act [PT] RewriteRule ^/jurisdiction/australian-capital-territory/?$ /dataset?q=act [PT]
RewriteRule ^/jurisdiction/new-south-wales/?$ /dataset?q=new+south+wales [PT] RewriteRule ^/jurisdiction/new-south-wales/?$ /dataset?q=new+south+wales [PT]
RewriteRule ^/jurisdiction/queensland/?$ /dataset?q=queensland [PT] RewriteRule ^/jurisdiction/queensland/?$ /dataset?q=queensland [PT]
RewriteRule ^/jurisdiction/south-australia/?$ /dataset?q=south+australia [PT] RewriteRule ^/jurisdiction/south-australia/?$ /dataset?q=south+australia [PT]
RewriteRule ^/jurisdiction/victoria/?$ /dataset?q=victoria [PT] RewriteRule ^/jurisdiction/victoria/?$ /dataset?q=victoria [PT]
RewriteRule ^/jurisdiction/western-australia/?$ /dataset?q=western+australia [PT] RewriteRule ^/jurisdiction/western-australia/?$ /dataset?q=western+australia [PT]
RewriteRule ^/jurisdiction/?$ /dataset [PT] RewriteRule ^/jurisdiction/?$ /dataset [PT]
   
# RewriteCond %{QUERY_STRING} ^category=([a-z]+) # RewriteCond %{QUERY_STRING} ^category=([a-z]+)
# RewriteRule ^/data/?$ /group/${lc:%1}? [PT] # RewriteRule ^/data/?$ /group/${lc:%1}? [PT]
   
# RewriteCond %{QUERY_STRING} ^format=Shapefile # RewriteCond %{QUERY_STRING} ^format=Shapefile
# RewriteRule ^/data/?$ /dataset?res_format=shp [PT] # RewriteRule ^/data/?$ /dataset?res_format=shp [PT]
   
RewriteRule ^/data/?$ /dataset? [PT] RewriteRule ^/data/?$ /dataset? [PT]
   
RewriteCond %{QUERY_STRING} ^s=(.*) RewriteCond %{QUERY_STRING} ^s=(.*)
RewriteRule ^/ /dataset?q=%1 [PT] RewriteRule ^/ /dataset?q=%1 [PT]
   
   
FilterDeclare OPENDATA FilterDeclare OPENDATA
FilterProvider OPENDATA SUBSTITUTE resp=Content-Type $text/ FilterProvider OPENDATA SUBSTITUTE resp=Content-Type $text/
FilterProvider OPENDATA SUBSTITUTE resp=Content-Type $/javascript FilterProvider OPENDATA SUBSTITUTE resp=Content-Type $/javascript
FilterProvider OPENDATA SUBSTITUTE resp=Content-Type $/json FilterProvider OPENDATA SUBSTITUTE resp=Content-Type $/json
FilterProvider OPENDATA SUBSTITUTE resp=Content-Type $/xml FilterProvider OPENDATA SUBSTITUTE resp=Content-Type $/xml
FilterChain OPENDATA FilterChain OPENDATA
Substitute "s#//opendata.linkdigital.com.au#//data.gov.au#" Substitute "s#//opendata.linkdigital.com.au#//data.gov.au#"
   
WSGIScriptAlias / /var/lib/ckan/dga/pyenv/bin/dga.py WSGIScriptAlias / /var/lib/ckan/dga/pyenv/bin/dga.py
WSGIPassAuthorization On WSGIPassAuthorization On
   
ErrorLog /var/log/apache2/ckandga.error.log ErrorLog /var/log/apache2/ckandga.error.log
CustomLog /var/log/apache2/ckandga.custom.log combined CustomLog /var/log/apache2/ckandga.custom.log combined
</VirtualHost> </VirtualHost>
   
# This is a basic VCL configuration file for varnish. See the vcl(7) # This is a basic VCL configuration file for varnish. See the vcl(7)
# man page for details on VCL syntax and semantics. # man page for details on VCL syntax and semantics.
# #
# Default backend definition. Set this to point to your content # Default backend definition. Set this to point to your content
# server. # server.
# #
backend default { backend default {
.host = "127.0.0.1"; .host = "127.0.0.1";
.port = "8080"; .port = "8080";
} }
   
sub vcl_fetch { sub vcl_fetch {
set beresp.grace = 1h; set beresp.grace = 1h;
if (beresp.http.content-type ~ "(text|application)") { if (beresp.http.content-type ~ "(text|application)") {
set beresp.do_gzip = true; set beresp.do_gzip = true;
} }
if (req.url ~ "\.(png|gif|jpg|jpeg|swf|css|js|woff|eot)$") { if (req.url ~ "\.(png|gif|jpg|jpeg|swf|css|js|woff|eot)$") {
unset beresp.http.set-cookie; unset beresp.http.set-cookie;
} }
} }
sub vcl_recv { sub vcl_recv {
if (req.url ~ "^/_tracking") { if (req.url ~ "^/_tracking") {
return (pass); return (pass);
} }
if (req.url ~ "\.(png|gif|jpg|jpeg|swf|css|js|woff|eot)$") { if (req.url ~ "\.(png|gif|jpg|jpeg|swf|css|js|woff|eot)$") {
return(lookup); return(lookup);
} }
  if (req.url ~ "/(..|.._..)/") {
  set req.url = regsub(req.url, "/((?!js)..|.._..)/", "/");
  }
if (req.http.Cookie) { if (req.http.Cookie) {
set req.http.Cookie = regsuball(req.http.Cookie, "(^|; ) *__utm.=[^;]+;? *", "\1"); # removes all cookies named __utm? (utma, utmb...) - tracking thing set req.http.Cookie = regsuball(req.http.Cookie, "(^|; ) *__utm.=[^;]+;? *", "\1"); # removes all cookies named __utm? (utma, utmb...) - tracking thing
   
if (req.http.Cookie == "") { if (req.http.Cookie == "") {
remove req.http.Cookie; remove req.http.Cookie;
} }
} }
} }
  sub vcl_hash {
  # http://serverfault.com/questions/112531/ignoring-get-parameters-in-varnish-vcl
  set req.url = regsub(req.url, "(?:(.com|.au))/((?!js)..|.._..)/", "/");
  hash_data(req.url);
  if (req.http.host) {
  hash_data(req.http.host);
  } else {
  hash_data(server.ip);
  }
  return (hash);
  }
sub vcl_deliver { sub vcl_deliver {
if (!resp.http.Vary) { if (!resp.http.Vary) {
set resp.http.Vary = "Accept-Encoding"; set resp.http.Vary = "Accept-Encoding";
} else if (resp.http.Vary !~ "(?i)Accept-Encoding") { } else if (resp.http.Vary !~ "(?i)Accept-Encoding") {
set resp.http.Vary = resp.http.Vary + ",Accept-Encoding"; set resp.http.Vary = resp.http.Vary + ",Accept-Encoding";
} }
if (obj.hits > 0) { if (obj.hits > 0) {
set resp.http.X-Cache = "HIT"; set resp.http.X-Cache = "HIT";
} else { } else {
set resp.http.X-Cache = "MISS"; set resp.http.X-Cache = "MISS";
} }
} }
   
# #
# Below is a commented-out copy of the default VCL logic. If you # Below is a commented-out copy of the default VCL logic. If you
# redefine any of these subroutines, the built-in logic will be # redefine any of these subroutines, the built-in logic will be
# appended to your code. # appended to your code.
# sub vcl_recv { # sub vcl_recv {
# if (req.restarts == 0) { # if (req.restarts == 0) {
# if (req.http.x-forwarded-for) { # if (req.http.x-forwarded-for) {
# set req.http.X-Forwarded-For = # set req.http.X-Forwarded-For =
# req.http.X-Forwarded-For + ", " + client.ip; # req.http.X-Forwarded-For + ", " + client.ip;
# } else { # } else {
# set req.http.X-Forwarded-For = client.ip; # set req.http.X-Forwarded-For = client.ip;
# } # }
# } # }
# if (req.request != "GET" && # if (req.request != "GET" &&
# req.request != "HEAD" && # req.request != "HEAD" &&
# req.request != "PUT" && # req.request != "PUT" &&
# req.request != "POST" && # req.request != "POST" &&
# req.request != "TRACE" && # req.request != "TRACE" &&
# req.request != "OPTIONS" && # req.request != "OPTIONS" &&
# req.request != "DELETE") { # req.request != "DELETE") {
# /* Non-RFC2616 or CONNECT which is weird. */ # /* Non-RFC2616 or CONNECT which is weird. */
# return (pipe); # return (pipe);
# } # }
# if (req.request != "GET" && req.request != "HEAD") { # if (req.request != "GET" && req.request != "HEAD") {
# /* We only deal with GET and HEAD by default */ # /* We only deal with GET and HEAD by default */
# return (pass); # return (pass);
# } # }
# if (req.http.Authorization || req.http.Cookie) { # if (req.http.Authorization || req.http.Cookie) {
# /* Not cacheable by default */ # /* Not cacheable by default */
# return (pass); # return (pass);
# } # }
# return (lookup); # return (lookup);
# } # }
# #
# sub vcl_pipe { # sub vcl_pipe {
# # Note that only the first request to the backend will have # # Note that only the first request to the backend will have
# # X-Forwarded-For set. If you use X-Forwarded-For and want to # # X-Forwarded-For set. If you use X-Forwarded-For and want to
# # have it set for all requests, make sure to have: # # have it set for all requests, make sure to have:
# # set bereq.http.connection = "close"; # # set bereq.http.connection = "close";
# # here. It is not set by default as it might break some broken web # # here. It is not set by default as it might break some broken web
# # applications, like IIS with NTLM authentication. # # applications, like IIS with NTLM authentication.
# return (pipe); # return (pipe);
# } # }
# #
# sub vcl_pass { # sub vcl_pass {
# return (pass); # return (pass);
# } # }
# #
# sub vcl_hash { # sub vcl_hash {
# hash_data(req.url); # hash_data(req.url);
# if (req.http.host) { # if (req.http.host) {
# hash_data(req.http.host); # hash_data(req.http.host);
# } else { # } else {
# hash_data(server.ip); # hash_data(server.ip);
# } # }
# return (hash); # return (hash);
# } # }
# #
# sub vcl_hit { # sub vcl_hit {
# return (deliver); # return (deliver);
# } # }
# #
# sub vcl_miss { # sub vcl_miss {
# return (fetch); # return (fetch);
# } # }
# #
# sub vcl_fetch { # sub vcl_fetch {
# if (beresp.ttl <= 0s || # if (beresp.ttl <= 0s ||
# beresp.http.Set-Cookie || # beresp.http.Set-Cookie ||
# beresp.http.Vary == "*") { # beresp.http.Vary == "*") {
# /* # /*
# * Mark as "Hit-For-Pass" for the next 2 minutes # * Mark as "Hit-For-Pass" for the next 2 minutes
# */ # */
# set beresp.ttl = 120 s; # set beresp.ttl = 120 s;
# return (hit_for_pass); # return (hit_for_pass);
# } # }
# return (deliver); # return (deliver);
# } # }
# #
# sub vcl_deliver { # sub vcl_deliver {
# return (deliver); # return (deliver);
# } # }
# #
# sub vcl_error { # sub vcl_error {
# set obj.http.Content-Type = "text/html; charset=utf-8"; # set obj.http.Content-Type = "text/html; charset=utf-8";
# set obj.http.Retry-After = "5"; # set obj.http.Retry-After = "5";
# synthetic {" # synthetic {"
# <?xml version="1.0" encoding="utf-8"?> # <?xml version="1.0" encoding="utf-8"?>
# <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" # <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
# "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> # "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
# <html> # <html>
# <head> # <head>
# <title>"} + obj.status + " " + obj.response + {"</title> # <title>"} + obj.status + " " + obj.response + {"</title>
# </head> # </head>
# <body> # <body>
# <h1>Error "} + obj.status + " " + obj.response + {"</h1> # <h1>Error "} + obj.status + " " + obj.response + {"</h1>
# <p>"} + obj.response + {"</p> # <p>"} + obj.response + {"</p>
# <h3>Guru Meditation:</h3> # <h3>Guru Meditation:</h3>
# <p>XID: "} + req.xid + {"</p> # <p>XID: "} + req.xid + {"</p>
# <hr> # <hr>
# <p>Varnish cache server</p> # <p>Varnish cache server</p>
# </body> # </body>
# </html> # </html>
# "}; # "};
# return (deliver); # return (deliver);
# } # }
# #
# sub vcl_init { # sub vcl_init {
# return (ok); # return (ok);
# } # }
# #
# sub vcl_fini { # sub vcl_fini {
# return (ok); # return (ok);
# } # }
   
  #
  # ckan - Pylons configuration
  #
  # The %(here)s variable will be replaced with the parent directory of this file
  #
  [DEFAULT]
 
  # Change debug to true when doing CKAN development, it enables Pylons'
  # interactive debugging tool, makes Fanstatic serve unminified JS and CSS
  # files, and enables CKAN templates' debugging features.
  #
  # WARNING: *THIS SETTING MUST BE SET TO FALSE ON A PRODUCTION ENVIRONMENT*
  # Debug mode will enable the interactive debugging tool, allowing ANYONE to
  # execute malicious code after an exception is raised.
  debug = False
 
  email_to = ddg@alert.links.com.au
  error_email_from = data.gov@finance.gov.au
 
  # The SMTP server to connect to when sending emails to users, with optional
  # port (default: 25). For example: lavabit.com, or smtp.gmail.com:587. To
  # connect to a local sendmail process enter 'localhost'.
  #smtp.server = localhost
  smtp.server = smtp.gmail.com:587
 
  # Whether or not to use STARTTLS when connecting to the SMTP server.
  #smtp.starttls = False
  smtp.starttls = True
 
  # The username and password to use to authenticate with the SMTP server.
  # Optional, if these are left commented-out then CKAN will try to send email
  # to the SMTP server without logging in first.
  #smtp.user = your_username@gmail.com
  #smtp.password = your_password
  smtp.user = datagovau@gmail.com
  smtp.password = ***
  googleanalytics.id = ***
  # The email address that emails sent by CKAN to users should appear to come
  # from. Optional (default: None, note that the SMTP server may insert its own
  # from address).
  smtp.mail_from = data.gov@finance.gov.au
 
  [server:main]
  use = egg:Paste#http
  host = 0.0.0.0
  port = 5000
 
  [app:main]
  use = egg:ckan
  ckan.tracking_enabled = true
  full_stack = true
  cache_dir = %(here)s/data
  beaker.session.key = ckan
  beaker.session.secret = ***
  app_instance_uuid = ***
  # Auth
 
  ckan.auth.anon_create_dataset = false
  ckan.auth.create_unowned_dataset = false
  ckan.auth.create_dataset_if_not_in_organization = false
  ckan.auth.user_create_organizations = false
  ckan.auth.user_delete_organizations = true
  ckan.auth.user_create_groups = false
  ckan.auth.user_delete_groups = true
  ckan.auth.create_user_via_api = true
 
  # List the names of CKAN extensions to activate.
  # Note: This line is required to be here for packaging, even if it is empty.
  # Note: Add ``pdf_preview`` to enable the resource preview for PDFs
  # Add the ``resource_proxy`` plugin to enable resorce proxying and get around the same origin policy
  # Add ``datastore`` to enable the CKAN DataStore extension
  ckan.plugins = qa disqus stats json_preview recline_preview datastore datastorer googleanalytics resource_proxy wms_preview kml_preview geojson_preview datagovau
 
  disqus.name = datagovau
 
  # If you'd like to fine-tune the individual locations of the cache data dirs
  # for the Cache data, or the Session saves, un-comment the desired settings
  # here:
  #beaker.cache.data_dir = %(here)s/data/cache
  #beaker.session.data_dir = %(here)s/data/sessions
 
  # Specify the database for SQLAlchemy to use:
  # * Postgres is currently required for a production CKAN deployment
  # * Sqlite (memory or file) can be used as a quick alternative for testing
  #sqlalchemy.url = postgresql://ckanuser:pass@localhost/ckantest
  sqlalchemy.url = ***
  #sqlalchemy.url = sqlite:///
  #sqlalchemy.url = sqlite:///%(here)s/somedb.db
 
  # Un-comment and specify the URLs for the DataStore database.
  # * Postgres is required
  #ckan.datastore.write_url = postgresql://ckanuser:pass@localhost/datastore
  #ckan.datastore.read_url = postgresql://readonlyuser:pass@localhost/datastore
  ckan.datastore.write_url = ***
  ckan.datastore.read_url = ***
 
  # repoze.who config
  who.config_file = %(here)s/who.ini
  who.log_level = warning
  who.log_file = %(cache_dir)s/who_log.ini
 
  # Location of RDF versions of datasets
  #rdf_packages = http://semantic.ckan.net/record/
 
  # Location of licenses group (defaults to cached local version of ckan group)
  #licenses_group_url = http://licenses.opendefinition.org/licenses/groups/ckan.json
  licenses_group_url = file://%(here)s/licenses.json
 
  # Dataset form to use
  package_form = standard
 
  # Hide certain extras fields from dataset read form:
  # package_hide_extras = for_search_index_only
 
  # API configuration
  #apikey_header_name = X-CKAN-API-Key
 
  ## extra places to look for templates and public files (comma separated lists)
  ## any templates/files found will override correspondingly named ones in
  ## ckan/templates/ and ckan/public
  ## (e.g. to override main layout template layout.html or add extra css files)
  # extra_template_paths = %(here)s/my-templates
  # extra_public_paths = %(here)s/my-public
 
  # Dataset form integration
  #package_edit_return_url = http://another.frontend/dataset/<NAME>
  #package_new_return_url = http://another.frontend/dataset/<NAME>
 
 
  # Turn on messaging with carrot, default to false
  #ckan.async_notifier = true
  # Messaging module used by carrot:
  # * pyamqplib - AMQP (e.g. for RabbitMQ)
  # * queue - native Python Queue (debugging and tests only)
  #carrot_messaging_library = pyamqplib
 
  ## Perform search just using database (rather than use e.g. solr).
  ## In this setup search is crude and limited .e.g no full-text search, no faceting ...
  ## However, very useful for getting up and running quickly with CKAN
  # ckan.simple_search = 1
 
  ## Title of site (using in several places including templates and <title> tag
  ckan.site_title = data.gov.au
 
  ## Logo image to use on the home page
  ckan.site_logo = /base/images/ckan-logo.png
 
  ## Site tagline / description (used on front page)
  ckan.site_description = Australian Government Open Data Catalogue
 
  ## Used in creating some absolute urls (such as rss feeds, css files) and
  ## dump filenames
  ckan.site_url = http://data.gov.au
 
  ## Favicon (default is the CKAN software favicon)
  ckan.favicon = /images/icons/ckan.ico
 
  ## The gravatar default to use. This can be any of the pre-defined strings
  ## as defined on http://en.gravatar.com/site/implement/images/ (e.g. "identicon"
  ## or "mm"). Or it can be a url, e.g. "http://example.com/images/avatar.jpg"
  ckan.gravatar_default = identicon
 
  ## Solr support
  solr_url = http://solr.data.gov.au:8983/solr
 
  ## Automatic indexing. Make all changes immediately available via the search
  ## after editing or creating a dataset. Default is true. If for some reason
  ## you need the indexing to occur asynchronously, set this option to 0.
  # ckan.search.automatic_indexing = 1
 
  ## An 'id' for the site (using, for example, when creating entries in a common search index)
  ## If not specified derived from the site_url
  ckan.site_id = data.gov.au
 
  ## API url to use (e.g. in AJAX callbacks)
  ## Enable if the API is at a different domain
  # ckan.api_url = http://www.ckan.net
 
  ## html content to be inserted just before </head> tag (e.g. extra stylesheet)
  ## NB: can use html e.g. <strong>blah</strong>
  ## NB: can have multiline strings just indent following lines
  # ckan.template_head_end = <link rel="stylesheet" href="http://mysite.org/css/custom.css" type="text/css">
 
  ## html content to be inserted just before </body> tag (e.g. google analytics code)
  ## NB: can use html e.g. <strong>blah</strong>
  ## NB: can have multiline strings just indent following lines
  # ckan.template_footer_end =
 
  # These three settings (ckan.log_dir, ckan.dump_dir and ckan.backup_dir) are
  # all used in cron jobs, not in CKAN itself. CKAN logging is configured
  # in the logging configuration below
  # Directory for logs (produced by cron scripts associated with ckan)
  ckan.log_dir = %(here)s/log
  # Directory for JSON/CSV dumps (must match setting in apache config)
  ckan.dump_dir = %(here)s/dump
  # Directory for SQL database backups
  ckan.backup_dir = %(here)s/backup
 
  # Default authorizations for new domain objects
  #ckan.default_roles.Package = {"visitor": ["reader"], "logged_in": ["reader"]}
  #ckan.default_roles.Group = {"visitor": ["reader"], "logged_in": ["reader"]}
  #ckan.default_roles.System = {"visitor": ["reader"], "logged_in": ["editor"]}
  #ckan.default_roles.AuthorizationGroup = {"visitor": ["reader"], "logged_in": ["reader"]}
 
  ## Ckan public and private recaptcha keys [localhost]
  #ckan.recaptcha.publickey = 6LcL-94SAAAAAKCHKLIgCYNJczalehF4a3GHlPyR
  #ckan.recaptcha.privatekey = 6LcL-94SAAAAADCsq995yBsNl7lHEgJ3Ukqf4Ykq
 
  # Locale/languages
  ckan.locale_default = en_GB
  ckan.locales_offered = en_GB
  # Languages are grouped by percentage of strings in CKAN 1.8 translated
  # (those with 100% first, then those with >=80%, then >=50%, then <50%) and
  # within these groups roughly sorted by number of worldwide native speakers
  # according to Wikipedia.
  ckan.locale_order = en_GB pt_BR ja it cs_CZ ca es fr el sv sr sr@latin no sk fi ru de pl nl bg ko_KR hu sa sl lv
  ckan.locales_filtered_out = en
 
  ## Atom Feeds
  #
  # Settings for customising the metadata provided in
  # atom feeds.
  #
  # These settings are used to generate the <id> tags for both feeds
  # and entries. The unique <id>s are created following the method
  # outlined in http://www.taguri.org/ ie - they generate tagURIs, as specified
  # in http://tools.ietf.org/html/rfc4151#section-2.1 :
  #
  # <id>tag:thedatahub.org,2012:/feeds/group/933f3857-79fd-4beb-a835-c0349e31ce76</id>
  #
  # Each component has the corresponding settings:
  #
  # "thedatahub.org" is ckan.feeds.authority_name
  # "2012" is ckan.feeds.date
  #
 
  # Leave blank to use the ckan.site_url config value, otherwise set to a
  # domain or email address that you own. e.g. thedatahub.org or
  # admin@thedatahub.org
  ckan.feeds.authority_name =
 
  # Pick a date of the form "yyyy[-mm[-dd]]" during which the above domain was
  # owned by you.
  ckan.feeds.date = 2012
 
  # If not set, then the value in `ckan.site_id` is used.
  ckan.feeds.author_name =
 
  # If not set, then the value in `ckan.site_url` is used.
  ckan.feeds.author_link =
 
  ## File Store
  #
  # CKAN allows users to upload files directly to file storage either on the local
  # file system or to online ‘cloud’ storage like Amazon S3 or Google Storage.
  #
  # If you are using local file storage, remember to set ckan.site_url.
  #
  # To enable cloud storage (Google or S3), first run: pip install boto
  #
  # @see http://docs.ckan.org/en/latest/filestore.html
 
  # 'Bucket' to use for file storage
  ckan.storage.bucket = ckandga
 
  # To enable local file storage:
  ofs.impl = pairtree
  ofs.storage_dir = /srv/ckan/dga
 
  # To enable Google cloud storage:
  #ofs.impl = google
  #ofs.gs_access_key_id =
  #ofs.gs_secret_access_key =
 
  # To enable S3 cloud storage:
  #ofs.impl = s3
  #ofs.aws_access_key_id = ....
  #ofs.aws_secret_access_key = ....
 
 
  ## Previews
  #
  # Set the file types that should be previewed inline (e.g. images) or directly in an iframe.
 
  ckan.preview.direct = png jpg gif
  ckan.preview.loadable = html htm rdf+xml owl+xml xml n3 n-triples turtle plain atom csv tsv rss txt json
 
 
  # Activity Streams
  #
  # Default maximum number of activities to show in an activity stream.
  # ckan.activity_list_limit = 31
 
 
  # Activity Streams Email Notifications
  #
  # Uncomment this line to enable activity streams email notifications.
  # You also need to setup a cron job to send the emails, see the documentation.
  #ckan.activity_streams_email_notifications = True
 
  # Email notifications for events older than this time delta will not be sent.
  # Accepted formats: '2 days', '14 days', '4:35:00' (hours, minutes, seconds),
  # '7 days, 3:23:34', etc.
  # ckan.email_notifications_since = 2 days
 
  ckan.cache_expires = 2592000
  #ckan.page_cache_enabled = True
  ckan.static_max_age = 2592000
  ckan.cache_enabled = True
  # DEBUGGING
 
  # ckan.debug_supress_header This option can be set to suppress the debug
  # information showing the controller and action recieving the request being
  # shown in the header. Note: This info only shows if debug is set to true.
  ckan.debug_supress_header = false
 
  ## ===================================
  ## Extensions
 
  ## Config option to enable the (1 day) cache for stats
  ## Default (if not defined) is True as stats computations are intensive
  # ckanext.stats.cache_enabled = True
 
  # Logging configuration
  [loggers]
  keys = root, ckan, ckanext
 
  [handlers]
  keys = console, file
 
  [formatters]
  keys = generic
 
  [logger_root]
  level = WARNING
  handlers = console, file
 
  [logger_ckan]
  level = ERROR
  handlers = console, file
  qualname = ckan
  propagate = 0
 
  [logger_ckanext]
  level = WARNING
  handlers = console, file
  qualname = ckanext
  propagate = 0
 
  [handler_console]
  class = StreamHandler
  args = (sys.stderr,)
  level = NOTSET
  formatter = generic
 
  [handler_file]
  class = logging.handlers.RotatingFileHandler
  formatter = generic
  level = NOTSET
  #args = ("ckan.log", "a", 20000000, 9)
  args = ("/var/log/ckan/dga/ckan.log", "a", 20000000, 9)
 
  [formatter_generic]
  format = %(asctime)s %(levelname)-5.5s [%(name)s] %(message)s
 
import logging import logging
   
import ckan.plugins as plugins import ckan.plugins as plugins
import ckan.lib as lib import ckan.lib as lib
import ckan.lib.dictization.model_dictize as model_dictize import ckan.lib.dictization.model_dictize as model_dictize
import ckan.plugins.toolkit as tk import ckan.plugins.toolkit as tk
import ckan.model as model import ckan.model as model
from pylons import config from pylons import config
   
#parse the activity feed for last active non-system user #parse the activity feed for last active non-system user
def get_last_active_user(id): def get_last_active_user(id):
system_user = lib.helpers.get_action('user_show',{'id': config.get('ckan.site_id', 'ckan_site_user')}) system_user = lib.helpers.get_action('user_show',{'id': config.get('ckan.site_id', 'ckan_site_user')})
user_list = [x for x in lib.helpers.get_action('package_activity_list',{'id':id}) if x['user_id'] != system_user['id']] user_list = [x for x in lib.helpers.get_action('package_activity_list',{'id':id}) if x['user_id'] != system_user['id']]
user = None user = None
if len(user_list) > 0: if len(user_list) > 0:
user = user_list[0].get('user_id', None) user = user_list[0].get('user_id', None)
if user is None: if user is None:
return system_user return system_user
else: else:
return lib.helpers.get_action('user_show',{'id':user}) return lib.helpers.get_action('user_show',{'id':user})
   
# get user created datasets and those they have edited # get user created datasets and those they have edited
def get_user_datasets(user_dict): def get_user_datasets(user_dict):
created_datasets_list = user_dict['datasets'] created_datasets_list = user_dict['datasets']
active_datasets_list = [x['data']['package'] for x in active_datasets_list = [x['data']['package'] for x in
lib.helpers.get_action('user_activity_list',{'id':user_dict['id']}) if x['data'].get('package')] lib.helpers.get_action('user_activity_list',{'id':user_dict['id']}) if x['data'].get('package')]
return created_datasets_list + active_datasets_list return created_datasets_list + active_datasets_list
   
def datastore_search(context, data_dict):  
return {'success': True} # allow all datastore search  
   
class DataGovAuPlugin(plugins.SingletonPlugin, class DataGovAuPlugin(plugins.SingletonPlugin,
tk.DefaultDatasetForm): tk.DefaultDatasetForm):
'''An example IDatasetForm CKAN plugin. '''An example IDatasetForm CKAN plugin.
   
Uses a tag vocabulary to add a custom metadata field to datasets. Uses a tag vocabulary to add a custom metadata field to datasets.
   
''' '''
plugins.implements(plugins.IConfigurer, inherit=False) plugins.implements(plugins.IConfigurer, inherit=False)
plugins.implements(plugins.IDatasetForm, inherit=False) plugins.implements(plugins.IDatasetForm, inherit=False)
plugins.implements(plugins.ITemplateHelpers, inherit=False) plugins.implements(plugins.ITemplateHelpers, inherit=False)
plugins.implements(plugins.IAuthFunctions)  
   
   
def get_auth_functions(self):  
return {'datastore_search': datastore_search}  
   
   
def update_config(self, config): def update_config(self, config):
# Add this plugin's templates dir to CKAN's extra_template_paths, so # Add this plugin's templates dir to CKAN's extra_template_paths, so
# that CKAN will use this plugin's custom templates. # that CKAN will use this plugin's custom templates.
# here = os.path.dirname(__file__) # here = os.path.dirname(__file__)
# rootdir = os.path.dirname(os.path.dirname(here)) # rootdir = os.path.dirname(os.path.dirname(here))
   
tk.add_template_directory(config, 'templates') tk.add_template_directory(config, 'templates')
tk.add_public_directory(config, 'theme/public') tk.add_public_directory(config, 'theme/public')
tk.add_resource('theme/public', 'ckanext-datagovau') tk.add_resource('theme/public', 'ckanext-datagovau')
# config['licenses_group_url'] = 'http://%(ckan.site_url)/licenses.json' # config['licenses_group_url'] = 'http://%(ckan.site_url)/licenses.json'
   
def get_helpers(self): def get_helpers(self):
return {'get_last_active_user': get_last_active_user, 'get_user_datasets': get_user_datasets} return {'get_last_active_user': get_last_active_user, 'get_user_datasets': get_user_datasets}
   
def is_fallback(self): def is_fallback(self):
# Return True to register this plugin as the default handler for # Return True to register this plugin as the default handler for
# package types not handled by any other IDatasetForm plugin. # package types not handled by any other IDatasetForm plugin.
return True return True
   
def package_types(self): def package_types(self):
# This plugin doesn't handle any special package types, it just # This plugin doesn't handle any special package types, it just
# registers itself as the default (above). # registers itself as the default (above).
return [] return []
   
   
def create_package_schema(self): def create_package_schema(self):
schema = super(DataGovAuPlugin, self).create_package_schema() schema = super(DataGovAuPlugin, self).create_package_schema()
schema = self._modify_package_schema(schema) schema = self._modify_package_schema(schema)
return schema return schema
   
def update_package_schema(self): def update_package_schema(self):
schema = super(DataGovAuPlugin, self).update_package_schema() schema = super(DataGovAuPlugin, self).update_package_schema()
schema = self._modify_package_schema(schema) schema = self._modify_package_schema(schema)
return schema return schema
   
def show_package_schema(self): def show_package_schema(self):
schema = super(DataGovAuPlugin, self).show_package_schema() schema = super(DataGovAuPlugin, self).show_package_schema()
   
# Don't show vocab tags mixed in with normal 'free' tags # Don't show vocab tags mixed in with normal 'free' tags
# (e.g. on dataset pages, or on the search page) # (e.g. on dataset pages, or on the search page)
schema['tags']['__extras'].append(tk.get_converter('free_tags_only')) schema['tags']['__extras'].append(tk.get_converter('free_tags_only'))
   
# Add our custom_text field to the dataset schema. # Add our custom_text field to the dataset schema.
# ignore_missing == optional # ignore_missing == optional
# ignore_empty == mandatory but not for viewing # ignore_empty == mandatory but not for viewing
# !!! always convert_from_extras first # !!! always convert_from_extras first
schema.update({ schema.update({
'agency_program': [tk.get_converter('convert_from_extras'), 'agency_program': [tk.get_converter('convert_from_extras'),
tk.get_validator('ignore_missing')], tk.get_validator('ignore_missing')],
'contact_point': [tk.get_converter('convert_from_extras'), 'contact_point': [tk.get_converter('convert_from_extras'),
tk.get_validator('ignore_empty')], tk.get_validator('ignore_empty')],
'spatial_coverage': [tk.get_converter('convert_from_extras'), 'spatial_coverage': [tk.get_converter('convert_from_extras'),
tk.get_validator('ignore_empty')], tk.get_validator('ignore_empty')],
'granularity': [tk.get_converter('convert_from_extras'), 'granularity': [tk.get_converter('convert_from_extras'),
tk.get_validator('ignore_empty')], tk.get_validator('ignore_empty')],
'jurisdiction': [tk.get_converter('convert_from_extras'), 'jurisdiction': [tk.get_converter('convert_from_extras'),
tk.get_validator('ignore_empty')], tk.get_validator('ignore_empty')],
'temporal_coverage': [tk.get_converter('convert_from_extras'), 'temporal_coverage': [tk.get_converter('convert_from_extras'),
tk.get_validator('ignore_empty')], tk.get_validator('ignore_empty')],
'data_state': [tk.get_converter('convert_from_extras'), 'data_state': [tk.get_converter('convert_from_extras'),
tk.get_validator('ignore_empty')], tk.get_validator('ignore_empty')],
'update_freq': [tk.get_converter('convert_from_extras'), 'update_freq': [tk.get_converter('convert_from_extras'),
tk.get_validator('ignore_empty')] tk.get_validator('ignore_empty')]
}) })
return schema return schema
   
def _modify_package_schema(self, schema): def _modify_package_schema(self, schema):
# Add our custom_test metadata field to the schema, this one will use # Add our custom_test metadata field to the schema, this one will use
# convert_to_extras instead of convert_to_tags. # convert_to_extras instead of convert_to_tags.
# ignore_missing == optional # ignore_missing == optional
# not_empty == mandatory, enforced here while modifying # not_empty == mandatory, enforced here while modifying
   
schema.update({ schema.update({
'agency_program': [tk.get_validator('ignore_missing'), 'agency_program': [tk.get_validator('ignore_missing'),
tk.get_converter('convert_to_extras')], tk.get_converter('convert_to_extras')],
'contact_point': [tk.get_converter('convert_to_extras'), 'contact_point': [tk.get_converter('convert_to_extras'),
tk.get_validator('not_empty')], tk.get_validator('not_empty')],
'spatial_coverage': [tk.get_converter('convert_to_extras'), 'spatial_coverage': [tk.get_converter('convert_to_extras'),
tk.get_validator('not_empty')], tk.get_validator('not_empty')],
'granularity': [tk.get_converter('convert_to_extras'), 'granularity': [tk.get_converter('convert_to_extras'),
tk.get_validator('not_empty')], tk.get_validator('not_empty')],
'jurisdiction': [tk.get_converter('convert_to_extras'), 'jurisdiction': [tk.get_converter('convert_to_extras'),
tk.get_validator('not_empty')], tk.get_validator('not_empty')],
'temporal_coverage': [tk.get_converter('convert_to_extras'), 'temporal_coverage': [tk.get_converter('convert_to_extras'),
tk.get_validator('not_empty')], tk.get_validator('not_empty')],
'data_state': [tk.get_converter('convert_to_extras'), 'data_state': [tk.get_converter('convert_to_extras'),
tk.get_validator('not_empty')], tk.get_validator('not_empty')],
'update_freq': [tk.get_converter('convert_to_extras'), 'update_freq': [tk.get_converter('convert_to_extras'),
tk.get_validator('not_empty')] tk.get_validator('not_empty')]
}) })
return schema return schema
   
# These methods just record how many times they're called, for testing # These methods just record how many times they're called, for testing
# purposes. # purposes.
# TODO: It might be better to test that custom templates returned by # TODO: It might be better to test that custom templates returned by
# these methods are actually used, not just that the methods get # these methods are actually used, not just that the methods get
# called. # called.
   
def setup_template_variables(self, context, data_dict): def setup_template_variables(self, context, data_dict):
return super(DataGovAuPlugin, self).setup_template_variables( return super(DataGovAuPlugin, self).setup_template_variables(
context, data_dict) context, data_dict)
   
def new_template(self): def new_template(self):
return super(DataGovAuPlugin, self).new_template() return super(DataGovAuPlugin, self).new_template()
   
def read_template(self): def read_template(self):
return super(DataGovAuPlugin, self).read_template() return super(DataGovAuPlugin, self).read_template()
   
def edit_template(self): def edit_template(self):
return super(DataGovAuPlugin, self).edit_template() return super(DataGovAuPlugin, self).edit_template()
   
def search_template(self): def search_template(self):
return super(DataGovAuPlugin, self).search_template() return super(DataGovAuPlugin, self).search_template()
   
def history_template(self): def history_template(self):
return super(DataGovAuPlugin, self).history_template() return super(DataGovAuPlugin, self).history_template()
   
def package_form(self): def package_form(self):
return super(DataGovAuPlugin, self).package_form() return super(DataGovAuPlugin, self).package_form()
   
   
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<rdf:RDF <rdf:RDF
xmlns:py="http://genshi.edgewall.org/" xmlns:py="http://genshi.edgewall.org/"
xmlns:foaf="http://xmlns.com/foaf/0.1/" xmlns:foaf="http://xmlns.com/foaf/0.1/"
xmlns:owl="http://www.w3.org/2002/07/owl#" xmlns:owl="http://www.w3.org/2002/07/owl#"
xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#" xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:dcat="http://www.w3.org/ns/dcat#" xmlns:dcat="http://www.w3.org/ns/dcat#"
xmlns:dcam="http://purl.org/dc/dcam/" xmlns:dcam="http://purl.org/dc/dcam/"
xmlns:aglsterms="http://www.agls.gov.au/agls/terms/" xmlns:aglsterms="http://www.agls.gov.au/agls/terms/"
xmlns:agentterms="http://www.agls.gov.au/agls/agentterms/" xmlns:agentterms="http://www.agls.gov.au/agls/agentterms/"
xmlns:availterms="http://www.agls.gov.au/agls/availterms/" xmlns:availterms="http://www.agls.gov.au/agls/availterms/"
xmlns:adminterms="http://www.agls.gov.au/agls/adminterms/" xmlns:adminterms="http://www.agls.gov.au/agls/adminterms/"
xmlns:dct="http://purl.org/dc/terms/" xmlns:dct="http://purl.org/dc/terms/"
> >
<dcat:Dataset rdf:about="${ h.url_for(controller='package',action='read',id=c.pkg_dict['name'], qualified=True)}"> <dcat:Dataset rdf:about="${ h.url_for(controller='package',action='read',id=c.pkg_dict['name'], qualified=True)}">
<dct:identifier>${c.pkg_dict['name']}</dct:identifier> <dct:identifier>${c.pkg_dict['name']}</dct:identifier>
<dct:title>${c.pkg_dict['title']}</dct:title> <dct:title>${c.pkg_dict['title']}</dct:title>
<dct:landingPage <dct:landingPage
rdf:resource="${ h.url_for(controller='package',action='read',id=c.pkg_dict['name'], qualified=True) }"/> rdf:resource="${ h.url_for(controller='package',action='read',id=c.pkg_dict['name'], qualified=True) }"/>
<owl:sameAs rdf:resource="urn:uuid:${c.pkg_dict['id']}"/> <owl:sameAs rdf:resource="urn:uuid:${c.pkg_dict['id']}"/>
<dct:type>Dataset</dct:type> <dct:type>Dataset</dct:type>
<dct:description>${c.pkg_dict['notes']}</dct:description> <dct:description>${c.pkg_dict['notes']}</dct:description>
<dct:issued>${c.pkg_dict['metadata_created']}</dct:issued> <dct:issued>${c.pkg_dict['metadata_created']}</dct:issued>
<dct:modified>${c.pkg_dict['metadata_modified']}</dct:modified> <dct:modified>${c.pkg_dict['metadata_modified']}</dct:modified>
<dct:language>en</dct:language>  
   
<py:choose> <py:choose>
<py:when test="c.pkg_dict.get('license_url',None)"> <py:when test="c.pkg_dict.get('license_url',None)">
<dct:license>${c.pkg_dict['license_url']}</dct:license> <dct:license rdf:resource="${c.pkg_dict['license_url']}"/>
<dct:rights rdf:resource="${c.pkg_dict['license_url']}"/> <dct:rights rdf:resource="${c.pkg_dict['license_url']}"/>
</py:when> </py:when>
<py:otherwise> <py:otherwise>
<dct:license>${c.pkg_dict['license_id']}</dct:license> <dct:license>${c.pkg_dict['license_id']}</dct:license>
<dct:rights rdf:resource="${c.pkg_dict['license_id']}"/> <dct:rights>"${c.pkg_dict['license_id']}"</dct:rights>
</py:otherwise> </py:otherwise>
</py:choose> </py:choose>
<py:for each="tag_dict in c.pkg_dict.get('tags',[])"> <py:for each="tag_dict in c.pkg_dict.get('tags',[])">
<dcat:keyword>${ tag_dict["name"] }</dcat:keyword> <dcat:keyword>${ tag_dict["name"] }</dcat:keyword>
</py:for> </py:for>
<py:for each="group_dict in c.pkg_dict.get('groups',[])">  
<dcat:theme>${ group_dict["title"] }</dcat:theme>  
</py:for>  
   
<foaf:homepage  
rdf:resource="${ h.url_for(controller='package',action='read',id=c.pkg_dict['name'], qualified=True)}"/>  
<rdfs:label>${c.pkg_dict['name']}</rdfs:label>  
   
   
<py:for each="rsc_dict in c.pkg_dict['resources']"> <py:for each="rsc_dict in c.pkg_dict['resources']">
<dcat:distribution> <dcat:distribution>
<dcat:Distribution> <dcat:Distribution>
<dct:title>${rsc_dict.get('name')}</dct:title> <dct:title>${rsc_dict.get('name')}</dct:title>
<owl:sameAs rdf:resource="urn:uuid:${rsc_dict.get('id')}"/> <owl:sameAs rdf:resource="urn:uuid:${rsc_dict.get('id')}"/>
<dct:description>${rsc_dict.get('description')}</dct:description> <dct:description>${rsc_dict.get('description')}</dct:description>
<dcat:accessURL rdf:resource="${ rsc_dict.get('url') }"/> <dcat:accessURL rdf:resource="${ rsc_dict.get('url') }"/>
<dct:created>${rsc_dict.get('created')}</dct:created> <dct:created>${rsc_dict.get('created')}</dct:created>
<dct:modified>${rsc_dict.get('revision_timestamp')}</dct:modified> <dct:modified>${rsc_dict.get('revision_timestamp')}</dct:modified>
<dcat:byteSize py:if="rsc_dict.get('size')">${rsc_dict.get('size')}</dcat:byteSize> <dcat:byteSize py:if="rsc_dict.get('size')">${rsc_dict.get('size')}</dcat:byteSize>
<dcat:mediaType py:if="rsc_dict.get('mimetype')">${rsc_dict.get('mimetype')}</dcat:mediaType> <dcat:mediaType py:if="rsc_dict.get('mimetype')">${rsc_dict.get('mimetype')}</dcat:mediaType>
<dct:extent py:if="rsc_dict.get('size')">${rsc_dict.get('size')} bytes</dct:extent> <dct:extent py:if="rsc_dict.get('size')">${rsc_dict.get('size')} bytes</dct:extent>
<dct:format py:if="rsc_dict.get('format')"> <dct:format py:if="rsc_dict.get('format')">
<dct:IMT> <dct:IMT>
<rdf:value>${rsc_dict.get('format')}</rdf:value> <rdf:value>${rsc_dict.get('format')}</rdf:value>
<rdfs:label>${rsc_dict.get('format')}</rdfs:label> <rdfs:label>${rsc_dict.get('format')}</rdfs:label>
</dct:IMT> </dct:IMT>
</dct:format> </dct:format>
<dct:title py:if="rsc_dict.get('name')">${rsc_dict.get('name')}</dct:title> <dct:title py:if="rsc_dict.get('name')">${rsc_dict.get('name')}</dct:title>
</dcat:Distribution> </dcat:Distribution>
</dcat:distribution> </dcat:distribution>
</py:for> </py:for>
   
   
  <!-- data.gov.au specific stuff below this line -->
<dct:publisher py:if="c.pkg_dict.get('organization', None)"> <dct:publisher py:if="c.pkg_dict.get('organization', None)">
<rdf:Description> <rdf:Description>
<foaf:name>${ c.pkg_dict['organization']['title'] }</foaf:name> <foaf:name>${ c.pkg_dict['organization']['title'] }</foaf:name>
</rdf:Description> </rdf:Description>
</dct:publisher> </dct:publisher>
<dct:creator py:if="c.pkg_dict.get('organization', None)"> <dct:creator py:if="c.pkg_dict.get('organization', None)">
<rdf:Description> <rdf:Description>
<foaf:name>${ c.pkg_dict['organization']['title'] }</foaf:name> <foaf:name>${ c.pkg_dict['organization']['title'] }</foaf:name>
</rdf:Description> </rdf:Description>
</dct:creator> </dct:creator>
<dct:contributor> <dct:contributor>
<rdf:Description> <rdf:Description>
<foaf:name>${h.get_last_active_user(c.pkg_dict['id'])["display_name"]}</foaf:name> <foaf:name>${h.get_last_active_user(c.pkg_dict['id'])["display_name"]}</foaf:name>
<foaf:mbox py:if="h.get_last_active_user(c.pkg_dict['id']).get('email', None)" <foaf:mbox py:if="h.get_last_active_user(c.pkg_dict['id']).get('email', None)"
rdf:resource="mailto:${h.get_last_active_user(c.pkg_dict['id'])['email']}"/> rdf:resource="mailto:${h.get_last_active_user(c.pkg_dict['id'])['email']}"/>
</rdf:Description> </rdf:Description>
</dct:contributor> </dct:contributor>
   
  <py:for each="extra_dict in c.pkg_dict.get('extras',[])">
<foaf:homepage py:if="c.pkg_dict.get('url')">${c.pkg_dict.get('url')}</foaf:homepage> <dct:relation>
  <rdf:Description>
  <rdfs:label>${extra_dict.get('key','')}</rdfs:label>
  <rdf:value>${extra_dict.get('value','')}</rdf:value>
  </rdf:Description>
  </dct:relation>
  </py:for>
  <dct:language>en</dct:language>
  <foaf:homepage
  rdf:resource="${ h.url_for(controller='package',action='read',id=c.pkg_dict['name'], qualified=True)}"/>
<dcat:contactPoint py:if="c.pkg_dict.get('contact_point')">${c.pkg_dict.contact_point }</dcat:contactPoint> <dcat:contactPoint py:if="c.pkg_dict.get('contact_point')">${c.pkg_dict.contact_point }</dcat:contactPoint>
  <py:choose>
  <py:when test="c.pkg_dict.get('spatial',None)">
  <dct:spatial py:if="c.pkg_dict.get('spatial')">${ c.pkg_dict.spatial }</dct:spatial>
  </py:when>
  <py:otherwise>
<dct:spatial py:if="c.pkg_dict.get('spatial_coverage')">${ c.pkg_dict.spatial_coverage }</dct:spatial> <dct:spatial py:if="c.pkg_dict.get('spatial_coverage')">${ c.pkg_dict.spatial_coverage }</dct:spatial>
  </py:otherwise>
  </py:choose>
<aglsterms:AglsJuri py:if="c.pkg_dict.get('jurisdiction')">${ c.pkg_dict.jurisdiction }</aglsterms:AglsJuri> <aglsterms:AglsJuri py:if="c.pkg_dict.get('jurisdiction')">${ c.pkg_dict.jurisdiction }</aglsterms:AglsJuri>
<dct:temporal py:if="c.pkg_dict.get('temporal_coverage')">${ c.pkg_dict.get('temporal_coverage') }</dct:temporal> <dct:temporal py:if="c.pkg_dict.get('temporal_coverage')">${ c.pkg_dict.get('temporal_coverage') }</dct:temporal>
<dct:relation py:if="c.pkg_dict.get('data_state')"> <dct:relation py:if="c.pkg_dict.get('data_state')">
<rdf:Description> <rdf:Description>
<rdfs:label>Data State</rdfs:label> <rdfs:label>Data State</rdfs:label>
<rdf:value>${ c.pkg_dict.get('data_state') }</rdf:value> <rdf:value>${ c.pkg_dict.get('data_state') }</rdf:value>
</rdf:Description> </rdf:Description>
</dct:relation> </dct:relation>
<dct:relation py:if="c.pkg_dict.get('update_freq')"> <dct:relation py:if="c.pkg_dict.get('update_freq')">
<rdf:Description> <rdf:Description>
<rdfs:label>Update Frequency</rdfs:label> <rdfs:label>Update Frequency</rdfs:label>
<rdf:value>${ c.pkg_dict.get('update_freq') }</rdf:value> <rdf:value>${ c.pkg_dict.get('update_freq') }</rdf:value>
</rdf:Description> </rdf:Description>
</dct:relation> </dct:relation>
<dct:relation py:if="c.pkg_dict.get('agency_program')"> <dct:relation py:if="c.pkg_dict.get('agency_program')">
<rdf:Description> <rdf:Description>
<rdfs:label>Agency Program</rdfs:label> <rdfs:label>Agency Program</rdfs:label>
<rdf:value>${ c.pkg_dict.get('agency_program') }</rdf:value> <rdf:value>${ c.pkg_dict.get('agency_program') }</rdf:value>
</rdf:Description> </rdf:Description>
</dct:relation> </dct:relation>
<dct:relation py:if="c.pkg_dict.get('granularity')"> <dct:relation py:if="c.pkg_dict.get('granularity')">
<rdf:Description> <rdf:Description>
<rdfs:label>Data Granularity</rdfs:label> <rdfs:label>Data Granularity</rdfs:label>
<rdf:value>${ c.pkg_dict.get('granularity') }</rdf:value> <rdf:value>${ c.pkg_dict.get('granularity') }</rdf:value>
</rdf:Description> </rdf:Description>
</dct:relation> </dct:relation>
<py:for each="extra_dict in c.pkg_dict.get('extras',[])">  
<dct:relation>  
<rdf:Description>  
<rdfs:label>${extra_dict.get('key','')}</rdfs:label>  
<rdf:value>${extra_dict.get('value','')}</rdf:value>  
</rdf:Description>  
</dct:relation>  
</py:for>  
</dcat:Dataset> </dcat:Dataset>
</rdf:RDF> </rdf:RDF>