From af77eb7fe1359ff21fc2ba40e6c879fd355b4c8a Mon Sep 17 00:00:00 2001
From: Anusha Ranganathan
Date: Tue, 20 Sep 2011 11:52:36 +0100
Subject: [PATCH] Databank now support 3 embargo states - Dark, Embargoed and
Open. Added a profiler to the pylons middleware
---
development-jenkins.ini | 2 +-
development.ini | 24 +-
production.ini | 4 +-
rdfdatabank/config/middleware.py | 10 +-
rdfdatabank/controllers/datasets.py | 54 +-
rdfdatabank/controllers/users.py | 1 +
rdfdatabank/lib/file_unpack.py | 31 +-
rdfdatabank/lib/utils.py | 39 +-
rdfdatabank/templates/datasetview.html | 2 +-
rdfdatabank/templates/part_list.html | 12 +-
rdfdatabank/templates/part_list_display.html | 11 +-
rdfdatabank/templates/rdf_manifest_form.html | 2 +-
rdfdatabank/tests/TestSubmission.py | 1713 ++++++++++++-----
rdfdatabank/tests/TestSubmission_load.py | 319 +++
rdfdatabank/tests/TestSubmission_submitter.py | 227 +--
rdfdatabank/tests/testdata/rdfdatabank.zip | Bin 0 -> 376392 bytes
requirements-dev.txt | 13 +
requirements.txt | 2 +
18 files changed, 1777 insertions(+), 689 deletions(-)
create mode 100644 rdfdatabank/tests/TestSubmission_load.py
create mode 100644 rdfdatabank/tests/testdata/rdfdatabank.zip
create mode 100644 requirements-dev.txt
diff --git a/development-jenkins.ini b/development-jenkins.ini
index 71e005b..dd0421b 100644
--- a/development-jenkins.ini
+++ b/development-jenkins.ini
@@ -8,7 +8,7 @@ debug = false
# Uncomment and replace with the address which should receive any error reports
#email_to = you@yourdomain.com
smtp_server = localhost
-error_email_from = paste@localhost
+error_email_from = paste@jenkins
[server:main]
use = egg:Paste#http
diff --git a/development.ini b/development.ini
index 516bbdf..6bad250 100644
--- a/development.ini
+++ b/development.ini
@@ -4,7 +4,7 @@
# The %(here)s variable will be replaced with the parent directory of this file
#
[DEFAULT]
-debug = false
+debug = true
# Uncomment and replace with the address which should receive any error reports
#email_to = you@yourdomain.com
smtp_server = localhost
@@ -13,11 +13,11 @@ error_email_from = paste@localhost
[server:main]
use = egg:Paste#http
#Use these setings to run pylons using mod_wsgi and apache
-host = 127.0.0.1
-port = 5000
+#host = 127.0.0.1
+#port = 5000
#Use these settings tp run pylons from the commandline
-#host = 0.0.0.0
-#port = 80
+host = 0.0.0.0
+port = 80
[app:main]
use = egg:rdfdatabank
@@ -30,16 +30,18 @@ beaker.session.secret = somesecret
who.config_file = %(here)s/who.ini
who.log_level = info
-who.log_file = /var/log/databank/who.log
-#who.log_file = stdout
+#who.log_file = /var/log/databank/who.log
+who.log_file = stdout
#who.log_file = %(here)s/logs/who.log
redis.host = localhost
granary.store = %(here)s/silos
-#granary.uri_root = http://databank.bodleian.ox.ac.uk/datasets/
granary.uri_root = http://192.168.23.133/
+profile.log_filename = %(here)s/logs/profile.log
+profile.path = /__profile__
+
auth.file = %(here)s/passwd
auth.info = %(here)s/rdfdatabank/config/users.py
@@ -80,17 +82,17 @@ keys = generic
[logger_root]
level = INFO
-handlers = logfile
+handlers = console
[logger_routes]
level = INFO
-handlers = logfile
+handlers = console
qualname = routes.middleware
# "level = DEBUG" logs the route matched and routing variables.
[logger_rdfdatabank]
level = DEBUG
-handlers = logfile
+handlers = console
qualname = rdfdatabank
[handler_console]
diff --git a/production.ini b/production.ini
index 930caa7..02aad1a 100644
--- a/production.ini
+++ b/production.ini
@@ -8,7 +8,7 @@ debug = false
# Uncomment and replace with the address which should receive any error reports
email_to = anusha.ranganathan@bodleian.ox.ac.uk
smtp_server = localhost
-error_email_from = paste@localhost
+error_email_from = paste@databank
[server:main]
use = egg:Paste#http
@@ -35,10 +35,8 @@ who.log_file = /var/log/databank/who.log
redis.host = localhost
-#granary.store = %(here)s/silos
granary.store = /silos
granary.uri_root = http://databank.ora.ox.ac.uk/
-#granary.uri_root = http://163.1.127.173/
auth.file = %(here)s/passwd
auth.info = %(here)s/rdfdatabank/config/users.py
diff --git a/rdfdatabank/config/middleware.py b/rdfdatabank/config/middleware.py
index a0f7371..d65d79e 100644
--- a/rdfdatabank/config/middleware.py
+++ b/rdfdatabank/config/middleware.py
@@ -46,13 +46,21 @@ def make_app(global_conf, full_stack=True, static_files=True, **app_conf):
app = PylonsApp()
#app = httpexceptions.make_middleware(app, global_conf)
+ if asbool(config['debug']):
+ from repoze.profile.profiler import AccumulatingProfileMiddleware
+ app = AccumulatingProfileMiddleware(
+ app,
+ log_filename=app_conf['profile.log_filename'],
+ discard_first_request=True,
+ flush_at_shutdown=True,
+ path=app_conf['profile.path']
+ )
# Routing/Session/Cache Middleware
app = RoutesMiddleware(app, config['routes.map'])
app = SessionMiddleware(app, config)
app = CacheMiddleware(app, config)
-
# CUSTOM MIDDLEWARE HERE (filtered by error handling middlewares)
if asbool(full_stack):
# Handle Python exceptions
diff --git a/rdfdatabank/controllers/datasets.py b/rdfdatabank/controllers/datasets.py
index 31e367b..f2e51ef 100644
--- a/rdfdatabank/controllers/datasets.py
+++ b/rdfdatabank/controllers/datasets.py
@@ -3,6 +3,8 @@
import re, os, shutil, codecs
import simplejson
from datetime import datetime, timedelta
+from dateutil.relativedelta import *
+from dateutil.parser import parse
import time
from uuid import uuid4
from pylons import request, response, session, tmpl_context as c, url, app_globals as ag
@@ -10,7 +12,7 @@
from pylons.decorators import rest
from paste.fileapp import FileApp
from rdfdatabank.lib.base import BaseController, render
-from rdfdatabank.lib.utils import create_new, is_embargoed, get_readme_text, test_rdf, munge_manifest, serialisable_stat, allowable_id2
+from rdfdatabank.lib.utils import create_new, is_embargoed, get_readme_text, test_rdf, munge_manifest, serialisable_stat, allowable_id2, get_rdf_template
from rdfdatabank.lib.file_unpack import get_zipfiles_in_dataset
from rdfdatabank.lib.conneg import MimeType as MT, parse as conneg_parse
@@ -218,14 +220,16 @@ def datasetview(self, silo, id):
if ident['repoze.who.userid'] == creator or ident.get('role') in ["admin", "manager"]:
c.editor = True
- if c.version and not c.version == currentversion:
- c.editor = False
-
+
c.show_files = True
#Only the administrator, manager and creator can view embargoed files.
if embargoed and not c.editor:
c.show_files = False
+ #Display but do not edit previous versions of files, since preious versions are read only.
+ if c.version and not c.version == currentversion:
+ c.editor = False
+
# View options
if "view" in options and c.editor:
c.view = options['view']
@@ -240,7 +244,8 @@ def datasetview(self, silo, id):
c.embargos[id] = is_embargoed(c_silo, id)
c.parts = item.list_parts(detailed=True)
c.manifest_pretty = item.rdf_to_string(format="pretty-xml")
- c.manifest = item.rdf_to_string()
+ #c.manifest = item.rdf_to_string()
+ c.manifest = get_rdf_template(item.uri, id)
c.zipfiles = get_zipfiles_in_dataset(item)
c.readme_text = None
#if item.isfile("README"):
@@ -366,20 +371,28 @@ def datasetview(self, silo, id):
abort(403)
item.increment_version_delta(clone_previous_version=True, copy_filenames=['manifest.rdf'])
#if params.has_key('embargoed'):
- if (params.has_key('embargo_change') and params.has_key('embargoed')) or \
- (params.has_key('embargoed') and params['embargoed'].lower() == 'true'):
+ if (params.has_key('embargo_change') and params.has_key('embargoed') and \
+ params['embargoed'].lower() in ['true', '1'] and params['embargo_change'].lower() in ['true', '1']) or \
+ (params.has_key('embargoed') and params['embargoed'].lower() in ['true', '1']):
+ embargoed_until_date = None
if params.has_key('embargoed_until') and params['embargoed_until']:
- embargoed_until_date = params['embargoed_until']
- elif params.has_key('embargo_days_from_now') and params['embargo_days_from_now']:
- embargoed_until_date = (datetime.now() + timedelta(days=params['embargo_days_from_now'])).isoformat()
- else:
- embargoed_until_date = (datetime.now() + timedelta(days=365*70)).isoformat()
+ try:
+ embargoed_until_date = parse(params['embargoed_until']).isoformat()
+ except:
+ embargoed_until_date = (datetime.now() + relativedelta(years=+70)).isoformat()
+ elif params.has_key('embargo_days_from_now') and params['embargo_days_from_now'].isdigit():
+ embargoed_until_date = (datetime.now() + timedelta(days=int(params['embargo_days_from_now']))).isoformat()
+ #It is embargoed indefinitely by default
+ #else:
+ # embargoed_until_date = (datetime.now() + timedelta(days=365*70)).isoformat()
item.metadata['embargoed'] = True
- item.metadata['embargoed_until'] = embargoed_until_date
+ item.metadata['embargoed_until'] = ''
item.del_triple(item.uri, u"oxds:isEmbargoed")
item.del_triple(item.uri, u"oxds:embargoedUntil")
item.add_triple(item.uri, u"oxds:isEmbargoed", 'True')
- item.add_triple(item.uri, u"oxds:embargoedUntil", embargoed_until_date)
+ if embargoed_until_date:
+ item.metadata['embargoed_until'] = embargoed_until_date
+ item.add_triple(item.uri, u"oxds:embargoedUntil", embargoed_until_date)
else:
#if is_embargoed(c_silo, id)[0] == True:
item.metadata['embargoed'] = False
@@ -822,13 +835,15 @@ def itemview(self, silo, id, path):
if ident['repoze.who.userid'] == creator or ident.get('role') in ["admin", "manager"]:
c.editor = True
- if c.version and not c.version == currentversion:
- c.editor = False
-
c.show_files = True
+ #Only the administrator, manager and creator can view embargoed files.
if embargoed and not c.editor:
c.show_files = False
+ #Display but do not edit previous versions of files, since preious versions are read only.
+ if c.version and not c.version == currentversion:
+ c.editor = False
+
# View options
if "view" in options and c.editor:
c.view = options['view']
@@ -1063,6 +1078,11 @@ def itemview(self, silo, id, path):
response.status_int = 403
response.status = "403 Forbidden"
return "Forbidden - Cannot delete the manifest"
+ if '3=' in path or '4=' in path:
+ response.content_type = "text/plain"
+ response.status_int = 403
+ response.status = "403 Forbidden"
+ return "Forbidden - These files are generated by the system and connot be deleted"
item.increment_version_delta(clone_previous_version=True, copy_filenames=['manifest.rdf'])
item.del_stream(path)
item.del_triple(item.uri, u"dcterms:modified")
diff --git a/rdfdatabank/controllers/users.py b/rdfdatabank/controllers/users.py
index 7fb88ef..dd399b0 100644
--- a/rdfdatabank/controllers/users.py
+++ b/rdfdatabank/controllers/users.py
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
import logging
import simplejson
+import codecs
from pylons import request, response, session, config, tmpl_context as c, url
from pylons.controllers.util import abort, redirect
from pylons.decorators import rest
diff --git a/rdfdatabank/lib/file_unpack.py b/rdfdatabank/lib/file_unpack.py
index fc18a80..3719b46 100644
--- a/rdfdatabank/lib/file_unpack.py
+++ b/rdfdatabank/lib/file_unpack.py
@@ -19,8 +19,12 @@ class BadZipfile(Exception):
"""Cannot open zipfile using commandline tool 'unzip' to target directory"""
def check_file_mimetype(real_filepath, mimetype):
+ if os.path.isdir(real_filepath):
+ return False
if os.path.islink(real_filepath):
real_filepath = os.readlink(real_filepath)
+ if not os.path.isfile(real_filepath):
+ return False
p = subprocess.Popen("file -ib '%s'" %(real_filepath), shell=True, stdout=subprocess.PIPE)
output_file = p.stdout
output_str = output_file.read()
@@ -29,11 +33,15 @@ def check_file_mimetype(real_filepath, mimetype):
else:
return False
-def get_zipfiles_in_dataset_old(dataset):
+def get_zipfiles_in_dataset(dataset):
derivative = dataset.list_rdf_objects("*", "ore:aggregates")
zipfiles = {}
- if derivative and derivative.values() and derivative.values()[0]:
- for file_uri in derivative.values()[0]:
+ #if derivative and derivative.values() and derivative.values()[0]:
+ if derivative:
+ #for file_uri in derivative.values()[0]:
+ for file_uri in derivative:
+ if not file_uri.lower().endswith('.zip'):
+ continue
filepath = file_uri[len(dataset.uri)+1:]
real_filepath = dataset.to_dirpath(filepath)
if os.path.islink(real_filepath):
@@ -43,7 +51,7 @@ def get_zipfiles_in_dataset_old(dataset):
zipfiles[filepath]="%s-%s"%(dataset.item_id, fn)
return zipfiles
-def get_zipfiles_in_dataset(dataset):
+def get_zipfiles_in_dataset_new(dataset):
p = subprocess.Popen("""file -iL `find %s -name '*.zip'` | grep "application/zip" | awk -F":" '{print $1}'""" %dataset.to_dirpath(), shell=True, stdout=subprocess.PIPE)
stdout_value = p.communicate()[0]
zipfiles = {}
@@ -148,6 +156,9 @@ def unpack_zip_item(target_dataset, current_dataset, zip_item, silo, ident):
if os.path.islink(filepath):
filepath = os.readlink(filepath)
+ emb = target_dataset.metadata.get('embargoed')
+ emb_until = target_dataset.metadata.get('embargoed_until')
+
# -- Step 1 -----------------------------
unpacked_dir = unzip_file(filepath)
@@ -181,9 +192,15 @@ def unpack_zip_item(target_dataset, current_dataset, zip_item, silo, ident):
target_dataset.add_triple(target_dataset.uri, u"rdf:type", "oxds:Grouping")
target_dataset.add_triple(target_dataset.uri, "dcterms:isVersionOf", file_uri)
#TODO: Adding the following metadata again as moving directory deletes all this information. Need to find a better way
- embargoed_until_date = (datetime.now() + timedelta(days=365*70)).isoformat()
- target_dataset.add_triple(target_dataset.uri, u"oxds:isEmbargoed", 'True')
- target_dataset.add_triple(target_dataset.uri, u"oxds:embargoedUntil", embargoed_until_date)
+ if emb:
+ target_dataset.add_triple(target_dataset.uri, u"oxds:isEmbargoed", 'True')
+ if emb_until:
+ target_dataset.add_triple(target_dataset.uri, u"oxds:embargoedUntil", emb_until)
+ else:
+ target_dataset.add_triple(target_dataset.uri, u"oxds:isEmbargoed", 'False')
+ #The embargo
+ #embargoed_until_date = (datetime.now() + timedelta(days=365*70)).isoformat()
+ #target_dataset.add_triple(target_dataset.uri, u"oxds:embargoedUntil", embargoed_until_date)
target_dataset.add_triple(target_dataset.uri, u"dcterms:identifier", target_dataset.item_id)
target_dataset.add_triple(target_dataset.uri, u"dcterms:mediator", ident)
target_dataset.add_triple(target_dataset.uri, u"dcterms:publisher", ag.publisher)
diff --git a/rdfdatabank/lib/utils.py b/rdfdatabank/lib/utils.py
index 0c40d8d..7d8de48 100644
--- a/rdfdatabank/lib/utils.py
+++ b/rdfdatabank/lib/utils.py
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
from datetime import datetime, timedelta
+from dateutil.relativedelta import *
+from dateutil.parser import parse
from time import sleep
from redis import Redis
from redis.exceptions import ConnectionError
@@ -14,6 +16,7 @@
#from rdflib.parser import StringInputSource
from rdflib import Namespace, RDF, RDFS, URIRef, Literal, BNode
+
from uuid import uuid4
import re
@@ -146,23 +149,31 @@ def is_embargoed_no_redis(silo, id, refresh=False):
def create_new(silo, id, creator, title=None, embargoed=True, embargoed_until=None, embargo_days_from_now=None, **kw):
item = silo.get_item(id, startversion="0")
item.metadata['createdby'] = creator
- item.metadata['embargoed'] = embargoed
+ item.metadata['embargoed_until'] = ''
item.metadata['uuid'] = uuid4().hex
item.add_namespace('oxds', "http://vocab.ox.ac.uk/dataset/schema#")
item.add_triple(item.uri, u"rdf:type", "oxds:DataSet")
- if embargoed:
- if embargoed_until:
- embargoed_until_date = embargoed_until
- elif embargo_days_from_now:
- embargoed_until_date = (datetime.now() + timedelta(days=embargo_days_from_now)).isoformat()
- else:
- embargoed_until_date = (datetime.now() + timedelta(days=365*70)).isoformat()
- item.metadata['embargoed_until'] = embargoed_until_date
+ if embargoed==True or embargoed.lower() in ['true', '1'] :
+ item.metadata['embargoed'] = True
item.add_triple(item.uri, u"oxds:isEmbargoed", 'True')
- item.add_triple(item.uri, u"oxds:embargoedUntil", embargoed_until_date)
+ embargoed_until_date = None
+ if embargoed_until:
+ try:
+ embargoed_until_date = parse(embargoed_until).isoformat()
+ except:
+ embargoed_until_date = (datetime.now() + relativedelta(years=+70)).isoformat()
+ elif embargo_days_from_now and embargo_days_from_now.isdigit():
+ embargoed_until_date = (datetime.now() + timedelta(days=int(embargo_days_from_now))).isoformat()
+ #TODO: Do we want the default embargo_until to be 70 years or indefinite. Going with indefinite
+ #else:
+ # embargoed_until_date = (datetime.now() + relativedelta(years=+70)).isoformat()
+ if embargoed_until_date:
+ item.metadata['embargoed_until'] = embargoed_until_date
+ item.add_triple(item.uri, u"oxds:embargoedUntil", embargoed_until_date)
else:
item.add_triple(item.uri, u"oxds:isEmbargoed", 'False')
+ item.metadata['embargoed'] = False
item.add_triple(item.uri, u"dcterms:identifier", id)
item.add_triple(item.uri, u"dcterms:mediator", creator)
item.add_triple(item.uri, u"dcterms:publisher", ag.publisher)
@@ -188,6 +199,14 @@ def get_readme_text(item, filename="README"):
text = fn.read().decode("utf-8")
return u"%s\n\n%s" % (filename, text)
+def get_rdf_template(item_uri, item_id):
+ g = ConjunctiveGraph(identifier=item_uri)
+ g.bind('rdf', 'http://www.w3.org/1999/02/22-rdf-syntax-ns#')
+ g.bind('dcterms', 'http://purl.org/dc/terms/')
+ g.add((URIRef(item_uri), URIRef('http://purl.org/dc/terms/identifier'), Literal(item_id)))
+ data2 = g.serialize(format='xml', encoding="utf-8") + '\n'
+ return data2
+
#def test_rdf(text):
def test_rdf(mfile):
g = ConjunctiveGraph()
diff --git a/rdfdatabank/templates/datasetview.html b/rdfdatabank/templates/datasetview.html
index e1952a9..3e40b18 100644
--- a/rdfdatabank/templates/datasetview.html
+++ b/rdfdatabank/templates/datasetview.html
@@ -90,7 +90,7 @@ Information for version ${c.version} of the dataset
Embargo date: Aim is for ISO8601 dates to provide embargo trigger events. Currently unused, unvalidated and unparsed.
- Change RDF Manifest:
+ Add metadata to the RDF Manifest:
<%include file="/rdf_manifest_form.html"/>
diff --git a/rdfdatabank/templates/part_list.html b/rdfdatabank/templates/part_list.html
index a68c356..57bcebc 100644
--- a/rdfdatabank/templates/part_list.html
+++ b/rdfdatabank/templates/part_list.html
@@ -9,18 +9,22 @@
%>
% if c.version:
% for part in c.parts:
-% if type(c.parts).__name__ == 'dict' and c.parts[part]:
+% if not part.startswith('3=') and not part.startswith('4='):
+% if type(c.parts).__name__ == 'dict' and c.parts[part]:
${part} ${h.bytes_to_english(c.parts[part].st_size)}
-% else:
+% else:
${part}
+% endif
% endif
% endfor
% else:
% for part in c.parts:
-% if type(c.parts).__name__ == 'dict' and c.parts[part]:
+% if not part.startswith('3=') and not part.startswith('4='):
+% if type(c.parts).__name__ == 'dict' and c.parts[part]:
${part} ${h.bytes_to_english(c.parts[part].st_size)} - (remove)
-% else:
+% else:
${part} - (remove)
+% endif
% endif
% endfor
% endif
diff --git a/rdfdatabank/templates/part_list_display.html b/rdfdatabank/templates/part_list_display.html
index 8630da2..64fd93f 100644
--- a/rdfdatabank/templates/part_list_display.html
+++ b/rdfdatabank/templates/part_list_display.html
@@ -11,11 +11,14 @@
else:
ver = ""
%>
+
% for part in c.parts:
- % if type(c.parts).__name__ == 'dict' and c.parts[part]:
- ${part} ${h.bytes_to_english(c.parts[part].st_size)}
- % else:
- ${part}
+ % if not part.startswith('3=') and not part.startswith('4='):
+ % if type(c.parts).__name__ == 'dict' and c.parts[part]:
+ ${part} ${h.bytes_to_english(c.parts[part].st_size)}
+ % else:
+ ${part}
+ % endif
% endif
% endfor
diff --git a/rdfdatabank/templates/rdf_manifest_form.html b/rdfdatabank/templates/rdf_manifest_form.html
index bc63ad6..dff68af 100644
--- a/rdfdatabank/templates/rdf_manifest_form.html
+++ b/rdfdatabank/templates/rdf_manifest_form.html
@@ -5,6 +5,6 @@
${c.manifest}
-
+
diff --git a/rdfdatabank/tests/TestSubmission.py b/rdfdatabank/tests/TestSubmission.py
index 0258ceb..1908813 100644
--- a/rdfdatabank/tests/TestSubmission.py
+++ b/rdfdatabank/tests/TestSubmission.py
@@ -9,6 +9,7 @@
"""
import os, os.path
from datetime import datetime, timedelta
+from dateutil.relativedelta import *
import sys
import unittest
import logging
@@ -78,11 +79,23 @@ def tearDown(self):
return
# Create empty test submission dataset
- def createSubmissionDataset(self):
+ def createSubmissionDataset(self, embargoed=None, embargoed_until=None):
# Create a new dataset, check response
fields = \
[ ("id", "TestSubmission")
]
+ if embargoed != None:
+ if embargoed:
+ fields.append(('embargoed', 'True'))
+ else:
+ fields.append(('embargoed', 'False'))
+ if embargoed_until != None:
+ if embargoed_until == True:
+ fields.append(('embargoed_until', 'True'))
+ elif embargoed_until == False:
+ fields.append(('embargoed_until', 'False'))
+ else:
+ fields.append(('embargoed_until', embargoed_until))
files =[]
(reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
(resp,respdata) = self.doHTTP_POST(
@@ -287,7 +300,7 @@ def testDatasetCreation(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
dcterms = "http://purl.org/dc/terms/"
subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
@@ -298,8 +311,7 @@ def testDatasetCreation(self):
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
def testDatasetCreation2(self):
@@ -322,7 +334,7 @@ def testDatasetCreation2(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
dcterms = "http://purl.org/dc/terms/"
subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
@@ -334,7 +346,6 @@ def testDatasetCreation2(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
def testDatasetRecreation(self):
@@ -348,7 +359,7 @@ def testDatasetRecreation(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
#Recreate the dataset, check response
fields = \
[ ("id", "TestSubmission")
@@ -374,7 +385,7 @@ def testDatasetRecreation(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
def testDeleteDataset(self):
"""Delete dataset - DELETE /silo_name/dataset_name"""
@@ -430,7 +441,7 @@ def testDatasetNaming(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
elif status == 403:
(resp, respdata) = self.doHTTP_GET(
resource="datasets/%s"%name,
@@ -471,254 +482,185 @@ def testDatasetStateInformation(self):
self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
- def testFileUpload(self):
- """Upload file to dataset - POST file to /silo_name/datasets/dataset_name"""
- # Create a new dataset, check response
+ def testEmbargoOnCreation(self):
+ """Create dataset - POST id to /silo_name"""
+ #---------------------------------------------------------------
+ # Create a new dataset, check response. No embargo information is passed.
self.createSubmissionDataset()
- #Access state information
- (resp, respdata) = self.doHTTP_GET(
- resource="states/TestSubmission",
- expect_status=200, expect_reason="OK", expect_type="application/json")
- # Upload zip file, check response
- zipdata = self.uploadSubmissionZipfile()
- # Access and check list of contents
+ # Access dataset, check response
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
- base = self.getRequestUri("datasets/TestSubmission/")
- dcterms = "http://purl.org/dc/terms/"
- ore = "http://www.openarchives.org/ore/terms/"
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
- self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
- self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
- self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
- self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
- self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
- self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
- # Access and check zip file content
- (resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir.zip",
- expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!")
- #Access state information and check
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertFalse((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
+ # Access state info
(resp, data) = self.doHTTP_GET(
resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
- parts = data['parts']
- self.assertEqual(len(state.keys()), 11, "States")
- self.assertEqual(state['item_id'], "TestSubmission", "Submission item identifier")
- self.assertEqual(len(state['versions']), 2, "Two versions")
- self.assertEqual(state['versions'][0], '0', "Version 0")
- self.assertEqual(state['versions'][1], '1', "Version 1")
- self.assertEqual(state['currentversion'], '1', "Current version == 1")
- self.assertEqual(state['rdffileformat'], 'xml', "RDF file type")
- self.assertEqual(state['rdffilename'], 'manifest.rdf', "RDF file name")
- self.assertEqual(state['files']['0'], ['manifest.rdf'], "List should contain just manifest.rdf")
- self.assertEqual(len(state['files']['1']), 2, "List should contain manifest.rdf and testdir.zip")
- self.assertEqual(len(state['metadata_files']['0']), 0, "metadata_files of version 0")
- self.assertEqual(len(state['metadata_files']['1']), 0, "metadata_files of version 1")
- self.assertEqual(len(state['subdir']['0']), 0, "Subdirectory count for version 0")
- self.assertEqual(len(state['subdir']['1']), 0, "Subdirectory count for version 1")
- self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointuser, "Created by")
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
- self.assertEqual(len(parts.keys()), 4, "Parts")
- self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
- self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
- self.assertEqual(len(parts['testdir.zip'].keys()), 13, "File stats for testdir.zip")
-
- def testFileDelete(self):
- """Delete file in dataset - DELETE /silo_name/datasets/dataset_name/file_name"""
- # Create a new dataset, check response
- self.createSubmissionDataset()
- # Upload zip file, check response
- zipdata = self.uploadSubmissionZipfile()
- # Access and check list of contents
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed?")
+ #---------------------------------------------------------------
+ # Delete dataset, check response
+ resp = self.doHTTP_DELETE(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK")
+ # Access dataset, test response indicating non-existent
+ (resp, respdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=404, expect_reason="Not Found")
+ # Create a new dataset, check response. embargoed=None, embargo_until=True.
+ self.createSubmissionDataset(embargoed_until=True)
+ # Access dataset, check response
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
- rdfgraph.parse(rdfstream)
- subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
- self.failUnless((subj,URIRef(oxds+"currentVersion"),"1") in rdfgraph, 'oxds:currentVersion')
- # Access and check zip file content and version
- (resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir.zip",
- expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!")
- # Delete file, check response
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertTrue((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
+ # Access state info
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ d = (datetime.now() + relativedelta(years=+70)).isoformat()
+ d = d.split('T')[0]
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertTrue(d in state['metadata']['embargoed_until'], "embargoed_until %s?"%d)
+ #---------------------------------------------------------------
+ # Delete dataset, check response
resp = self.doHTTP_DELETE(
- resource="datasets/TestSubmission/testdir.zip",
+ resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK")
- # Access and check zip file does not exist
- (resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir.zip",
+ # Access dataset, test response indicating non-existent
+ (resp, respdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
expect_status=404, expect_reason="Not Found")
- # Access and check list of contents
+ # Create a new dataset, check response. embargoed=None, embargo_until=2012-08-12
+ d = '2012-08-12'
+ self.createSubmissionDataset(embargoed_until=d)
+ # Access dataset, check response
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
- dcterms = "http://purl.org/dc/terms/"
- ore = "http://www.openarchives.org/ore/terms/"
+ self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
- self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
- self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
- self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
- self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
- self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
- self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
- #Access state information and check
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertTrue((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
+ # Access state info
(resp, data) = self.doHTTP_GET(
resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
- parts = data['parts']
- self.assertEqual(len(state.keys()), 11, "States")
- self.assertEqual(state['item_id'], "TestSubmission", "Submission item identifier")
- self.assertEqual(len(state['versions']), 3, "Three versions")
- self.assertEqual(state['versions'][0], '0', "Version 0")
- self.assertEqual(state['versions'][1], '1', "Version 1")
- self.assertEqual(state['versions'][2], '2', "Version 2")
- self.assertEqual(state['currentversion'], '2', "Current version == 2")
- self.assertEqual(state['rdffileformat'], 'xml', "RDF file type")
- self.assertEqual(state['rdffilename'], 'manifest.rdf', "RDF file name")
- self.assertEqual(len(state['files']['0']), 1, "List should contain just manifest.rdf")
- self.assertEqual(len(state['files']['1']), 2, "List should contain manifest.rdf and testdir.zip")
- self.assertEqual(len(state['files']['2']), 1, "List should contain just manifest.rdf")
- self.assertEqual(len(state['metadata_files']['0']), 0, "metadata_files of version 0")
- self.assertEqual(len(state['metadata_files']['1']), 0, "metadata_files of version 1")
- self.assertEqual(len(state['metadata_files']['2']), 0, "metadata_files of version 2")
- self.assertEqual(len(state['subdir']['0']), 0, "Subdirectory count for version 0")
- self.assertEqual(len(state['subdir']['1']), 0, "Subdirectory count for version 1")
- self.assertEqual(len(state['subdir']['2']), 0, "Subdirectory count for version 2")
- self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointuser, "Created by")
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
- self.assertEqual(len(parts.keys()), 3, "Parts")
- self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
- self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
-
- def testFileUpdate(self):
- """Update file in dataset - POST file to /silo_name/datasets/dataset_name (x 2)"""
- # Create a new dataset, check response
- self.createSubmissionDataset()
- # Upload zip file, check response (uploads the file testdir.zip)
- zipdata = self.uploadSubmissionZipfile()
- # Access and check list of contents
+ self.assertTrue(d in state['metadata']['embargoed_until'], "embargoed_until %s?"%d)
+ #---------------------------------------------------------------
+ # Delete dataset, check response
+ resp = self.doHTTP_DELETE(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK")
+ # Access dataset, test response indicating non-existent
+ (resp, respdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=404, expect_reason="Not Found")
+ # Create a new dataset, check response. embargoed=True, embargo_until=None.
+ self.createSubmissionDataset(embargoed=True)
+ # Access dataset, check response
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
- rdfgraph.parse(rdfstream)
- subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
- # Access and check zip file content and version
- (resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir.zip",
- expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!")
- # Upload zip file again, check response
- zipdata = self.updateSubmissionZipfile(file_to_upload="testdir2.zip", filename="testdir.zip")
- # Access and check list of contents
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertFalse((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
+ # Access state info
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed?")
+ #---------------------------------------------------------------
+ # Delete dataset, check response
+ resp = self.doHTTP_DELETE(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK")
+ # Access dataset, test response indicating non-existent
+ (resp, respdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=404, expect_reason="Not Found")
+ # Create a new dataset, check response. embargoed=True, embargo_until=True
+ self.createSubmissionDataset(embargoed=True, embargoed_until=True)
+ # Access dataset, check response
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
- base = self.getRequestUri("datasets/TestSubmission/")
- dcterms = "http://purl.org/dc/terms/"
- ore = "http://www.openarchives.org/ore/terms/"
+ self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
- self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
- self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
- self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
- self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
- self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
- self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
- # Access and check zip file content
- (resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir.zip",
- expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!")
- #Access state information and check
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
+ # Access state info
(resp, data) = self.doHTTP_GET(
resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
- parts = data['parts']
- self.assertEqual(len(state.keys()), 11, "States")
- self.assertEqual(state['item_id'], "TestSubmission", "Submission item identifier")
- self.assertEqual(len(state['versions']), 3, "Three versions")
- self.assertEqual(state['versions'][0], '0', "Version 0")
- self.assertEqual(state['versions'][1], '1', "Version 1")
- self.assertEqual(state['versions'][2], '2', "Version 2")
- self.assertEqual(state['currentversion'], '2', "Current version == 2")
- self.assertEqual(state['rdffileformat'], 'xml', "RDF file type")
- self.assertEqual(state['rdffilename'], 'manifest.rdf', "RDF file name")
- self.assertEqual(len(state['files']['0']), 1, "List should contain just manifest.rdf")
- self.assertEqual(len(state['files']['1']), 2, "List should contain manifest.rdf and testdir.zip")
- self.assertEqual(len(state['files']['2']), 2, "List should contain manifest.rdf and testdir.zip")
- self.assertEqual(len(state['metadata_files']['0']), 0, "metadata_files of version 0")
- self.assertEqual(len(state['metadata_files']['1']), 0, "metadata_files of version 1")
- self.assertEqual(len(state['metadata_files']['2']), 0, "metadata_files of version 2")
- self.assertEqual(len(state['subdir']['0']), 0, "Subdirectory count for version 0")
- self.assertEqual(len(state['subdir']['1']), 0, "Subdirectory count for version 1")
- self.assertEqual(len(state['subdir']['2']), 0, "Subdirectory count for version 2")
- self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointuser, "Created by")
+ d = (datetime.now() + relativedelta(years=+70)).isoformat()
+ d = d.split('T')[0]
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
- self.assertEqual(len(parts.keys()), 4, "Parts")
- self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
- self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
- self.assertEqual(len(parts['testdir.zip'].keys()), 13, "File stats for testdir.zip")
-
- def testGetDatasetByVersion(self):
- """Upload files to a dataset - POST file to /silo_name/datasets/dataset_name. Access each of the versions and the files in that version"""
- #Definitions
- subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
- base = self.getRequestUri("datasets/TestSubmission/")
- dcterms = "http://purl.org/dc/terms/"
- ore = "http://www.openarchives.org/ore/terms/"
- oxds = "http://vocab.ox.ac.uk/dataset/schema#"
- stype = URIRef(oxds+"DataSet")
- #---------Version 0
- # Create a new dataset, check response
- self.createSubmissionDataset()
- # Access and check list of contents
+ self.assertTrue(d in state['metadata']['embargoed_until'], "embargoed_until %s?"%d)
+ #---------------------------------------------------------------
+ # Delete dataset, check response
+ resp = self.doHTTP_DELETE(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK")
+ # Access dataset, test response indicating non-existent
+ (resp, respdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=404, expect_reason="Not Found")
+ # Create a new dataset, check response. embargoed=True, embargo_until=09-08-2012
+ d = '09-08-2012'
+ self.createSubmissionDataset(embargoed=True, embargoed_until=d)
+ # Access dataset, check response
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
@@ -726,79 +668,135 @@ def testGetDatasetByVersion(self):
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
- #Access state information and check
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
+ # Access state info
(resp, data) = self.doHTTP_GET(
resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
- parts = data['parts']
- self.assertEqual(len(state.keys()), 11, "States")
- self.assertEqual(len(parts.keys()), 3, "Parts")
- #---------Version 1
- # Upload zip file, check response
- zipdata = self.uploadSubmissionZipfile()
- # Access and check list of contents
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertTrue('2012-09-08' in state['metadata']['embargoed_until'], "embargoed_until 2012-09-08?")
+ #---------------------------------------------------------------
+ # Delete dataset, check response
+ resp = self.doHTTP_DELETE(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK")
+ # Access dataset, test response indicating non-existent
+ (resp, respdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=404, expect_reason="Not Found")
+ # Create a new dataset, check response. embargoed = False.
+ self.createSubmissionDataset(embargoed=False)
+ # Access dataset, check response
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
- # Access and check zip file content
- (resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir.zip",
- expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!")
- #Access state information and check
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'False') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertFalse((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
+ # Access state info
(resp, data) = self.doHTTP_GET(
resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
- parts = data['parts']
- self.assertEqual(len(state.keys()), 11, "States")
- self.assertEqual(len(parts.keys()), 4, "Parts")
- # Access and check list of contents of version 0
+ self.assertEqual(state['metadata']['embargoed'], False, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed?")
+ #---------------------------------------------------------------
+ # Delete dataset, check response
+ resp = self.doHTTP_DELETE(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK")
+ # Access dataset, test response indicating non-existent
+ (resp, respdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=404, expect_reason="Not Found")
+ # Create a new dataset, check response. embargoed = False, embargoed_until = True
+ self.createSubmissionDataset(embargoed=False, embargoed_until=True)
+ # Access dataset, check response
(resp, rdfdata) = self.doHTTP_GET(
- resource="datasets/TestSubmission/version0",
+ resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
- #---------Version 2
- # Upload zip file, check response
- zipdata2 = self.uploadSubmissionZipfile(file_to_upload="testdir2.zip")
- # Access and check list of contents
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'False') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertFalse((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
+ # Access state info
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ self.assertEqual(state['metadata']['embargoed'], False, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed?")
+ #---------------------------------------------------------------
+ # Delete dataset, check response
+ resp = self.doHTTP_DELETE(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK")
+ # Access dataset, test response indicating non-existent
+ (resp, respdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=404, expect_reason="Not Found")
+ # Create a new dataset, check response. embargoed = False, embargoed_until = 12 sep 2013
+ d = '12 Sep 2013'
+ self.createSubmissionDataset(embargoed=False, embargoed_until=d)
+ # Access dataset, check response
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
- # Access and check zip file content
- (resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir.zip",
- expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile - testdir.zip!")
- (resp, zipfile2) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir2.zip",
- expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata2, zipfile2, "Difference between local and remote zipfile - testdir2.zip!")
- #Access state information and check
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'False') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertFalse((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
+ # Access state info
(resp, data) = self.doHTTP_GET(
resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
- parts = data['parts']
- self.assertEqual(len(state.keys()), 11, "States")
- self.assertEqual(len(parts.keys()), 5, "Parts")
- #---------Version 3
- # Delete file, check response
- resp = self.doHTTP_DELETE(
- resource="datasets/TestSubmission/testdir.zip",
- expect_status=200, expect_reason="OK")
+ self.assertEqual(state['metadata']['embargoed'], False, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed?")
+
+ def testFileUpload(self):
+ """Upload file to dataset - POST file to /silo_name/datasets/dataset_name"""
+ # Create a new dataset, check response
+ self.createSubmissionDataset()
+ #Access state information
+ (resp, respdata) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ # Upload zip file, check response
+ zipdata = self.uploadSubmissionZipfile()
# Access and check list of contents
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
@@ -806,15 +804,28 @@ def testGetDatasetByVersion(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ base = self.getRequestUri("datasets/TestSubmission/")
+ dcterms = "http://purl.org/dc/terms/"
+ ore = "http://www.openarchives.org/ore/terms/"
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ stype = URIRef(oxds+"DataSet")
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check zip file content
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
- expect_status=404, expect_reason="Not Found")
- (resp, zipfile2) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir2.zip",
expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata2, zipfile2, "Difference between local and remote zipfile - testdir2.zip!")
+ self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!")
#Access state information and check
(resp, data) = self.doHTTP_GET(
resource="states/TestSubmission",
@@ -822,43 +833,68 @@ def testGetDatasetByVersion(self):
state = data['state']
parts = data['parts']
self.assertEqual(len(state.keys()), 11, "States")
+ self.assertEqual(state['item_id'], "TestSubmission", "Submission item identifier")
+ self.assertEqual(len(state['versions']), 2, "Two versions")
+ self.assertEqual(state['versions'][0], '0', "Version 0")
+ self.assertEqual(state['versions'][1], '1', "Version 1")
+ self.assertEqual(state['currentversion'], '1', "Current version == 1")
+ self.assertEqual(state['rdffileformat'], 'xml', "RDF file type")
+ self.assertEqual(state['rdffilename'], 'manifest.rdf', "RDF file name")
+ self.assertEqual(state['files']['0'], ['manifest.rdf'], "List should contain just manifest.rdf")
+ self.assertEqual(len(state['files']['1']), 2, "List should contain manifest.rdf and testdir.zip")
+ self.assertEqual(len(state['metadata_files']['0']), 0, "metadata_files of version 0")
+ self.assertEqual(len(state['metadata_files']['1']), 0, "metadata_files of version 1")
+ self.assertEqual(len(state['subdir']['0']), 0, "Subdirectory count for version 0")
+ self.assertEqual(len(state['subdir']['1']), 0, "Subdirectory count for version 1")
+ self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointuser, "Created by")
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
self.assertEqual(len(parts.keys()), 4, "Parts")
- #---------Version 4
- # Update zip file, check response
- zipdata3 = self.updateSubmissionZipfile(file_to_upload="testrdf4.zip", filename="testdir2.zip")
+ self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
+ self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
+ self.assertEqual(len(parts['testdir.zip'].keys()), 13, "File stats for testdir.zip")
+
+ def testFileDelete(self):
+ """Delete file in dataset - DELETE /silo_name/datasets/dataset_name/file_name"""
+ # Create a new dataset, check response
+ self.createSubmissionDataset()
+ # Upload zip file, check response
+ zipdata = self.uploadSubmissionZipfile()
# Access and check list of contents
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
- rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
- # Access and check zip file content
+ rdfgraph.parse(rdfstream)
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),"1") in rdfgraph, 'oxds:currentVersion')
+ # Access and check zip file content and version
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
- expect_status=404, expect_reason="Not Found")
- (resp, zipfile2) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir2.zip",
expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata3, zipfile2, "Difference between local and remote zipfile - testdir2.zip!")
- #Access state information and check
- (resp, data) = self.doHTTP_GET(
- resource="states/TestSubmission",
- expect_status=200, expect_reason="OK", expect_type="application/json")
- state = data['state']
- parts = data['parts']
- self.assertEqual(len(state.keys()), 11, "States")
- self.assertEqual(len(parts.keys()), 4, "Parts")
- #=========Access each of the versions
- #---------Version 0
- # Access and check list of contents of version 0
+ self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!")
+ # Delete file, check response
+ resp = self.doHTTP_DELETE(
+ resource="datasets/TestSubmission/testdir.zip",
+ expect_status=200, expect_reason="OK")
+ # Access and check zip file does not exist
+ (resp, zipfile) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir.zip",
+ expect_status=404, expect_reason="Not Found")
+ # Access and check list of contents
(resp, rdfdata) = self.doHTTP_GET(
- resource="datasets/TestSubmission/version0",
+ resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ dcterms = "http://purl.org/dc/terms/"
+ ore = "http://www.openarchives.org/ore/terms/"
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ stype = URIRef(oxds+"DataSet")
self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
@@ -867,27 +903,76 @@ def testGetDatasetByVersion(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
#Access state information and check
(resp, data) = self.doHTTP_GET(
- resource="states/TestSubmission/version0",
+ resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
parts = data['parts']
self.assertEqual(len(state.keys()), 11, "States")
- self.assertEqual(len(parts.keys()), 3, "Parts")
- self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
- self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
- #---------Version 1
- # Access and check list of contents of version 1
- (resp, rdfdata) = self.doHTTP_GET(
- resource="datasets/TestSubmission/version1",
- expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ self.assertEqual(state['item_id'], "TestSubmission", "Submission item identifier")
+ self.assertEqual(len(state['versions']), 3, "Three versions")
+ self.assertEqual(state['versions'][0], '0', "Version 0")
+ self.assertEqual(state['versions'][1], '1', "Version 1")
+ self.assertEqual(state['versions'][2], '2', "Version 2")
+ self.assertEqual(state['currentversion'], '2', "Current version == 2")
+ self.assertEqual(state['rdffileformat'], 'xml', "RDF file type")
+ self.assertEqual(state['rdffilename'], 'manifest.rdf', "RDF file name")
+ self.assertEqual(len(state['files']['0']), 1, "List should contain just manifest.rdf")
+ self.assertEqual(len(state['files']['1']), 2, "List should contain manifest.rdf and testdir.zip")
+ self.assertEqual(len(state['files']['2']), 1, "List should contain just manifest.rdf")
+ self.assertEqual(len(state['metadata_files']['0']), 0, "metadata_files of version 0")
+ self.assertEqual(len(state['metadata_files']['1']), 0, "metadata_files of version 1")
+ self.assertEqual(len(state['metadata_files']['2']), 0, "metadata_files of version 2")
+ self.assertEqual(len(state['subdir']['0']), 0, "Subdirectory count for version 0")
+ self.assertEqual(len(state['subdir']['1']), 0, "Subdirectory count for version 1")
+ self.assertEqual(len(state['subdir']['2']), 0, "Subdirectory count for version 2")
+ self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointuser, "Created by")
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(len(parts.keys()), 3, "Parts")
+ self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
+ self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
+
+ def testFileUpdate(self):
+ """Update file in dataset - POST file to /silo_name/datasets/dataset_name (x 2)"""
+ # Create a new dataset, check response
+ self.createSubmissionDataset()
+ # Upload zip file, check response (uploads the file testdir.zip)
+ zipdata = self.uploadSubmissionZipfile()
+ # Access and check list of contents
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
+ # Access and check zip file content and version
+ (resp, zipfile) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir.zip",
+ expect_status=200, expect_reason="OK", expect_type="application/zip")
+ self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!")
+ # Upload zip file again, check response
+ zipdata = self.updateSubmissionZipfile(file_to_upload="testdir2.zip", filename="testdir.zip")
+ # Access and check list of contents
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ base = self.getRequestUri("datasets/TestSubmission/")
+ dcterms = "http://purl.org/dc/terms/"
+ ore = "http://www.openarchives.org/ore/terms/"
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ stype = URIRef(oxds+"DataSet")
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
@@ -896,149 +981,372 @@ def testGetDatasetByVersion(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
- self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check zip file content
(resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir.zip/version1",
+ resource="datasets/TestSubmission/testdir.zip",
expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile - Version 1!")
+ self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!")
#Access state information and check
(resp, data) = self.doHTTP_GET(
- resource="states/TestSubmission/version1",
+ resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
parts = data['parts']
self.assertEqual(len(state.keys()), 11, "States")
+ self.assertEqual(state['item_id'], "TestSubmission", "Submission item identifier")
+ self.assertEqual(len(state['versions']), 3, "Three versions")
+ self.assertEqual(state['versions'][0], '0', "Version 0")
+ self.assertEqual(state['versions'][1], '1', "Version 1")
+ self.assertEqual(state['versions'][2], '2', "Version 2")
+ self.assertEqual(state['currentversion'], '2', "Current version == 2")
+ self.assertEqual(state['rdffileformat'], 'xml', "RDF file type")
+ self.assertEqual(state['rdffilename'], 'manifest.rdf', "RDF file name")
+ self.assertEqual(len(state['files']['0']), 1, "List should contain just manifest.rdf")
+ self.assertEqual(len(state['files']['1']), 2, "List should contain manifest.rdf and testdir.zip")
+ self.assertEqual(len(state['files']['2']), 2, "List should contain manifest.rdf and testdir.zip")
+ self.assertEqual(len(state['metadata_files']['0']), 0, "metadata_files of version 0")
+ self.assertEqual(len(state['metadata_files']['1']), 0, "metadata_files of version 1")
+ self.assertEqual(len(state['metadata_files']['2']), 0, "metadata_files of version 2")
+ self.assertEqual(len(state['subdir']['0']), 0, "Subdirectory count for version 0")
+ self.assertEqual(len(state['subdir']['1']), 0, "Subdirectory count for version 1")
+ self.assertEqual(len(state['subdir']['2']), 0, "Subdirectory count for version 2")
+ self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointuser, "Created by")
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
self.assertEqual(len(parts.keys()), 4, "Parts")
self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
self.assertEqual(len(parts['testdir.zip'].keys()), 13, "File stats for testdir.zip")
+
+ def testGetDatasetByVersion(self):
+ """Upload files to a dataset - POST file to /silo_name/datasets/dataset_name. Access each of the versions and the files in that version"""
+ #Definitions
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ base = self.getRequestUri("datasets/TestSubmission/")
+ dcterms = "http://purl.org/dc/terms/"
+ ore = "http://www.openarchives.org/ore/terms/"
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ stype = URIRef(oxds+"DataSet")
+ #---------Version 0
+ # Create a new dataset, check response
+ self.createSubmissionDataset()
+ # Access and check list of contents
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ parts = data['parts']
+ self.assertEqual(len(state.keys()), 11, "States")
+ self.assertEqual(len(parts.keys()), 3, "Parts")
+ #---------Version 1
+ # Upload zip file, check response
+ zipdata = self.uploadSubmissionZipfile()
+ # Access and check list of contents
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ # Access and check zip file content
+ (resp, zipfile) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir.zip",
+ expect_status=200, expect_reason="OK", expect_type="application/zip")
+ self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!")
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ parts = data['parts']
+ self.assertEqual(len(state.keys()), 11, "States")
+ self.assertEqual(len(parts.keys()), 4, "Parts")
+ # Access and check list of contents of version 0
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/version0",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
#---------Version 2
- # Access and check list of contents of version 2
+ # Upload zip file, check response
+ zipdata2 = self.uploadSubmissionZipfile(file_to_upload="testdir2.zip")
+ # Access and check list of contents
(resp, rdfdata) = self.doHTTP_GET(
- resource="datasets/TestSubmission/version2",
+ resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
- self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
- self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir2.zip")) in rdfgraph)
- self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
- self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
- self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
- self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
- self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
# Access and check zip file content
(resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir.zip/version2",
+ resource="datasets/TestSubmission/testdir.zip",
expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile - Version 2!")
+ self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile - testdir.zip!")
(resp, zipfile2) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir2.zip/version2",
+ resource="datasets/TestSubmission/testdir2.zip",
expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata2, zipfile2, "Difference between local and remote zipfile - Version 2!")
+ self.assertEqual(zipdata2, zipfile2, "Difference between local and remote zipfile - testdir2.zip!")
#Access state information and check
(resp, data) = self.doHTTP_GET(
- resource="states/TestSubmission/version2",
+ resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
parts = data['parts']
self.assertEqual(len(state.keys()), 11, "States")
self.assertEqual(len(parts.keys()), 5, "Parts")
- self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
- self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
- self.assertEqual(len(parts['testdir.zip'].keys()), 13, "File stats for testdir.zip")
- self.assertEqual(len(parts['testdir2.zip'].keys()), 13, "File stats for testdir2.zip")
#---------Version 3
- # Access and check list of contents of version 3
+ # Delete file, check response
+ resp = self.doHTTP_DELETE(
+ resource="datasets/TestSubmission/testdir.zip",
+ expect_status=200, expect_reason="OK")
+ # Access and check list of contents
(resp, rdfdata) = self.doHTTP_GET(
- resource="datasets/TestSubmission/version3",
+ resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
- self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
- self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir2.zip")) in rdfgraph)
- self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
- self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
- self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
- self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'3') in rdfgraph, 'oxds:currentVersion')
- self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
# Access and check zip file content
(resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir2.zip/version3",
- expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata2, zipfile, "Difference between local and remote zipfile - Version 3!")
- (resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir.zip/version3",
+ resource="datasets/TestSubmission/testdir.zip",
expect_status=404, expect_reason="Not Found")
+ (resp, zipfile2) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir2.zip",
+ expect_status=200, expect_reason="OK", expect_type="application/zip")
+ self.assertEqual(zipdata2, zipfile2, "Difference between local and remote zipfile - testdir2.zip!")
#Access state information and check
(resp, data) = self.doHTTP_GET(
- resource="states/TestSubmission/version3",
+ resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
parts = data['parts']
self.assertEqual(len(state.keys()), 11, "States")
self.assertEqual(len(parts.keys()), 4, "Parts")
- self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
- self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
- self.assertEqual(len(parts['testdir2.zip'].keys()), 13, "File stats for testdir2.zip")
#---------Version 4
- # Access and check list of contents of version 4
+ # Update zip file, check response
+ zipdata3 = self.updateSubmissionZipfile(file_to_upload="testrdf4.zip", filename="testdir2.zip")
+ # Access and check list of contents
(resp, rdfdata) = self.doHTTP_GET(
- resource="datasets/TestSubmission/version4",
+ resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
- self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
- self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir2.zip")) in rdfgraph)
- self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
- self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
- self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
- self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'4') in rdfgraph, 'oxds:currentVersion')
- self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
# Access and check zip file content
(resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir2.zip/version4",
- expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata3, zipfile, "Difference between local and remote zipfile - Version 4!")
- (resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir.zip/version4",
+ resource="datasets/TestSubmission/testdir.zip",
expect_status=404, expect_reason="Not Found")
+ (resp, zipfile2) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir2.zip",
+ expect_status=200, expect_reason="OK", expect_type="application/zip")
+ self.assertEqual(zipdata3, zipfile2, "Difference between local and remote zipfile - testdir2.zip!")
#Access state information and check
(resp, data) = self.doHTTP_GET(
- resource="states/TestSubmission/version4",
+ resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
parts = data['parts']
self.assertEqual(len(state.keys()), 11, "States")
self.assertEqual(len(parts.keys()), 4, "Parts")
- self.assertEqual(state['item_id'], "TestSubmission", "Submission item identifier")
- self.assertEqual(len(state['versions']), 5, "Five versions")
- self.assertEqual(state['versions'],['0', '1', '2', '3', '4'], "Versions")
- self.assertEqual(state['currentversion'], '4', "Current version == 4")
- self.assertEqual(state['rdffileformat'], 'xml', "RDF file type")
- self.assertEqual(state['rdffilename'], 'manifest.rdf', "RDF file name")
- self.assertEqual(state['files']['0'], ['manifest.rdf'], "List should contain just manifest.rdf")
+ #=========Access each of the versions
+ #---------Version 0
+ # Access and check list of contents of version 0
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/version0",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission/version0",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ parts = data['parts']
+ self.assertEqual(len(state.keys()), 11, "States")
+ self.assertEqual(len(parts.keys()), 3, "Parts")
+ self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
+ self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
+ #---------Version 1
+ # Access and check list of contents of version 1
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/version1",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ # Access and check zip file content
+ (resp, zipfile) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir.zip/version1",
+ expect_status=200, expect_reason="OK", expect_type="application/zip")
+ self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile - Version 1!")
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission/version1",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ parts = data['parts']
+ self.assertEqual(len(state.keys()), 11, "States")
+ self.assertEqual(len(parts.keys()), 4, "Parts")
+ self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
+ self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
+ self.assertEqual(len(parts['testdir.zip'].keys()), 13, "File stats for testdir.zip")
+ #---------Version 2
+ # Access and check list of contents of version 2
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/version2",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir2.zip")) in rdfgraph)
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ # Access and check zip file content
+ (resp, zipfile) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir.zip/version2",
+ expect_status=200, expect_reason="OK", expect_type="application/zip")
+ self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile - Version 2!")
+ (resp, zipfile2) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir2.zip/version2",
+ expect_status=200, expect_reason="OK", expect_type="application/zip")
+ self.assertEqual(zipdata2, zipfile2, "Difference between local and remote zipfile - Version 2!")
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission/version2",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ parts = data['parts']
+ self.assertEqual(len(state.keys()), 11, "States")
+ self.assertEqual(len(parts.keys()), 5, "Parts")
+ self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
+ self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
+ self.assertEqual(len(parts['testdir.zip'].keys()), 13, "File stats for testdir.zip")
+ self.assertEqual(len(parts['testdir2.zip'].keys()), 13, "File stats for testdir2.zip")
+ #---------Version 3
+ # Access and check list of contents of version 3
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/version3",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir2.zip")) in rdfgraph)
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'3') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ # Access and check zip file content
+ (resp, zipfile) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir2.zip/version3",
+ expect_status=200, expect_reason="OK", expect_type="application/zip")
+ self.assertEqual(zipdata2, zipfile, "Difference between local and remote zipfile - Version 3!")
+ (resp, zipfile) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir.zip/version3",
+ expect_status=404, expect_reason="Not Found")
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission/version3",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ parts = data['parts']
+ self.assertEqual(len(state.keys()), 11, "States")
+ self.assertEqual(len(parts.keys()), 4, "Parts")
+ self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
+ self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
+ self.assertEqual(len(parts['testdir2.zip'].keys()), 13, "File stats for testdir2.zip")
+ #---------Version 4
+ # Access and check list of contents of version 4
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/version4",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir2.zip")) in rdfgraph)
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'4') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ # Access and check zip file content
+ (resp, zipfile) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir2.zip/version4",
+ expect_status=200, expect_reason="OK", expect_type="application/zip")
+ self.assertEqual(zipdata3, zipfile, "Difference between local and remote zipfile - Version 4!")
+ (resp, zipfile) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir.zip/version4",
+ expect_status=404, expect_reason="Not Found")
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission/version4",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ parts = data['parts']
+ self.assertEqual(len(state.keys()), 11, "States")
+ self.assertEqual(len(parts.keys()), 4, "Parts")
+ self.assertEqual(state['item_id'], "TestSubmission", "Submission item identifier")
+ self.assertEqual(len(state['versions']), 5, "Five versions")
+ self.assertEqual(state['versions'],['0', '1', '2', '3', '4'], "Versions")
+ self.assertEqual(state['currentversion'], '4', "Current version == 4")
+ self.assertEqual(state['rdffileformat'], 'xml', "RDF file type")
+ self.assertEqual(state['rdffilename'], 'manifest.rdf', "RDF file name")
+ self.assertEqual(state['files']['0'], ['manifest.rdf'], "List should contain just manifest.rdf")
self.assertEqual(len(state['files']['1']), 2, "List should contain manifest.rdf and testdir.zip")
self.assertEqual(len(state['files']['2']), 3, "List should contain manifest.rdf, testdir.zip and testdir2.zip")
self.assertEqual(len(state['files']['3']), 2, "List should contain manifest.rdf and testdir2.zip")
@@ -1091,7 +1399,7 @@ def testPostMetadataFile(self):
address = "http://schemas.talis.com/2005/address/schema#"
stype = URIRef(oxds+"DataSet")
stype2 = URIRef(bibo+"DocumentPart")
- self.assertEqual(len(rdfgraph),42,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),41,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
@@ -1099,7 +1407,6 @@ def testPostMetadataFile(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:piblisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,RDF.type,stype2) in rdfgraph, 'Testing submission type: '+subj+", "+stype2)
@@ -1183,7 +1490,7 @@ def testMetadataFileUpdate(self):
foaf = "http://xmlns.com/foaf/0.1/"
address = "http://schemas.talis.com/2005/address/schema#"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
@@ -1191,7 +1498,6 @@ def testMetadataFileUpdate(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:piblisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"title"),"Test dataset with merged metadata") in rdfgraph, 'dcterms:title')
@@ -1219,7 +1525,7 @@ def testMetadataFileUpdate(self):
Was worth a million such; and yet
She lived long, till God gave her rest.
"""
- self.assertEqual(len(rdfgraph),32,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),31,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
@@ -1227,7 +1533,6 @@ def testMetadataFileUpdate(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"title"),'A Cat') in rdfgraph, 'dcterms:title')
@@ -1239,8 +1544,6 @@ def testMetadataFileUpdate(self):
self.failUnless((subj,URIRef(dcterms+"type"),"Poem") in rdfgraph, 'dcterms:type')
self.failUnless((subj,URIRef(dcterms+"type"),URIRef("http://purl.org/dc/dcmitype/Text")) in rdfgraph, 'dcterms:type')
self.failUnless((subj,URIRef(dcterms+"rightsHolder"),"Copyright Edward Thomas, 1979, reproduced under licence from Faber and Faber Ltd.") in rdfgraph, 'dcterms:rightsHolder')
- #for s, p, o in rdfgraph.triples((None, RDF.value, None)):
- # print s, p, o, type(o)
self.failUnless((subj,RDF.value,Literal(doctext)) in rdfgraph, 'rdf:value')
self.failUnless((subj,URIRef(dcterms+"source"),"Edward Thomas Collected Poems") in rdfgraph, 'dcterms:source')
#self.failUnless((subj,URIRef(dcterms+"created"),"1979-01-01/1979-12-31") in rdfgraph, 'dcterms:created')
@@ -1302,7 +1605,7 @@ def testMetadataFileDelete(self):
dcterms = "http://purl.org/dc/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
@@ -1310,7 +1613,6 @@ def testMetadataFileDelete(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
def testPutCreateFile(self):
@@ -1337,7 +1639,7 @@ def testPutCreateFile(self):
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
@@ -1346,7 +1648,6 @@ def testPutCreateFile(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check zip file content
@@ -1407,7 +1708,7 @@ def testPutUpdateFile(self):
rdfgraph.parse(rdfstream)
subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
# Access and check zip file content and version
(resp, zipfile) = self.doHTTP_GET(
@@ -1435,7 +1736,7 @@ def testPutUpdateFile(self):
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
@@ -1445,7 +1746,6 @@ def testPutUpdateFile(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'3') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check zip file content
@@ -1520,7 +1820,7 @@ def testPutMetadataFile(self):
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
owl = "http://www.w3.org/2002/07/owl#"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
@@ -1528,7 +1828,6 @@ def testPutMetadataFile(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"title"),"Test dataset with merged metadata") in rdfgraph, 'dcterms:title')
@@ -1544,7 +1843,7 @@ def testPutMetadataFile(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
@@ -1552,7 +1851,6 @@ def testPutMetadataFile(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(owl+"sameAs"),URIRef("http://example.org/testrdf/")) in rdfgraph, 'owl:sameAs')
@@ -1614,7 +1912,7 @@ def testUnicodeMetadataFileUpdate(self):
f = codecs.open('testdata/unicodedata/unicode01.txt', 'r', 'utf-8')
doctext1 = f.read()
f.close()
- self.assertEqual(len(rdfgraph),14,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
@@ -1622,7 +1920,6 @@ def testUnicodeMetadataFileUpdate(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:piblisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"title"),"General punctuation") in rdfgraph, 'dcterms:title')
@@ -1651,7 +1948,7 @@ def testUnicodeMetadataFileUpdate(self):
f = codecs.open('testdata/unicodedata/unicode02.txt', 'r', 'utf-8')
doctext2 = f.read()
f.close()
- self.assertEqual(len(rdfgraph),15,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),14,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"title"),"A table of (some) accents") in rdfgraph, 'dcterms:title')
self.failUnless((subj,RDF.value,Literal(doctext2)) in rdfgraph, 'rdf:value')
@@ -1673,7 +1970,7 @@ def testUnicodeMetadataFileUpdate(self):
f = codecs.open('testdata/unicodedata/unicode03.txt', 'r', 'utf-8')
doctext3 = f.read()
f.close()
- self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),15,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,URIRef(oxds+"currentVersion"),'3') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"title"),"Combining diacritics") in rdfgraph, 'dcterms:title')
self.failUnless((subj,RDF.value,Literal(doctext3)) in rdfgraph, 'rdf:value')
@@ -1693,7 +1990,7 @@ def testUnicodeMetadataFileUpdate(self):
f = codecs.open('testdata/unicodedata/unicode04.txt', 'r', 'utf-8')
doctext4 = f.read()
f.close()
- self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,URIRef(oxds+"currentVersion"),'4') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"title"),"Various symbols") in rdfgraph, 'dcterms:title')
self.failUnless((subj,RDF.value,Literal(doctext4)) in rdfgraph, 'rdf:value')
@@ -1713,7 +2010,7 @@ def testUnicodeMetadataFileUpdate(self):
f = codecs.open('testdata/unicodedata/unicode05.txt', 'r', 'utf-8')
doctext5 = f.read()
f.close()
- self.assertEqual(len(rdfgraph),18,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,URIRef(oxds+"currentVersion"),'5') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"title"),"Some verses in Russian") in rdfgraph, 'dcterms:title')
self.failUnless((subj,RDF.value,Literal(doctext5)) in rdfgraph, 'rdf:value')
@@ -1735,7 +2032,7 @@ def testUnicodeMetadataFileUpdate(self):
f = codecs.open('testdata/unicodedata/unicode06.txt', 'r', 'utf-8')
doctext6 = f.read()
f.close()
- self.assertEqual(len(rdfgraph),19,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),18,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,URIRef(oxds+"currentVersion"),'6') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"title"),"Some verses in ancient Greek") in rdfgraph, 'dcterms:title')
self.failUnless((subj,RDF.value,Literal(doctext6)) in rdfgraph, 'rdf:value')
@@ -1755,7 +2052,7 @@ def testUnicodeMetadataFileUpdate(self):
f = codecs.open('testdata/unicodedata/unicode07.txt', 'r', 'utf-8')
doctext7 = f.read()
f.close()
- self.assertEqual(len(rdfgraph),20,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),19,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,URIRef(oxds+"currentVersion"),'7') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"title"),"Some verses in Sanskrit") in rdfgraph, 'dcterms:title')
self.failUnless((subj,RDF.value,Literal(doctext7)) in rdfgraph, 'rdf:value')
@@ -1777,7 +2074,7 @@ def testUnicodeMetadataFileUpdate(self):
f = codecs.open('testdata/unicodedata/unicode08.txt', 'r', 'utf-8')
doctext8= f.read()
f.close()
- self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),20,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,URIRef(oxds+"currentVersion"),'8') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"title"),"Some Chinese") in rdfgraph, 'dcterms:title')
self.failUnless((subj,RDF.value,Literal(doctext8)) in rdfgraph, 'rdf:value')
@@ -1797,7 +2094,7 @@ def testUnicodeMetadataFileUpdate(self):
f = codecs.open('testdata/unicodedata/unicode09.txt', 'r', 'utf-8')
doctext9 = f.read()
f.close()
- self.assertEqual(len(rdfgraph),22,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,URIRef(oxds+"currentVersion"),'9') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"title"),"A Tamil name") in rdfgraph, 'dcterms:title')
self.failUnless((subj,RDF.value,Literal(doctext9)) in rdfgraph, 'rdf:value')
@@ -1809,26 +2106,333 @@ def testUnicodeMetadataFileUpdate(self):
#Access state information and check
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
- expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
- fr = open('response.xml', 'w')
- fr.write(rdfdata)
- fr.close()
- rdfgraph = Graph()
- rdfgraph.parse('response.xml', format='xml')
- doctext10 = None
- f = codecs.open('testdata/unicodedata/unicode10.txt', 'r', 'utf-8')
- doctext10= f.read()
- f.close()
- self.assertEqual(len(rdfgraph),23,'Graph length %i' %len(rdfgraph))
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'10') in rdfgraph, 'oxds:currentVersion')
- self.failUnless((subj,URIRef(dcterms+"title"),"Some Arabic") in rdfgraph, 'dcterms:title')
- self.failUnless((subj,RDF.value,Literal(doctext10)) in rdfgraph, 'rdf:value')
- os.remove('response.xml')
-
- def testDeleteEmbargo(self):
- """Delete embargo information - POST embargo_change to /silo_name/datasets/dataset_name"""
- # Create a new dataset, check response
- self.createSubmissionDataset()
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ fr = open('response.xml', 'w')
+ fr.write(rdfdata)
+ fr.close()
+ rdfgraph = Graph()
+ rdfgraph.parse('response.xml', format='xml')
+ doctext10 = None
+ f = codecs.open('testdata/unicodedata/unicode10.txt', 'r', 'utf-8')
+ doctext10= f.read()
+ f.close()
+ self.assertEqual(len(rdfgraph),22,'Graph length %i' %len(rdfgraph))
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'10') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"title"),"Some Arabic") in rdfgraph, 'dcterms:title')
+ self.failUnless((subj,RDF.value,Literal(doctext10)) in rdfgraph, 'rdf:value')
+ os.remove('response.xml')
+
+ def testDeleteEmbargo(self):
+ """Delete embargo information - POST embargo_change to /silo_name/datasets/dataset_name"""
+ # Create a new dataset, check response
+ self.createSubmissionDataset()
+ #Access dataset and check content
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertFalse((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ # Delete embargo, check response
+ fields = \
+ [ ("embargo_change", 'true')
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/TestSubmission",
+ expect_status=204, expect_reason="Updated")
+ #Access dataset and check content
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'False') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ self.assertEqual(state['metadata']['embargoed'], False, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], "", "Should have no date for embargoed_until")
+
+ def testChangeEmbargo(self):
+ """Modify embargo information - POST embargo_change, embargo, embargo_until to /silo_name/datasets/dataset_name"""
+ # Create a new dataset, check response
+ self.createSubmissionDataset()
+ #Access dataset and check content
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertFalse((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "embargoed_until?")
+ #-------------------------------------------------
+ # Change embargo without embargo_until date - embargoed = True, check response
+ fields = \
+ [ ("embargoed", 'true')
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/TestSubmission",
+ expect_status=204, expect_reason="Updated")
+ #Access dataset and check content
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertFalse((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "embargoed_until?")
+ #-------------------------------------------------
+ #Change embargo - embargoed = true, embargoed_until = True
+ d = datetime.now().isoformat()
+ fields = \
+ [ ("embargoed", 'true')
+ ,("embargoed_until", 'true')
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/TestSubmission",
+ expect_status=204, expect_reason="Updated")
+ #Access dataset and check content
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ d = (datetime.now() + relativedelta(years=+70)).isoformat()
+ d = d.split('T')[0]
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertTrue(d in state['metadata']['embargoed_until'], "embargoed_until %s?"%d)
+ #-------------------------------------------------
+ #Change embargo - embargoed = true, embargoed_until = datetime
+ d = datetime.now()
+ delta = timedelta(days=365*4)
+ d2 = d + delta
+ d2 = d2.isoformat()
+ fields = \
+ [ ("embargoed", 'true')
+ ,("embargoed_until", d2)
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/TestSubmission",
+ expect_status=204, expect_reason="Updated")
+ #Access dataset and check content
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d2) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'3') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], d2, "embargoed_until?")
+ #-------------------------------------------------
+ #Change embargo - embargoed = true, embargoed_until = datetime, embargo_change = true
+ d2 = '09 August 2013'
+ fields = \
+ [ ("embargo_change", 'true')
+ ,("embargoed", 'true')
+ ,("embargoed_until", d2)
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/TestSubmission",
+ expect_status=204, expect_reason="Updated")
+ #Access dataset and check content
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'4') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertTrue('2013-08-09' in state['metadata']['embargoed_until'], "embargoed_until 2013-08-09?")
+ #-------------------------------------------------
+ # Change embargo - embargoed_until = true and check response
+ fields = \
+ [ ("embargoed_until", 'true')
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/TestSubmission",
+ expect_status=400, expect_reason="Bad request")
+ #Access dataset and check content
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'4') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertTrue('2013-08-09' in state['metadata']['embargoed_until'], "embargoed_until 2013-08-09?")
+ #-------------------------------------------------
+ # Change embargo - embargoed_until = date and check response
+ d5 = datetime.now()
+ delta = timedelta(days=3)
+ d5 = d5 + delta
+ d5 = d5.isoformat()
+ fields = \
+ [ ("embargoed_until", d5)
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/TestSubmission",
+ expect_status=400, expect_reason="Bad request")
#Access dataset and check content
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
@@ -1836,7 +2440,7 @@ def testDeleteEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
dcterms = "http://purl.org/dc/terms/"
subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
@@ -1844,9 +2448,24 @@ def testDeleteEmbargo(self):
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
- # Delete embargo, check response
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'4') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertTrue('2013-08-09' in state['metadata']['embargoed_until'], "embargoed_until 2013-08-09?")
+ #-------------------------------------------------
+ #Delete embargo
fields = \
- [ ("embargo_change", 'true')
+ [ ("embargoed", 'false')
]
files =[]
(reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
@@ -1868,12 +2487,13 @@ def testDeleteEmbargo(self):
stype = URIRef(oxds+"DataSet")
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'False') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertFalse((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'5') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
#Access state information and check
(resp, data) = self.doHTTP_GET(
@@ -1881,12 +2501,19 @@ def testDeleteEmbargo(self):
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
self.assertEqual(state['metadata']['embargoed'], False, "Embargoed?")
- self.assertEqual(state['metadata']['embargoed_until'], "", "Should have no date for embargoed_until")
-
- def testChangeEmbargo(self):
- """Modify embargo information - POST embargo_change, embargo, embargo_until to /silo_name/datasets/dataset_name"""
- # Create a new dataset, check response
- self.createSubmissionDataset()
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed until?")
+ #-------------------------------------------------
+ #Delete embargo
+ fields = \
+ [ ("embargoed", 'false')
+ ,("embargoed_until", 'true')
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/TestSubmission",
+ expect_status=204, expect_reason="Updated")
#Access dataset and check content
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
@@ -1900,17 +2527,31 @@ def testChangeEmbargo(self):
subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
stype = URIRef(oxds+"DataSet")
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
- # Delete embargo, check response
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'False') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertFalse((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'6') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ self.assertEqual(state['metadata']['embargoed'], False, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed until?")
+ #-------------------------------------------------
+ #Delete embargo
d = datetime.now()
- delta = timedelta(days=365*3)
- d2 = d + delta
- d2 = d2.isoformat()
+ delta = timedelta(days=4)
+ d3 = d + delta
+ d3 = d3.isoformat()
fields = \
- [ ("embargo_change", 'true')
- ,("embargoed", 'true')
- ,("embargoed_until", d2)
+ [ ("embargoed", 'false')
+ ,("embargoed_until", d3)
]
files =[]
(reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
@@ -1925,28 +2566,28 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
dcterms = "http://purl.org/dc/terms/"
subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
stype = URIRef(oxds+"DataSet")
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d2) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'False') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertFalse((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'7') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
#Access state information and check
(resp, data) = self.doHTTP_GET(
resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
- self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
- self.assertEqual(state['metadata']['embargoed_until'], d2, "embargoed_until?")
+ self.assertEqual(state['metadata']['embargoed'], False, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed until?")
def testFileUnpack(self):
"""Unpack zip file to a new dataset - POST zip filename to /silo_name/items/dataset_name"""
@@ -1984,7 +2625,7 @@ def testFileUnpack(self):
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
@@ -1994,7 +2635,6 @@ def testFileUnpack(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),"1") in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access new dataset, check response
@@ -2007,14 +2647,13 @@ def testFileUnpack(self):
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testdir"))
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir/")
- self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -2066,7 +2705,7 @@ def testFileUnpack(self):
def testSymlinkFileUnpack(self):
"""Unpack zip file uploaded in a previous version to a new dataset - POST zip filename to /silo_name/items/dataset_name"""
# Create a new dataset, check response
- self.createSubmissionDataset()
+ self.createSubmissionDataset(embargoed=False)
# Upload zip file testdir.zip, check response
zipdata = self.uploadSubmissionZipfile(file_to_upload="testdir2.zip")
# Upload zip file test, check response
@@ -2101,7 +2740,7 @@ def testSymlinkFileUnpack(self):
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),14,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
@@ -2111,8 +2750,7 @@ def testSymlinkFileUnpack(self):
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'False') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(oxds+"currentVersion"),"2") in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access new dataset, check response
@@ -2125,14 +2763,13 @@ def testSymlinkFileUnpack(self):
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testdir"))
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir/")
- self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -2206,7 +2843,7 @@ def testFileUploadToUnpackedDataset(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
# Access new dataset TestSubmission-testdir, check response
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission-testdir",
@@ -2220,14 +2857,13 @@ def testFileUploadToUnpackedDataset(self):
dcterms = "http://purl.org/dc/terms/"
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
- self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -2263,14 +2899,13 @@ def testFileUploadToUnpackedDataset(self):
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testdir"))
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir/")
- self.assertEqual(len(rdfgraph),18,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -2330,7 +2965,7 @@ def testUpdateUnpackedDataset(self):
zipdata = self.uploadSubmissionZipfile()
# Upload second zip file, check response
zipdata = self.uploadSubmissionZipfile(file_to_upload="testdir2.zip")
- # Unpack ZIP file into a new dataset, check response
+ # Unpack ZIP file into a new dataset, check response - version 1
fields = \
[ ("filename", "testdir.zip")
]
@@ -2350,7 +2985,7 @@ def testUpdateUnpackedDataset(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),14,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
# Access and check list of contents in TestSubmission-testdir
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission-testdir",
@@ -2364,14 +2999,13 @@ def testUpdateUnpackedDataset(self):
dcterms = "http://purl.org/dc/terms/"
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
- self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -2381,6 +3015,58 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory/file2.a")) in rdfgraph)
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"test-csv.csv")) in rdfgraph)
self.failUnless((subj,URIRef(oxds+"currentVersion"),"1") in rdfgraph, 'oxds:currentVersion')
+ #-------------------------------------------------
+ #Change embargo - embargoed = true, embargoed_until = datetime
+ d = datetime.now()
+ delta = timedelta(days=365*4)
+ d2 = d + delta
+ d2 = d2.isoformat()
+ fields = \
+ [ ("embargoed", 'true')
+ ,("embargoed_until", d2)
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/TestSubmission-testdir",
+ expect_status=204, expect_reason="Updated")
+ #Access dataset and check content
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission-testdir",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission-testdir"))
+ stype = URIRef(oxds+"Grouping")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d2) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory")) in rdfgraph)
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory/file1.a")) in rdfgraph)
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory/file1.b")) in rdfgraph)
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory/file2.a")) in rdfgraph)
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"test-csv.csv")) in rdfgraph)
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),"2") in rdfgraph, 'oxds:currentVersion')
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission-testdir",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], d2, "embargoed_until?")
+ #-------------------------------------------------
# Unpack second ZIP file into dataset TestSubmission-testdir, check response
fields = \
[ ("filename", "testdir2.zip"),
@@ -2405,7 +3091,7 @@ def testUpdateUnpackedDataset(self):
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),15,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),14,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
@@ -2416,8 +3102,7 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(oxds+"currentVersion"),"2") in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access dataset TestSubmission-testdir, check response
@@ -2438,8 +3123,8 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d2) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"title"),"Test dataset with merged metadata") in rdfgraph, 'dcterms:title')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
@@ -2453,31 +3138,33 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory2/file2.a")) in rdfgraph)
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory2/file2.b")) in rdfgraph)
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"test-csv.csv")) in rdfgraph)
- self.failUnless((subj,URIRef(oxds+"currentVersion"),"2") in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),"3") in rdfgraph, 'oxds:currentVersion')
#Access state information of TestSubmission-testdir
(resp, data) = self.doHTTP_GET(
resource="states/TestSubmission-testdir",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
parts = data['parts']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], d2, "embargoed_until?")
self.assertEqual(len(state.keys()), 11, "States")
self.assertEqual(state['item_id'], "TestSubmission-testdir", "Submission item identifier")
- self.assertEqual(len(state['versions']), 3, "Three versions")
- self.assertEqual(state['versions'][0], '0', "Version 0")
- self.assertEqual(state['versions'][1], '1', "Version 1")
- self.assertEqual(state['versions'][2], '2', "Version 2")
- self.assertEqual(state['currentversion'], '2', "Current version == 2")
+ self.assertEqual(len(state['versions']), 4, "Four versions")
+ self.assertEqual(state['currentversion'], '3', "Current version == 2")
self.assertEqual(state['rdffileformat'], 'xml', "RDF file type")
self.assertEqual(state['rdffilename'], 'manifest.rdf', "RDF file name")
self.assertEqual(len(state['files']['0']), 1, "List should contain just manifest.rdf")
self.assertEqual(len(state['files']['1']), 3, "List should contain manifest.rdf, directory and test-csv.csv")
- self.assertEqual(len(state['files']['2']), 4, "List should contain manifest.rdf, directory1, directory2 and test-csv.csv")
+ self.assertEqual(len(state['files']['2']), 3, "List should contain manifest.rdf, directory and test-csv.csv")
+ self.assertEqual(len(state['files']['3']), 4, "List should contain manifest.rdf, directory1, directory2 and test-csv.csv")
self.assertEqual(len(state['metadata_files']['0']), 0, "metadata_files of version 0")
self.assertEqual(len(state['metadata_files']['1']), 0, "metadata_files of version 1")
self.assertEqual(len(state['metadata_files']['2']), 0, "metadata_files of version 2")
+ self.assertEqual(len(state['metadata_files']['3']), 0, "metadata_files of version 3")
self.assertEqual(state['subdir']['0'], [], "Subdirectory count for version 0")
self.assertEqual(state['subdir']['1'], ['directory'], "Subdirectory for version 1")
- self.assertEqual(len(state['subdir']['2']), 2, "Subdirectory for version 2 should be directory1 and directory2")
+ self.assertEqual(state['subdir']['2'], ['directory'], "Subdirectory for version 2")
+ self.assertEqual(len(state['subdir']['3']), 2, "Subdirectory for version 2 should be directory1 and directory2")
self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointuser, "Created by")
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
self.assertEqual(len(parts.keys()), 6, "Parts")
@@ -2493,14 +3180,13 @@ def testUpdateUnpackedDataset(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -2516,6 +3202,8 @@ def testUpdateUnpackedDataset(self):
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
parts = data['parts']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], d2, "embargoed_until?")
self.assertEqual(len(state.keys()), 11, "States")
self.assertEqual(len(parts.keys()), 5, "Parts")
self.assertEqual(len(parts['4=TestSubmission-testdir'].keys()), 13, "File stats for 4=TestSubmission-testdir")
@@ -2529,14 +3217,13 @@ def testUpdateUnpackedDataset(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype1) in rdfgraph, 'Testing submission type: '+subj+", "+stype1)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(oxds+"currentVersion"),"0") in rdfgraph, 'oxds:currentVersion')
#Access state information of TestSubmission-testdir version 0
@@ -2545,6 +3232,8 @@ def testUpdateUnpackedDataset(self):
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
parts = data['parts']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], d2, "embargoed_until?")
self.assertEqual(len(state.keys()), 11, "States")
self.assertEqual(len(parts.keys()), 3, "Parts")
self.assertEqual(len(parts['4=TestSubmission-testdir'].keys()), 13, "File stats for 4=TestSubmission-testdir")
@@ -2556,52 +3245,53 @@ def testUpdateUnpackedDataset(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),22,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d2) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
- self.failUnless((subj,URIRef(dcterms+"title"),"Test dataset with merged metadata") in rdfgraph, 'dcterms:title')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
- self.failUnless((subj,URIRef(owl+"sameAs"),URIRef("http://example.org/testdir2/")) in rdfgraph, 'owl:sameAs')
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory1")) in rdfgraph)
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory1/file1.a")) in rdfgraph)
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory1/file1.b")) in rdfgraph)
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory1/file1.c")) in rdfgraph)
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory2")) in rdfgraph)
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory2/file2.a")) in rdfgraph)
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory2/file2.b")) in rdfgraph)
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory")) in rdfgraph)
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory/file1.a")) in rdfgraph)
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory/file1.b")) in rdfgraph)
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory/file2.a")) in rdfgraph)
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"test-csv.csv")) in rdfgraph)
self.failUnless((subj,URIRef(oxds+"currentVersion"),"2") in rdfgraph, 'oxds:currentVersion')
- #Access state information of TestSubmission-testdir version 2
+ #Access state information of TestSubmission-testdir version 3
(resp, data) = self.doHTTP_GET(
- resource="states/TestSubmission-testdir/version2",
+ resource="states/TestSubmission-testdir/version3",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
parts = data['parts']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], d2, "embargoed_until?")
self.assertEqual(len(state.keys()), 11, "States")
self.assertEqual(state['item_id'], "TestSubmission-testdir", "Submission item identifier")
- self.assertEqual(len(state['versions']), 3, "Three versions")
+ self.assertEqual(len(state['versions']), 4, "Four versions")
self.assertEqual(state['versions'][0], '0', "Version 0")
self.assertEqual(state['versions'][1], '1', "Version 1")
self.assertEqual(state['versions'][2], '2', "Version 2")
- self.assertEqual(state['currentversion'], '2', "Current version == 2")
+ self.assertEqual(state['versions'][3], '3', "Version 3")
+ self.assertEqual(state['currentversion'], '3', "Current version == 3")
self.assertEqual(state['rdffileformat'], 'xml', "RDF file type")
self.assertEqual(state['rdffilename'], 'manifest.rdf', "RDF file name")
self.assertEqual(len(state['files']['0']), 1, "List should contain just manifest.rdf")
self.assertEqual(len(state['files']['1']), 3, "List should contain manifest.rdf, directory and test-csv.csv")
- self.assertEqual(len(state['files']['2']), 4, "List should contain manifest.rdf, directory1, directory2 and test-csv.csv")
+ self.assertEqual(len(state['files']['2']), 3, "List should contain manifest.rdf, directory and test-csv.csv")
+ self.assertEqual(len(state['files']['3']), 4, "List should contain manifest.rdf, directory1, directory2 and test-csv.csv")
self.assertEqual(len(state['metadata_files']['0']), 0, "metadata_files of version 0")
self.assertEqual(len(state['metadata_files']['1']), 0, "metadata_files of version 1")
self.assertEqual(len(state['metadata_files']['2']), 0, "metadata_files of version 2")
+ self.assertEqual(len(state['metadata_files']['3']), 0, "metadata_files of version 3")
self.assertEqual(state['subdir']['0'], [], "Subdirectory count for version 0")
self.assertEqual(state['subdir']['1'], ['directory'], "Subdirectory for version 1")
- self.assertEqual(len(state['subdir']['2']), 2, "Subdirectory for version 2 should be directory1 and directory2")
+ self.assertEqual(state['subdir']['2'], ['directory'], "Subdirectory for version 2")
+ self.assertEqual(len(state['subdir']['3']), 2, "Subdirectory for version 2 should be directory1 and directory2")
self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointuser, "Created by")
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
self.assertEqual(len(parts.keys()), 6, "Parts")
@@ -2668,7 +3358,7 @@ def testMetadataMerging(self):
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testrdf.zip")) in rdfgraph)
@@ -2678,7 +3368,6 @@ def testMetadataMerging(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check list of contents in child dataset - TestSubmission-testrdf
@@ -2694,7 +3383,7 @@ def testMetadataMerging(self):
base = self.getRequestUri("datasets/TestSubmission-testrdf/")
owl = "http://www.w3.org/2002/07/owl#"
stype = URIRef(oxds+"Grouping")
- self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),20,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(owl+"sameAs"),URIRef("http://example.org/testrdf/")) in rdfgraph, 'owl:sameAs')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
@@ -2711,7 +3400,6 @@ def testMetadataMerging(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
#Get the file arabic.txt
@@ -2787,7 +3475,7 @@ def testMetadataInDirectoryMerging(self):
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testrdf2.zip")) in rdfgraph)
@@ -2796,8 +3484,7 @@ def testMetadataInDirectoryMerging(self):
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check list of contents in child dataset - TestSubmission-testrdf
@@ -2811,7 +3498,7 @@ def testMetadataInDirectoryMerging(self):
base = self.getRequestUri("datasets/TestSubmission-testrdf2/")
owl = "http://www.w3.org/2002/07/owl#"
stype = URIRef(oxds+"Grouping")
- self.assertEqual(len(rdfgraph),20,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),19,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(owl+"sameAs"),URIRef("http://example.org/testrdf/")) in rdfgraph, 'owl:sameAs')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
@@ -2828,7 +3515,6 @@ def testMetadataInDirectoryMerging(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
# Delete the dataset TestSubmission-testrdf2
@@ -2890,7 +3576,7 @@ def testReferencedMetadataMerging(self):
owl = "http://www.w3.org/2002/07/owl#"
dc = "http://purl.org/dc/elements/1.1/"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testrdf3.zip")) in rdfgraph)
@@ -2899,8 +3585,7 @@ def testReferencedMetadataMerging(self):
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check list of contents in child dataset - TestSubmission-testrdf3
@@ -2913,11 +3598,11 @@ def testReferencedMetadataMerging(self):
rdfgraph = Graph()
rdfgraph.parse('response.xml', format='xml')
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testrdf3"))
- subj2 = URIRef(self.getRequestUri("datasets/TestSubmission-testrdf3/directory/hebrew.txt"))
+ subj2 = URIRef(self.getRequestUri("datasets/TestSubmission-testrdf3/testrdf3/directory/hebrew.txt"))
base = self.getRequestUri("datasets/TestSubmission-testrdf3/")
stype = URIRef(oxds+"Grouping")
stype2 = URIRef(oxds+"item")
- self.assertEqual(len(rdfgraph),28,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),31,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -2938,7 +3623,6 @@ def testReferencedMetadataMerging(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(owl+"sameAs"),URIRef("http://example.org/2aFiles/")) in rdfgraph, 'owl:sameAs')
@@ -2946,9 +3630,9 @@ def testReferencedMetadataMerging(self):
self.failUnless((subj,URIRef(dc+"description"),"file1.b is another file") in rdfgraph, 'dc:description')
self.failUnless((subj,URIRef(dc+"description"),"This is a archived test item 2a ") in rdfgraph, 'dc:description')
- #self.failUnless((subj2,RDF.type,stype2) in rdfgraph, 'Testing submission type: '+subj2+", "+stype2)
- #self.failUnless((subj2,URIRef(dcterms+"title"),"Hebrew text") in rdfgraph, 'dcterms:title')
- #self.failUnless((subj2,URIRef(dcterms+"source"),"http://genizah.bodleian.ox.ac.uk/") in rdfgraph, 'dcterms:source')
+ self.failUnless((subj2,RDF.type,stype2) in rdfgraph, 'Testing submission type: %s, %s'%(subj2, stype2))
+ self.failUnless((subj2,URIRef(dcterms+"title"),"Hebrew text") in rdfgraph, 'dcterms:title')
+ self.failUnless((subj2,URIRef(dcterms+"source"),"http://genizah.bodleian.ox.ac.uk/") in rdfgraph, 'dcterms:source')
#Get the file hebrew.txt
(resp, hebrew_data) = self.doHTTP_GET(
resource="datasets/TestSubmission-testrdf3/testrdf3/directory/hebrew.txt",
@@ -3011,7 +3695,7 @@ def testReferencedMetadataMerging2(self):
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testrdf4.zip")) in rdfgraph)
@@ -3021,7 +3705,6 @@ def testReferencedMetadataMerging2(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check list of contents in child dataset - TestSubmission-testrdf3
@@ -3039,7 +3722,7 @@ def testReferencedMetadataMerging2(self):
dc = "http://purl.org/dc/elements/1.1/"
stype = URIRef(oxds+"Grouping")
stype2 = URIRef(oxds+"item")
- self.assertEqual(len(rdfgraph),29,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),28,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -3057,7 +3740,6 @@ def testReferencedMetadataMerging2(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dc+"description"),"This is a archived test item 2a ") in rdfgraph, 'dc:description')
@@ -3065,8 +3747,6 @@ def testReferencedMetadataMerging2(self):
self.failUnless((subj,URIRef(dcterms+"title"),"Test item 2a") in rdfgraph, 'dcterms:title')
#self.failUnless((subj,URIRef(owl+"sameAs"),URIRef("2aFiles")) in rdfgraph, 'dcterms:title')
- for s, p, o in rdfgraph.triples((None, RDF.type, None)):
- print s, p, o, type(o)
self.failUnless((subj2,RDF.type,stype2) in rdfgraph, 'Testing submission type: %s, %s'%(str(subj2), str(stype2)))
self.failUnless((subj2,URIRef(dc+"description"),"This is a archived test item 1a ") in rdfgraph, 'dc:description')
self.failUnless((subj2,URIRef(dcterms+"title"),"Test item 1a") in rdfgraph, 'dcterms:title')
@@ -3129,6 +3809,7 @@ def getTestSuite(select="unit"):
, "testDeleteDataset"
, "testDatasetNaming"
, "testDatasetStateInformation"
+ , "testEmbargoOnCreation"
, "testFileUpload"
, "testFileDelete"
, "testFileUpdate"
diff --git a/rdfdatabank/tests/TestSubmission_load.py b/rdfdatabank/tests/TestSubmission_load.py
new file mode 100644
index 0000000..ed95c56
--- /dev/null
+++ b/rdfdatabank/tests/TestSubmission_load.py
@@ -0,0 +1,319 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# $Id: $
+"""
+Databank submission test cases
+
+$Rev: $
+"""
+import os, os.path
+from datetime import datetime, timedelta
+import sys
+import unittest
+import logging
+import httplib
+import urllib
+import codecs
+try:
+ # Running Python 2.5 with simplejson?
+ import simplejson as json
+except ImportError:
+ import json
+
+#My system is running rdflib version 2.4.2. So adding rdflib v3.0 to sys path
+#rdflib_path = os.path.join(os.getcwd(), 'rdflib')
+#sys.path.insert(0, rdflib_path)
+#import rdflib
+#from rdflib.namespace import RDF
+#from rdflib.graph import Graph
+#from rdflib.plugins.memory import Memory
+#from rdflib import URIRef
+#from rdflib import Literal
+#rdflib.plugin.register('sparql',rdflib.query.Processor,'rdfextras.sparql.processor','Processor')
+#rdflib.plugin.register('sparql', rdflib.query.Result,
+# 'rdfextras.sparql.query', 'SPARQLQueryResult')
+
+from StringIO import StringIO
+
+from rdflib import RDF, URIRef, Literal
+from rdflib.Graph import ConjunctiveGraph as Graph
+
+#from time import sleep
+#import subprocess
+
+if __name__ == "__main__":
+ # For testing:
+ # add main library directory to python path if running stand-alone
+ sys.path.append("..")
+
+#from MiscLib import TestUtils
+from testlib import TestUtils
+from testlib import SparqlQueryTestCase
+
+#from RDFDatabankConfigProd import RDFDatabankConfig as RC
+from RDFDatabankConfig import RDFDatabankConfig as RC
+
+RDFDatabankConfig = RC()
+logger = logging.getLogger('TestSubmission')
+
+class TestSubmission(SparqlQueryTestCase.SparqlQueryTestCase):
+ """
+ Test simple dataset submissions to RDFDatabank
+ """
+ def setUp(self):
+ self.setRequestEndPoint(
+ endpointhost=RDFDatabankConfig.endpointhost, # Via SSH tunnel
+ endpointpath=RDFDatabankConfig.endpointpath)
+ self.setRequestUserPass(
+ endpointuser=RDFDatabankConfig.endpointuser,
+ endpointpass=RDFDatabankConfig.endpointpass)
+ self.setRequestUriRoot(
+ manifesturiroot=RDFDatabankConfig.granary_uri_root)
+ self.setRequestUserPass(
+ endpointuser=RDFDatabankConfig.endpointuser,
+ endpointpass=RDFDatabankConfig.endpointpass)
+ resp = self.doHTTP_DELETE(
+ resource="datasets/TestSubmission",
+ expect_status="*", expect_reason="*")
+ return
+
+ def tearDown(self):
+ return
+
+ # Create empty test submission dataset
+ def createSubmissionDataset(self, dataset_id='TestSubmission'):
+ # Create a new dataset, check response
+ fields = \
+ [ ("id", dataset_id)
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/",
+ expect_status=201, expect_reason="Created")
+ LHobtained = resp.getheader('Content-Location', None)
+ LHexpected = "%sdatasets/%s"%(self._endpointpath, dataset_id)
+ self.assertEquals(LHobtained, LHexpected, 'Content-Location not correct')
+ return
+
+ def uploadSubmissionZipfile(self, dataset_id='TestSubmission', file_to_upload="testdir.zip", filename=None):
+ # Submit ZIP file, check response
+ fields = []
+ if filename:
+ fields = \
+ [ ("filename", filename)
+ ]
+ else:
+ filename = file_to_upload
+ zipdata = open("testdata/%s"%file_to_upload).read()
+ files = \
+ [ ("file", file_to_upload, zipdata, "application/zip")
+ ]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/%s/"%dataset_id,
+ expect_status=201, expect_reason="Created")
+ LHobtained = resp.getheader('Content-Location', None)
+ LHexpected = "%sdatasets/%s/%s"%(self._endpointpath, dataset_id, filename)
+ self.assertEquals(LHobtained, LHexpected, 'Content-Location not correct')
+ return zipdata
+
+ def updateSubmissionZipfile(self, dataset_id='TestSubmission', file_to_upload="testdir.zip", filename=None):
+ # Submit ZIP file, check response
+ fields = []
+ if filename:
+ fields = \
+ [ ("filename", filename)
+ ]
+ zipdata = open("testdata/%s"%file_to_upload).read()
+ files = \
+ [ ("file", file_to_upload, zipdata, "application/zip")
+ ]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata)= self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/%s/"%dataset_id,
+ expect_status=204, expect_reason="No Content")
+ return zipdata
+
+ # Actual tests follow
+ def test01CreateSilo(self):
+ """List all silos your account has access to - GET /admin. If the silo 'sandbox' does not exist, create it"""
+ self.setRequestUserPass(
+ endpointuser=RDFDatabankConfig.endpointadminuser,
+ endpointpass=RDFDatabankConfig.endpointadminpass)
+ # Access list silos, check response
+ (resp, data) = self.doHTTP_GET(
+ endpointpath="/",
+ resource="admin/",
+ expect_status=200, expect_reason="OK", expect_type="application/JSON")
+ silo_name = RDFDatabankConfig.endpointpath.strip('/')
+ silolist = data
+ if not silo_name in silolist:
+ #Create new silo
+ owner_list = [RDFDatabankConfig.endpointadminuser]
+ if not RDFDatabankConfig.endpointuser in owner_list:
+ owner_list.append(RDFDatabankConfig.endpointuser)
+ owner_list = ",".join(owner_list)
+ fields = \
+ [ ("silo", silo_name),
+ ("title", "Sandbox silo"),
+ ("description", "Sandbox silo for testing"),
+ ("notes", "Created by test"),
+ ("owners", owner_list),
+ ("disk_allocation", "100000")
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype, resource="admin/", endpointpath="/",
+ expect_status=201, expect_reason="Created")
+ LHobtained = resp.getheader('Content-Location', None)
+ LHexpected = "/%s"%silo_name
+ self.assertEquals(LHobtained, LHexpected, 'Content-Location not correct')
+ # Access list silos, check response
+ (resp, data) = self.doHTTP_GET(
+ endpointpath="/",
+ resource="admin/",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ newsilolist = data
+ self.failUnless(len(newsilolist)>0, "No silos returned")
+ self.assertEquals(len(newsilolist), len(silolist)+1, "One additional silo should have been returned")
+ for s in silolist: self.failUnless(s in newsilolist, "Silo "+s+" in original list, not in new list")
+ self.failUnless(silo_name in newsilolist, "Silo '%s' not in new list"%silo_name)
+ return
+
+ def testFileUploadBulk(self):
+ for i in range(0, 10000):
+ """Upload file to dataset - POST file to /silo_name/datasets/dataset_name"""
+ # Create a new dataset, check response
+ start = datetime.now()
+ dataset_id='TestSubmission%d'%i
+ f = open('test_times.log', 'a')
+ f.write('%s: Creating and uploading file to dataset %s \n'%(start.isoformat(), dataset_id))
+ f.close()
+ self.createSubmissionDataset(dataset_id=dataset_id)
+ #Access state information
+ (resp, respdata) = self.doHTTP_GET(
+ resource="states/%s"%dataset_id,
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ # Upload zip file, check response
+ zipdata = self.uploadSubmissionZipfile(dataset_id=dataset_id, file_to_upload='rdfdatabank.zip', filename='testdir.zip')
+ end = datetime.now()
+ delta = end - start
+ time_used = delta.days * 86400 + delta.seconds
+ f = open('test_times.log', 'a')
+ f.write(' Time taken: %s \n\n'%str(time_used))
+ f.close()
+ # Access and check list of contents
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/%s"%dataset_id,
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ subj = URIRef(self.getRequestUri("datasets/%s"%dataset_id))
+ base = self.getRequestUri("datasets/%s/"%dataset_id)
+ dcterms = "http://purl.org/dc/terms/"
+ ore = "http://www.openarchives.org/ore/terms/"
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ stype = URIRef(oxds+"DataSet")
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ # Access and check zip file content
+ (resp, zipfile) = self.doHTTP_GET(
+ resource="datasets/%s/testdir.zip"%dataset_id,
+ expect_status=200, expect_reason="OK", expect_type="application/zip")
+ self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!")
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/%s"%dataset_id,
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ parts = data['parts']
+ self.assertEqual(len(state.keys()), 11, "States")
+ self.assertEqual(state['item_id'], dataset_id, "Submission item identifier")
+ self.assertEqual(len(state['versions']), 2, "Two versions")
+ self.assertEqual(state['versions'][0], '0', "Version 0")
+ self.assertEqual(state['versions'][1], '1', "Version 1")
+ self.assertEqual(state['currentversion'], '1', "Current version == 1")
+ self.assertEqual(state['rdffileformat'], 'xml', "RDF file type")
+ self.assertEqual(state['rdffilename'], 'manifest.rdf', "RDF file name")
+ self.assertEqual(state['files']['0'], ['manifest.rdf'], "List should contain just manifest.rdf")
+ self.assertEqual(len(state['files']['1']), 2, "List should contain manifest.rdf and testdir.zip")
+ self.assertEqual(len(state['metadata_files']['0']), 0, "metadata_files of version 0")
+ self.assertEqual(len(state['metadata_files']['1']), 0, "metadata_files of version 1")
+ self.assertEqual(len(state['subdir']['0']), 0, "Subdirectory count for version 0")
+ self.assertEqual(len(state['subdir']['1']), 0, "Subdirectory count for version 1")
+ self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointuser, "Created by")
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed until?")
+ self.assertEqual(len(parts.keys()), 4, "Parts")
+ self.assertEqual(len(parts['4=%s'%dataset_id].keys()), 13, "File stats for 4=%s"%dataset_id)
+ self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
+ self.assertEqual(len(parts['testdir.zip'].keys()), 13, "File stats for testdir.zip")
+
+ # Sentinel/placeholder tests
+
+ def testUnits(self):
+ assert (True)
+
+ def testComponents(self):
+ assert (True)
+
+ def testIntegration(self):
+ assert (True)
+
+ def testPending(self):
+ #Need to have performance tests and analyse performance
+ #Need to set the permission of file being uploaded
+ #assert (False), "Pending tests follow"
+ assert (True)
+
+# Assemble test suite
+
+def getTestSuite(select="unit"):
+ """
+ Get test suite
+
+ select is one of the following:
+ "unit" return suite of unit tests only
+ "component" return suite of unit and component tests
+ "all" return suite of unit, component and integration tests
+ "pending" return suite of pending tests
+ name a single named test to be run
+ """
+ testdict = {
+ "unit":
+ [ "testUnits"
+ , "test01CreateSilo"
+ , "testFileUploadBulk"
+ ],
+ "component":
+ [ "testComponents"
+ ],
+ "integration":
+ [ "testIntegration"
+ ],
+ "pending":
+ [ "testPending"
+ ]
+ }
+ return TestUtils.getTestSuite(TestSubmission, testdict, select=select)
+
+if __name__ == "__main__":
+ TestUtils.runTests("TestSubmission.log", getTestSuite, sys.argv)
+
+# End.
diff --git a/rdfdatabank/tests/TestSubmission_submitter.py b/rdfdatabank/tests/TestSubmission_submitter.py
index 0ab35cd..8f55124 100644
--- a/rdfdatabank/tests/TestSubmission_submitter.py
+++ b/rdfdatabank/tests/TestSubmission_submitter.py
@@ -10,6 +10,7 @@
"""
import os, os.path
import datetime
+from dateutil.relativedelta import *
import sys
import unittest
import logging
@@ -42,6 +43,7 @@
from testlib import SparqlQueryTestCase
from RDFDatabankConfig import RDFDatabankConfig
+#from RDFDatabankConfigProd import RDFDatabankConfig
logger = logging.getLogger('TestSubmission')
@@ -71,9 +73,12 @@ def tearDown(self):
# Create empty test submission dataset
def createSubmissionDataset(self):
+ d = (datetime.datetime.now() + datetime.timedelta(days=365*4)).isoformat()
# Create a new dataset, check response
fields = \
- [ ("id", "TestSubmission")
+ [ ("id", "TestSubmission"),
+ ('embargoed', 'True'),
+ ('embargoed_until', d)
]
files =[]
(reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
@@ -84,7 +89,7 @@ def createSubmissionDataset(self):
LHobtained = resp.getheader('Content-Location', None)
LHexpected = "%sdatasets/TestSubmission"%self._endpointpath
self.assertEquals(LHobtained, LHexpected, 'Content-Location not correct')
- return
+ return d
def uploadSubmissionZipfile(self, file_to_upload="testdir.zip"):
# Submit ZIP file, check response
@@ -680,7 +685,7 @@ def testDatasetNotPresent(self):
def testDatasetCreation(self):
"""Create dataset - POST id to /silo_name"""
# Create a new dataset as submitter, check response
- self.createSubmissionDataset()
+ d = self.createSubmissionDataset()
# Access dataset, check response
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
@@ -700,8 +705,8 @@ def testDatasetCreation(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
#Admin user of this silo - Create a new dataset, check response
@@ -731,7 +736,7 @@ def testDatasetCreation(self):
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
subj = URIRef(self.getRequestUri("datasets/TestSubmission2"))
- self.assertEqual(len(rdfgraph), 10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph), 9,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,URIRef(dcterms+"mediator"),RDFDatabankConfig.endpointadminuser) in rdfgraph, 'dcterms:mediator')
#manager user of this silo - Create a new dataset, check response
@@ -761,7 +766,7 @@ def testDatasetCreation(self):
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
subj = URIRef(self.getRequestUri("datasets/TestSubmission3"))
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,URIRef(dcterms+"mediator"),RDFDatabankConfig.endpointmanageruser) in rdfgraph, 'dcterms:mediator')
#General user - Create a new dataset, check response
@@ -882,7 +887,7 @@ def testDatasetCreation2(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
dcterms = "http://purl.org/dc/terms/"
subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
@@ -894,16 +899,17 @@ def testDatasetCreation2(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
-
#Admin user of this silo - Create a new dataset, check response
self.setRequestUserPass(
endpointuser=RDFDatabankConfig.endpointadminuser,
endpointpass=RDFDatabankConfig.endpointadminpass)
+ d = (datetime.datetime.now() + datetime.timedelta(days=365*4)).isoformat()
fields = \
- [ ("id", "TestSubmission2")
+ [ ("id", "TestSubmission2"),
+ ('embargoed', 'True'),
+ ('embargoed_until', d)
]
files =[]
(reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
@@ -933,7 +939,9 @@ def testDatasetCreation2(self):
endpointuser=RDFDatabankConfig.endpointmanageruser,
endpointpass=RDFDatabankConfig.endpointmanagerpass)
fields = \
- [ ("id", "TestSubmission3")
+ [ ("id", "TestSubmission3"),
+ ('embargoed', 'True'),
+ ('embargoed_until', d)
]
files =[]
(reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
@@ -1421,7 +1429,7 @@ def testDatasetNaming(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
elif status == 403 or status == 400:
(resp, respdata) = self.doHTTP_GET(
resource="datasets/%s"%name,
@@ -1439,7 +1447,7 @@ def testDatasetNaming(self):
def testDatasetStateInformation(self):
"""Get state information of dataset - GET /silo_name/states/dataset_name."""
# Create a new dataset by submitter, check response
- self.createSubmissionDataset()
+ d = self.createSubmissionDataset()
# Access state info
(resp, data) = self.doHTTP_GET(
resource="states/TestSubmission",
@@ -1458,6 +1466,7 @@ def testDatasetStateInformation(self):
self.assertEqual(len(state['subdir']['0']), 0, "Subdirectory count for version 0")
self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointsubmitteruser, "Created by")
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], d, "Embargoed until?")
# date
# version_dates
self.assertEqual(len(parts.keys()), 3, "Parts")
@@ -1529,7 +1538,7 @@ def testDatasetStateInformation(self):
def testFileUpload(self):
"""Upload file to dataset - POST file to /silo_name/datasets/dataset_name"""
# Create a new dataset, check response
- self.createSubmissionDataset()
+ d = self.createSubmissionDataset()
#Access state information
(resp, respdata) = self.doHTTP_GET(
resource="states/TestSubmission",
@@ -1558,8 +1567,8 @@ def testFileUpload(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check zip file content
@@ -1741,7 +1750,7 @@ def testFileUpload(self):
def testFileDelete(self):
"""Delete file in dataset - DELETE /silo_name/datasets/dataset_name/file_name"""
# Create a new dataset, check response
- self.createSubmissionDataset()
+ d = self.createSubmissionDataset()
# Upload zip file, check response
zipdata = self.uploadSubmissionZipfile()
# Access and check list of contents
@@ -1788,8 +1797,8 @@ def testFileDelete(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
#Access state information and check
@@ -1954,7 +1963,7 @@ def testFileDelete(self):
def testFileUpdate(self):
"""Update file in dataset - POST file to /silo_name/datasets/dataset_name (x 2)"""
# Create a new dataset, check response
- self.createSubmissionDataset()
+ d = self.createSubmissionDataset()
# Upload zip file, check response (uploads the file testdir.zip)
zipdata = self.uploadSubmissionZipfile()
# Access and check list of contents
@@ -1997,8 +2006,8 @@ def testFileUpdate(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check zip file content
@@ -2191,7 +2200,7 @@ def testGetDatasetByVersionByURI(self):
stype = URIRef(oxds+"DataSet")
#---------Version 0
# Create a new dataset, check response
- self.createSubmissionDataset()
+ d = self.createSubmissionDataset()
# Access and check list of contents
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
@@ -2341,8 +2350,8 @@ def testGetDatasetByVersionByURI(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
#Access state information and check
(resp, data) = self.doHTTP_GET(
@@ -2371,8 +2380,8 @@ def testGetDatasetByVersionByURI(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check zip file content
@@ -2409,8 +2418,8 @@ def testGetDatasetByVersionByURI(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check zip file content
@@ -4893,7 +4902,19 @@ def testUnicodeMetadataFileUpdate(self):
def testChangeEmbargo(self):
"""Change embargo information - POST embargo_change to /silo_name/datasets/dataset_name"""
# Create a new dataset, check response
- self.createSubmissionDataset()
+ #self.createSubmissionDataset()
+ fields = \
+ [ ("id", "TestSubmission")
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/",
+ expect_status=201, expect_reason="Created")
+ LHobtained = resp.getheader('Content-Location', None)
+ LHexpected = "%sdatasets/TestSubmission"%self._endpointpath
+ self.assertEquals(LHobtained, LHexpected, 'Content-Location not correct')
#Access dataset and check content
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
@@ -4901,7 +4922,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
dcterms = "http://purl.org/dc/terms/"
subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
@@ -4910,15 +4931,13 @@ def testChangeEmbargo(self):
base = self.getRequestUri("datasets/TestSubmission/")
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
#Access state information and check
(resp, data) = self.doHTTP_GET(
resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
- embargoed_until_date = (datetime.date.today() + datetime.timedelta(days=365*70)).isoformat()
state = data['state']
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
- self.failUnless(embargoed_until_date in state['metadata']['embargoed_until'], "Default 70 year embargo failed")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "embargoed_until indefinite - default embargo failed")
# Upload zip file, check response
zipdata = self.uploadSubmissionZipfile(file_to_upload="testdir.zip")
#Access dataset and check content
@@ -4928,10 +4947,9 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
@@ -4956,7 +4974,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=200, expect_reason="OK", expect_type="application/zip")
@@ -4971,7 +4989,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=200, expect_reason="OK", expect_type="application/zip")
@@ -4986,7 +5004,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=403, expect_reason="Forbidden", expect_type="application/zip")
@@ -5000,7 +5018,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=302, expect_reason="Found", expect_type="application/zip")
@@ -5016,7 +5034,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=403, expect_reason="Forbidden", expect_type="application/zip")
@@ -5030,7 +5048,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=403, expect_reason="Forbidden", expect_type="application/zip")
@@ -5044,12 +5062,13 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=403, expect_reason="Forbidden", expect_type="application/zip")
# Delete embargo, check response
+ embargoed_until_date = datetime.datetime.now().isoformat()
self.setRequestUserPass(
endpointuser=RDFDatabankConfig.endpointsubmitteruser,
endpointpass=RDFDatabankConfig.endpointsubmitterpass)
@@ -5209,10 +5228,10 @@ def testChangeEmbargo(self):
self.setRequestUserPass(
endpointuser=RDFDatabankConfig.endpointadminuser,
endpointpass=RDFDatabankConfig.endpointadminpass)
- d = (datetime.datetime.now() + datetime.timedelta(days=365*10)).isoformat()
+ d1 = (datetime.datetime.now() + datetime.timedelta(days=365*10)).isoformat()
fields = \
[ ("embargoed", 'true')
- ,("embargoed_until", d)
+ ,("embargoed_until", d1)
]
files =[]
(reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
@@ -5241,7 +5260,7 @@ def testChangeEmbargo(self):
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
- self.failUnless(d in state['metadata']['embargoed_until'], "embargoed_until date?")
+ self.failUnless(d1 in state['metadata']['embargoed_until'], "embargoed_until date?")
# Access and check zip file content
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
@@ -5859,14 +5878,13 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
dcterms = "http://purl.org/dc/terms/"
subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
stype = URIRef(oxds+"DataSet")
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
@@ -5882,7 +5900,7 @@ def testChangeEmbargo(self):
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
- self.failUnless(embargoed_until_date in state['metadata']['embargoed_until'], "Updating embargoed_until date")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed?")
# Access and check zip file content by submitter
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
@@ -5898,7 +5916,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=200, expect_reason="OK", expect_type="application/zip")
@@ -5913,7 +5931,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=200, expect_reason="OK", expect_type="application/zip")
@@ -5928,7 +5946,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=403, expect_reason="Forbidden", expect_type="application/zip")
@@ -5942,7 +5960,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=302, expect_reason="Found", expect_type="application/zip")
@@ -5958,7 +5976,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=403, expect_reason="Forbidden", expect_type="application/zip")
@@ -5972,7 +5990,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=403, expect_reason="Forbidden", expect_type="application/zip")
@@ -5986,7 +6004,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=403, expect_reason="Forbidden", expect_type="application/zip")
@@ -6045,8 +6063,8 @@ def testFileUnpack(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(oxds+"currentVersion"),"4") in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((URIRef(base+"testdir.zip"),URIRef(dcterms+"hasVersion"),subj2) in rdfgraph, 'ore:aggregates testrdf.zip')
@@ -6060,15 +6078,14 @@ def testFileUnpack(self):
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testdir"))
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir/")
- self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -6106,6 +6123,7 @@ def testFileUnpack(self):
self.assertEqual(state['subdir']['1'], ['directory'], "Subdirectory for version 1")
self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointsubmitteruser, "Created by")
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed?")
self.assertEqual(len(parts.keys()), 5, "Parts")
self.assertEqual(len(parts['4=TestSubmission-testdir'].keys()), 13, "File stats for 4=TestSubmission-testdir")
self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
@@ -6153,15 +6171,14 @@ def testFileUnpack(self):
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir2/")
owl = "http://www.w3.org/2002/07/owl#"
- self.assertEqual(len(rdfgraph),22,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -6210,15 +6227,14 @@ def testFileUnpack(self):
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir2/")
owl = "http://www.w3.org/2002/07/owl#"
- self.assertEqual(len(rdfgraph),22,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -6278,15 +6294,14 @@ def testFileUnpack(self):
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testrdf"))
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testrdf/")
- self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),20,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -6337,15 +6352,14 @@ def testFileUnpack(self):
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testrdf"))
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testrdf/")
- self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),20,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -6616,15 +6630,14 @@ def testSymlinkFileUnpack(self):
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testdir"))
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir/")
- self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -6662,6 +6675,7 @@ def testSymlinkFileUnpack(self):
self.assertEqual(state['subdir']['1'], ['directory'], "Subdirectory for version 1")
self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointsubmitteruser, "Created by")
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed until?")
self.assertEqual(len(parts.keys()), 5, "Parts")
self.assertEqual(len(parts['4=TestSubmission-testdir'].keys()), 13, "File stats for 4=TestSubmission-testdir")
self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
@@ -6714,15 +6728,14 @@ def testFileUploadToUnpackedDataset(self):
dcterms = "http://purl.org/dc/terms/"
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
- self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -6758,15 +6771,14 @@ def testFileUploadToUnpackedDataset(self):
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testdir"))
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir/")
- self.assertEqual(len(rdfgraph),18,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -6803,6 +6815,7 @@ def testFileUploadToUnpackedDataset(self):
self.assertEqual(state['subdir']['2'], ['directory'], "Subdirectory for version 2")
self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointsubmitteruser, "Created by")
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed until?")
self.assertEqual(len(parts.keys()), 6, "Parts")
self.assertEqual(len(parts['4=TestSubmission-testdir'].keys()), 13, "File stats for 4=TestSubmission-testdir")
self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
@@ -7016,15 +7029,14 @@ def testUpdateUnpackedDataset(self):
dcterms = "http://purl.org/dc/terms/"
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
- self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -7093,7 +7105,7 @@ def testUpdateUnpackedDataset(self):
base = self.getRequestUri("datasets/TestSubmission-testdir/")
owl = "http://www.w3.org/2002/07/owl#"
dc = "http://purl.org/dc/elements/1.1/"
- self.assertEqual(len(rdfgraph),29,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),28,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -7111,8 +7123,7 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dc+"description"),"This is a archived test item 2a ") in rdfgraph, 'dc:description')
#self.failUnless((subj,URIRef(dcterms+"title"),"Test dataset 4 with updated and merged metadata") in rdfgraph, 'dcterms:title')
@@ -7152,6 +7163,7 @@ def testUpdateUnpackedDataset(self):
self.assertEqual(state['subdir']['2'], ['testrdf4'], "Subdirectory for version 2 should be directory")
self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointsubmitteruser, "Created by")
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed until?")
self.assertEqual(len(parts.keys()), 4, "Parts")
self.assertEqual(len(parts['4=TestSubmission-testdir'].keys()), 13, "File stats for 4=TestSubmission-testdir")
self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
@@ -7215,7 +7227,7 @@ def testUpdateUnpackedDataset(self):
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir/")
owl = "http://www.w3.org/2002/07/owl#"
- self.assertEqual(len(rdfgraph),22,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
@@ -7223,7 +7235,6 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -7285,7 +7296,7 @@ def testUpdateUnpackedDataset(self):
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testdir"))
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir/")
- self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),20,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
@@ -7293,7 +7304,6 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -7502,7 +7512,7 @@ def testUpdateUnpackedDataset(self):
stype2 = URIRef(oxds+"Grouping")
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testdir"))
base = self.getRequestUri("datasets/TestSubmission-testdir/")
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
@@ -7510,7 +7520,6 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(oxds+"currentVersion"),"0") in rdfgraph, 'oxds:currentVersion')
#Access state information of TestSubmission-testdir version 0
@@ -7531,7 +7540,7 @@ def testUpdateUnpackedDataset(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype2) in rdfgraph, 'Testing submission type: '+subj+", "+stype2)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
@@ -7539,7 +7548,6 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -7590,7 +7598,6 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dc+"description"),"This is a archived test item 2a ") in rdfgraph, 'dc:description')
self.failUnless((subj,URIRef(dcterms+"title"),"Test item 2a") in rdfgraph, 'dcterms:title')
@@ -7640,7 +7647,7 @@ def testUpdateUnpackedDataset(self):
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir/")
owl = "http://www.w3.org/2002/07/owl#"
- self.assertEqual(len(rdfgraph),22,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
@@ -7648,7 +7655,6 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -7712,7 +7718,7 @@ def testUpdateUnpackedDataset(self):
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testdir"))
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir/")
- self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),20,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
@@ -7720,7 +7726,6 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -7872,7 +7877,7 @@ def testMetadataMerging(self):
base = self.getRequestUri("datasets/TestSubmission-testrdf/")
owl = "http://www.w3.org/2002/07/owl#"
stype = URIRef(oxds+"Grouping")
- self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),20,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(owl+"sameAs"),URIRef("http://example.org/testrdf/")) in rdfgraph, 'owl:sameAs')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
@@ -7889,8 +7894,7 @@ def testMetadataMerging(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
#Get the file arabic.txt
@@ -7981,7 +7985,7 @@ def testMetadataInDirectoryMerging(self):
base = self.getRequestUri("datasets/TestSubmission-testrdf2/")
owl = "http://www.w3.org/2002/07/owl#"
stype = URIRef(oxds+"Grouping")
- self.assertEqual(len(rdfgraph),20, 'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),19, 'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(owl+"sameAs"),URIRef("http://example.org/testrdf/")) in rdfgraph, 'owl:sameAs')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
@@ -7999,7 +8003,6 @@ def testMetadataInDirectoryMerging(self):
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
# Delete the dataset TestSubmission-testrdf2
@@ -8091,7 +8094,7 @@ def testReferencedMetadataMerging(self):
base = self.getRequestUri("datasets/TestSubmission-testrdf3/")
stype = URIRef(oxds+"Grouping")
stype2 = URIRef(oxds+"item")
- self.assertEqual(len(rdfgraph),32,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),31,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -8112,8 +8115,7 @@ def testReferencedMetadataMerging(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(owl+"sameAs"),URIRef("http://example.org/2aFiles/")) in rdfgraph, 'owl:sameAs')
@@ -8216,7 +8218,7 @@ def testReferencedMetadataMerging2(self):
dc = "http://purl.org/dc/elements/1.1/"
stype = URIRef(oxds+"Grouping")
stype2 = URIRef(oxds+"item")
- self.assertEqual(len(rdfgraph),29,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),28,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -8235,7 +8237,6 @@ def testReferencedMetadataMerging2(self):
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dc+"description"),"This is a archived test item 2a ") in rdfgraph, 'dc:description')
diff --git a/rdfdatabank/tests/testdata/rdfdatabank.zip b/rdfdatabank/tests/testdata/rdfdatabank.zip
new file mode 100644
index 0000000000000000000000000000000000000000..5486da4dceeb5d82660fdd49104fc97ca00df66d
GIT binary patch
literal 376392
zcmaI71CS_9wjqh_P9oQPc$>oim2@~V+9YeR9a(OA|<1{RR
zSfKm1FZAH@`bRt{VP2Qbmwr(lLFe_8hf`+CQAq>Yc3q_crG0k9R30j1C$c#CDh@qq
ztD2!;GrYs?17&C}%00|_wb04vudN=z6`E739QCd^aKO`PI9m;r29z5(lb<@CrQ-9^`0DjgxbKd~QbOjW{(E
ztE@I2ll58tg|CgiJDPS@rFJ1}8Z)_bt8f_&Ou~?D$tOa
zp0}xzp9L&`s9)T0+57UJ1#l@1AK=3U?EW`%Zuih|4`-3z{oLJI+}y%<)R_Im!GPSn
z(!mcKYoY$+-y9z4g9M)98Ur4=PzkIbe(5epdWE7BK`EN$tg{Z~37@RxFj^E3i2VYe
zmWZN(@u=S-^6r8DR1}3}W-I};Gh$$^#htzLV3Oud)G4qFv_wCM{AnH$ovk}17%lDqGh5TnS+eFk8)%|WZj2csOfFu_9WU5(g?-5|
zb+};HrD&kiAYU~{%q69cTU{=->|8ZOErmt`4gHRgtGfTd=U+5bjU
zXEYewxR^WG+WZ$B?A(Kt)$jb)?++24&xaQchW^`eLjz-hF?_wHF6cV6}((F8?W5Ob86K+$^2J@r+
z)`X!kKf`4Nti3_GYi3)f6oC0Sa{*&J$_0!0!`W;kWjWioEvF8pqITJjksqtNvXPFd
zq0lx1LdR6DREH#|FozJ{^KeoX8VoVybP}(N9+J-p)3xVtB3XCR5L8NDzLpH0>&&oE
zh7pg#>SEK-;d|ySd@In7%k3SR8XO1V1DDCUV8V2}Td`_|4U~9O$p|=Er3)0EUwpQi
z{EMD=B-zM?uA(iu(Ri%NR>Q?bI^19)?vPMg5sC?bP-coML2Yug``E0*qgJ>;0NjWH
zF88L_c>hzJyME+M*vnK-I%$FzCih+8)`D>vdcD7QNy&)baNilzy&p9mO9u33qVskX
zj4i65s}QFKoV?=NPpSqengR6LvAxfcxMF<)XW}5vfB&sl|9U(1JF+EQUPVnMSR#Cd
zm~V_yoUZd1LZtFlr|8bwLahuK>+)CfAut|XI>GPj&z{?0?X8u!G#n-Ft000#HoKAb
zaLP{foaKx*%L`FKpP=kajqrM5>wx1)q-IQKTW_1Lj-MfV1gGyEBX+K4xfe6Y))7hb
zpyjIlaFr;>0cx~x4k7z8kARm>e@xlTk*>9E@2C?$cq(nfrJiscDl4U$9IWMI(n&BX
zo-cJn4TNEdP)S*7bzWax?l1SR=VmqDgrf*;p9H*^ni%3Zy;Yt(LAUJyy*kBKFfd~O
zl}TWE_eY}rajvLu!2ffxQZxXVyaNLO^!(i@@c;8-wKg|0vNCqncQB^4bB|IHk6EBc
z?tW2KJDP5wuXS*y^2u#w;ihQPGEb
z1TU*ypL8GqTtl5)6>su3lu!%cjmBACU-PwhLRH_!htN~zed7E
z+nlJmmWWJqf>mVxe0!|Y%#J($Y^F#LTh8V2i9S{Ny)=Q)hc|ts1vC^^s2w*NvlJo=
zvcx`dd5TEQL7Jzjj#MnVeEH1_>Fpb1dhM#2-bW|E2;;u8tis6_6f2w}HPo$m|9dpt
z!H2k5r3)|0XL%*!<)+)WyNd{t@j?Ju<#vjp)MT(Q*+ZD%2YfaLarTg#-n?+N0sD3q
zb=ugvO1z#2k($Z6E&Dl|GMA;I&Q(l_5!^}=)I?@US)w{EUlX<~0w5M!
zqI8B~@1nK-aAL&BQ`oluPs^$Y-B)kmJMlI3^!q0l0#Ma&_9rYQ}K
z8bo7yvl1`{75d@o<6dZPN=l8TM&E|7
z6ofcE++myIvy4{;${djT(0PT^p}GDKFX00$5_(BCA;MVHI>tlD;3Wl{1F!UViBSV$
zL+y5;c9I*gr;mebA=w6zKoP~+L9m%iiN0#KQ_B#NXH;zMfuLE>cd$%Hb0wLN<-Cc5
zD~h^vG+o9}IO~1RBKPLo%hJ^fmczr@`}@QGNXL=Cm)0&(AFaiuUi6?HvLpB4B%uU3
zS~SX*aRSJ+Gt>-d-AbsrKkaWRy^48YTrCb$P}Ysbl;RF&YTAGSWb#0!jNHA&gX!PP
z=@jzG%rmC4C9iHKxHB|pnnAxADk%C!<5JKX(hLn@H(WwM9<`>lmj@wS1Lq(d;IVJ>a7`pLCAH#1`*7S(0@pUhu253iL*hjxgupL;*5
zxe1|)>Kvq!f|tKiW`?*Zb9ui69fmm5THP?~1U?JT0a{>BYH7ytmA{~R|QY@MCV
zZA||LjiO}9zd-~3&&?Z3frD5?xe0AuTva2N{1-hLmDMG|rk#io#P5lTRTO
zf%QUB$d(80PN&sM!Q5$s1vTbqlGa`Q+@u3Z!l6l&97U9##pGM~Nv{zgsKziTYHmP+
zdW8sbt?+zhEAddcrC}d=t>q>u%7(wOD|pi|HdG?&;x85oqAuaE^j$x;09Oex&=jho
zirC2Eq=&mnR)-;@0T6m9OUnyYbySEV6^jA;Vy83|N;Y*t5>l!v%F-4=ap2DKq~^Nq
z+`DeDDKM3l(u6>Xe0tZ`QZ=6JPKiIfAf6R*>Y2Eh!QD)l)&)m+7$VRdZlVUg+~dQb
zB`#`3Ee`;1>A{;OEDe?kK0oR4n-(M>78){Pvv2hFGtDOY4y16AKq=B`J~@qdCMYSL
z`#oMr6t7&Q(4;yaAe&w_O)BqK1PWrLKa8xh2?J=kU2sBcZhE&-9#36;i<
zPHllE9G6D<0Jp;dy1pCdf&oS=Q|w|0RZ+5k3v(GfJ{BwlG%O>tEdW1Od5
zoz4i>L%xvV`}}TM_3nMlgXG-A@lSz~Fgj^=8a03qJM)@ib4xC&I~>mE4X~Udyn$|H
zaDs3N)p4;e+osxR^VP9K#9X!kA6am^w6TzJ5wQ43xYox>U=JT%kH{Mc-}l*cPw%0a
zCMq~3v}n##NqDg#$~dn_hdN>8o}K`v$DVkd4<`;zV4Ly(DH#&AA4H%ftBdV7>v
zq`+m6Ia#K7n`&93)5$pwc{cuZ|&0dH(ezPe%R(mr!B
z-1Bzq6f-=-Si`A8)8W*-S>)2HaUXQ*+WAv(e)S7`5vfGzu3QknLaC{=?MHU+wwA{=#+h??v>VVe9N@?BGaaWNf1EY~}P9ywXw9
zkp1-VK`+?`F2MT`AZY$Ny|YPTsp-i6<|6T-{)g8l7D64K4_!HPS)97a*t-!%B4~EH
zC_MpD%6q7;e*z*XF8B*jobsi3f`5RAE~4pCIT!QZks7(
z>O5MEpJwCiSU}AAtXzNh9I^y8`A@ms5D|Do#eR|hn3v@Wk`CSI7w6JR9Y;V
z_bnySwxLD(*Nk2HTs)=AJ6(;~oIonbik;wBMWA_Aj3lj3y{}l=TSs#BL`QHWiZ|i>-3H}%L
z*VX+?;GJ}J|2p7HW-0!U@#vnH!=toNXXXRh#H->1}o?D
zMQgvUF<-PE@*Ak=q~j#J!*g1^XfbPCO@N0_)O
z5J1}`0oo}?F0pZhl#&`vnmp^^htc02CM>F|e4w-re(4kI1XRtaGMgR!)3wSr^`99t&&Yxgp=2PoFW3K(RrIm`crAat(x-F-9zuQDHV{vVX8naJFU`Sp(+@Qe@
zff}e4P2kB3l=%!@IY$dwgw}I&tth?FOQ9Os>6OIy
zMub&V1~*`h%P70Rrk(79Xs;f9UUZvE*!7)-Z|scDXs42f8{@1&WN{#Nl4chT^w~@T
zE2Pe1RM&I9YT)GMRKRLuXH%n684G3Oe67^p-zPNm=8KTePD}U^8+#4Ocq_(D$(aUQy
zs4^Dct$C@kcm~YpKohlY{HT>-2Prf2I+`OD?uA;sJR>u-W|g%>K{pHqk&&B%IG)C>
zgi>}n{x^R0jjYXW{^IZo
zd#rka{&u^jRw0kLH1O#Uf6^*ioe%<8Y5r`hHGY(2=Dpfr50jy(bNsORhuX$~W%}fC
z+MS!pWv5<~iM^m;vl_^pLG1V8EzUe#=I;RaNfo0n79#|*vxQWjb-C=wUH_UrUK|7&
z4mdStLMGIyfT2)g{@x%wh_86~n1r=ZtNmK3slE~hC?QNo(*zKGk!?>n-?&!1L8s(a
zh>F}dr(pv+eR~2p@CD+M7M+fW*sd4BXhJtA2;>jP9m&oYoyR$dfR|~{m<8w~RIQ!q
zBS8YfoZ%=pa$jLIjN=5o-(4Bq3W_O&v-#IH;S5Jzr(
zGK9Hw_(0!tPyMk|+<+`N&?b2!9BpSc{-tt$%1fc%*7W$dZGN1tx3buih!dT@uKr3n
ze4Z=})_{8p91#5Oy3{fQ9J_IkNR#4&4rC5yj~t=uAORqZ4B8F0iyZ2VWVhxZ2X7XH
z7I$&m`r2)F_QH`R{3pkx>h!!Y^iFjv3*yn6!al5&I;S|NTMN$!i
z^2r@yA48UzRY4}mAnIjX?U&w5y}K7rmQd&4+YvMMP&JHCHYs%}waQ#!a=e3zJJ&pkjtuyPkWbe-VnceNq%$4_`%De-wThUtN~T?~8og^Ym6}3;
z-61#tp;vH|U2$lp4ofwrog(DWMXD<>J~SJAo-(jCEDSuoE?}iU3Io_w_Cc_&9^>!m
z?eTFB7NPSwI+p4COby(tsBy#wgOJBj#>ay&5DG)aHdomPW^Nx+4f+-jPhcE^)fD38
z3B0@+6Wv=GwEwW0Q@7exz<%C=6uy5KxcE*R0J%{ycf=R6Xfmdr+)}D#ooIq&9fzmz
zS>ApZmovm&S+}<2hR>%5KP^-DO6!3xHk(Tvr{9=QVVlnQaIcjcezTALdR!F%
z=gaou3HNoXO;g$*RrXM_Psvcu>HYrP*DG#sw$jh0<3|BYpPDTjAgB&7jfTdWx+^M{
zA44S!lVuQZ2XjZUZv6Zuj@AjK$Xa=tF~7SWl`Yh{mCMjAnKxxgB0ZsXmma2PvKO~8
zLLbJ~h>+O7x?Y5^&IQ5=qHzPXT4FxJZf
zoXjxG&}dMgHNjsusi?^g=I~1`YLTKu|5D40kroO0)H9Vm6;00iAQhMqxQ>ZvDgb`#
za~9{o}aTd5&`fd(;r
zQ9L3`#G*!%{!*k(S7BA18@uz*AK5lrBNUqAI)Y;O*gKQd?unfoU=N^{TZc-SSzQHl
zs|wa};*Pka`Tcy_U}`$rV)2}sK67y%JfcK*&2i6b`)y$$gV`gl!u5#4ZbDLb5=R}W
zD+kE05(hrtYxz?vDwlA86cz3hPD|W9kbnU8==S1Bbd@hb)Fjghoqld7~p*TTG>`nwZ^e@Iggr`JgDaSA$kpmtoZdAqsVG%XfA?yNAp
ziu08Hg>=I4{pqIi*xD6J9%QXe=oWf6|{GC1uzkWvO3-H
zTEAf0ty?c3w-Op?=`8;TWwLb-33{1~Vt0gd&TYHr{Em^t*h
z-XuaH^VLpx9@o=Vl<;H0%r|$dPmgXUFX3HWpmiSg%4+;rO3nJN+40+Ig6c3OYu*`4
z-*MME{^257=nBWKpsO1ERP)BEs4;wUeqTy?KXfw_3CJ>;d-P2)91&vrTiX@ea`=80
zo>qPB&5G?K#Bd2XhP3Yq+JuC{3y7>>%V+?z_(Ku%Iim}DyYuaPS@(@4qbDE^71ct6
z;H|;kAY@UB^t$f)V{X|^GKS)NCt~fSAu8vncnO_Qdc{Ix1m)`Yq2h$(ZJR3*`zo>S
z?BcZQW{_(xhP3;M2~=CSE69P>q1x~*RKz*hpR@?-l~-u$-EUsYa3_pm3<^W&hWBqa
z<^i$dqCmRqpOcM~5f_E4>riLTKR+C7phfwciQ`jkAFZbiIA_JtmT!kI^gQ)j6puPa
zprQu0-&Shne-8kg>I?|RY(*wciU}*Tuwc3vYYk|Jp3}(;RHZH7>X|RsQjH+OoGMVG
z*-B%D*M`g#2Xobg3e8`TkJO&1N9VOGU<|t|=JSW;&?=kjJx}4?w$@wWP-~u6q47AM
zTIm1GnIh*J-Xqu{kvjqA-36AuAN!zgMUx5sEF$H7AyyHNy&J;W*>LD9&7$D&7HY1W
z%E8?t(=V75qwz0bL?*ff)YYH6Km2m2qLF+%rwK$;WtGuponh>1Spu~vxsrq*jtz{G
z7*QS1r5Mr4(kc5kNor(Bnr~`*x5OvNLNEJ^IB6qSxW}h?9k3NG?vqk>NYy9D^nWtfsEsgSECRLfNW$ZMU16ZVx
zIhC-c8(IkO#-OEnr48B&q6+@rQTX$$i}rKM4@_l$zI4u{*5bxA9o`JqO+@v4ie(fp
zC}N%kxD>(o$$Rg?X^I#W72y1?lzegB@zv5ung@RF4Qr$Qd~|h}0&)N6v}}SB_zp>;
zJCwBg4NYRGt}B1QUL6ON(;&5@4jTFA@OAe048p`~PP?m^P|F^Q{3O;BWdDF2j8FIWLbhC2%bIi=8!V%@uIxRI=eSFAaphJ$EAmok*PU*W{-q25NCkQal>P*N
z1sI3Fsm}i(75qyV?9BhsVC4E_0{Nl49#CMy{G`FRrJr+sf*Fkd_(vI=WV$$ic|vv6
zrn)tTjAcAedD==SLW}ziu*c&ySc&QTzfSc*X_-Rs$&hPc#F}|m^2-JK*YKB&N~3k|
ztM@@TdHQ+a7WPJppC=&JgjY8?E?-dSmqZ2kcxgvYjpjL1H#B0=6)wVcDXwt-=rA=o
zUx2KT$p+`JEv)suq}Ep&k^#lU*$mvXz1+t7DaQcEc5$Tmb5Bl5UNt1Q*y1>%QPlK0
zTU@Bw~pDo`{21`YDwj=%8#owe^T
zW*@)*M0oF;r&?5w!0u9;)$F&|(?B)Wf+HEdfGSq9K-%1>QB}Pqx}Mab1*$LAx%<5QWDB-$
zDK$7eHcvnL2zOkNC~|2FrX~Bh#?D@^4EOH73M+h!NoTCYDH)JF=>z(CZ9mGlDM
ztx1*YVi~Hq8Ewz-gH?8Q5L$oNojFpfQd1$GC#Xj)`XJZ|RnuN>^w$!cAQxOR`<7OF0Q-P8i-oQyiBE?~^tC?Cx3_~U(
zJru?0+WpyAYHQr@6h*F1m{KOv7Ut&NqJ~0LNHAg0P7Qu5sbtHaWDUs;*OOc5B8>Lt
z6Ore|0u|(q+tKIF-E42yEH=wGJ?Ez7@A2#}+E#tz@3ZP#X`{Ao(6c-PdutynkeqE&
zb-G+xEo&8I&5L~t=zD0Q=OD6%)tA2Ai3Pgu@7Irqg_=>2KoI6+D)PzMYF}^rhxMA{
z8q$y4s9(URld&RB=1A-xn7s3|wbVPm($r#&j@RUcdH1A5(mtaM93y4GKQMN4Vrs{v
zKatuSR%iLRdge!jf1d9{sJsUi*qvYl2B5CBJZyy*Gh)_O<{&zc_^vZw0UufQs2aVw
zHdAc^#UqKep9eY$?LY6`5_z-JQlpiDEkbh^RQ<*76Gh)e6zmPUn3tCL;gLZKC`_H
z@fpQ1*X>tWV!2D{M09ReAn}V-nbr(z{>%IjOK&m)V~M_x(Lu&b#)yl_7LY0D*WRM@
zpZwhd0Z!Guc5-aU`En#-9q_M3D%}=~E+sb;z96!JiV0a-*-=yEkMZkSj;^-1Wma}pmak`#P|^57k47H*
zrV$(#6qVGEh`RzsqK9G?i#NYQ)sWQa(@;|09%ES_zviH%EY=ou`GW{8lH69*{(R6I
zBwD99Z$TTR%J77N^fg-RAjz3%lOaZO0d)|BC0nHdkyVK0@F3J70Q;UU?rKYhMqB7H
z8fIu-k61Dz64MzA<1=x%u6y%^;ae`Y`*mbgJ6b6ht6f~Vhm>-_*f3)L`~(Ouego)p
zlw1Cn+tb;cXL@%QZaJH!l1!~**V}!Ol!aKCfP%wYQnR#CnurzW#StZAy#QPM&G-Em
z;l*Y)vK6ksxZ5y%g8LQ83elINAWqG#*D45EMSgs?R6)`m5^!B@~XZRQ8tnmsg`^h4{
zZU<9TKLAj{GBIo3U1CInrRLi|*LjCf%LT!VCdM1XD~tjVZ;vWi)Ck^yt;cRbuHe$J
zGUPHv8Jt+06NfMxl@}A4%wBX(wfi%q-cGd%u
zIMPj1ts)my2GL0m(@TEGV18U291`$ORaN<&v$%u&!Nizqm(Kx20z~-?{h{*?zJwiR
z$emEgCn)M=A<@whqixKULUyHh
z)r?WOn9zUQ#ve|qT-?hB9sAiaSv-o_m|q6@UOBLfM4l&1qPAC<$X4yQVi3Q^2`yk`
z-ePdUs=yRsD}o{|*U3*qPf==)MdSLt_!Ci^Cd0M`oQ`4qYi7jeP_H7SfC3*ily1@`z-asl{`JJD$0ROSh455;TjDoVz
z5$>`G)4`BYdxX%Lh%6e*&!gQB!+Tr~N=uS~0bT$T&*xP?s7y&su>WIq<51jnbI)r~
zhHU38Rj)m2S%6KRyB>X<5dE>EU0G>-KAZ0G#WN!7Pnl)Fo-xo#9i^Y$nqNKx0*X*J
zqB}i0ymgFLjA`JMLQ?&Uc#;Aw$B#7j@`}EgkG~+&QK44yuvKfOOMtNrO^mXBJO$UQ
zc2Aj;GzKQ8Y@Q^x-RM2PY5+Ee4{(Hg^0GbXr%s3`ZrWO>~%$}
z^KGr1A|b|64MqU_c*XupR3^r->2=2mz~i4*a>ryrnGv2R2H;<4u-bYQhsZ`O$4;Rc(?Lak+9Hl+s11U0shx7>IB2HRj!HU3ATA5MC5=naix&u(ZfAL2AQfX
zWp(nYPcZo40az{T_i&ma`s%?b{y(sQ@Aib;V_8b>FzPzamzKb-*W^KXY;Sw_6u?!rE-(>medo@l*Ly&3{
zzB3g{4E3^HhL>Ujp1g@KDPn?@gJ`C;-t*a%uB_G^jXpRfhw+>^rIv5}7{B@NrP2b2
zcGk{oCM`W1zZ=G6%h@3^v{4~WMbxb|Y(arMz<||y`o1zKCavii;?2=F4-(xHais?U
zVT8c8!?k-3GbQ4753nr~mQ@nxLbFV4-
z9;oBIyTu3KabbU(sfkB@yXuM3kTFtP)3UmHkwe2y8H)y}S%DiK3-IwL9!KMmV${UKobdf(926oc(R9UyUIi^-vQ8cL{Y+~zS9~pA5+S8P4o1O9T0m!
z8_Z%6Z!0ZLz7(s89bF}Zwk9q*tTwAM6(z<#8Y$lXDreM?UhvZmTb|QHRDmRTxM?2~=8y|&
zQZwoVv|3j%;C7i?-a_U}1pD!H6UwPeGk|A>Ra9TEpu+9|Uk_+HW}?^rgtj6U5^Lx(
zGHU-+^&ogjB2)Q2tR^WOdPxo-TVsaZw8WF`ds5Cb1U5f;!Hd`=vIIh4MZ4Bm7gAq7
z*dsi`c;)6qRD@C#uMCSng=+q2Rq4ir5^
z*$!xdIv@d;VUxyr5vt==49^&b4Mk-W_6u_ZtT)x?TUcK_9j?RK&ovk$Gw*j{sdAFw
z5H0RTP~Th(7LWD7U{M<+33X0SYPmA)AxXf5`cp{kT({nB={=0~>x5+I9s~Ecjowglz{etQ5G8dv|*q|K$|yeQE?zp8&rd)JIv5EadZv(wJ8mSMN!0w
z7G;i9Z}Jf#%813Oi%sw0wYPn%%$H9{{-3Xg5iC4WaIr=fX2BE|2PV@^#*#RS8bQtD
zf>tWN;cNBJ`|e7A_+4p_Ih*ZnnPVuft=`+ufZT2(sxoL^cPysXL)IrgIXS0gWwuOf
zfEinv7};0XM|O6z#Gl<%s)IWI*lM#bEy+MVdKv&6t^Zk%OdIAB51xE(*8swzQAU-~
z=PbK_pyXlXc9b#eq^y8&o^6&wWBjkM;vQmv*J5qBqgRliPUSC$3upw)o(d
z+xQ}AWXGo@67M3bU^(#x6|+Xuq4^FIzPOvAa!b%-=^e*;0^G@zN3`!$L@e0^EZ#&i
zNE!)p01T0Ks0%Km(-?>zECvU~0a*MY_@m+D{D<^MarW>NLm=U{swpoqPvV+3u#)y_
zhhox_An19H=I2@R-p`AzP%hv)HkR0E#7%O+
zCXK^Qisf~B&$PJIu%P?(Xy4NMaVKa!JQgEm1vM0=&%V^wPE=i?XuugvO1h}6Mc<;R
z3;<`y!qcwC8gnNeWB|CbgCn(=Qv*c$JV#paRET{PTB+#+p6MDD@QYr5l?
z1V1YnGUVR}1@YSEpI0k<0 2mSi5{EfjJn|a5%cLoUvWgR1f?GG!G_G#jzqF$>X
zNu&yFN#r_jajZq_R!KQ!$R-rX5v`nl;-yNIo;7xcLO5sx$Vg%b6nG4i=veN-s{MNU
zIa)q(<$MSisB20&NEQ=Ar~`jbhm1z*0>CDeu6)`A_dR!DF(N44B=@qt*|VDU)D&Qg
zaHW*>z`rGl-8nNf$l{*OC-W&96{K*vq+hSecUhKbKDxGHVH(+(j&cezBzp-s%EQl%
z`Tib^@{j9gqQr@j)}QDIp{{{B-r`7h$FrYA>G!V)0YNjc0wYiKgRrT;0t?l9-jHEv
zyZ{KXNj$8a@k8#%*DqNigmTAJ54)dhO#o#GD{0kzHR;!c5A1CxGxs>AS`y
zWT!I2VPsqL`7rznY7WB^00r0|X?OYU!zsvD%zhgD^D8`5PF8J})?~lBWlI7;o}~0s
zz3(svGR`${q+LQ=b@1Br;W7lgv=`i*#FIVp)!FQ|@EZrj$pEjLJF7>R#(=do?~lCV&a1
zTd0KF0Tg6;N;BCdGmrc*>lU@7Y%2HJ8yd&-z$In7K@v0SF
z>9XdfAd1L^A6}oKFrFbWo+0SDhQ02cP0NZxWc#F7|yL4IA
zV{kK4&r46dW5ccFF-wtyb16aU!3dKH<41s4sL035Al)Fm3+c_dhgLGZ0bmKIejRY@
zx@^efguPb|U_|olzo|Gw7017r+7Up@|8{T2i&lrBi4_?rs&Zl0CtJ*6!;FLD>J?b
zJ08i@Xl^@}e;qAtNLXqlo{GEYH*xnB?S#3#@|YRgTdMC^mIK*(>Vy@)%dK6qYp5Kw
za4IN)*SKb|5_kSaSDvH*SOTeJR-UU}CM}2b1hoa@^!x`R%T7$E{9VQ-8rA5a%ZQnN
zmC1qn{9NnwmHPaE+T4*lo9hWrE2m@!M~YTv5uNSTIeEqT0`<+zW)7XBCpo{LNMpyz
zF%?-u;7Mk}3yIZnTqWtHVOqHgbqHXblXx|-l?$k4Z>q%*s|%=kYS0o?AR+K%seED!
znr0t51c)$hsJtD+H7E2-z?u^ykr9tk&KJ)cWcA>c4=4pfBX8JwwPw1M7%b2`zYD0A(3!BMe
z>JprDfO0yH)PSR5@8Y^?7ItNXTfheci2)$7sR*>jLMA3MfNrizXCMi-lOw|TxNd~t
zgwr&h3jMN~!_I;DIV9WhTBM+y C|AH-5V%=otc9D`UU5Mvz;zZMHS_BT~$wE#{n
zgd_tp7I-opo}Q#YYDS+U>2w3%?R=)9OeNY8GunZ0D%p6C%_S6g8~d9IRX1Bne76no?n}eEOp<04_;Q
zHpCw#+ARZ)Z*)$g=RiiJyvX&w&Zd{jJEm&ahb0fM5H=?wFF60*VI$o)QQ6bMf~Z;TCxj
zaDItJ%$>}sT00rRl)(Jo^o9Kdqrs&Pg_2rru%>C*2DM@?86#J2qv)e&J4}IaL`5xe
zh;z^UWzO3l%4A;_rNdIsuNJPp-fR2@+F$Px2tXG&-(mR!Km2oBzNtEH`ykBz0cF^K
zY>~pkUBAEkg%QhQBd7LI6VPHXH023L4XS)QJ{G^PxIq2f9@YE;FX!gI;nUq~^3skn
zr9E$VrbJDX*??Sc`7Zlj~+q?zj?EQc2d
zkq$jxuN%^u%mcGmH{=_^&BpfJ6Urcs0Oksmwdon)mc!{$QaIpd&S|(&4zzJ
z{#$DfAG<$A4`PPEkw2jKSt}Q4D2gl_YrZ2k6)+8YY}_??P>XhiX{3WS3ji;pfmt7z
z={5U6j*mX}qm=jo^LxmFdZQ%44%;Km3N_2E8^gn=AM=(ZK2<<0s_aURoYUirmIqAC1^ckrTAmn?$YvL9onGI3vfqwSc(9w`uEqoSL9x*Uzn^wzG2EXY#2`D+5Y;Y
zDQ;7+k;ni;XCtapWIv!hV2vR@D{({;SHVHh*}5XB>K)8^hYsW3d-K_
zzZ3RN@>g@!i{zStGG5JQ5r;cfsv-5KG2NXsee)JWVTea?(VILOZPst~nLc#dKUhlJ
zZ$t2d7hsoQvaBXOo?W)OUJqYrd@(v+qA~&|{DyUb{g2;AH=?RH-!`8%wRB$sHDJq7
z=7BI+y=yPGN$>A&m+AQV48&>dCh)OL(?+qSLNgYoc7m%VAwOQxsCv76-8>M^R~2(X
zti1_exPIBPDcib>
zty!MI*79tP15Lr|32$)DC2eC-f4C6&Z#0DMQj_%$$0^U83pOzay
zkY(k?=p&m_J#ySWvN}aBxJ6bk$|b}34=8$71gfX^Jsb3!=JQ{MUv@PK&Ksnf@C#BxjInzaDQCLn7fF$
zdpq5~P&Hjwt{L52;rAOvW+$U4KRcJ=&rrg9R5+hz1eo_DT|B{*MIQTLF%j|oNpNR-
z@&eML?o|s^foZH@;DvIUQyLG`Hb|$!PrGJ!mN6O)g=K7mBmKoIOd3vNoIpyEupimt
zlqKtm|IQTa*?PZKnI%i=gds9@0X9?g=}gFRbrBJPH$tb=js4}8=Lc}VE=jr5jDu#i
zYs#7T4ATU%0@B8d(ELXvt)&9`LNy@JVOu5KR?s&~sZv2N%`dz{3P25sC+}-N-?DF+XsX}*^+JuzqDLO8j2jt8)BC^X-UmP
z@{2v=-X;3LIZ=msCG+!705Ry|A!_g;VEPA1!U6))eC#cr>*b1a*|{(k(l*K{-T7~r
z$-<;mKO*_hc?N5=39ylwIx-XR=raV5UKFiC8)`=({aY+3d2@H5ocJ;9C&cXM9W5{S
zS9{$JDLC&Y6b}uvpBp^9iNfJK@c4MuohP<1>bnaV%OCRv29BWIh2r)}85x)27frs}HC7ftIqS`BjI
z22o5oj2lO}AHvQMjj&sZvo9*e*-Lhp^q=yE>Tzc?0~bd~KR`_GtAFRp$IcS1BvNzd
z2>!yqV-{cY1=8V}4Zbw=J(^YU0q}^O1?5aGDHC_u$I&o2c1X+SsnR=-91K3(a^<)a
zpxAS{}-(d!vQk_v4_d0akig2RL30%{xKosxpxcD{}x-?vvjd*T3pN
zTZ;nLycu1FO~GMrW`wn2YEc^6+o}$vwbHi~Rg>AWfQ?NI*bFP6<~1i)lI34m@Cg(K
zU~%;ClKlk7A#xk%4=WY*uO?S+%X(d{ij2z0yeOe;Yfbn{>@)%nBT?VS*1|@Aq5Yua
zxCz^$}cpn*TWmLM5ZqZxCthAXytfkXmG3*%?Z4#VivzsmfBF+$62##|g?3N9_B
zwr*xo=lJT$`tIYzAdz<}PlJfhQ$Dy$oS$*}c9bczou7d&2bpDfSOUB%+B+8u6bATe
zQu;CwQDvY3++jp*KC-FNZ~SI4`tC_DGeIqY&gF5
zNCvsAo(h9~!GQ=4x}SE|<8DT(zRoDlX0kAHet|y3?I~_nh~*Aojo7HODbQdz=N^D5
zv$1H5m88dL(`rfX+G%wz>-Hw=4WKhqxsS0DH5Q_?DU6ktr33euqC0kcIu@ka+AJtN
z1&~an>LME2Vf{MmP@)a8Dk4ui1#!XZ{~eV+%Z7cr0Fg9I|DiGs8**}&xPjsV%x*wO&IW~Rs!hv~_F9r8}6Za}ry!&G0;?HK2TE{)^liB3v
zl;v%fka*J6-$ClPBGowMDKE4O&u`9UU_mXYPD4I^g9Zx)S4Jt6dbGc&AD8T;-`j*1
z!Q@$keP9;yLCwQ7C7%O_iO$^-wt$p=N4yCfysLSfL)5vv^KDp#58jyrJRn6osBh4a
z&vBW!_C)l~(5!&hc2q_BH}>JrLZMl;V;!QU1pElu;+dGla$|<#_(pq?b^@#;A0ERL
ze5F|5IU=KB0s;7-a`x(U`csK`9w2wyUlg`qygUTQ7Ac9;-k>JGFhPdDX_8z;8s74n
zqM~Dbhri^W**@pE0|HYU&tadN!Cl_g2?bctC%}7B9O6q(QmRH7B5Ay2TydGNogQhA
z2}AbO=Ya>6D}pG!vd*wv-LOmBmv241+0TF&40X`F8fTD&q1Ahdo{560r4lx}m{(#!n6VurMst_tFQ3fL#Dj-zt5{KQTt1abpg<=vyld@VXzmY|j^e7N
z2l>&IsJZLG>}*f~G0BrxjZ6wjylt)3`5R7U<@jxc&SlEC^6(P%b=k5;LQIJwhKzpM
z^j5;F@PJ`^G`-YY@g#+LQ{*QDKbPI?l#YtN?#eOjHl&hzn=7eJ%yz^mQMAq#>7irF
z@dB}bud(I{BapI_ur^s7$rPz<8CsMZ!@yO=d{o!92I$F{rKxI^pR82UslKRDQL$cU
zCvKB}qPrF^2Nf7EwR(F%k@e4dtHEU~yb@G(%Hrmo=cZ^Z@!)VGhkyk$;vbAQKZ+bP
zR)P%tYXG2{GRX>DOk-{{DzuwBl=9k=BB&$2(G@W>83Wb|?kpuJ?Ri->x8}F$%Z~qc?sDpMuPlH8*tVG}I!qw@*L^MuX@Dd?(k!PVUYIq7
zoH12KMcg+Isl$hPCn_u{=Hx*@B|+LW$YKnE2zIp`NRKj=jy2L*Fhe(C5hGo;x)taY
zvlBXeyjXVqo~g0J$*8xZ$v!3<9JR`yeATrX`iXY3AlIPBceLdt5Gg|SMTK!ldzj
zx+2&Gs?=E$t9o`BflZK!H3g2U$Wf7#))Y>Ak%)=MSYg-az~GVIkd?b%NPaYz^$!5K
zUwGlNN@rPqeOKGPc|RXO-uS!HD*2MbQkG&|NrOF59{B9wk%wh!%?rA
zn0zr3eGg0EiiS;7!o0`*Q;9i6Jq}^xQB>74L{c%T?$(WnkI4((W#8xj`F|J{CN@1wct)d$(QsNfZl
z)jgDp_QhJUSCn5vm3D55qLZRZ0MXEir_d0guqqUbqm|!eJ`-U+W}*)F8=_w^zN)u~
z2K`20%a0T;e8jwjH@B-{ehV<_a3DsO|y^TlYencR@A8JndMZI-Jpz4s<%YB8G~3qjW#g0DcI
zf7R3RuK#k@7<^cxmS7rd^xdRUku~n_cf|g8r6DND&_iZl617>+85YzK{Q`!BqIi3s
zMq4twZ+@u(3aV!Qd9bnGCoFD<`C!ZmObo(J3Vr6xxGrmVsrL`*Y57j%EZAWx!ow*o
zIcT4wEOeW@E_#639FUPBS&H#h50-$Vg5TdG^clz^tFKU9J?JkMMpEq)*x%H58Hcc*9;Ns9~IWxM;tEzA$6XoPf?A%nUr1TZqGzjrtZ@Rvm@$Nv?FVfH)m
zg00g@yqm6BTBe(4N}r+B;Aix9Zs@5osw6xIMIk=k-mvC&^Y$9n#khJ`Tn9TLcp{g6
zI;76mp#|()T4;TnP7`ZtzhCXzxLSo)7m(UoUX)-03Zc*>V@L6|s+Ka6QM=x`87Rh}
zAq%{$TQW9h)M;R|dv~RDQKNLRD~=Q3UF0h7Cp{z=xKD2LW}SoDxm`gBSu9$b0M7oa
ziZE1ByP^02j01}0YG94uOW4=~S4QQ$J#&zAz7N!`A*!~sGk*#?)(GT7E#&L>fd7hN
zpLM+f+dA?;Kgucyy&A;rw!zNfE12S9Z-?4diBFEe&sI6%K~(LKEg0&$vAu?I=g%*Y
zT~R0|sN3+)z&eHf0+skS0qtB~;bMVM`!8Q`f9d+*lQwyiS!b0(z8_ck;PxCec<4Z*
zrYQBm$r%)qupzA`>@7Vw>I3z$82r!MoP~ky;>E0$VMBb*5>nT)63togH$w(oP
z!%<%?P^3~&B^#xGWHPFLjFgGX#=PU3(erH%=(nwRkQ(><)#t~zVL}5xY%E$!$C!GT
zX%(O~+b*3EOJd;SON}$Em=!f>iZ8Y^Lg|I^A^<~tr?|og)h(guT4|Xtx)TrkMNx5C
z@4Bbgf$TbIg|D$w#1yT2&^?qC!J+cz@knigV#^m1w=dIUNy+h9^(2uaAMDf~X`hBk
zNm_zlSU!5Do!1a)uQdQ!GX{G&g@TNATp_PoeP=KGMV)6s7iZc6GeS%@47}dV1
zd&s06P|Z+I$kBQ$M}`g$H@cc9c#-*5O`=2>E75XSwr`!+E(1R%dpx_IeU#DzTOWWB
zrcDK?GJGfXK1#d~{!(Z7u<~D+d(QY&-B}sDK2TzLgwSY~)av}iqr}7ZQA%xWtn5=4
z{qq|*i$<)ox@Xc3_iB5X$P630&2c-$l{4TT+1l44EEH*CPs2%J(D;RW9G^t{Ms~sr
zKUvWnf6(^e8v;C{82V9=6q!BbuFqkeAU;6|A2^&bh-3x?YLH_{N`d+Z5w?KZ$Eksc
z19mqTub39%`H^|N(7z(wYZYG&yv7!*yV$fa<)9w``ZDPEc45(bcpVuSI{}y-?Gf1m
z7aLkeTJ0wJ9Vtc34&s4Z{aU8*dAu~|oSK60ii3I;=Sj1@2yw5e9AHoH6hYQc{}yZD
zWoSsNmkWngH)Km%kZNr!MY>MaitINL+*iT~{{H{|_mL!sPIyr%zaOS`?uCe*tuNr9
zgOnWTGc}F1rG+*TeF;SX?H+?5C4@t3rVm70RgWTH))_xn5P$bau-2OyYH?PHdl^L4
zr!GRDre$u8_aI+@Qz;`1`sVz8vJ;rH*BilS#NWj?LJ7tdc9+&&{`OWxt)w#cno$wIvMbZ#Yxr5^+60E^v
z@(jE#08yMQB+AtC*~k^Nc-|4lDnv*2gF_Sy1gxLWt*hcTZhL?0n0LcpSD(sbnUbC4
zZYz&XBpY&qBhh;iru%YO0ayB{X2UN4t}Gf&Br7
z1i7MGg3(OX3O*G(v)g5ptQ?CF
zZIB6mt$m(#AFX$PblXajr8e=$s&I!k@%uK_%+J`amnpXT9+lNUK_KPmQ6-67X;2U7
z=rsh(+Q!eKx;#`wJ((8c$rv-34dJ34(znHQToUdobFVUESWx*zI4)W8t@KO36-+*Z
zP%C+&-H~dmR-@y*;M-^)NA|R~vHZ=_UgRc8
z9cDB>+ov5`xL#6?mM96~tz5ib2HzH*gUrU31HL9Ki`*X1+g*aFxYpeE7b41
zdWU<%T~b6KETe&qU_@OUjx{;aARqAl=!31fAQ(|ku(KbXk);SRz2P!j!%hWS2OA8#
zckGj+8d@JS`$Pcr+DD;}BfS!HpAb3UPL~USdbNTx>I3yy*p1!JeV8osXV2^bGx6dY
z3Hgby)nNH@bGah|XO(&I*4f9GbOv#4^tP7%ehlbHqJ}9G$
zm)Pej%=>sf4TDbCgJ{ey#D@MEb0+}0*v8dwL+D;u3A(tCjc6
z{yaqEFhulrn(CH9>?(rR5jo))5+>^>)-FB)XR2}5H{_}0^DPj3Wq~w~ZHHJILssqQ
zxrFH-c2_3giHMhIs`3NhNpDHDh19mAp>4CRQxq$eQn<71di7J-)K=JheH|3;_rE(E
z6#cP>${Rj34ZSY;mEd+AC@c-Q&-4a8uD{oZXHJRKAL)+PXHJM!7hw2aL6*vF!3{oYqTwycQh7X~|f1dAg
zz^}8ytLY`KZigQ_nWvrt+y=
zz*?q!UN!v}v@Q+)iJwn}P=)k|Vd~5MzekTu9sUzNy27^k@qli9{KFhi>-mnhLB?QJ
zDC_w5!+E?PmGOT(pfPpiAQLPpc^u4dp04C$3w-&G`^vno)pU1_s+Hd3%6*vR8svQG
zL3AvPXsCA!`BLn`kZHqQ_^1+P=6v`lZPN+PKb1u<(V@wrBFQnPl7qB3bIAMvl&GNt
zTSXFF!r*-=;V@X|5#%mq&Y-<^`Dw7YWz9PXZCMg};SsSB>{~OzKZ;5t=*^i_do$IM
z<$#>^4h>l)B#?|X4TiciSH(dfWlq4700&0&7*#huoD@2Ar2*`^W?vn}w}OHS*3
z3=~8l{xH{Cb0jD&b&x3}brF-4&qNvcmqiAHP=X+W`B8i&my(M#ZBwYi6A!<@+~8S)
zqj=QI$^F$h0dhfIz&)&6QILjmxswzs2FSLJXq=TWa49YyPknoP>WwFB-g8EmC`C7}bZ2iRNG9lu+aX5p-HL+Pw2Pghy-J&3*jBNjsD-
zyKv>?+@p%ZPE7dt2yz1U(hvl{RY8elP*RP
z*NN?f<_|C0|CpGa{o;_9D#9h9F<9yt{51-z)7G9s;&bp>pURs57qZr{vtaJNYfmw3F
zM$Lcldw
zQ8|T8?Bcn*s3z}GpeedfyCb);O8pu{qo~GaGtKdV0
z9|&XbP%+Mz2F?N*jaB9~jItVAE`Zz|9hjIIrX^Wr&IWC-x0Iw^Y7c=vyj8A`ut<5H
zukXt(k&d3FO$Hz?{v_
zyD|EeBX&y^xerl!3z<8YP!-
z9~cQ85vM>{EGK>v_7u9WEu=?92Nd1a8C1PZp1#nR<2VMpeGBu
zEaLHWQH?CjfGpl`X$|&>)F5t-VgzDiHk@+^48=;UJiWzVM#72^MJTG}Zcqi`;T|=r
zZtW*oLg8(J?*Zaf6uQJ#J%Kx#CsSH3jJCuyVn;Z-bcLv2VeQ>Ml5*SSN9Q+v!YDgB7$Alx7>z7U)7WU5?65zl~v*yL1>K%NpD_^H>#k=24Ygkaq
z)0l!Tvw7|~Fz!U|)CvZz8-UkV&Rt?0#YTV!bcKb}q2Xub-L?2G+9`
z%3W?@ANM@Lb9?dPumC*K%JW>P!GvhiR1h9`C~oFhme3t2<_sieX83%qqN$Z8TAU>{
z4;pbH({q7hv1E}QK!cS`=~9DHV689s-1M|XDQ*-)$Ew9;(KGOKb6>;+Spe_<#K+1*
z8W%oW5j8p_WBij&i|mbva0__1rC)iz{tV4FI)@#RF**joBct|2wz03cnbM~wSNPgx
zG;Ajlcg=Mx%jIm`87gGKX-AB?AStD^`vM9mHb1)xSz%VXFYMXu$|gc0u#GzNp#Fl|
z;nG&ei%F6ND39z;>lm4x(Ui7XV_K0R+1l5j_)KhdPG4w7MLknw$@-_h3&{
z_gvPzN1|~94D#b+5)xw}{^SoJvSksKK$xx)Cdx;Iv*NjT!n2AF+XLn{zfamOufV)M
zJbTayV(I7T6DFvhNk>2qpjcM4a#gN0UC))9w^x%HZMh@0PafxMLvH((uYyIZ(jY46XyHgk(&rOr$U2>u2uaBW
z?re(AC$=agw!A{TZr%5j78!{gDm4eGa{s#c*nby=Af?jT={9VZlUzN!B)k4y{79rY
zD`$*cZ9R2x>1ZD}8)Y01u%4w9aGIi{$eJ9(d
zy^UwyrQVId+1z;ci`rqGm0kziW9&L%@`*=9lcir}viz{1_sSZ-Z-14ku`8}Tumc!w
zw2(UGOJ&cQ*jm3nA(!)zq8mEJ>Aj8gxS=fTIoLE4!o_UqJX>Fi#Iky$UW%4Bf`A{<
zcH@S8y%gZXK)BPHn!&oEx|Z##NA2y3T?Y?C8xghPg_&Btz6((~2(#(%Nv>n|?
zX!NlgU94|C7Q;u#rHJ>m>s!F{FG5$tp^xB_gH-#NxvsnFGuD)m*NmY_I|UtJczlZ3
zgr+dIcAT$HPjVf1oYWEmRaO-*e@(X`4V7(mj&fo*_BWq3M1IY#&t$b_B8G`4-KP>9`TpcxMN0rNcgM98PMd?T^
zUIBHRaCvbex`GFO<|B2|gq7q9smX9Cp&?;qThQCm_Ue0Zl?^?kcJ;eR0vJpbld4ps9}5g
z69~L;j#%tq>8HidQ~rumMtF`cHO98*CoO-MA>Hag^;^}c>xM+fcxrhJPcJ4O@^lPV
z_H%@eAmvDO^QJ(2daRKpeLp2|d(1nRpcvcpa=7Qmyb!5Xz}{H?mwet#uaUo)*Rzz@
znRNNyv+8Th$QoU+0Ve44dH{!hSUVU=2(l{&y0ZnX|Kl^Unz*Pfbo?
zn-Q_+rVf*+kQ5o(ql8ZpI#uKxTKyuZXgrD{D#oO%6?7+Hui;^Y9&=dH<=~%i+W;
zH*(hUZ=vP@uhZs{hzW8?eFAh;E652!(e43k
z_*s@co(fh84(Rt?>S$3C-g73Cl8Q_W?HM&FNV3*Tt$8Yu5!5eG@(E#SS`Wn*SvFxd
zreEP9Zjud(8tVXD0#_o`OX3>!7EAdINqcuin5Z|~llpzlIrcJmv%I{KFC^E9Xn1ZY
z0!{e_fP@_n=wB;J7uC5z=5ASV_n)E4il3X{e;KZ{bg=sm170nb2Ofk8R@CR$;Ep*N
zs<*UdySQ+!Z72!}g(NNkNo!!G_=8sqpD0CRs3gtkY2&P-=t8%x0hfiRZ(y0xb=f@CE-=PLr*8D9uqHVYwA#Zl<`s$wl
zg@0{If9Ll<_~lCr00KJU{oh|vD_1kS|Dj{xY{dV-3i%9-zza#0q1r+P)7lZ7M392P
z#q1C5;jD0tF*JMgv3O=znsXYD?ZY;Q^QR8)JmJ$BoRuh(qlZND@wktx5{Z<22XeKD
z&`ArU4{(^8R=R?uG=Eo!rQYT=A}E_eun9kDy?g0%*MNnt9Q_tOf*aoJM;e
zLFYUt|CpsOosU7A&6W|E@HGJORTbxk5enX?pE5$tMaYiqO~=G~p@Rw`BBa}$?dwjX
z=@ewLHWc)HDkWj@G*=UV_A%Xx#0lU>kd}unGeFR;-o%5di-@nf6C<7?@iq=79LCaX
z;zGu=$W^8u0ZS6u^t{=zL1+M+pnQ?pL1CmvEM|LOlS`Wv`ye!o)B~n_-!o?QV559A
zP*L(pX5fS?MgqupXAqbU;MFm@q6a#((^OIx&r5B6`n>TYt
zw~K1~i-7B~=?2%GoNvdQ_s}P8+ml_;L
z@ghO?^CM%~o(mh(z-L}zk0fNYv0_U=9E!?$Z(&hS`50&50&U9Rtv_F|FMqQ1e1~nx
z6G+Jf(ubb)ArJ>IqvPtTO>|k>T`r~fm<1=qT5O@9HzRqcb=TdvV+l90Hv&Cyk$e^n
zawej>j^y?d^HBB}B_Q2M*b9=$Sj2Wl@eln~sBm>5Cs{hu>VZqX2-r2x
zSl?Ft_kq!=Uci%%`-`RnEEun}hTx8M?Qk+$O-P5v8@rI{+CsJ0nre1zU!%tk%D|vq
zwkCOv{t1|kFrGs&@P4RH;W#IT#keKM_n&qS$s*GYx{-Y!aDZ@_<{F1ekobj8&N+sF
ztr$MFQOo`3<;L;NmlxMb=Tfx2PA}_;?vKN}rq|IL9gqCDg+q_L(+h7kCsl-6vsGx^
zuUZLvC%-=Lqje2ZH`*K|E>GTPy;wfSjGsem=fDTQN<~uO#RTQ%d~R%Zq!PXJKV2U(
zgOmE8-Y^7j2M{Pdn@CDD142#q)$%aTm>)*u$uUiD%^mdda|RCSR=5n5hbuYH^&e
zQywxhseCyXY-+*kw`MzANs{
zO{)2)Qjny}@p>E4GSC$`mDlg=IrtZ~n+8RbQlswCq5|WOg=voXSX4I@j%R5sL9mhZ
z7($5YUlP8h?Pom>${1MZ5iN{5hOBo7Rjxl5HcpnG#Z}@dIw(GY1zG1hO5emhMV{}1
zEMsbw5EDvT`{-TmLw{)@dx
zGA-JHx(Tw(8zfUt^fXGPGQZ8^H3GlBQOA)I?&7h>GNc1)B#}Z8$ggL9YVE)mhobiK
z1qd22`SJxgmyq4;~J&oJL^{)^m{w2o_u&!ysDcS}iNA)zLbY`{=^=ldCv!
zeYO$EvF4jhvicJ)N4|>DgjXt3^J<75ZEE7J>L`7-a$L*t
zRo5x|-l4igpg)B6y$kI5^U+aGC)S?iT~#2we+edFuuAjKS{?pi#)4bI?B$k@$b{-y~I6b&ijBo;j`4(KI%b$
zH1}`+DFqLYK6WLmZoROweFb+}lFUi@HH?fDfXEw5C3~#xkc+6^%TUGTOkFrrG7XOYtwws_`?F(c7^%
z8g5Cy2;V)m_lQpExS2*|tf4AxN&UdFAzfz{Ti$^$cY0R7ikE-B0a6t7eRWC=UFc!?
zA1NQGIvCZh^y&kpdIo7{u`pyfvaW@S2=4$~a6wt|&B-o}u#tN38fM&e)dB038hK^>
zt?*m>XrnERLE4i9uae%#76wf$wtqPblY{c9Zx^;07?oP86W6^DY`kWlzmn(O`wE-U
zWpSV77;L@mR$8?A&H`MAtUcvp%Jx4Cb}nnh+^jYDeya#%-hHvj{baG)#GdVvuKtGp
zbI~aQJlJTHmZGIQkdOa+%S&q6Nkm@_tR1B%d=@(2N4yV!mVU`g^0P3lnjPO9mE1v0
znwl8RmyATrD;lZnnv&JYwu-9=zMHF#g$I1wA+}0aL)J8mDmVy`J<<5{F@nRDkYI_h
z&Y=E9dUTmY!DDw0yK#FUy;zhCr8*FFlKjUMhsG5QYFS_7Xb~uPmo;E0iofjQSZ|o;
za#y-_C?t5=_Gs+iu4dc>ET@CI@|@S+@X*F)a-pOs#09Jnpin&eMg{)W*R;IzH}ECw
zB%p47yMw+qnD=?P_Tdn%pJ^6%&k5brwsuX`!&tA?srSE>)_=s|ZYzIQ7!(lD3?~o}
z>Hi`Q|3_*4U$L7j?Eg_)JwIxzCRtFDi%eBD%7$wgPApO;DW|=LC@$m*WNMA&aNFuk
zXbPn0haO}*chJF`uf?wrwoHn?M5R#H^QlIeV99UM6NT!6tTb~QXWDe^(@JjKPzGKQ
zyqD>xN<1OBl%h%c8z}YiA(-lFAVk^-myxmz6$4NJ(ocvQWxmH+7#)hQHNH;5Di>l!
zJIbogDO>I~%nu@oG$uGXlmwaH{l5=LcShO&I7zKTo;S_=*Kx-;b8W`f6%mL8NNn~n}D1Z8E0
zDChyj+Tb^8$c~>LIsgZ})Zk*>Xs7SgF(97bS$qZM75qVIRc>
z5y-whkCIZQ&GZ}1g1l~?`mM+7>+uNIy}%M=xrGwvxnMxr)?iG2qj~}9R!G~+tHfRh
zCgKMv43irQHM|aPXh<}8{>jqt0BSl(IbZ2;K4_Gv?h=moLTwlZ;pX%LO*=cxI4X3%
z^cf@BB6)5qXOL*sK^`w%E;>NMUpdRQ$~?rN=E4g}l^X2dEd~bofiZ7A?w8Jk@Vo3w
zS|GgUDf7|HuMP3l{Vpzr7C((M#3b#XCIu6YFx341HDv*!NCD+NSu$f433e2b*qa4iWuLUTCnkg)g)R|C@qf94j%A&w0O%DBe
z&hw7x=gDZHFgRd?T=XT@zGw|KMcqMxmeEW`Y?hg8mk;CyA&E~tKhOF*eSWUoO)1^Jdb<8Q$NLa*$ZGls
z3`)gherKzG>IpSOo37k?E13X_g!mJ>vTS&~5crS6XmuiB2jVD;S9GxVQJ7q=3rZyY
zVh}4oGSqAd&EolM;Y`c%;rZ&&f&{0!lP)iMsz-Y~Inba!@s5FZQj2{2o)CleqNAo6
zVvk@O+C>LxibXqLwd=jI85?pA`+kabokO%GeLAt*hu4`F_6^Pt~b4XFSG
zq`J#!I|oGOm=6@|OV^Fhos4%kO$Y_fp3T>{89|j20<%UfBzLT_5HN%D$L+5B3$#O!p
zl34W2RP_bmFcY>xR_3e@)-`p<8E}*Qt0>@Y*@L~q7ckSPaj24g0ZzG#Tbn_zyKc%B
z?xQK4&)=*bRXe%zWyz$FhGMj;8piJdW3jU>O&vgrAn3LiUyABXw^(ZEb@C(|sanV}
zOZY|#Em}>UJD{X?S;tzzt?Ph+W9&3`4t#F*@hVup;(5{gG0$`zA_pWkhW#h$vR3=9
z;P4g=<<=bGdw-fPVT~O9XXtJJ_Yj)bWwc4W7T}|V`gK?y7pCoJdiNMPTa>HF!=1@C
zfO5y1Yo-(E`(B)O+!q*0hpzut0g4Qqa)G!nBT!J|3d@VHS7k6O&i!^_8-AQBP)6Ri
zRwmOq#fXU2AO^@jh;tpn|Z9@rQW*6ck@$dX#LLa8~Xfq|319I&q`0zXqdD
zs99PGY;Tk%Y0}D1KTdeN=$uVBVZAL*t&YIq!Jn02NPAtFxEW7F7|8milX5#LUh56S
zgiB&i`oDy0YW^(N{g2rTIt^x|v$0C;wsOcAhA?k5`%r1>Zme#sKV5!`Yog
zE5`hXtUFPBVYb7xMOrcp$fH&&utfe`c3l_jha1{4ja}$$@kx<>yX1>EichEM2Uq)i
zmoWoBp>xAEy>dW-RvK+CZsA(X!>9Lit-q8mCApHvHN@nUQf)i38=@Uo!(?tX4IwV2w`
zO)FBPqKd}cC(O^zs7u0%M4g}I1o!j0EEK&x`Tmzj&M(yh>Q?wkdA$IlK49kF;)iz61N8L7V9#{?{_fIu(SToc
zlZBo7_vf6scZUYuk~&;etDe)f6`^ezkMTbXu|wCgkv6@9RD4ia+2f|-!cKp!OI>1p6@PV0b`6rrc}duJU}IFC)z61
z&7C`s{{KZ({f~@EN*Kc7`N7o&|8&j#e>5Nc-$YesGgB*PGn4-m&9o>t*#9&i?R=rB
zmrO>5Rzt9tzzVL>SLmpz$zO)jL@+eUseJcBEP_!FkJ7c??C$vZvtMdE2a&MBt%o;k
zF2uJ%lKwRlW9JlRot^RnI^tW$f}2%6V426dajj8PEN#?@Qk+!wdVtg){j$a!)(o5U
z>AjYSO?$-7oWV*cl$#l_en{~S`I#)|M}H6G&L>mG=wh*|e#Q%x%ZBa%@Svj5o2t5N
z_wi~aJ@tt6os2jIKhOzB+973I+{_VxLMtRD9R7^(9Yt+WsPkPkE2#&9PoaBJYQ+ku#Pwx;MT4UmZX4!B6}d
zqx+y2U_d|?$p4>>sQ>rcyO$e%V+m8p9idh~~Znv{IcEdIMfGy6_CDNR_js2VQu3^0I2B1{e=rWF!NajS87
ztePlW8tuRqTScA6#6KWwZM!{>^2Ry_t>E~^XXKhJTTdD{tKu2Z%2Z{S%7wIr#@3fS
zjlDvnop{$tyt%r_sO~4MG2q
zT~$)5+CD95Np4({5v>ZsjDbbfQhTYJ$_fxYJ*AvS4{oZl1wkd<7mwHtW7
z{eI9#q&xKLnf?&)6_uf>1`gjK7zsppFpP&n{e&U}7svQFoO%Et@uxBH(7)02tRKkS
z6zVp$X)dvbcVF`*y1;FY)M8&Vycj*Y8YZvlBZr`At%~y9TXVKJ(8C(|(6bl^S4r-n
zq_Cjyr}+#)!@?v9iYi`mtrw9Lfza#riRbM6QFg0=i{aO#9vgPF
zwH=bhQ}~y2RH{d}B~({8T8O~IR*a>N8fsF7y3-8eRM4U}@Qkz615s^hWHWpWX&3M=
zL+r_hA0Ww)&)4Vpy&g`jvd6*%JlfXj)qMoP8c6E!%d
zr@%Ypx^3sXcOne`N_3&5SRcm;PXX%7Y3j!kU=0R!S8<2wIuD%8gCXalG__4aDLDa?
z;|eh=WdWHm?2t#wg@B^=ls6)c2zfPLq-AXm4*}-3L%=%6+!z9-)vQK$ez3C$7(@dz
zFU6K8K?;Bwu4z(>jZDIaGJ=H+-FY~K1Kd}dsG4u%UkZIN(bvR(02x^)p$41
zyM0pkphG8W6|V5MlUk-<=28WcYK1;ipi{I#!(c`M;RKfteBZbOAeR
zNY+$;mJLN*Mz9Dt5zM83!Tp`qs1iB9AD%HXJRHfq`u@v!q)U9_mj;I4I{NuQNg)33
zu7-=1t;7Gg8tN19KgF6or!>`$mch|l2vDj~!Rs1AND<0n_J2wsDBxRRP=7j@4lkvG
zpoxM4akaM|%nfXlGNg5g)zYJgACzmHlNqOK#&UHkt6@K7zDFwt
zt4dNKE1^ZS_q+Hw+~HwV2DzYEwlNE#6QOgJs-dR@ve*VQKC77!gR#e~Xvray;l7ZqQ;pSl3sTTXhC1rq
zrOLMF=~?Wo97cXsY+yY!$h8UgTqHU$p;4Z_X51>gT+kB#p!3;Y#}MCE3Xd@3#;Um1
zYvQ(4>&LJ7ZU*kZi*x227zmQV*Fl)?k$nN5Pk$eBn(EGNF3|nYyb_)Im%V;kz7q^5
z+z`dUr@T;sNz?}a7JxCufErw-T>`aM8Nr0*nR)*+g@+BBNxm5}q1frZ{MG3ix8
z*M3QQ8ch^c!cNA#2v>o~0{hR%JVh-b@QC5buY*oj^<}b8U-$9NvC?Gniz;K1HpZ?}
zEEk~Q1++j){wi*w6vW6VUD!R@r8|P$DU`s*;u8WtVYi-b+3C2$CYSO-2>z$^4d9|qg
zW?iS`!Mq**0eDik6P;p9Y;ImoJ^25|RcHrIyy@8|*k)t&*t*>;Dea1)gB1=&=fJDc
z2p@QNp-1-!0fAlPLskIm7TW)W@3wxu>9CXUvI&pIQz`AK0Q21noHFdkD{UhNmWsQ!
z0#o;%>-om5!ut?wg}>q*2{#X_V>HDh#iF4A5nfg^9a^2Jxj%vZ{w%c-si2&Ca94L@
zfle5h7bkN2I<6n6*qw77T!UI)?)er`{1=C{?0q8dgw~prbOn}691n%?Eg<#XK_SI5
z^si66_k+-~^O(UOE|%ti=KtZ~gr)uC=Yb92TKK8FoWS`1ky5Tku4ezUMQHw?h21|=
z>L5{++K7)(aRgqNP~3PDIE~sFrH!q`sUZam`cL0OzRB1Ix2zUCVUg0`^}Q{OKKQ-s
zP+BSaRU7?kzK7rYJQf70l)MgPBk7mV9h9g{W66HMGpQ-+6?xVzP=r0YO!H#MX3z?6
z+w7M{Dk-I@?sKKkG|1-YNIFC3;F(P-6Pr;N0iaUiFj+Os+KXg#{G(h3qcXoU!}x-q
z4#lS_^<(T`ozv>Ck|$dXH$o}Nxn??wrgF)Cmk=WR3YV?2+-0ZL8BUmHl7jCvl1A||
z%v35Lq3+9a9T;h3oYhL~dj*CMzY=+AhYviIHxVvUVCe*nxj2n(cvWcjW}j)4uq51k
zIB04~K?!vjfjxl^#iS+bXgez*HV~_#jFb>@Q{D3ws&ybG2N+>h_KS#OIZmiA|wu}>4cunh?5OXMv`NiO~%F3)a&JjoLbZ9hfE2|@cte+
zNH4?DXAGXrEpW!VB}+S^Ce{GZEpJN{^vypeFfKdnbqjjIp~Qd|fw>GA?yZK~hqi{7
zQK|eU(+r4y01=G)Q4I6HhzU)o(|jY?wH0lq2ras&ea!8sZ^=mMW*F13fys#BR#0mB
zk)f}p@?h{l9S6rG;PfEYhYj<#HFVP`(Hn85>6sMi;Irp(gXEM_klk+ORuPKE2?lpef@h`=LulJ^Qhx^WYjkdQ;y155C?pO0sra6HVK;ZQC|0v(mOzY1_7K
z+qP|0+BQ4C(|yO@=k&h)CnCm(80$x@cxTKxA8859TgbEC=^6g4ssn-%b>oBtA_A5P
z|0-M=Rx|MBRarkTdNFi*eL5d-zD1&vrmdS`H|#}9E9wzL0gpNFK}Qgd7)I?M@`b+T
zKT7_dm2=R;>TE9QE5%|b8x$WM;4)K4|KhmUz`P(64CLf^Zj$!QWrw>(-%s8K8f7L-
z;Q{7jbR(zAcdGqU7{1;Vd-z7z>x<>R`*i!YI6wQsJys`Gd7(BF6zdkPveRWmjohYNjEV99xmB-3DHfDIiPV!U?mQO4f=?hN*z
zKrVol-?6IDzP7Evt`&g-o7h$~Dp2QEV^0X`+?dl@i@bwh#|r
z;~oCqvHiCR{zyya%I`^5jCU+O=D(+iaA$;+&)mUn^Chj~^RP*|Ba1%V9?p-^_%Lp{
z%Sk{@bu_j*J>C$2C44vu2`Sw{YNA_4Sf)$PB|^txqaA~p#+gglO|`299Zk8qSG&&V
zGtPI=9*_JBP$RGr5Y
zNt}nSR
zWc`E8)F&`JWYvuBa`-CSd)Ya0?W?BPuW<2camxfgpcR2gT3j#P?(j-NBsR@1f8Nak
zacasUVuNK_5@Rray&AV
zXMy$>P#n|I?tB?!9rj3Wqlw=D#brW$V^9^aCV?k!!Et;X3T^`40h#j^ulhf`wCD&$
zaAF$~Sp`AdQjcpDG+5;r7_hiDmC>C3|}m$;u4F-yfORBIGXw>a0#YEeqWDN3(PjmEZital@S>4Jw~rw=+#6IuK$M
zCn&cAIU~-D4=5tM6h%j2wAwD(AUz{NdXHXT$}p8qK}J%j2g#)?*9IdN!^GJFa&b?|
z*+93)(>QSQSm1a%oZbwJ2`N<$jv{W(6(9j;$%VE4W5+c|9&Gr|qkxMZhB{>rqol-U
z6eKlkpn3A3T^WHh|ea3M$YUiMA5Z~I#cC$U_p}INGK0dVM*F4WSR=WI)VR%K|vMZK^6e?=Yh-BYmR!
z>)GLiS1#Z@h{D5O*jiCj{=uJsB_@66qu+8q_t&=QK|uv0`4Z`cl(H>IRc_a9F3?iQ
zX*OsI6hvU@#`g$Fv!c9Jn}KY%k+@aCBCu}Y;oTN{W70~Ok3hv^l(cO&yCJ+yO;+OkkwRF2`?6WB{Djd+-a5L#E+R|
zok{z{ff^Inc4fL7tS#}1BRyoX8i=&;?GZ>gE8dj)dun7Fy=c#%lwpup=_<{!8hg?A
zAO9eqB@guB02Ovf>2Zr(a1|&5Gnzs(1`|TFk?F-LYnr5xAG}dxLeTz2b)ZioHEd0C
z7#~}zAzE7@Z;Jb*u5^&fV7=9ONQ#UXN>Nvxl3h_z2Q`kWw^nTOHptC{Vx+as0At`{j^Zs!_Sr6658(n02H-KKl8bJTw2j3&68JqG2m*=i
z)u)&`Cc(h~-l9H4M4j|Z)EiGm(#(u0N)Q0hP~Ez)f%q-%Y!STQfPjH(z&sd&D2mbHI&&xZ*m%9tkFPq(}#
z`}>6M*RmF@G0D1HRt3YttI(%(@!JxboAxS_z~Nf#d)6g0DBqPh*06@|rSzpuHuQl(
zOW!lc2}T|?I*3XMJghU;du|w?W;*roa@8cN6Fq6Fu=NH^2LT7dw#AlmM@@wMy|4}e
zK*^1>{T&}$k!c3;Y8?c^;|D2;r7I}uUM%&>l&i3`JMYLAv{op*CiHM_fBDqo#pd1)
zZ_6k*ua~SNzvlZRIX?D5yBwi~&g@R`JN;E29KM3{o1OjnYd~g8T}t-)LqiiT==U?Z
zJo&TJ(e5fwhR>A>65jR4D$6HeswROs;+)4HCC#DQublFpa
zsuV}kOV-Cu0aDn`87}5;>!1baVejQs%*|9tk?7wxM;{|wX*t`ObNc55+9}>~TFt5g
zzq8kK&XsfWZR?-OA=5aD4fK+``4sM8V$Ex=;nNqiusejBt6{45%G3;^PdXo_BoE)r
z+v+46O`(upInaQ{onz>hLGFdR-T5!u>$(W*J?>@or4}<-@i*HP*>hQoJ?tOP^)C5}
zYIJZptRX+&Y`Q#bJ)U=dv2J-2sQ;*zW!bEMb1ThgSZ!oTbTqAjk$pIOO}dLWE@dJgH!
zK?iz<^zmZenBu1!{7lAp*38#b)QO}n^qzJB;Rb0E25l4uZ5RNl-|}BoxBaec`38S@
zdQ|$#j`rS`TH~weX^qic6UOqAAN@%lmK&k!`r1rXfv`2YO^Kl?QR2P2bz++q#%Z7k`n%nkkx
zvj8@`=i^5n(^LOh#U=hEqW{l$!+$=lZ)c}#YGrGnZ}rcVU8Oo^v+=Wtd#8rbCdmu-
zq~DbeC*wtdh&@EX03uaHLr(uCIbL`Fh#Q}eCheS=FNP4?(COCTdO`q^P33Q=&75v1
z3kr|gZb&pBLm1Z;uN5$6peR)*{1^vWjj**OWRVE1GjClHPl8^rkO*1Roseqi_HgF!
z91VZ8_ZeQ_i)EVHLmFu!SwMLXV_#4a=rfEJs2W)Y#Ni
z&RYy)8cConIcG&ZXU2gPF<>ScOYxV_LJ@rs7fLCgr6}`^B??8~_+x8av%o_R;ZVvj
zTa#f11}k3LpOsQ!PGY=e00Gz6uPAj6o?0wbcnrL<@U)-b;@mgEF_(XbiK
zcU{irfI+7}&bBoy>l#(?a*;eq!JRE%1)Fy8q0T+Uj(Hz2*;%!KvG*}CQO_YcF3YRn
z(eVr77~kJ9K4VzE4PyiOPRuE}V@M~bS1fD1$X*s|3!QVW-*hg+#VFY1FId&e=p(Tm
z9Dv$KZON-5CHnEi-=)EJ5v2b?Ix7c
zWbmztxhY#H+t5wd#lF(vilh^mT^LiP9&YN=tP)2b$9^iqt(*}G`EX10j~knUbg6ZW
zgSL?TE{i>(Ok54nmVuo{q#^+zot`ZIc>`%^0!z?x97_|Ykr2b^U)=$$y_M#9H|OC;
z55;-!{Ni_*Wr-;U7G{>27N`^oCtGujn3n1|_u9Dt>?X$Pbzx0=#GwV!FJqH{6}Q7q
z)h9-YGvZlXa0(Dv$2q&KuNVeg4cD!7{~yV9la$}q*Ce~W6%zr6YGB43ju!9(l9|J7
zh19iLt;m%F#ukC$_>}d{NsUfY>S!*?4jiz!V|QoB8JXb?9U^CjQwi?D#u%UcpA)>z
zP=Ca5&No?RNPgN-jeJwNX9k2{_){@i@4$=25OjA*PMYSd(#vIFGFbp--h2QGg8fS$
z<+gu-^~md}zy9q|{!;@aYJ@5ge?VO3A92pl0_^`f!UjKKm_Mg6N?}TFfB~lan6lD-
zHMW=!fPL53Ib)Cn4nkCYf8NYo%b*3xHfE*id>Of$^VR#qt77TwN2rfW+^G~V!)OqE
z{5hTKRm|sVgdo~Uu?MCgHUficynDq#L+FJrb|{ln=J&*|w3pw>gdwz`g!ba&@%F_zA~N-ZJA%L~$N)>4KiaC8jb8e`>?S#$8-}Qy71XKuv#Pv4@0t-g_=zT-a__|mV5e+paYIW^jepAx|dFv!7t
z{t90BX1Gx9Skh1;d{SXLnBR97$zyI-DQvp3OmA^3cS9y_&5~w8FA+LiAXe1l4J%$a
zj_nQmz#Mb|`zUzoW}UY9~ELW`p!gJf1f%(B9C>Aee?
zwr2W(7ll?6Xy8y?NHwy?OEop_cy~7Hp8)&r;2dN*jJ%2PP%_}%Cm1hCsmWORcV=&h
z86~E+b%G2UPB|Mj7D^W$<-eDPd*#-hZv2Zvd~rK*&HX;3F;U#7N83ddWnukSW}(A~
z9|tbc3xJ=Gjz)x^Xp*4}vgMBbOtAJF69HZEzQo!WJ4-A`c7QE$%fvE5>Ma79yXwN<
z>enltL_YRI3rUg7gujM}6q(HTpS^YL=ApdDheUf=Q?T0+I*SuFAMF_bM0C7fFWg8*
zRY#&-yII8zBf5RlHJsbK{bBbc(Fy7JfZB>GZ^wkht%XD^i9`2NK3`5(*(<6|ta*q@(*6afIhkHh-^
zb*Kz&ZETGHF;kzy{Qp>;{s@$bR@hfU9D{_htt0|G0OgyD*07Q|R_GcC-_wyeJX1l7fk&@aNuivwC_kIy
zdI{+Av?EID+4AcEP)(%AxMSVKoVqGJIdBl2Rv{uLM%|ZqBcTjdUo`r$2Qd>6^GPge
z6i0=Fc9ed8@k1%h?5m<7alNQO&{ls9{ZItEA_pwyp4_dR?JMXw4b7oX7%+RGBqI9K
zUl)FfLT~WRj1#Qr>V%Zwu3=A!rXP?SZw6m}LuToMWgAvt1iTrq;iMCY8VS(=tL|Rv
zj#H6%h_|EMmyf#?$>Q4^&tEqi&lxS-UEJ-=U5xFxkNV#05W8q@*PVP6$kG7D*M+~C
zF|X))r~LYE3Ls0ZXL#8Z^;~`_(H4g9)_k=%pKp93_^d9UlPm1*
zck}sj__@}AC0Z`fdHQ1*-791Sp_()jQQQyhdp4NakQ?YSuk^{aGjyP(<#PUm$MzV7
zE#7jh{I2gdm353QIApvMve5J|3ks+6(DUSxMXwgr)vJdVgMQcN8rQ0*7sm!UxF`Ju
zpPs$cuB<(=B)WnHlHxUW*;x<{d&<2LY)bXwgsq$2`T+uS{ujY0LeHz=mxwmX{QK{G6UZzwEqpiqm2i?8y_dzCDmEis)
zC6M3a5Rp(Iq^dA>+`RE%tj{D1?egw&9o7}uiNg?|n$vfvh7E$@o4R_cqClY^0`(w3|3YTq9JiJrL3crp3!Y^iZ$q
z^R188V%-b|o-n(4A}8;2QSgKf$NYhffyO*#)*>z)W@F{?=``4^OC{mcX3W3~VJ?p=
z8UYv^a6M2QYLW{G$PS?&L3ki7p|+q(GW*?L;hPcM`6Yup1iGG0o&7Dbk9S@ISynn|
z7}P{GQ3{cXR1+}c(zvrxi-HNq1EbGsrzwEh#%Aqem;n#x%F4izyW+PD;03lf+MaZC
zyi*?lJ{_D}vwyyklp;|#Ls7)+oBx*c^zs8UIyMI2FZc=>qg-C6eUytLmKb0m9VR`P
zACX^#XmTBDTt;A7uI!bw)Nr5cCg+x#r|C#Na>=^JB}oL=3~|vtk{$;(sX
z%&e`HP*T=empnj!M?B)Z&iXTh0IY;K*7*a8(fb9o8HKkx
z;x(q7r!D13jz(fQQ3UwqIB>wkIZC~%-7n1ZStj!2r0UeG!iE5|bJzrcdMXvN`$|X&
z8FZ=njv-e%TS2+3%kSd{HqvXBgUOOl8;(LLSGc=p*~JB>hgDXW#1NLS?I%m^Ia1WF;puY2WU`f(0-=)U6;>T4n;_3)|t
zK0Fw{z8gPwJu
z@TEsEzA+Pq%hAtpN&s*VC_{aKf;}GwV`N9u%)QJ$R_Gji++BASxe~+@T$CLK^mKc>
zy9IV=a&JeA1U6{n4W5gVZbs2`U~EN_q0`@GNud*N8;{Rg(;M#oMMYJtNbxD~7YA6k
zqOc#(n9!iXqbNIcDS%z81|PG2WKC(Z|JH~JTQ^g-8U+NX|A0Al9Lujf`KYL9w`55u
zSZTzd4XS{h;6Z6xlQJrY*fdNDGP0JNS2H6u4=~ha
zj5sM-YkQ#nl|fnrQu;J{qE(3z_G-es9Z20XCd?qw#moS*xs0I&a$^W}^u2?aGFwJk
zFlR=Q0<>vEzHPB2tvfGH3PBdX=>G0rXsRuTYHCo$V-ScD*<4`<3fAUNgO>p@78QA#{0RV9SLmTsB2BvFfY-MNsk79r8
zx4i8F14_^DGnZN_QD+fIpFu#CMC83uP&%^=B6fSOhQ(+raYG?bm+Y)oxjm<<;Z_A%
zY0I9Tp6Bfu`=SHDvU;krJZ`G1`t<91r8_P3mJ0zyeikLn2gqhY!~C5%_b4d!dZ29UiHSplHVXBir^$om4L?i^VS*dOsNOlk
zEgEzPJ};aN4v=_fB!2j&l@^B{?~#e$sVOoTI`+jkQ_bw@>WI&Oh++j$vf`wuoJR1L
zOBE{*Kv`kpQFXorK-E&t{+>_-03^ZcBBuy^=LzjF7ZJA8E7jU;W!P5s>2U?mH&n#8!#=w=n6O_pf2PjtVuOb63EFbw<VyB=JXo-6O)avRUb5eE`zLC!CuhENL$}&W^Efo<)O{8ac
zs>vC6@5TS|2~%w0o`Okj=NbEg(G$#o+u`^E>${$>fVhSaN~eTVOo>kWF1Gic9CR-?d7lMp9Tnw2MJTjVTl|G
zd>WV*8-;7gRX8nTixh=C51RA@`;F10EobW^Zf|OCBlPFqDWuSWf8b?Cjfx}_F5g&L
zxcL`DbZg*2urLn#(b@F!`$Ohr-A+sPCmqH_2`ARw>#05rb{pGx{r0IkFprH1hawzT
zo5z7tk__jodbktS0A~}f%zQ(nk?;dEFM#MeBwr06&5eXf%tmOcUgswSHpfOTpX$j0
zy&Ga!v9lC1ph{;SFRs8q%F}6v^?o|7L8kBfCeH`?HHSUdcc0~OL*_gpCC@eI2af5j
z^31;U^ua7Av10GugQgUz6->=!o*#{BSl6}D(+;(0Y`;7zysbuE2bxpv
z5)jByJ!UYb14c}jf$mnaFS$WWs)$>a(D^GogY*>_gA%`J=rMpA`!Trf2SvH30!9fF
zZa(XTf6}NK%0(+t%pvZovE}YN!jA(m_-r@iE!hxNw*7*FxPTocS|1TVl
zhGxdr`agx^^0#`-597}JA7*kXfi=8bUML-qst~oDJtRn}15yjC)Se#qmNvq7)-7ts
zAEadP?UMnMYFn$B$dLxOupj|)wY^L!hyIfdPiZR8Pce7Uz8V6{Mvb}j@ujnNfb{9~5=R9ynqf=Da-HZiNMDn3Kc
zhU440b7iOgI~408D-o`o{ov{FUiWU(FMo;3#?XXo+QUMkCKNE3Ro=|mlFW*v9AB%m
z+!=@($+q>abXpVHx+~uECG=J>O|X$tn#zSTgej^LUCz(D<884TIWs_y0!yhEl24v7
zv*CIFxv`Z3-VVuWNRD|n(QK|O2tabknm}Yx0ZC&?n#ox_BM`(w^9Cd(kxh&2X9fC;
zkYhC^RUaZXZvEpR3CSe=Zv8@6cyKyl-Cv=}D0zhjpo4>BN})Jb4Vp(Ox_12Am5LR6
z4iuZnb0#|ybeDZN$LP*PE)+Q>nM?5tm5-7Tq}#&eSJJ^;;^Ft1EI*o^VqnT4VrqPW
zO9ond)p$SGr+hu;DbSJ(>lp|6&T{k$Y76`lQQ;@Lh;1!tuu7F&YATPX$7PCOJ8R`t
zZP6hl(kTMx*1|cexO=3P3^dzKl~c7Gb@GV1Oo-*X%+3z8_KM%hgHq
z6=0fm_8>bi`Y8Fg;6y4Kk8HRVu&fQBm5U12a9Q!hx;{RVcKnVD|ITNB9N7cb(-Idi
zwp(056;81heLD#H+db##qC%i`ul%Gftt>NJrWHzastdls3{!yRM>+-D!t_5e;dDQxajdgw-RAng}Zsxxi;S
zSL&hOrZla+#I{^K#Zi$2f{gq0DE*nsY64`rja_>)i|&eK>&Afoe4=?AA|!!i#I7?y
z4mtbdJMQaAf*AQ^)O?@U1$M}K<9;;Z?1>&ZbvR+)a<6Zbnkb~2T-`RA(>d@*2!MDO
z9nrPcY~o2d@5_pc{l07pfLkG$4ZQVIc=}i8B+FI(qTI4(ILMe~ah|cz(ip5j3Zaaj
z?8GLOIqvt}YJjcpCtarVV90fbFgD~c&$TJRcL1LNdo;VYJAgM-xnkq_h4A@CoZVuG
z8$vLTK*Y;F|HUavkzYE1L=%E#}4s`w9R}0b(?m
z+Yf6VA$S=;c=*Z0g5ju-#}8;7#vt<+V!6Oi9atN)nT(jBwKa=37-lpfj`eUjt7
zK{jB?vpe3EIGG=O8%R7%(#MBgw5%nNuW^kVWfdZs#(4WCt%ZXgKP$+Vsn{d
z6(9s(?uJsGo7}$GWis~@F
zXp5Pv=u|AUem_K$z+tPaK*{JOfFd<_t0Zz>Eba!@R+qq8FABIS376$GSKbBYQW!)H
zfu3J%7u8asH-YxDwRnl;7xU3-KnLw{=b6=K~qs95GrcGzT^@s4C_gDgi4IgKMf&|NgGGO)#
z@2g;M12aSztuaTfaE~X6=@l1qh)j>P55N7_F-mEC0x#PRVc=*N!j{r7LS&nY|HazV
z1W>J#bOZ~~qUAy6A1u(#TsrGRxg9d)8hhpLKGZmjhURlUonGl1THn$^=9T-=KNBO~
zXy!_Hg$BaxXV{_XLr#60-qx}u*?SD6Psi$%rGI)^3;M;m@J_t%$@aBTJui0Q)))f$~A8|9#y(w;t+<>a(KM%K8CV3_K@BJ#cAf?DsOyoC1N-rC2-eO1QDD-@a{~)
zD^J4nm`bh-&2*R3EqlA~fb!XK#7VgJmZd>^hR5zv?u8);7Jzyq7Jy&@YvlQ*umpc{
zolrHUvWP<|s@TjyS;B>!YEttEOi>XVw%BBSbuq~O)!a#2mHU|BOS$KOEQVwmH5c|!
zC!itFvp}bFzbwNQa7dwBt_Dq{
z#ndOMhFz?M&c+pOnABfO*B6BXY#d7H;1kQ5%7gCwmn`PP9?<0N1cpw`$~ry@Fqy
z$YqzjCf6s?v#L~xq|I|l94EFw9753CZZ_ZSvz%_!5)rq@y99c0{%<6L5Xy
znD|e|Er^=a6g~LGu#TZ5ZWJ`{tPdk41|!m1tk#BvjTdWn1;g!4CC1WcZ+Z(6F!q}H
zm*L?!xHf6Ub834APVm${Dunzb1(||-+%^!j$uSwc?7H`*du=Ueh6Bek=kUcb=gX+9
zTA%T~21K($cPr2pn6w1*n9#00*8TlzaJ&ku3Us0xbjJnMdi6zICcmMH>Ow)>$v7>g
zK^M5T7;$Jk0h|)hjH{UFfTxt#l2y$8K^x|}b6W+3Y~TQIf8N15gKv8^R|8^XyoFIP@v1Tq#t8ggC-tL
zr|YNyR;$v+2FCQA3#aeYGW4>Ax2DYM7Sk&c)n4)-*X}xCf0QDVy#|P6&;P@8_9e6*
zW1|i2y}|Aku{}35gxKDDo1?S1-Wz{XzWOFRe-r;)zFzEPv4
z3W_7A=&nrbetGPNkn4yrLD+1j{h>KY3g-(PU`9HN=dZpzCN%x04H$nFm`q6_M7n2*
z)3VqcW4S;FR3Tk|WC-kiOZ15TB%eUUg9}gEKvuY@D|9+-o@ZyAGXa)KQYKe9r6uuH
zM?<)@;1s6el5XwboQ|a_u4K8ZF5~ud6YDW*N#1H3dv3-i!4X
zaD-;Gg+JZC5o5uCbJ6F@a3Uj1>RI^_oeEpo>>UV(9ixVl|}4zTPq%BafPxxjbI
zw&h$fq}&*K^@XvwV|-tzuoUaCsHIKqa2IEU>R7aD+3%bwsVrd`8=+V}5-OW|Dwu1S
ziqNW;6&Hp1rQtaY5+qMa5p;(#sHXR7Rm<}Tz(vX1Y)Q$y=n}Zof@HZ{yz)`P*xtU%
z*fRDwihzCqo0E}Aodz-=A=L}se-1PJ>q+xxzGvsj7K;!?=Ih`{g6*@A4#rcF4iLK?
zhkq|VR(klDTgvJ{D0}%$BQ(J>V&sisCMo{KrJn$Hkml0$6yf_7+9)NUqL>?L4c*s$
zx1bu>h}R!p%->#kTvC+a#S;m^Pj+Bca`rE?kZXvN6^Vo9YNhfaDh+gNKCQVH`rX9dwK>Fu;S}4UV|H
zs2Kd1t6p5S3xRsil!9F;wiaa>dd?V_Fj27H>|7@e{x5jxgcsc6t6s?r9P+D7$}iTc
zTei)$JD&O42J{MlL`NvA`6Qty{E`v8M3sa6*wRW8%|yB!13!gq}YF&f(T5@5m$vBGy4
z7UA%H1wEo@5X5f>I}hVMrzs=PlfYh%KG!^U8h&*i$h?~p*RfANI||o^Lt7i?Uae6!
z&9Di7+2lMgx4oX7c73X#`JKqSo3O7JQf%dr4Whf*a8=MxV02sd9PE`tx
zKNcFw2{f%?ARWNP<59BaY#jAs?FRz$x+O8KSL*tGHJU?BhR}(FbpO@m{R%W0TkWw2Ns;WvMG|m|3#Qlrq(zS!21IV7-Xa7?c#tk2xX;KXdwoQDMqr
z;O7*rw=6Rm&7CsFYEZF$TH5a(dd)y9P@Z^WrS2Gq=18nD2XTTb%9|MJpCn;Xib?LP5NjNI(x3F6%sY7B5}1dR?_9gEr-T)QYQq1`_s!izIa^khWI~7N%JHsWuX5
zCbs=oR~D&zkx(=#yJJTw>M>W&tQZKM{o5B+rDWexY7MU9D80kIx}yTrL9Rbo{pR6T>XDl-7*(WPHWqp}hgornQdy?}^x
zqA~zI&)TkmO!B7LGzhT8^|%~RML-DnM03cCOc>O-@~iQgh=vZuA?QeTd2ZdVI_xOk
z6Orr=bH(=GW4N+7Xysy5GJ49QSj}A|MzGYh9HdqZtWw;}2&&Br>+9pibdcfsM`m;<
zdXF|h4es;q09we{y5Xrwj9{g?Obk22*_c&pBO>)N6hE;g2+N9?_lMkeRS?6bBZQdu
z>C*|1?ZxLHd(M@t9NJ5jiAI}5Ahq6&@F#}x>;i?bSf_1zZ*L!(K_)}sbKTwzPZo2&
zy_-Q-M!|dnpvMu@x%RKU}BD
zh(L3kCz-{FB!nGuFt%?xbG@m`6&z^X*oeKj3&=eSY%dkd|I`ppP>8!4Uqe1hrLGK9
zIi^XwUJ`YPH8&&EYXbJ?*EF$UheaT*+(McD7dgl*B*1P`vw(1)<|TZ*SlkVY2I^Ak
zJpj>gxweL4ym4KndXh4Vkx6LlVYemG2StLvSerm20Fd;o1=Qk*I+eM0=qni_y*Vub
z%Lp+4bL+Vl{5VP<9hvYlmhQ6VaWP;wh2YItn~&bJ>WsTdtuwYHc9ox`n(Lmkz`-fdMVyLt(kJCu&B%o|_ni
zzoi(WuwaWEDao6Varm^elp7@>jz$saE((h$M?c4MU{$X$R1Mjf+scHDW6s53X)-R0r5Ua3XGZ(p11Mb;Xh
z*}mGOqO&N7div28N)+@4*ReXdNRF_B*`Y9NcNZEclzKDezd_Z+UjX
z!_tIbT=JM6yS1cgeo)S-mnGe|AWepg=A>6I1IETKcItY*7%`bTek5<948VL17S!yW
zk#4}&jJk%{nXA(-s9xo1f#bKuJg0mI-esKUBu-48+lgLOm)U2LzK_fbSf1mTg7R`?
zrt~@d>~6|(7vZcSb&vNWE%?Z#wjwsAoGL=p^OdSbX@%E>2iRPibGK?i0DgFSc@VcOm4?nmP%BP05fRw{9T})Ia|Pm(pv&Vx8#Uegl`u4-
zL2TCI9k91hI9MC))>anN9t4W(i>$Po$Y@Nzs?=JxoaXq`Zl$I*(6N~nOPE)I3{=@k
zml^Y-d^ZgIdD|cdWupiknD{(Cc{s%x1eO|MH>1`DvT9?R;0_9FBDcu!K7xRHE5ew8
zBExcK@H1fdv0D7*%M%>=9vz1>za|e5LPl7`z1okv4MxVSTrtjt)(0Ud_tOed+W>mJ
zZ%!^o=Ai}PwGmLSZ>be}R)XGmv)1uOF%nEl0rf6MgUE939>hUB@LCb#jv3(9f&mXI
zWYX|YGIO24222Y5lF{L3@1B7XNjN8Et~NVXQQ4|kP6>!t3-^!{uw!lZwk|2NG!fKJ
z71>7;nf