diff --git a/development-jenkins.ini b/development-jenkins.ini
index 71e005b..dd0421b 100644
--- a/development-jenkins.ini
+++ b/development-jenkins.ini
@@ -8,7 +8,7 @@ debug = false
# Uncomment and replace with the address which should receive any error reports
#email_to = you@yourdomain.com
smtp_server = localhost
-error_email_from = paste@localhost
+error_email_from = paste@jenkins
[server:main]
use = egg:Paste#http
diff --git a/development.ini b/development.ini
index 516bbdf..6bad250 100644
--- a/development.ini
+++ b/development.ini
@@ -4,7 +4,7 @@
# The %(here)s variable will be replaced with the parent directory of this file
#
[DEFAULT]
-debug = false
+debug = true
# Uncomment and replace with the address which should receive any error reports
#email_to = you@yourdomain.com
smtp_server = localhost
@@ -13,11 +13,11 @@ error_email_from = paste@localhost
[server:main]
use = egg:Paste#http
#Use these setings to run pylons using mod_wsgi and apache
-host = 127.0.0.1
-port = 5000
+#host = 127.0.0.1
+#port = 5000
#Use these settings tp run pylons from the commandline
-#host = 0.0.0.0
-#port = 80
+host = 0.0.0.0
+port = 80
[app:main]
use = egg:rdfdatabank
@@ -30,16 +30,18 @@ beaker.session.secret = somesecret
who.config_file = %(here)s/who.ini
who.log_level = info
-who.log_file = /var/log/databank/who.log
-#who.log_file = stdout
+#who.log_file = /var/log/databank/who.log
+who.log_file = stdout
#who.log_file = %(here)s/logs/who.log
redis.host = localhost
granary.store = %(here)s/silos
-#granary.uri_root = http://databank.bodleian.ox.ac.uk/datasets/
granary.uri_root = http://192.168.23.133/
+profile.log_filename = %(here)s/logs/profile.log
+profile.path = /__profile__
+
auth.file = %(here)s/passwd
auth.info = %(here)s/rdfdatabank/config/users.py
@@ -80,17 +82,17 @@ keys = generic
[logger_root]
level = INFO
-handlers = logfile
+handlers = console
[logger_routes]
level = INFO
-handlers = logfile
+handlers = console
qualname = routes.middleware
# "level = DEBUG" logs the route matched and routing variables.
[logger_rdfdatabank]
level = DEBUG
-handlers = logfile
+handlers = console
qualname = rdfdatabank
[handler_console]
diff --git a/production.ini b/production.ini
index 930caa7..02aad1a 100644
--- a/production.ini
+++ b/production.ini
@@ -8,7 +8,7 @@ debug = false
# Uncomment and replace with the address which should receive any error reports
email_to = anusha.ranganathan@bodleian.ox.ac.uk
smtp_server = localhost
-error_email_from = paste@localhost
+error_email_from = paste@databank
[server:main]
use = egg:Paste#http
@@ -35,10 +35,8 @@ who.log_file = /var/log/databank/who.log
redis.host = localhost
-#granary.store = %(here)s/silos
granary.store = /silos
granary.uri_root = http://databank.ora.ox.ac.uk/
-#granary.uri_root = http://163.1.127.173/
auth.file = %(here)s/passwd
auth.info = %(here)s/rdfdatabank/config/users.py
diff --git a/rdfdatabank/config/middleware.py b/rdfdatabank/config/middleware.py
index a0f7371..d65d79e 100644
--- a/rdfdatabank/config/middleware.py
+++ b/rdfdatabank/config/middleware.py
@@ -46,13 +46,21 @@ def make_app(global_conf, full_stack=True, static_files=True, **app_conf):
app = PylonsApp()
#app = httpexceptions.make_middleware(app, global_conf)
+ if asbool(config['debug']):
+ from repoze.profile.profiler import AccumulatingProfileMiddleware
+ app = AccumulatingProfileMiddleware(
+ app,
+ log_filename=app_conf['profile.log_filename'],
+ discard_first_request=True,
+ flush_at_shutdown=True,
+ path=app_conf['profile.path']
+ )
# Routing/Session/Cache Middleware
app = RoutesMiddleware(app, config['routes.map'])
app = SessionMiddleware(app, config)
app = CacheMiddleware(app, config)
-
# CUSTOM MIDDLEWARE HERE (filtered by error handling middlewares)
if asbool(full_stack):
# Handle Python exceptions
diff --git a/rdfdatabank/controllers/datasets.py b/rdfdatabank/controllers/datasets.py
index 31e367b..f2e51ef 100644
--- a/rdfdatabank/controllers/datasets.py
+++ b/rdfdatabank/controllers/datasets.py
@@ -3,6 +3,8 @@
import re, os, shutil, codecs
import simplejson
from datetime import datetime, timedelta
+from dateutil.relativedelta import *
+from dateutil.parser import parse
import time
from uuid import uuid4
from pylons import request, response, session, tmpl_context as c, url, app_globals as ag
@@ -10,7 +12,7 @@
from pylons.decorators import rest
from paste.fileapp import FileApp
from rdfdatabank.lib.base import BaseController, render
-from rdfdatabank.lib.utils import create_new, is_embargoed, get_readme_text, test_rdf, munge_manifest, serialisable_stat, allowable_id2
+from rdfdatabank.lib.utils import create_new, is_embargoed, get_readme_text, test_rdf, munge_manifest, serialisable_stat, allowable_id2, get_rdf_template
from rdfdatabank.lib.file_unpack import get_zipfiles_in_dataset
from rdfdatabank.lib.conneg import MimeType as MT, parse as conneg_parse
@@ -218,14 +220,16 @@ def datasetview(self, silo, id):
if ident['repoze.who.userid'] == creator or ident.get('role') in ["admin", "manager"]:
c.editor = True
- if c.version and not c.version == currentversion:
- c.editor = False
-
+
c.show_files = True
#Only the administrator, manager and creator can view embargoed files.
if embargoed and not c.editor:
c.show_files = False
+ #Display but do not edit previous versions of files, since preious versions are read only.
+ if c.version and not c.version == currentversion:
+ c.editor = False
+
# View options
if "view" in options and c.editor:
c.view = options['view']
@@ -240,7 +244,8 @@ def datasetview(self, silo, id):
c.embargos[id] = is_embargoed(c_silo, id)
c.parts = item.list_parts(detailed=True)
c.manifest_pretty = item.rdf_to_string(format="pretty-xml")
- c.manifest = item.rdf_to_string()
+ #c.manifest = item.rdf_to_string()
+ c.manifest = get_rdf_template(item.uri, id)
c.zipfiles = get_zipfiles_in_dataset(item)
c.readme_text = None
#if item.isfile("README"):
@@ -366,20 +371,28 @@ def datasetview(self, silo, id):
abort(403)
item.increment_version_delta(clone_previous_version=True, copy_filenames=['manifest.rdf'])
#if params.has_key('embargoed'):
- if (params.has_key('embargo_change') and params.has_key('embargoed')) or \
- (params.has_key('embargoed') and params['embargoed'].lower() == 'true'):
+ if (params.has_key('embargo_change') and params.has_key('embargoed') and \
+ params['embargoed'].lower() in ['true', '1'] and params['embargo_change'].lower() in ['true', '1']) or \
+ (params.has_key('embargoed') and params['embargoed'].lower() in ['true', '1']):
+ embargoed_until_date = None
if params.has_key('embargoed_until') and params['embargoed_until']:
- embargoed_until_date = params['embargoed_until']
- elif params.has_key('embargo_days_from_now') and params['embargo_days_from_now']:
- embargoed_until_date = (datetime.now() + timedelta(days=params['embargo_days_from_now'])).isoformat()
- else:
- embargoed_until_date = (datetime.now() + timedelta(days=365*70)).isoformat()
+ try:
+ embargoed_until_date = parse(params['embargoed_until']).isoformat()
+ except:
+ embargoed_until_date = (datetime.now() + relativedelta(years=+70)).isoformat()
+ elif params.has_key('embargo_days_from_now') and params['embargo_days_from_now'].isdigit():
+ embargoed_until_date = (datetime.now() + timedelta(days=int(params['embargo_days_from_now']))).isoformat()
+ #It is embargoed indefinitely by default
+ #else:
+ # embargoed_until_date = (datetime.now() + timedelta(days=365*70)).isoformat()
item.metadata['embargoed'] = True
- item.metadata['embargoed_until'] = embargoed_until_date
+ item.metadata['embargoed_until'] = ''
item.del_triple(item.uri, u"oxds:isEmbargoed")
item.del_triple(item.uri, u"oxds:embargoedUntil")
item.add_triple(item.uri, u"oxds:isEmbargoed", 'True')
- item.add_triple(item.uri, u"oxds:embargoedUntil", embargoed_until_date)
+ if embargoed_until_date:
+ item.metadata['embargoed_until'] = embargoed_until_date
+ item.add_triple(item.uri, u"oxds:embargoedUntil", embargoed_until_date)
else:
#if is_embargoed(c_silo, id)[0] == True:
item.metadata['embargoed'] = False
@@ -822,13 +835,15 @@ def itemview(self, silo, id, path):
if ident['repoze.who.userid'] == creator or ident.get('role') in ["admin", "manager"]:
c.editor = True
- if c.version and not c.version == currentversion:
- c.editor = False
-
c.show_files = True
+ #Only the administrator, manager and creator can view embargoed files.
if embargoed and not c.editor:
c.show_files = False
+ #Display but do not edit previous versions of files, since preious versions are read only.
+ if c.version and not c.version == currentversion:
+ c.editor = False
+
# View options
if "view" in options and c.editor:
c.view = options['view']
@@ -1063,6 +1078,11 @@ def itemview(self, silo, id, path):
response.status_int = 403
response.status = "403 Forbidden"
return "Forbidden - Cannot delete the manifest"
+ if '3=' in path or '4=' in path:
+ response.content_type = "text/plain"
+ response.status_int = 403
+ response.status = "403 Forbidden"
+ return "Forbidden - These files are generated by the system and connot be deleted"
item.increment_version_delta(clone_previous_version=True, copy_filenames=['manifest.rdf'])
item.del_stream(path)
item.del_triple(item.uri, u"dcterms:modified")
diff --git a/rdfdatabank/controllers/users.py b/rdfdatabank/controllers/users.py
index 7fb88ef..dd399b0 100644
--- a/rdfdatabank/controllers/users.py
+++ b/rdfdatabank/controllers/users.py
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
import logging
import simplejson
+import codecs
from pylons import request, response, session, config, tmpl_context as c, url
from pylons.controllers.util import abort, redirect
from pylons.decorators import rest
diff --git a/rdfdatabank/lib/file_unpack.py b/rdfdatabank/lib/file_unpack.py
index fc18a80..3719b46 100644
--- a/rdfdatabank/lib/file_unpack.py
+++ b/rdfdatabank/lib/file_unpack.py
@@ -19,8 +19,12 @@ class BadZipfile(Exception):
"""Cannot open zipfile using commandline tool 'unzip' to target directory"""
def check_file_mimetype(real_filepath, mimetype):
+ if os.path.isdir(real_filepath):
+ return False
if os.path.islink(real_filepath):
real_filepath = os.readlink(real_filepath)
+ if not os.path.isfile(real_filepath):
+ return False
p = subprocess.Popen("file -ib '%s'" %(real_filepath), shell=True, stdout=subprocess.PIPE)
output_file = p.stdout
output_str = output_file.read()
@@ -29,11 +33,15 @@ def check_file_mimetype(real_filepath, mimetype):
else:
return False
-def get_zipfiles_in_dataset_old(dataset):
+def get_zipfiles_in_dataset(dataset):
derivative = dataset.list_rdf_objects("*", "ore:aggregates")
zipfiles = {}
- if derivative and derivative.values() and derivative.values()[0]:
- for file_uri in derivative.values()[0]:
+ #if derivative and derivative.values() and derivative.values()[0]:
+ if derivative:
+ #for file_uri in derivative.values()[0]:
+ for file_uri in derivative:
+ if not file_uri.lower().endswith('.zip'):
+ continue
filepath = file_uri[len(dataset.uri)+1:]
real_filepath = dataset.to_dirpath(filepath)
if os.path.islink(real_filepath):
@@ -43,7 +51,7 @@ def get_zipfiles_in_dataset_old(dataset):
zipfiles[filepath]="%s-%s"%(dataset.item_id, fn)
return zipfiles
-def get_zipfiles_in_dataset(dataset):
+def get_zipfiles_in_dataset_new(dataset):
p = subprocess.Popen("""file -iL `find %s -name '*.zip'` | grep "application/zip" | awk -F":" '{print $1}'""" %dataset.to_dirpath(), shell=True, stdout=subprocess.PIPE)
stdout_value = p.communicate()[0]
zipfiles = {}
@@ -148,6 +156,9 @@ def unpack_zip_item(target_dataset, current_dataset, zip_item, silo, ident):
if os.path.islink(filepath):
filepath = os.readlink(filepath)
+ emb = target_dataset.metadata.get('embargoed')
+ emb_until = target_dataset.metadata.get('embargoed_until')
+
# -- Step 1 -----------------------------
unpacked_dir = unzip_file(filepath)
@@ -181,9 +192,15 @@ def unpack_zip_item(target_dataset, current_dataset, zip_item, silo, ident):
target_dataset.add_triple(target_dataset.uri, u"rdf:type", "oxds:Grouping")
target_dataset.add_triple(target_dataset.uri, "dcterms:isVersionOf", file_uri)
#TODO: Adding the following metadata again as moving directory deletes all this information. Need to find a better way
- embargoed_until_date = (datetime.now() + timedelta(days=365*70)).isoformat()
- target_dataset.add_triple(target_dataset.uri, u"oxds:isEmbargoed", 'True')
- target_dataset.add_triple(target_dataset.uri, u"oxds:embargoedUntil", embargoed_until_date)
+ if emb:
+ target_dataset.add_triple(target_dataset.uri, u"oxds:isEmbargoed", 'True')
+ if emb_until:
+ target_dataset.add_triple(target_dataset.uri, u"oxds:embargoedUntil", emb_until)
+ else:
+ target_dataset.add_triple(target_dataset.uri, u"oxds:isEmbargoed", 'False')
+ #The embargo
+ #embargoed_until_date = (datetime.now() + timedelta(days=365*70)).isoformat()
+ #target_dataset.add_triple(target_dataset.uri, u"oxds:embargoedUntil", embargoed_until_date)
target_dataset.add_triple(target_dataset.uri, u"dcterms:identifier", target_dataset.item_id)
target_dataset.add_triple(target_dataset.uri, u"dcterms:mediator", ident)
target_dataset.add_triple(target_dataset.uri, u"dcterms:publisher", ag.publisher)
diff --git a/rdfdatabank/lib/utils.py b/rdfdatabank/lib/utils.py
index 0c40d8d..7d8de48 100644
--- a/rdfdatabank/lib/utils.py
+++ b/rdfdatabank/lib/utils.py
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
from datetime import datetime, timedelta
+from dateutil.relativedelta import *
+from dateutil.parser import parse
from time import sleep
from redis import Redis
from redis.exceptions import ConnectionError
@@ -14,6 +16,7 @@
#from rdflib.parser import StringInputSource
from rdflib import Namespace, RDF, RDFS, URIRef, Literal, BNode
+
from uuid import uuid4
import re
@@ -146,23 +149,31 @@ def is_embargoed_no_redis(silo, id, refresh=False):
def create_new(silo, id, creator, title=None, embargoed=True, embargoed_until=None, embargo_days_from_now=None, **kw):
item = silo.get_item(id, startversion="0")
item.metadata['createdby'] = creator
- item.metadata['embargoed'] = embargoed
+ item.metadata['embargoed_until'] = ''
item.metadata['uuid'] = uuid4().hex
item.add_namespace('oxds', "http://vocab.ox.ac.uk/dataset/schema#")
item.add_triple(item.uri, u"rdf:type", "oxds:DataSet")
- if embargoed:
- if embargoed_until:
- embargoed_until_date = embargoed_until
- elif embargo_days_from_now:
- embargoed_until_date = (datetime.now() + timedelta(days=embargo_days_from_now)).isoformat()
- else:
- embargoed_until_date = (datetime.now() + timedelta(days=365*70)).isoformat()
- item.metadata['embargoed_until'] = embargoed_until_date
+ if embargoed==True or embargoed.lower() in ['true', '1'] :
+ item.metadata['embargoed'] = True
item.add_triple(item.uri, u"oxds:isEmbargoed", 'True')
- item.add_triple(item.uri, u"oxds:embargoedUntil", embargoed_until_date)
+ embargoed_until_date = None
+ if embargoed_until:
+ try:
+ embargoed_until_date = parse(embargoed_until).isoformat()
+ except:
+ embargoed_until_date = (datetime.now() + relativedelta(years=+70)).isoformat()
+ elif embargo_days_from_now and embargo_days_from_now.isdigit():
+ embargoed_until_date = (datetime.now() + timedelta(days=int(embargo_days_from_now))).isoformat()
+ #TODO: Do we want the default embargo_until to be 70 years or indefinite. Going with indefinite
+ #else:
+ # embargoed_until_date = (datetime.now() + relativedelta(years=+70)).isoformat()
+ if embargoed_until_date:
+ item.metadata['embargoed_until'] = embargoed_until_date
+ item.add_triple(item.uri, u"oxds:embargoedUntil", embargoed_until_date)
else:
item.add_triple(item.uri, u"oxds:isEmbargoed", 'False')
+ item.metadata['embargoed'] = False
item.add_triple(item.uri, u"dcterms:identifier", id)
item.add_triple(item.uri, u"dcterms:mediator", creator)
item.add_triple(item.uri, u"dcterms:publisher", ag.publisher)
@@ -188,6 +199,14 @@ def get_readme_text(item, filename="README"):
text = fn.read().decode("utf-8")
return u"%s\n\n%s" % (filename, text)
+def get_rdf_template(item_uri, item_id):
+ g = ConjunctiveGraph(identifier=item_uri)
+ g.bind('rdf', 'http://www.w3.org/1999/02/22-rdf-syntax-ns#')
+ g.bind('dcterms', 'http://purl.org/dc/terms/')
+ g.add((URIRef(item_uri), URIRef('http://purl.org/dc/terms/identifier'), Literal(item_id)))
+ data2 = g.serialize(format='xml', encoding="utf-8") + '\n'
+ return data2
+
#def test_rdf(text):
def test_rdf(mfile):
g = ConjunctiveGraph()
diff --git a/rdfdatabank/templates/datasetview.html b/rdfdatabank/templates/datasetview.html
index e1952a9..3e40b18 100644
--- a/rdfdatabank/templates/datasetview.html
+++ b/rdfdatabank/templates/datasetview.html
@@ -90,7 +90,7 @@
Information for version ${c.version} of the dataset
Embargo date: Aim is for ISO8601 dates to provide embargo trigger events. Currently unused, unvalidated and unparsed.
- Change RDF Manifest:
+ Add metadata to the RDF Manifest:
<%include file="/rdf_manifest_form.html"/>
diff --git a/rdfdatabank/templates/part_list.html b/rdfdatabank/templates/part_list.html
index a68c356..57bcebc 100644
--- a/rdfdatabank/templates/part_list.html
+++ b/rdfdatabank/templates/part_list.html
@@ -9,18 +9,22 @@
%>
% if c.version:
% for part in c.parts:
-% if type(c.parts).__name__ == 'dict' and c.parts[part]:
+% if not part.startswith('3=') and not part.startswith('4='):
+% if type(c.parts).__name__ == 'dict' and c.parts[part]:
${part} ${h.bytes_to_english(c.parts[part].st_size)}
-% else:
+% else:
${part}
+% endif
% endif
% endfor
% else:
% for part in c.parts:
-% if type(c.parts).__name__ == 'dict' and c.parts[part]:
+% if not part.startswith('3=') and not part.startswith('4='):
+% if type(c.parts).__name__ == 'dict' and c.parts[part]:
${part} ${h.bytes_to_english(c.parts[part].st_size)} - (remove)
-% else:
+% else:
${part} - (remove)
+% endif
% endif
% endfor
% endif
diff --git a/rdfdatabank/templates/part_list_display.html b/rdfdatabank/templates/part_list_display.html
index 8630da2..64fd93f 100644
--- a/rdfdatabank/templates/part_list_display.html
+++ b/rdfdatabank/templates/part_list_display.html
@@ -11,11 +11,14 @@
else:
ver = ""
%>
+
% for part in c.parts:
- % if type(c.parts).__name__ == 'dict' and c.parts[part]:
- ${part} ${h.bytes_to_english(c.parts[part].st_size)}
- % else:
- ${part}
+ % if not part.startswith('3=') and not part.startswith('4='):
+ % if type(c.parts).__name__ == 'dict' and c.parts[part]:
+ ${part} ${h.bytes_to_english(c.parts[part].st_size)}
+ % else:
+ ${part}
+ % endif
% endif
% endfor
diff --git a/rdfdatabank/templates/rdf_manifest_form.html b/rdfdatabank/templates/rdf_manifest_form.html
index bc63ad6..dff68af 100644
--- a/rdfdatabank/templates/rdf_manifest_form.html
+++ b/rdfdatabank/templates/rdf_manifest_form.html
@@ -5,6 +5,6 @@
${c.manifest}
-
+
diff --git a/rdfdatabank/tests/TestSubmission.py b/rdfdatabank/tests/TestSubmission.py
index 0258ceb..1908813 100644
--- a/rdfdatabank/tests/TestSubmission.py
+++ b/rdfdatabank/tests/TestSubmission.py
@@ -9,6 +9,7 @@
"""
import os, os.path
from datetime import datetime, timedelta
+from dateutil.relativedelta import *
import sys
import unittest
import logging
@@ -78,11 +79,23 @@ def tearDown(self):
return
# Create empty test submission dataset
- def createSubmissionDataset(self):
+ def createSubmissionDataset(self, embargoed=None, embargoed_until=None):
# Create a new dataset, check response
fields = \
[ ("id", "TestSubmission")
]
+ if embargoed != None:
+ if embargoed:
+ fields.append(('embargoed', 'True'))
+ else:
+ fields.append(('embargoed', 'False'))
+ if embargoed_until != None:
+ if embargoed_until == True:
+ fields.append(('embargoed_until', 'True'))
+ elif embargoed_until == False:
+ fields.append(('embargoed_until', 'False'))
+ else:
+ fields.append(('embargoed_until', embargoed_until))
files =[]
(reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
(resp,respdata) = self.doHTTP_POST(
@@ -287,7 +300,7 @@ def testDatasetCreation(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
dcterms = "http://purl.org/dc/terms/"
subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
@@ -298,8 +311,7 @@ def testDatasetCreation(self):
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
def testDatasetCreation2(self):
@@ -322,7 +334,7 @@ def testDatasetCreation2(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
dcterms = "http://purl.org/dc/terms/"
subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
@@ -334,7 +346,6 @@ def testDatasetCreation2(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
def testDatasetRecreation(self):
@@ -348,7 +359,7 @@ def testDatasetRecreation(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
#Recreate the dataset, check response
fields = \
[ ("id", "TestSubmission")
@@ -374,7 +385,7 @@ def testDatasetRecreation(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
def testDeleteDataset(self):
"""Delete dataset - DELETE /silo_name/dataset_name"""
@@ -430,7 +441,7 @@ def testDatasetNaming(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
elif status == 403:
(resp, respdata) = self.doHTTP_GET(
resource="datasets/%s"%name,
@@ -471,254 +482,185 @@ def testDatasetStateInformation(self):
self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
- def testFileUpload(self):
- """Upload file to dataset - POST file to /silo_name/datasets/dataset_name"""
- # Create a new dataset, check response
+ def testEmbargoOnCreation(self):
+ """Create dataset - POST id to /silo_name"""
+ #---------------------------------------------------------------
+ # Create a new dataset, check response. No embargo information is passed.
self.createSubmissionDataset()
- #Access state information
- (resp, respdata) = self.doHTTP_GET(
- resource="states/TestSubmission",
- expect_status=200, expect_reason="OK", expect_type="application/json")
- # Upload zip file, check response
- zipdata = self.uploadSubmissionZipfile()
- # Access and check list of contents
+ # Access dataset, check response
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
- base = self.getRequestUri("datasets/TestSubmission/")
- dcterms = "http://purl.org/dc/terms/"
- ore = "http://www.openarchives.org/ore/terms/"
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
- self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
- self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
- self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
- self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
- self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
- self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
- # Access and check zip file content
- (resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir.zip",
- expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!")
- #Access state information and check
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertFalse((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
+ # Access state info
(resp, data) = self.doHTTP_GET(
resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
- parts = data['parts']
- self.assertEqual(len(state.keys()), 11, "States")
- self.assertEqual(state['item_id'], "TestSubmission", "Submission item identifier")
- self.assertEqual(len(state['versions']), 2, "Two versions")
- self.assertEqual(state['versions'][0], '0', "Version 0")
- self.assertEqual(state['versions'][1], '1', "Version 1")
- self.assertEqual(state['currentversion'], '1', "Current version == 1")
- self.assertEqual(state['rdffileformat'], 'xml', "RDF file type")
- self.assertEqual(state['rdffilename'], 'manifest.rdf', "RDF file name")
- self.assertEqual(state['files']['0'], ['manifest.rdf'], "List should contain just manifest.rdf")
- self.assertEqual(len(state['files']['1']), 2, "List should contain manifest.rdf and testdir.zip")
- self.assertEqual(len(state['metadata_files']['0']), 0, "metadata_files of version 0")
- self.assertEqual(len(state['metadata_files']['1']), 0, "metadata_files of version 1")
- self.assertEqual(len(state['subdir']['0']), 0, "Subdirectory count for version 0")
- self.assertEqual(len(state['subdir']['1']), 0, "Subdirectory count for version 1")
- self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointuser, "Created by")
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
- self.assertEqual(len(parts.keys()), 4, "Parts")
- self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
- self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
- self.assertEqual(len(parts['testdir.zip'].keys()), 13, "File stats for testdir.zip")
-
- def testFileDelete(self):
- """Delete file in dataset - DELETE /silo_name/datasets/dataset_name/file_name"""
- # Create a new dataset, check response
- self.createSubmissionDataset()
- # Upload zip file, check response
- zipdata = self.uploadSubmissionZipfile()
- # Access and check list of contents
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed?")
+ #---------------------------------------------------------------
+ # Delete dataset, check response
+ resp = self.doHTTP_DELETE(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK")
+ # Access dataset, test response indicating non-existent
+ (resp, respdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=404, expect_reason="Not Found")
+ # Create a new dataset, check response. embargoed=None, embargo_until=True.
+ self.createSubmissionDataset(embargoed_until=True)
+ # Access dataset, check response
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
- rdfgraph.parse(rdfstream)
- subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
- self.failUnless((subj,URIRef(oxds+"currentVersion"),"1") in rdfgraph, 'oxds:currentVersion')
- # Access and check zip file content and version
- (resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir.zip",
- expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!")
- # Delete file, check response
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertTrue((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
+ # Access state info
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ d = (datetime.now() + relativedelta(years=+70)).isoformat()
+ d = d.split('T')[0]
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertTrue(d in state['metadata']['embargoed_until'], "embargoed_until %s?"%d)
+ #---------------------------------------------------------------
+ # Delete dataset, check response
resp = self.doHTTP_DELETE(
- resource="datasets/TestSubmission/testdir.zip",
+ resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK")
- # Access and check zip file does not exist
- (resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir.zip",
+ # Access dataset, test response indicating non-existent
+ (resp, respdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
expect_status=404, expect_reason="Not Found")
- # Access and check list of contents
+ # Create a new dataset, check response. embargoed=None, embargo_until=2012-08-12
+ d = '2012-08-12'
+ self.createSubmissionDataset(embargoed_until=d)
+ # Access dataset, check response
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
- dcterms = "http://purl.org/dc/terms/"
- ore = "http://www.openarchives.org/ore/terms/"
+ self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
- self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
- self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
- self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
- self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
- self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
- self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
- #Access state information and check
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertTrue((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
+ # Access state info
(resp, data) = self.doHTTP_GET(
resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
- parts = data['parts']
- self.assertEqual(len(state.keys()), 11, "States")
- self.assertEqual(state['item_id'], "TestSubmission", "Submission item identifier")
- self.assertEqual(len(state['versions']), 3, "Three versions")
- self.assertEqual(state['versions'][0], '0', "Version 0")
- self.assertEqual(state['versions'][1], '1', "Version 1")
- self.assertEqual(state['versions'][2], '2', "Version 2")
- self.assertEqual(state['currentversion'], '2', "Current version == 2")
- self.assertEqual(state['rdffileformat'], 'xml', "RDF file type")
- self.assertEqual(state['rdffilename'], 'manifest.rdf', "RDF file name")
- self.assertEqual(len(state['files']['0']), 1, "List should contain just manifest.rdf")
- self.assertEqual(len(state['files']['1']), 2, "List should contain manifest.rdf and testdir.zip")
- self.assertEqual(len(state['files']['2']), 1, "List should contain just manifest.rdf")
- self.assertEqual(len(state['metadata_files']['0']), 0, "metadata_files of version 0")
- self.assertEqual(len(state['metadata_files']['1']), 0, "metadata_files of version 1")
- self.assertEqual(len(state['metadata_files']['2']), 0, "metadata_files of version 2")
- self.assertEqual(len(state['subdir']['0']), 0, "Subdirectory count for version 0")
- self.assertEqual(len(state['subdir']['1']), 0, "Subdirectory count for version 1")
- self.assertEqual(len(state['subdir']['2']), 0, "Subdirectory count for version 2")
- self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointuser, "Created by")
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
- self.assertEqual(len(parts.keys()), 3, "Parts")
- self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
- self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
-
- def testFileUpdate(self):
- """Update file in dataset - POST file to /silo_name/datasets/dataset_name (x 2)"""
- # Create a new dataset, check response
- self.createSubmissionDataset()
- # Upload zip file, check response (uploads the file testdir.zip)
- zipdata = self.uploadSubmissionZipfile()
- # Access and check list of contents
+ self.assertTrue(d in state['metadata']['embargoed_until'], "embargoed_until %s?"%d)
+ #---------------------------------------------------------------
+ # Delete dataset, check response
+ resp = self.doHTTP_DELETE(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK")
+ # Access dataset, test response indicating non-existent
+ (resp, respdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=404, expect_reason="Not Found")
+ # Create a new dataset, check response. embargoed=True, embargo_until=None.
+ self.createSubmissionDataset(embargoed=True)
+ # Access dataset, check response
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
- rdfgraph.parse(rdfstream)
- subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
- # Access and check zip file content and version
- (resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir.zip",
- expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!")
- # Upload zip file again, check response
- zipdata = self.updateSubmissionZipfile(file_to_upload="testdir2.zip", filename="testdir.zip")
- # Access and check list of contents
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertFalse((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
+ # Access state info
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed?")
+ #---------------------------------------------------------------
+ # Delete dataset, check response
+ resp = self.doHTTP_DELETE(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK")
+ # Access dataset, test response indicating non-existent
+ (resp, respdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=404, expect_reason="Not Found")
+ # Create a new dataset, check response. embargoed=True, embargo_until=True
+ self.createSubmissionDataset(embargoed=True, embargoed_until=True)
+ # Access dataset, check response
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
- base = self.getRequestUri("datasets/TestSubmission/")
- dcterms = "http://purl.org/dc/terms/"
- ore = "http://www.openarchives.org/ore/terms/"
+ self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
- self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
- self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
- self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
- self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
- self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
- self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
- # Access and check zip file content
- (resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir.zip",
- expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!")
- #Access state information and check
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
+ # Access state info
(resp, data) = self.doHTTP_GET(
resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
- parts = data['parts']
- self.assertEqual(len(state.keys()), 11, "States")
- self.assertEqual(state['item_id'], "TestSubmission", "Submission item identifier")
- self.assertEqual(len(state['versions']), 3, "Three versions")
- self.assertEqual(state['versions'][0], '0', "Version 0")
- self.assertEqual(state['versions'][1], '1', "Version 1")
- self.assertEqual(state['versions'][2], '2', "Version 2")
- self.assertEqual(state['currentversion'], '2', "Current version == 2")
- self.assertEqual(state['rdffileformat'], 'xml', "RDF file type")
- self.assertEqual(state['rdffilename'], 'manifest.rdf', "RDF file name")
- self.assertEqual(len(state['files']['0']), 1, "List should contain just manifest.rdf")
- self.assertEqual(len(state['files']['1']), 2, "List should contain manifest.rdf and testdir.zip")
- self.assertEqual(len(state['files']['2']), 2, "List should contain manifest.rdf and testdir.zip")
- self.assertEqual(len(state['metadata_files']['0']), 0, "metadata_files of version 0")
- self.assertEqual(len(state['metadata_files']['1']), 0, "metadata_files of version 1")
- self.assertEqual(len(state['metadata_files']['2']), 0, "metadata_files of version 2")
- self.assertEqual(len(state['subdir']['0']), 0, "Subdirectory count for version 0")
- self.assertEqual(len(state['subdir']['1']), 0, "Subdirectory count for version 1")
- self.assertEqual(len(state['subdir']['2']), 0, "Subdirectory count for version 2")
- self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointuser, "Created by")
+ d = (datetime.now() + relativedelta(years=+70)).isoformat()
+ d = d.split('T')[0]
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
- self.assertEqual(len(parts.keys()), 4, "Parts")
- self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
- self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
- self.assertEqual(len(parts['testdir.zip'].keys()), 13, "File stats for testdir.zip")
-
- def testGetDatasetByVersion(self):
- """Upload files to a dataset - POST file to /silo_name/datasets/dataset_name. Access each of the versions and the files in that version"""
- #Definitions
- subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
- base = self.getRequestUri("datasets/TestSubmission/")
- dcterms = "http://purl.org/dc/terms/"
- ore = "http://www.openarchives.org/ore/terms/"
- oxds = "http://vocab.ox.ac.uk/dataset/schema#"
- stype = URIRef(oxds+"DataSet")
- #---------Version 0
- # Create a new dataset, check response
- self.createSubmissionDataset()
- # Access and check list of contents
+ self.assertTrue(d in state['metadata']['embargoed_until'], "embargoed_until %s?"%d)
+ #---------------------------------------------------------------
+ # Delete dataset, check response
+ resp = self.doHTTP_DELETE(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK")
+ # Access dataset, test response indicating non-existent
+ (resp, respdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=404, expect_reason="Not Found")
+ # Create a new dataset, check response. embargoed=True, embargo_until=09-08-2012
+ d = '09-08-2012'
+ self.createSubmissionDataset(embargoed=True, embargoed_until=d)
+ # Access dataset, check response
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
@@ -726,79 +668,135 @@ def testGetDatasetByVersion(self):
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
- #Access state information and check
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
+ # Access state info
(resp, data) = self.doHTTP_GET(
resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
- parts = data['parts']
- self.assertEqual(len(state.keys()), 11, "States")
- self.assertEqual(len(parts.keys()), 3, "Parts")
- #---------Version 1
- # Upload zip file, check response
- zipdata = self.uploadSubmissionZipfile()
- # Access and check list of contents
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertTrue('2012-09-08' in state['metadata']['embargoed_until'], "embargoed_until 2012-09-08?")
+ #---------------------------------------------------------------
+ # Delete dataset, check response
+ resp = self.doHTTP_DELETE(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK")
+ # Access dataset, test response indicating non-existent
+ (resp, respdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=404, expect_reason="Not Found")
+ # Create a new dataset, check response. embargoed = False.
+ self.createSubmissionDataset(embargoed=False)
+ # Access dataset, check response
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
- # Access and check zip file content
- (resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir.zip",
- expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!")
- #Access state information and check
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'False') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertFalse((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
+ # Access state info
(resp, data) = self.doHTTP_GET(
resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
- parts = data['parts']
- self.assertEqual(len(state.keys()), 11, "States")
- self.assertEqual(len(parts.keys()), 4, "Parts")
- # Access and check list of contents of version 0
+ self.assertEqual(state['metadata']['embargoed'], False, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed?")
+ #---------------------------------------------------------------
+ # Delete dataset, check response
+ resp = self.doHTTP_DELETE(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK")
+ # Access dataset, test response indicating non-existent
+ (resp, respdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=404, expect_reason="Not Found")
+ # Create a new dataset, check response. embargoed = False, embargoed_until = True
+ self.createSubmissionDataset(embargoed=False, embargoed_until=True)
+ # Access dataset, check response
(resp, rdfdata) = self.doHTTP_GET(
- resource="datasets/TestSubmission/version0",
+ resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
- #---------Version 2
- # Upload zip file, check response
- zipdata2 = self.uploadSubmissionZipfile(file_to_upload="testdir2.zip")
- # Access and check list of contents
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'False') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertFalse((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
+ # Access state info
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ self.assertEqual(state['metadata']['embargoed'], False, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed?")
+ #---------------------------------------------------------------
+ # Delete dataset, check response
+ resp = self.doHTTP_DELETE(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK")
+ # Access dataset, test response indicating non-existent
+ (resp, respdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=404, expect_reason="Not Found")
+ # Create a new dataset, check response. embargoed = False, embargoed_until = 12 sep 2013
+ d = '12 Sep 2013'
+ self.createSubmissionDataset(embargoed=False, embargoed_until=d)
+ # Access dataset, check response
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
- # Access and check zip file content
- (resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir.zip",
- expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile - testdir.zip!")
- (resp, zipfile2) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir2.zip",
- expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata2, zipfile2, "Difference between local and remote zipfile - testdir2.zip!")
- #Access state information and check
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'False') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertFalse((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
+ # Access state info
(resp, data) = self.doHTTP_GET(
resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
- parts = data['parts']
- self.assertEqual(len(state.keys()), 11, "States")
- self.assertEqual(len(parts.keys()), 5, "Parts")
- #---------Version 3
- # Delete file, check response
- resp = self.doHTTP_DELETE(
- resource="datasets/TestSubmission/testdir.zip",
- expect_status=200, expect_reason="OK")
+ self.assertEqual(state['metadata']['embargoed'], False, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed?")
+
+ def testFileUpload(self):
+ """Upload file to dataset - POST file to /silo_name/datasets/dataset_name"""
+ # Create a new dataset, check response
+ self.createSubmissionDataset()
+ #Access state information
+ (resp, respdata) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ # Upload zip file, check response
+ zipdata = self.uploadSubmissionZipfile()
# Access and check list of contents
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
@@ -806,15 +804,28 @@ def testGetDatasetByVersion(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ base = self.getRequestUri("datasets/TestSubmission/")
+ dcterms = "http://purl.org/dc/terms/"
+ ore = "http://www.openarchives.org/ore/terms/"
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ stype = URIRef(oxds+"DataSet")
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check zip file content
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
- expect_status=404, expect_reason="Not Found")
- (resp, zipfile2) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir2.zip",
expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata2, zipfile2, "Difference between local and remote zipfile - testdir2.zip!")
+ self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!")
#Access state information and check
(resp, data) = self.doHTTP_GET(
resource="states/TestSubmission",
@@ -822,43 +833,68 @@ def testGetDatasetByVersion(self):
state = data['state']
parts = data['parts']
self.assertEqual(len(state.keys()), 11, "States")
+ self.assertEqual(state['item_id'], "TestSubmission", "Submission item identifier")
+ self.assertEqual(len(state['versions']), 2, "Two versions")
+ self.assertEqual(state['versions'][0], '0', "Version 0")
+ self.assertEqual(state['versions'][1], '1', "Version 1")
+ self.assertEqual(state['currentversion'], '1', "Current version == 1")
+ self.assertEqual(state['rdffileformat'], 'xml', "RDF file type")
+ self.assertEqual(state['rdffilename'], 'manifest.rdf', "RDF file name")
+ self.assertEqual(state['files']['0'], ['manifest.rdf'], "List should contain just manifest.rdf")
+ self.assertEqual(len(state['files']['1']), 2, "List should contain manifest.rdf and testdir.zip")
+ self.assertEqual(len(state['metadata_files']['0']), 0, "metadata_files of version 0")
+ self.assertEqual(len(state['metadata_files']['1']), 0, "metadata_files of version 1")
+ self.assertEqual(len(state['subdir']['0']), 0, "Subdirectory count for version 0")
+ self.assertEqual(len(state['subdir']['1']), 0, "Subdirectory count for version 1")
+ self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointuser, "Created by")
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
self.assertEqual(len(parts.keys()), 4, "Parts")
- #---------Version 4
- # Update zip file, check response
- zipdata3 = self.updateSubmissionZipfile(file_to_upload="testrdf4.zip", filename="testdir2.zip")
+ self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
+ self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
+ self.assertEqual(len(parts['testdir.zip'].keys()), 13, "File stats for testdir.zip")
+
+ def testFileDelete(self):
+ """Delete file in dataset - DELETE /silo_name/datasets/dataset_name/file_name"""
+ # Create a new dataset, check response
+ self.createSubmissionDataset()
+ # Upload zip file, check response
+ zipdata = self.uploadSubmissionZipfile()
# Access and check list of contents
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
- rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
- # Access and check zip file content
+ rdfgraph.parse(rdfstream)
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),"1") in rdfgraph, 'oxds:currentVersion')
+ # Access and check zip file content and version
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
- expect_status=404, expect_reason="Not Found")
- (resp, zipfile2) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir2.zip",
expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata3, zipfile2, "Difference between local and remote zipfile - testdir2.zip!")
- #Access state information and check
- (resp, data) = self.doHTTP_GET(
- resource="states/TestSubmission",
- expect_status=200, expect_reason="OK", expect_type="application/json")
- state = data['state']
- parts = data['parts']
- self.assertEqual(len(state.keys()), 11, "States")
- self.assertEqual(len(parts.keys()), 4, "Parts")
- #=========Access each of the versions
- #---------Version 0
- # Access and check list of contents of version 0
+ self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!")
+ # Delete file, check response
+ resp = self.doHTTP_DELETE(
+ resource="datasets/TestSubmission/testdir.zip",
+ expect_status=200, expect_reason="OK")
+ # Access and check zip file does not exist
+ (resp, zipfile) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir.zip",
+ expect_status=404, expect_reason="Not Found")
+ # Access and check list of contents
(resp, rdfdata) = self.doHTTP_GET(
- resource="datasets/TestSubmission/version0",
+ resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ dcterms = "http://purl.org/dc/terms/"
+ ore = "http://www.openarchives.org/ore/terms/"
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ stype = URIRef(oxds+"DataSet")
self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
@@ -867,27 +903,76 @@ def testGetDatasetByVersion(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
#Access state information and check
(resp, data) = self.doHTTP_GET(
- resource="states/TestSubmission/version0",
+ resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
parts = data['parts']
self.assertEqual(len(state.keys()), 11, "States")
- self.assertEqual(len(parts.keys()), 3, "Parts")
- self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
- self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
- #---------Version 1
- # Access and check list of contents of version 1
- (resp, rdfdata) = self.doHTTP_GET(
- resource="datasets/TestSubmission/version1",
- expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ self.assertEqual(state['item_id'], "TestSubmission", "Submission item identifier")
+ self.assertEqual(len(state['versions']), 3, "Three versions")
+ self.assertEqual(state['versions'][0], '0', "Version 0")
+ self.assertEqual(state['versions'][1], '1', "Version 1")
+ self.assertEqual(state['versions'][2], '2', "Version 2")
+ self.assertEqual(state['currentversion'], '2', "Current version == 2")
+ self.assertEqual(state['rdffileformat'], 'xml', "RDF file type")
+ self.assertEqual(state['rdffilename'], 'manifest.rdf', "RDF file name")
+ self.assertEqual(len(state['files']['0']), 1, "List should contain just manifest.rdf")
+ self.assertEqual(len(state['files']['1']), 2, "List should contain manifest.rdf and testdir.zip")
+ self.assertEqual(len(state['files']['2']), 1, "List should contain just manifest.rdf")
+ self.assertEqual(len(state['metadata_files']['0']), 0, "metadata_files of version 0")
+ self.assertEqual(len(state['metadata_files']['1']), 0, "metadata_files of version 1")
+ self.assertEqual(len(state['metadata_files']['2']), 0, "metadata_files of version 2")
+ self.assertEqual(len(state['subdir']['0']), 0, "Subdirectory count for version 0")
+ self.assertEqual(len(state['subdir']['1']), 0, "Subdirectory count for version 1")
+ self.assertEqual(len(state['subdir']['2']), 0, "Subdirectory count for version 2")
+ self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointuser, "Created by")
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(len(parts.keys()), 3, "Parts")
+ self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
+ self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
+
+ def testFileUpdate(self):
+ """Update file in dataset - POST file to /silo_name/datasets/dataset_name (x 2)"""
+ # Create a new dataset, check response
+ self.createSubmissionDataset()
+ # Upload zip file, check response (uploads the file testdir.zip)
+ zipdata = self.uploadSubmissionZipfile()
+ # Access and check list of contents
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
+ # Access and check zip file content and version
+ (resp, zipfile) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir.zip",
+ expect_status=200, expect_reason="OK", expect_type="application/zip")
+ self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!")
+ # Upload zip file again, check response
+ zipdata = self.updateSubmissionZipfile(file_to_upload="testdir2.zip", filename="testdir.zip")
+ # Access and check list of contents
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ base = self.getRequestUri("datasets/TestSubmission/")
+ dcterms = "http://purl.org/dc/terms/"
+ ore = "http://www.openarchives.org/ore/terms/"
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ stype = URIRef(oxds+"DataSet")
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
@@ -896,149 +981,372 @@ def testGetDatasetByVersion(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
- self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check zip file content
(resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir.zip/version1",
+ resource="datasets/TestSubmission/testdir.zip",
expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile - Version 1!")
+ self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!")
#Access state information and check
(resp, data) = self.doHTTP_GET(
- resource="states/TestSubmission/version1",
+ resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
parts = data['parts']
self.assertEqual(len(state.keys()), 11, "States")
+ self.assertEqual(state['item_id'], "TestSubmission", "Submission item identifier")
+ self.assertEqual(len(state['versions']), 3, "Three versions")
+ self.assertEqual(state['versions'][0], '0', "Version 0")
+ self.assertEqual(state['versions'][1], '1', "Version 1")
+ self.assertEqual(state['versions'][2], '2', "Version 2")
+ self.assertEqual(state['currentversion'], '2', "Current version == 2")
+ self.assertEqual(state['rdffileformat'], 'xml', "RDF file type")
+ self.assertEqual(state['rdffilename'], 'manifest.rdf', "RDF file name")
+ self.assertEqual(len(state['files']['0']), 1, "List should contain just manifest.rdf")
+ self.assertEqual(len(state['files']['1']), 2, "List should contain manifest.rdf and testdir.zip")
+ self.assertEqual(len(state['files']['2']), 2, "List should contain manifest.rdf and testdir.zip")
+ self.assertEqual(len(state['metadata_files']['0']), 0, "metadata_files of version 0")
+ self.assertEqual(len(state['metadata_files']['1']), 0, "metadata_files of version 1")
+ self.assertEqual(len(state['metadata_files']['2']), 0, "metadata_files of version 2")
+ self.assertEqual(len(state['subdir']['0']), 0, "Subdirectory count for version 0")
+ self.assertEqual(len(state['subdir']['1']), 0, "Subdirectory count for version 1")
+ self.assertEqual(len(state['subdir']['2']), 0, "Subdirectory count for version 2")
+ self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointuser, "Created by")
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
self.assertEqual(len(parts.keys()), 4, "Parts")
self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
self.assertEqual(len(parts['testdir.zip'].keys()), 13, "File stats for testdir.zip")
+
+ def testGetDatasetByVersion(self):
+ """Upload files to a dataset - POST file to /silo_name/datasets/dataset_name. Access each of the versions and the files in that version"""
+ #Definitions
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ base = self.getRequestUri("datasets/TestSubmission/")
+ dcterms = "http://purl.org/dc/terms/"
+ ore = "http://www.openarchives.org/ore/terms/"
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ stype = URIRef(oxds+"DataSet")
+ #---------Version 0
+ # Create a new dataset, check response
+ self.createSubmissionDataset()
+ # Access and check list of contents
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ parts = data['parts']
+ self.assertEqual(len(state.keys()), 11, "States")
+ self.assertEqual(len(parts.keys()), 3, "Parts")
+ #---------Version 1
+ # Upload zip file, check response
+ zipdata = self.uploadSubmissionZipfile()
+ # Access and check list of contents
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ # Access and check zip file content
+ (resp, zipfile) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir.zip",
+ expect_status=200, expect_reason="OK", expect_type="application/zip")
+ self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!")
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ parts = data['parts']
+ self.assertEqual(len(state.keys()), 11, "States")
+ self.assertEqual(len(parts.keys()), 4, "Parts")
+ # Access and check list of contents of version 0
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/version0",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
#---------Version 2
- # Access and check list of contents of version 2
+ # Upload zip file, check response
+ zipdata2 = self.uploadSubmissionZipfile(file_to_upload="testdir2.zip")
+ # Access and check list of contents
(resp, rdfdata) = self.doHTTP_GET(
- resource="datasets/TestSubmission/version2",
+ resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
- self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
- self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir2.zip")) in rdfgraph)
- self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
- self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
- self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
- self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
- self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
# Access and check zip file content
(resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir.zip/version2",
+ resource="datasets/TestSubmission/testdir.zip",
expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile - Version 2!")
+ self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile - testdir.zip!")
(resp, zipfile2) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir2.zip/version2",
+ resource="datasets/TestSubmission/testdir2.zip",
expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata2, zipfile2, "Difference between local and remote zipfile - Version 2!")
+ self.assertEqual(zipdata2, zipfile2, "Difference between local and remote zipfile - testdir2.zip!")
#Access state information and check
(resp, data) = self.doHTTP_GET(
- resource="states/TestSubmission/version2",
+ resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
parts = data['parts']
self.assertEqual(len(state.keys()), 11, "States")
self.assertEqual(len(parts.keys()), 5, "Parts")
- self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
- self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
- self.assertEqual(len(parts['testdir.zip'].keys()), 13, "File stats for testdir.zip")
- self.assertEqual(len(parts['testdir2.zip'].keys()), 13, "File stats for testdir2.zip")
#---------Version 3
- # Access and check list of contents of version 3
+ # Delete file, check response
+ resp = self.doHTTP_DELETE(
+ resource="datasets/TestSubmission/testdir.zip",
+ expect_status=200, expect_reason="OK")
+ # Access and check list of contents
(resp, rdfdata) = self.doHTTP_GET(
- resource="datasets/TestSubmission/version3",
+ resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
- self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
- self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir2.zip")) in rdfgraph)
- self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
- self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
- self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
- self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'3') in rdfgraph, 'oxds:currentVersion')
- self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
# Access and check zip file content
(resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir2.zip/version3",
- expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata2, zipfile, "Difference between local and remote zipfile - Version 3!")
- (resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir.zip/version3",
+ resource="datasets/TestSubmission/testdir.zip",
expect_status=404, expect_reason="Not Found")
+ (resp, zipfile2) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir2.zip",
+ expect_status=200, expect_reason="OK", expect_type="application/zip")
+ self.assertEqual(zipdata2, zipfile2, "Difference between local and remote zipfile - testdir2.zip!")
#Access state information and check
(resp, data) = self.doHTTP_GET(
- resource="states/TestSubmission/version3",
+ resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
parts = data['parts']
self.assertEqual(len(state.keys()), 11, "States")
self.assertEqual(len(parts.keys()), 4, "Parts")
- self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
- self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
- self.assertEqual(len(parts['testdir2.zip'].keys()), 13, "File stats for testdir2.zip")
#---------Version 4
- # Access and check list of contents of version 4
+ # Update zip file, check response
+ zipdata3 = self.updateSubmissionZipfile(file_to_upload="testrdf4.zip", filename="testdir2.zip")
+ # Access and check list of contents
(resp, rdfdata) = self.doHTTP_GET(
- resource="datasets/TestSubmission/version4",
+ resource="datasets/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
- self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
- self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir2.zip")) in rdfgraph)
- self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
- self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
- self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
- self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'4') in rdfgraph, 'oxds:currentVersion')
- self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
# Access and check zip file content
(resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir2.zip/version4",
- expect_status=200, expect_reason="OK", expect_type="application/zip")
- self.assertEqual(zipdata3, zipfile, "Difference between local and remote zipfile - Version 4!")
- (resp, zipfile) = self.doHTTP_GET(
- resource="datasets/TestSubmission/testdir.zip/version4",
+ resource="datasets/TestSubmission/testdir.zip",
expect_status=404, expect_reason="Not Found")
+ (resp, zipfile2) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir2.zip",
+ expect_status=200, expect_reason="OK", expect_type="application/zip")
+ self.assertEqual(zipdata3, zipfile2, "Difference between local and remote zipfile - testdir2.zip!")
#Access state information and check
(resp, data) = self.doHTTP_GET(
- resource="states/TestSubmission/version4",
+ resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
parts = data['parts']
self.assertEqual(len(state.keys()), 11, "States")
self.assertEqual(len(parts.keys()), 4, "Parts")
- self.assertEqual(state['item_id'], "TestSubmission", "Submission item identifier")
- self.assertEqual(len(state['versions']), 5, "Five versions")
- self.assertEqual(state['versions'],['0', '1', '2', '3', '4'], "Versions")
- self.assertEqual(state['currentversion'], '4', "Current version == 4")
- self.assertEqual(state['rdffileformat'], 'xml', "RDF file type")
- self.assertEqual(state['rdffilename'], 'manifest.rdf', "RDF file name")
- self.assertEqual(state['files']['0'], ['manifest.rdf'], "List should contain just manifest.rdf")
+ #=========Access each of the versions
+ #---------Version 0
+ # Access and check list of contents of version 0
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/version0",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission/version0",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ parts = data['parts']
+ self.assertEqual(len(state.keys()), 11, "States")
+ self.assertEqual(len(parts.keys()), 3, "Parts")
+ self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
+ self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
+ #---------Version 1
+ # Access and check list of contents of version 1
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/version1",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ # Access and check zip file content
+ (resp, zipfile) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir.zip/version1",
+ expect_status=200, expect_reason="OK", expect_type="application/zip")
+ self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile - Version 1!")
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission/version1",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ parts = data['parts']
+ self.assertEqual(len(state.keys()), 11, "States")
+ self.assertEqual(len(parts.keys()), 4, "Parts")
+ self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
+ self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
+ self.assertEqual(len(parts['testdir.zip'].keys()), 13, "File stats for testdir.zip")
+ #---------Version 2
+ # Access and check list of contents of version 2
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/version2",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir2.zip")) in rdfgraph)
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ # Access and check zip file content
+ (resp, zipfile) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir.zip/version2",
+ expect_status=200, expect_reason="OK", expect_type="application/zip")
+ self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile - Version 2!")
+ (resp, zipfile2) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir2.zip/version2",
+ expect_status=200, expect_reason="OK", expect_type="application/zip")
+ self.assertEqual(zipdata2, zipfile2, "Difference between local and remote zipfile - Version 2!")
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission/version2",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ parts = data['parts']
+ self.assertEqual(len(state.keys()), 11, "States")
+ self.assertEqual(len(parts.keys()), 5, "Parts")
+ self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
+ self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
+ self.assertEqual(len(parts['testdir.zip'].keys()), 13, "File stats for testdir.zip")
+ self.assertEqual(len(parts['testdir2.zip'].keys()), 13, "File stats for testdir2.zip")
+ #---------Version 3
+ # Access and check list of contents of version 3
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/version3",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir2.zip")) in rdfgraph)
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'3') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ # Access and check zip file content
+ (resp, zipfile) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir2.zip/version3",
+ expect_status=200, expect_reason="OK", expect_type="application/zip")
+ self.assertEqual(zipdata2, zipfile, "Difference between local and remote zipfile - Version 3!")
+ (resp, zipfile) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir.zip/version3",
+ expect_status=404, expect_reason="Not Found")
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission/version3",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ parts = data['parts']
+ self.assertEqual(len(state.keys()), 11, "States")
+ self.assertEqual(len(parts.keys()), 4, "Parts")
+ self.assertEqual(len(parts['4=TestSubmission'].keys()), 13, "File stats for 4=TestSubmission")
+ self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
+ self.assertEqual(len(parts['testdir2.zip'].keys()), 13, "File stats for testdir2.zip")
+ #---------Version 4
+ # Access and check list of contents of version 4
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/version4",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir2.zip")) in rdfgraph)
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'4') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ # Access and check zip file content
+ (resp, zipfile) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir2.zip/version4",
+ expect_status=200, expect_reason="OK", expect_type="application/zip")
+ self.assertEqual(zipdata3, zipfile, "Difference between local and remote zipfile - Version 4!")
+ (resp, zipfile) = self.doHTTP_GET(
+ resource="datasets/TestSubmission/testdir.zip/version4",
+ expect_status=404, expect_reason="Not Found")
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission/version4",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ parts = data['parts']
+ self.assertEqual(len(state.keys()), 11, "States")
+ self.assertEqual(len(parts.keys()), 4, "Parts")
+ self.assertEqual(state['item_id'], "TestSubmission", "Submission item identifier")
+ self.assertEqual(len(state['versions']), 5, "Five versions")
+ self.assertEqual(state['versions'],['0', '1', '2', '3', '4'], "Versions")
+ self.assertEqual(state['currentversion'], '4', "Current version == 4")
+ self.assertEqual(state['rdffileformat'], 'xml', "RDF file type")
+ self.assertEqual(state['rdffilename'], 'manifest.rdf', "RDF file name")
+ self.assertEqual(state['files']['0'], ['manifest.rdf'], "List should contain just manifest.rdf")
self.assertEqual(len(state['files']['1']), 2, "List should contain manifest.rdf and testdir.zip")
self.assertEqual(len(state['files']['2']), 3, "List should contain manifest.rdf, testdir.zip and testdir2.zip")
self.assertEqual(len(state['files']['3']), 2, "List should contain manifest.rdf and testdir2.zip")
@@ -1091,7 +1399,7 @@ def testPostMetadataFile(self):
address = "http://schemas.talis.com/2005/address/schema#"
stype = URIRef(oxds+"DataSet")
stype2 = URIRef(bibo+"DocumentPart")
- self.assertEqual(len(rdfgraph),42,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),41,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
@@ -1099,7 +1407,6 @@ def testPostMetadataFile(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:piblisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,RDF.type,stype2) in rdfgraph, 'Testing submission type: '+subj+", "+stype2)
@@ -1183,7 +1490,7 @@ def testMetadataFileUpdate(self):
foaf = "http://xmlns.com/foaf/0.1/"
address = "http://schemas.talis.com/2005/address/schema#"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
@@ -1191,7 +1498,6 @@ def testMetadataFileUpdate(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:piblisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"title"),"Test dataset with merged metadata") in rdfgraph, 'dcterms:title')
@@ -1219,7 +1525,7 @@ def testMetadataFileUpdate(self):
Was worth a million such; and yet
She lived long, till God gave her rest.
"""
- self.assertEqual(len(rdfgraph),32,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),31,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
@@ -1227,7 +1533,6 @@ def testMetadataFileUpdate(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"title"),'A Cat') in rdfgraph, 'dcterms:title')
@@ -1239,8 +1544,6 @@ def testMetadataFileUpdate(self):
self.failUnless((subj,URIRef(dcterms+"type"),"Poem") in rdfgraph, 'dcterms:type')
self.failUnless((subj,URIRef(dcterms+"type"),URIRef("http://purl.org/dc/dcmitype/Text")) in rdfgraph, 'dcterms:type')
self.failUnless((subj,URIRef(dcterms+"rightsHolder"),"Copyright Edward Thomas, 1979, reproduced under licence from Faber and Faber Ltd.") in rdfgraph, 'dcterms:rightsHolder')
- #for s, p, o in rdfgraph.triples((None, RDF.value, None)):
- # print s, p, o, type(o)
self.failUnless((subj,RDF.value,Literal(doctext)) in rdfgraph, 'rdf:value')
self.failUnless((subj,URIRef(dcterms+"source"),"Edward Thomas Collected Poems") in rdfgraph, 'dcterms:source')
#self.failUnless((subj,URIRef(dcterms+"created"),"1979-01-01/1979-12-31") in rdfgraph, 'dcterms:created')
@@ -1302,7 +1605,7 @@ def testMetadataFileDelete(self):
dcterms = "http://purl.org/dc/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
@@ -1310,7 +1613,6 @@ def testMetadataFileDelete(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
def testPutCreateFile(self):
@@ -1337,7 +1639,7 @@ def testPutCreateFile(self):
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
@@ -1346,7 +1648,6 @@ def testPutCreateFile(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check zip file content
@@ -1407,7 +1708,7 @@ def testPutUpdateFile(self):
rdfgraph.parse(rdfstream)
subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
# Access and check zip file content and version
(resp, zipfile) = self.doHTTP_GET(
@@ -1435,7 +1736,7 @@ def testPutUpdateFile(self):
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
@@ -1445,7 +1746,6 @@ def testPutUpdateFile(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'3') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check zip file content
@@ -1520,7 +1820,7 @@ def testPutMetadataFile(self):
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
owl = "http://www.w3.org/2002/07/owl#"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
@@ -1528,7 +1828,6 @@ def testPutMetadataFile(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"title"),"Test dataset with merged metadata") in rdfgraph, 'dcterms:title')
@@ -1544,7 +1843,7 @@ def testPutMetadataFile(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
@@ -1552,7 +1851,6 @@ def testPutMetadataFile(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(owl+"sameAs"),URIRef("http://example.org/testrdf/")) in rdfgraph, 'owl:sameAs')
@@ -1614,7 +1912,7 @@ def testUnicodeMetadataFileUpdate(self):
f = codecs.open('testdata/unicodedata/unicode01.txt', 'r', 'utf-8')
doctext1 = f.read()
f.close()
- self.assertEqual(len(rdfgraph),14,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
@@ -1622,7 +1920,6 @@ def testUnicodeMetadataFileUpdate(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:piblisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"title"),"General punctuation") in rdfgraph, 'dcterms:title')
@@ -1651,7 +1948,7 @@ def testUnicodeMetadataFileUpdate(self):
f = codecs.open('testdata/unicodedata/unicode02.txt', 'r', 'utf-8')
doctext2 = f.read()
f.close()
- self.assertEqual(len(rdfgraph),15,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),14,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"title"),"A table of (some) accents") in rdfgraph, 'dcterms:title')
self.failUnless((subj,RDF.value,Literal(doctext2)) in rdfgraph, 'rdf:value')
@@ -1673,7 +1970,7 @@ def testUnicodeMetadataFileUpdate(self):
f = codecs.open('testdata/unicodedata/unicode03.txt', 'r', 'utf-8')
doctext3 = f.read()
f.close()
- self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),15,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,URIRef(oxds+"currentVersion"),'3') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"title"),"Combining diacritics") in rdfgraph, 'dcterms:title')
self.failUnless((subj,RDF.value,Literal(doctext3)) in rdfgraph, 'rdf:value')
@@ -1693,7 +1990,7 @@ def testUnicodeMetadataFileUpdate(self):
f = codecs.open('testdata/unicodedata/unicode04.txt', 'r', 'utf-8')
doctext4 = f.read()
f.close()
- self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,URIRef(oxds+"currentVersion"),'4') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"title"),"Various symbols") in rdfgraph, 'dcterms:title')
self.failUnless((subj,RDF.value,Literal(doctext4)) in rdfgraph, 'rdf:value')
@@ -1713,7 +2010,7 @@ def testUnicodeMetadataFileUpdate(self):
f = codecs.open('testdata/unicodedata/unicode05.txt', 'r', 'utf-8')
doctext5 = f.read()
f.close()
- self.assertEqual(len(rdfgraph),18,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,URIRef(oxds+"currentVersion"),'5') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"title"),"Some verses in Russian") in rdfgraph, 'dcterms:title')
self.failUnless((subj,RDF.value,Literal(doctext5)) in rdfgraph, 'rdf:value')
@@ -1735,7 +2032,7 @@ def testUnicodeMetadataFileUpdate(self):
f = codecs.open('testdata/unicodedata/unicode06.txt', 'r', 'utf-8')
doctext6 = f.read()
f.close()
- self.assertEqual(len(rdfgraph),19,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),18,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,URIRef(oxds+"currentVersion"),'6') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"title"),"Some verses in ancient Greek") in rdfgraph, 'dcterms:title')
self.failUnless((subj,RDF.value,Literal(doctext6)) in rdfgraph, 'rdf:value')
@@ -1755,7 +2052,7 @@ def testUnicodeMetadataFileUpdate(self):
f = codecs.open('testdata/unicodedata/unicode07.txt', 'r', 'utf-8')
doctext7 = f.read()
f.close()
- self.assertEqual(len(rdfgraph),20,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),19,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,URIRef(oxds+"currentVersion"),'7') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"title"),"Some verses in Sanskrit") in rdfgraph, 'dcterms:title')
self.failUnless((subj,RDF.value,Literal(doctext7)) in rdfgraph, 'rdf:value')
@@ -1777,7 +2074,7 @@ def testUnicodeMetadataFileUpdate(self):
f = codecs.open('testdata/unicodedata/unicode08.txt', 'r', 'utf-8')
doctext8= f.read()
f.close()
- self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),20,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,URIRef(oxds+"currentVersion"),'8') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"title"),"Some Chinese") in rdfgraph, 'dcterms:title')
self.failUnless((subj,RDF.value,Literal(doctext8)) in rdfgraph, 'rdf:value')
@@ -1797,7 +2094,7 @@ def testUnicodeMetadataFileUpdate(self):
f = codecs.open('testdata/unicodedata/unicode09.txt', 'r', 'utf-8')
doctext9 = f.read()
f.close()
- self.assertEqual(len(rdfgraph),22,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,URIRef(oxds+"currentVersion"),'9') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"title"),"A Tamil name") in rdfgraph, 'dcterms:title')
self.failUnless((subj,RDF.value,Literal(doctext9)) in rdfgraph, 'rdf:value')
@@ -1809,26 +2106,333 @@ def testUnicodeMetadataFileUpdate(self):
#Access state information and check
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
- expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
- fr = open('response.xml', 'w')
- fr.write(rdfdata)
- fr.close()
- rdfgraph = Graph()
- rdfgraph.parse('response.xml', format='xml')
- doctext10 = None
- f = codecs.open('testdata/unicodedata/unicode10.txt', 'r', 'utf-8')
- doctext10= f.read()
- f.close()
- self.assertEqual(len(rdfgraph),23,'Graph length %i' %len(rdfgraph))
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'10') in rdfgraph, 'oxds:currentVersion')
- self.failUnless((subj,URIRef(dcterms+"title"),"Some Arabic") in rdfgraph, 'dcterms:title')
- self.failUnless((subj,RDF.value,Literal(doctext10)) in rdfgraph, 'rdf:value')
- os.remove('response.xml')
-
- def testDeleteEmbargo(self):
- """Delete embargo information - POST embargo_change to /silo_name/datasets/dataset_name"""
- # Create a new dataset, check response
- self.createSubmissionDataset()
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ fr = open('response.xml', 'w')
+ fr.write(rdfdata)
+ fr.close()
+ rdfgraph = Graph()
+ rdfgraph.parse('response.xml', format='xml')
+ doctext10 = None
+ f = codecs.open('testdata/unicodedata/unicode10.txt', 'r', 'utf-8')
+ doctext10= f.read()
+ f.close()
+ self.assertEqual(len(rdfgraph),22,'Graph length %i' %len(rdfgraph))
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'10') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"title"),"Some Arabic") in rdfgraph, 'dcterms:title')
+ self.failUnless((subj,RDF.value,Literal(doctext10)) in rdfgraph, 'rdf:value')
+ os.remove('response.xml')
+
+ def testDeleteEmbargo(self):
+ """Delete embargo information - POST embargo_change to /silo_name/datasets/dataset_name"""
+ # Create a new dataset, check response
+ self.createSubmissionDataset()
+ #Access dataset and check content
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertFalse((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ # Delete embargo, check response
+ fields = \
+ [ ("embargo_change", 'true')
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/TestSubmission",
+ expect_status=204, expect_reason="Updated")
+ #Access dataset and check content
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'False') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ self.assertEqual(state['metadata']['embargoed'], False, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], "", "Should have no date for embargoed_until")
+
+ def testChangeEmbargo(self):
+ """Modify embargo information - POST embargo_change, embargo, embargo_until to /silo_name/datasets/dataset_name"""
+ # Create a new dataset, check response
+ self.createSubmissionDataset()
+ #Access dataset and check content
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertFalse((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "embargoed_until?")
+ #-------------------------------------------------
+ # Change embargo without embargo_until date - embargoed = True, check response
+ fields = \
+ [ ("embargoed", 'true')
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/TestSubmission",
+ expect_status=204, expect_reason="Updated")
+ #Access dataset and check content
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertFalse((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "embargoed_until?")
+ #-------------------------------------------------
+ #Change embargo - embargoed = true, embargoed_until = True
+ d = datetime.now().isoformat()
+ fields = \
+ [ ("embargoed", 'true')
+ ,("embargoed_until", 'true')
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/TestSubmission",
+ expect_status=204, expect_reason="Updated")
+ #Access dataset and check content
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ d = (datetime.now() + relativedelta(years=+70)).isoformat()
+ d = d.split('T')[0]
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertTrue(d in state['metadata']['embargoed_until'], "embargoed_until %s?"%d)
+ #-------------------------------------------------
+ #Change embargo - embargoed = true, embargoed_until = datetime
+ d = datetime.now()
+ delta = timedelta(days=365*4)
+ d2 = d + delta
+ d2 = d2.isoformat()
+ fields = \
+ [ ("embargoed", 'true')
+ ,("embargoed_until", d2)
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/TestSubmission",
+ expect_status=204, expect_reason="Updated")
+ #Access dataset and check content
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d2) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'3') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], d2, "embargoed_until?")
+ #-------------------------------------------------
+ #Change embargo - embargoed = true, embargoed_until = datetime, embargo_change = true
+ d2 = '09 August 2013'
+ fields = \
+ [ ("embargo_change", 'true')
+ ,("embargoed", 'true')
+ ,("embargoed_until", d2)
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/TestSubmission",
+ expect_status=204, expect_reason="Updated")
+ #Access dataset and check content
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'4') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertTrue('2013-08-09' in state['metadata']['embargoed_until'], "embargoed_until 2013-08-09?")
+ #-------------------------------------------------
+ # Change embargo - embargoed_until = true and check response
+ fields = \
+ [ ("embargoed_until", 'true')
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/TestSubmission",
+ expect_status=400, expect_reason="Bad request")
+ #Access dataset and check content
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
+ stype = URIRef(oxds+"DataSet")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'4') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertTrue('2013-08-09' in state['metadata']['embargoed_until'], "embargoed_until 2013-08-09?")
+ #-------------------------------------------------
+ # Change embargo - embargoed_until = date and check response
+ d5 = datetime.now()
+ delta = timedelta(days=3)
+ d5 = d5 + delta
+ d5 = d5.isoformat()
+ fields = \
+ [ ("embargoed_until", d5)
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/TestSubmission",
+ expect_status=400, expect_reason="Bad request")
#Access dataset and check content
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
@@ -1836,7 +2440,7 @@ def testDeleteEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
dcterms = "http://purl.org/dc/terms/"
subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
@@ -1844,9 +2448,24 @@ def testDeleteEmbargo(self):
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
- # Delete embargo, check response
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'4') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertTrue('2013-08-09' in state['metadata']['embargoed_until'], "embargoed_until 2013-08-09?")
+ #-------------------------------------------------
+ #Delete embargo
fields = \
- [ ("embargo_change", 'true')
+ [ ("embargoed", 'false')
]
files =[]
(reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
@@ -1868,12 +2487,13 @@ def testDeleteEmbargo(self):
stype = URIRef(oxds+"DataSet")
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'False') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertFalse((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'5') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
#Access state information and check
(resp, data) = self.doHTTP_GET(
@@ -1881,12 +2501,19 @@ def testDeleteEmbargo(self):
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
self.assertEqual(state['metadata']['embargoed'], False, "Embargoed?")
- self.assertEqual(state['metadata']['embargoed_until'], "", "Should have no date for embargoed_until")
-
- def testChangeEmbargo(self):
- """Modify embargo information - POST embargo_change, embargo, embargo_until to /silo_name/datasets/dataset_name"""
- # Create a new dataset, check response
- self.createSubmissionDataset()
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed until?")
+ #-------------------------------------------------
+ #Delete embargo
+ fields = \
+ [ ("embargoed", 'false')
+ ,("embargoed_until", 'true')
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/TestSubmission",
+ expect_status=204, expect_reason="Updated")
#Access dataset and check content
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
@@ -1900,17 +2527,31 @@ def testChangeEmbargo(self):
subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
stype = URIRef(oxds+"DataSet")
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
- # Delete embargo, check response
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'False') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertFalse((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'6') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ self.assertEqual(state['metadata']['embargoed'], False, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed until?")
+ #-------------------------------------------------
+ #Delete embargo
d = datetime.now()
- delta = timedelta(days=365*3)
- d2 = d + delta
- d2 = d2.isoformat()
+ delta = timedelta(days=4)
+ d3 = d + delta
+ d3 = d3.isoformat()
fields = \
- [ ("embargo_change", 'true')
- ,("embargoed", 'true')
- ,("embargoed_until", d2)
+ [ ("embargoed", 'false')
+ ,("embargoed_until", d3)
]
files =[]
(reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
@@ -1925,28 +2566,28 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
dcterms = "http://purl.org/dc/terms/"
subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
stype = URIRef(oxds+"DataSet")
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d2) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'False') in rdfgraph, 'oxds:isEmbargoed')
+ self.assertFalse((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'7') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
#Access state information and check
(resp, data) = self.doHTTP_GET(
resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
- self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
- self.assertEqual(state['metadata']['embargoed_until'], d2, "embargoed_until?")
+ self.assertEqual(state['metadata']['embargoed'], False, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed until?")
def testFileUnpack(self):
"""Unpack zip file to a new dataset - POST zip filename to /silo_name/items/dataset_name"""
@@ -1984,7 +2625,7 @@ def testFileUnpack(self):
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
@@ -1994,7 +2635,6 @@ def testFileUnpack(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),"1") in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access new dataset, check response
@@ -2007,14 +2647,13 @@ def testFileUnpack(self):
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testdir"))
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir/")
- self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -2066,7 +2705,7 @@ def testFileUnpack(self):
def testSymlinkFileUnpack(self):
"""Unpack zip file uploaded in a previous version to a new dataset - POST zip filename to /silo_name/items/dataset_name"""
# Create a new dataset, check response
- self.createSubmissionDataset()
+ self.createSubmissionDataset(embargoed=False)
# Upload zip file testdir.zip, check response
zipdata = self.uploadSubmissionZipfile(file_to_upload="testdir2.zip")
# Upload zip file test, check response
@@ -2101,7 +2740,7 @@ def testSymlinkFileUnpack(self):
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),14,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
@@ -2111,8 +2750,7 @@ def testSymlinkFileUnpack(self):
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'False') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(oxds+"currentVersion"),"2") in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access new dataset, check response
@@ -2125,14 +2763,13 @@ def testSymlinkFileUnpack(self):
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testdir"))
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir/")
- self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -2206,7 +2843,7 @@ def testFileUploadToUnpackedDataset(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
# Access new dataset TestSubmission-testdir, check response
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission-testdir",
@@ -2220,14 +2857,13 @@ def testFileUploadToUnpackedDataset(self):
dcterms = "http://purl.org/dc/terms/"
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
- self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -2263,14 +2899,13 @@ def testFileUploadToUnpackedDataset(self):
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testdir"))
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir/")
- self.assertEqual(len(rdfgraph),18,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -2330,7 +2965,7 @@ def testUpdateUnpackedDataset(self):
zipdata = self.uploadSubmissionZipfile()
# Upload second zip file, check response
zipdata = self.uploadSubmissionZipfile(file_to_upload="testdir2.zip")
- # Unpack ZIP file into a new dataset, check response
+ # Unpack ZIP file into a new dataset, check response - version 1
fields = \
[ ("filename", "testdir.zip")
]
@@ -2350,7 +2985,7 @@ def testUpdateUnpackedDataset(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),14,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
# Access and check list of contents in TestSubmission-testdir
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission-testdir",
@@ -2364,14 +2999,13 @@ def testUpdateUnpackedDataset(self):
dcterms = "http://purl.org/dc/terms/"
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
- self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -2381,6 +3015,58 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory/file2.a")) in rdfgraph)
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"test-csv.csv")) in rdfgraph)
self.failUnless((subj,URIRef(oxds+"currentVersion"),"1") in rdfgraph, 'oxds:currentVersion')
+ #-------------------------------------------------
+ #Change embargo - embargoed = true, embargoed_until = datetime
+ d = datetime.now()
+ delta = timedelta(days=365*4)
+ d2 = d + delta
+ d2 = d2.isoformat()
+ fields = \
+ [ ("embargoed", 'true')
+ ,("embargoed_until", d2)
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/TestSubmission-testdir",
+ expect_status=204, expect_reason="Updated")
+ #Access dataset and check content
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/TestSubmission-testdir",
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ dcterms = "http://purl.org/dc/terms/"
+ subj = URIRef(self.getRequestUri("datasets/TestSubmission-testdir"))
+ stype = URIRef(oxds+"Grouping")
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d2) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory")) in rdfgraph)
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory/file1.a")) in rdfgraph)
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory/file1.b")) in rdfgraph)
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory/file2.a")) in rdfgraph)
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"test-csv.csv")) in rdfgraph)
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),"2") in rdfgraph, 'oxds:currentVersion')
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/TestSubmission-testdir",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], d2, "embargoed_until?")
+ #-------------------------------------------------
# Unpack second ZIP file into dataset TestSubmission-testdir, check response
fields = \
[ ("filename", "testdir2.zip"),
@@ -2405,7 +3091,7 @@ def testUpdateUnpackedDataset(self):
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),15,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),14,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
@@ -2416,8 +3102,7 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(oxds+"currentVersion"),"2") in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access dataset TestSubmission-testdir, check response
@@ -2438,8 +3123,8 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d2) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"title"),"Test dataset with merged metadata") in rdfgraph, 'dcterms:title')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
@@ -2453,31 +3138,33 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory2/file2.a")) in rdfgraph)
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory2/file2.b")) in rdfgraph)
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"test-csv.csv")) in rdfgraph)
- self.failUnless((subj,URIRef(oxds+"currentVersion"),"2") in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),"3") in rdfgraph, 'oxds:currentVersion')
#Access state information of TestSubmission-testdir
(resp, data) = self.doHTTP_GET(
resource="states/TestSubmission-testdir",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
parts = data['parts']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], d2, "embargoed_until?")
self.assertEqual(len(state.keys()), 11, "States")
self.assertEqual(state['item_id'], "TestSubmission-testdir", "Submission item identifier")
- self.assertEqual(len(state['versions']), 3, "Three versions")
- self.assertEqual(state['versions'][0], '0', "Version 0")
- self.assertEqual(state['versions'][1], '1', "Version 1")
- self.assertEqual(state['versions'][2], '2', "Version 2")
- self.assertEqual(state['currentversion'], '2', "Current version == 2")
+ self.assertEqual(len(state['versions']), 4, "Four versions")
+ self.assertEqual(state['currentversion'], '3', "Current version == 2")
self.assertEqual(state['rdffileformat'], 'xml', "RDF file type")
self.assertEqual(state['rdffilename'], 'manifest.rdf', "RDF file name")
self.assertEqual(len(state['files']['0']), 1, "List should contain just manifest.rdf")
self.assertEqual(len(state['files']['1']), 3, "List should contain manifest.rdf, directory and test-csv.csv")
- self.assertEqual(len(state['files']['2']), 4, "List should contain manifest.rdf, directory1, directory2 and test-csv.csv")
+ self.assertEqual(len(state['files']['2']), 3, "List should contain manifest.rdf, directory and test-csv.csv")
+ self.assertEqual(len(state['files']['3']), 4, "List should contain manifest.rdf, directory1, directory2 and test-csv.csv")
self.assertEqual(len(state['metadata_files']['0']), 0, "metadata_files of version 0")
self.assertEqual(len(state['metadata_files']['1']), 0, "metadata_files of version 1")
self.assertEqual(len(state['metadata_files']['2']), 0, "metadata_files of version 2")
+ self.assertEqual(len(state['metadata_files']['3']), 0, "metadata_files of version 3")
self.assertEqual(state['subdir']['0'], [], "Subdirectory count for version 0")
self.assertEqual(state['subdir']['1'], ['directory'], "Subdirectory for version 1")
- self.assertEqual(len(state['subdir']['2']), 2, "Subdirectory for version 2 should be directory1 and directory2")
+ self.assertEqual(state['subdir']['2'], ['directory'], "Subdirectory for version 2")
+ self.assertEqual(len(state['subdir']['3']), 2, "Subdirectory for version 2 should be directory1 and directory2")
self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointuser, "Created by")
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
self.assertEqual(len(parts.keys()), 6, "Parts")
@@ -2493,14 +3180,13 @@ def testUpdateUnpackedDataset(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -2516,6 +3202,8 @@ def testUpdateUnpackedDataset(self):
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
parts = data['parts']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], d2, "embargoed_until?")
self.assertEqual(len(state.keys()), 11, "States")
self.assertEqual(len(parts.keys()), 5, "Parts")
self.assertEqual(len(parts['4=TestSubmission-testdir'].keys()), 13, "File stats for 4=TestSubmission-testdir")
@@ -2529,14 +3217,13 @@ def testUpdateUnpackedDataset(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype1) in rdfgraph, 'Testing submission type: '+subj+", "+stype1)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(oxds+"currentVersion"),"0") in rdfgraph, 'oxds:currentVersion')
#Access state information of TestSubmission-testdir version 0
@@ -2545,6 +3232,8 @@ def testUpdateUnpackedDataset(self):
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
parts = data['parts']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], d2, "embargoed_until?")
self.assertEqual(len(state.keys()), 11, "States")
self.assertEqual(len(parts.keys()), 3, "Parts")
self.assertEqual(len(parts['4=TestSubmission-testdir'].keys()), 13, "File stats for 4=TestSubmission-testdir")
@@ -2556,52 +3245,53 @@ def testUpdateUnpackedDataset(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),22,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d2) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
- self.failUnless((subj,URIRef(dcterms+"title"),"Test dataset with merged metadata") in rdfgraph, 'dcterms:title')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
- self.failUnless((subj,URIRef(owl+"sameAs"),URIRef("http://example.org/testdir2/")) in rdfgraph, 'owl:sameAs')
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory1")) in rdfgraph)
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory1/file1.a")) in rdfgraph)
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory1/file1.b")) in rdfgraph)
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory1/file1.c")) in rdfgraph)
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory2")) in rdfgraph)
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory2/file2.a")) in rdfgraph)
- self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory2/file2.b")) in rdfgraph)
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory")) in rdfgraph)
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory/file1.a")) in rdfgraph)
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory/file1.b")) in rdfgraph)
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"directory/file2.a")) in rdfgraph)
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"test-csv.csv")) in rdfgraph)
self.failUnless((subj,URIRef(oxds+"currentVersion"),"2") in rdfgraph, 'oxds:currentVersion')
- #Access state information of TestSubmission-testdir version 2
+ #Access state information of TestSubmission-testdir version 3
(resp, data) = self.doHTTP_GET(
- resource="states/TestSubmission-testdir/version2",
+ resource="states/TestSubmission-testdir/version3",
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
parts = data['parts']
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], d2, "embargoed_until?")
self.assertEqual(len(state.keys()), 11, "States")
self.assertEqual(state['item_id'], "TestSubmission-testdir", "Submission item identifier")
- self.assertEqual(len(state['versions']), 3, "Three versions")
+ self.assertEqual(len(state['versions']), 4, "Four versions")
self.assertEqual(state['versions'][0], '0', "Version 0")
self.assertEqual(state['versions'][1], '1', "Version 1")
self.assertEqual(state['versions'][2], '2', "Version 2")
- self.assertEqual(state['currentversion'], '2', "Current version == 2")
+ self.assertEqual(state['versions'][3], '3', "Version 3")
+ self.assertEqual(state['currentversion'], '3', "Current version == 3")
self.assertEqual(state['rdffileformat'], 'xml', "RDF file type")
self.assertEqual(state['rdffilename'], 'manifest.rdf', "RDF file name")
self.assertEqual(len(state['files']['0']), 1, "List should contain just manifest.rdf")
self.assertEqual(len(state['files']['1']), 3, "List should contain manifest.rdf, directory and test-csv.csv")
- self.assertEqual(len(state['files']['2']), 4, "List should contain manifest.rdf, directory1, directory2 and test-csv.csv")
+ self.assertEqual(len(state['files']['2']), 3, "List should contain manifest.rdf, directory and test-csv.csv")
+ self.assertEqual(len(state['files']['3']), 4, "List should contain manifest.rdf, directory1, directory2 and test-csv.csv")
self.assertEqual(len(state['metadata_files']['0']), 0, "metadata_files of version 0")
self.assertEqual(len(state['metadata_files']['1']), 0, "metadata_files of version 1")
self.assertEqual(len(state['metadata_files']['2']), 0, "metadata_files of version 2")
+ self.assertEqual(len(state['metadata_files']['3']), 0, "metadata_files of version 3")
self.assertEqual(state['subdir']['0'], [], "Subdirectory count for version 0")
self.assertEqual(state['subdir']['1'], ['directory'], "Subdirectory for version 1")
- self.assertEqual(len(state['subdir']['2']), 2, "Subdirectory for version 2 should be directory1 and directory2")
+ self.assertEqual(state['subdir']['2'], ['directory'], "Subdirectory for version 2")
+ self.assertEqual(len(state['subdir']['3']), 2, "Subdirectory for version 2 should be directory1 and directory2")
self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointuser, "Created by")
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
self.assertEqual(len(parts.keys()), 6, "Parts")
@@ -2668,7 +3358,7 @@ def testMetadataMerging(self):
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testrdf.zip")) in rdfgraph)
@@ -2678,7 +3368,6 @@ def testMetadataMerging(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check list of contents in child dataset - TestSubmission-testrdf
@@ -2694,7 +3383,7 @@ def testMetadataMerging(self):
base = self.getRequestUri("datasets/TestSubmission-testrdf/")
owl = "http://www.w3.org/2002/07/owl#"
stype = URIRef(oxds+"Grouping")
- self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),20,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(owl+"sameAs"),URIRef("http://example.org/testrdf/")) in rdfgraph, 'owl:sameAs')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
@@ -2711,7 +3400,6 @@ def testMetadataMerging(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
#Get the file arabic.txt
@@ -2787,7 +3475,7 @@ def testMetadataInDirectoryMerging(self):
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testrdf2.zip")) in rdfgraph)
@@ -2796,8 +3484,7 @@ def testMetadataInDirectoryMerging(self):
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check list of contents in child dataset - TestSubmission-testrdf
@@ -2811,7 +3498,7 @@ def testMetadataInDirectoryMerging(self):
base = self.getRequestUri("datasets/TestSubmission-testrdf2/")
owl = "http://www.w3.org/2002/07/owl#"
stype = URIRef(oxds+"Grouping")
- self.assertEqual(len(rdfgraph),20,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),19,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(owl+"sameAs"),URIRef("http://example.org/testrdf/")) in rdfgraph, 'owl:sameAs')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
@@ -2828,7 +3515,6 @@ def testMetadataInDirectoryMerging(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
# Delete the dataset TestSubmission-testrdf2
@@ -2890,7 +3576,7 @@ def testReferencedMetadataMerging(self):
owl = "http://www.w3.org/2002/07/owl#"
dc = "http://purl.org/dc/elements/1.1/"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testrdf3.zip")) in rdfgraph)
@@ -2899,8 +3585,7 @@ def testReferencedMetadataMerging(self):
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check list of contents in child dataset - TestSubmission-testrdf3
@@ -2913,11 +3598,11 @@ def testReferencedMetadataMerging(self):
rdfgraph = Graph()
rdfgraph.parse('response.xml', format='xml')
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testrdf3"))
- subj2 = URIRef(self.getRequestUri("datasets/TestSubmission-testrdf3/directory/hebrew.txt"))
+ subj2 = URIRef(self.getRequestUri("datasets/TestSubmission-testrdf3/testrdf3/directory/hebrew.txt"))
base = self.getRequestUri("datasets/TestSubmission-testrdf3/")
stype = URIRef(oxds+"Grouping")
stype2 = URIRef(oxds+"item")
- self.assertEqual(len(rdfgraph),28,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),31,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -2938,7 +3623,6 @@ def testReferencedMetadataMerging(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(owl+"sameAs"),URIRef("http://example.org/2aFiles/")) in rdfgraph, 'owl:sameAs')
@@ -2946,9 +3630,9 @@ def testReferencedMetadataMerging(self):
self.failUnless((subj,URIRef(dc+"description"),"file1.b is another file") in rdfgraph, 'dc:description')
self.failUnless((subj,URIRef(dc+"description"),"This is a archived test item 2a ") in rdfgraph, 'dc:description')
- #self.failUnless((subj2,RDF.type,stype2) in rdfgraph, 'Testing submission type: '+subj2+", "+stype2)
- #self.failUnless((subj2,URIRef(dcterms+"title"),"Hebrew text") in rdfgraph, 'dcterms:title')
- #self.failUnless((subj2,URIRef(dcterms+"source"),"http://genizah.bodleian.ox.ac.uk/") in rdfgraph, 'dcterms:source')
+ self.failUnless((subj2,RDF.type,stype2) in rdfgraph, 'Testing submission type: %s, %s'%(subj2, stype2))
+ self.failUnless((subj2,URIRef(dcterms+"title"),"Hebrew text") in rdfgraph, 'dcterms:title')
+ self.failUnless((subj2,URIRef(dcterms+"source"),"http://genizah.bodleian.ox.ac.uk/") in rdfgraph, 'dcterms:source')
#Get the file hebrew.txt
(resp, hebrew_data) = self.doHTTP_GET(
resource="datasets/TestSubmission-testrdf3/testrdf3/directory/hebrew.txt",
@@ -3011,7 +3695,7 @@ def testReferencedMetadataMerging2(self):
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
stype = URIRef(oxds+"DataSet")
- self.assertEqual(len(rdfgraph),13,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testrdf4.zip")) in rdfgraph)
@@ -3021,7 +3705,6 @@ def testReferencedMetadataMerging2(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check list of contents in child dataset - TestSubmission-testrdf3
@@ -3039,7 +3722,7 @@ def testReferencedMetadataMerging2(self):
dc = "http://purl.org/dc/elements/1.1/"
stype = URIRef(oxds+"Grouping")
stype2 = URIRef(oxds+"item")
- self.assertEqual(len(rdfgraph),29,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),28,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -3057,7 +3740,6 @@ def testReferencedMetadataMerging2(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dc+"description"),"This is a archived test item 2a ") in rdfgraph, 'dc:description')
@@ -3065,8 +3747,6 @@ def testReferencedMetadataMerging2(self):
self.failUnless((subj,URIRef(dcterms+"title"),"Test item 2a") in rdfgraph, 'dcterms:title')
#self.failUnless((subj,URIRef(owl+"sameAs"),URIRef("2aFiles")) in rdfgraph, 'dcterms:title')
- for s, p, o in rdfgraph.triples((None, RDF.type, None)):
- print s, p, o, type(o)
self.failUnless((subj2,RDF.type,stype2) in rdfgraph, 'Testing submission type: %s, %s'%(str(subj2), str(stype2)))
self.failUnless((subj2,URIRef(dc+"description"),"This is a archived test item 1a ") in rdfgraph, 'dc:description')
self.failUnless((subj2,URIRef(dcterms+"title"),"Test item 1a") in rdfgraph, 'dcterms:title')
@@ -3129,6 +3809,7 @@ def getTestSuite(select="unit"):
, "testDeleteDataset"
, "testDatasetNaming"
, "testDatasetStateInformation"
+ , "testEmbargoOnCreation"
, "testFileUpload"
, "testFileDelete"
, "testFileUpdate"
diff --git a/rdfdatabank/tests/TestSubmission_load.py b/rdfdatabank/tests/TestSubmission_load.py
new file mode 100644
index 0000000..ed95c56
--- /dev/null
+++ b/rdfdatabank/tests/TestSubmission_load.py
@@ -0,0 +1,319 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# $Id: $
+"""
+Databank submission test cases
+
+$Rev: $
+"""
+import os, os.path
+from datetime import datetime, timedelta
+import sys
+import unittest
+import logging
+import httplib
+import urllib
+import codecs
+try:
+ # Running Python 2.5 with simplejson?
+ import simplejson as json
+except ImportError:
+ import json
+
+#My system is running rdflib version 2.4.2. So adding rdflib v3.0 to sys path
+#rdflib_path = os.path.join(os.getcwd(), 'rdflib')
+#sys.path.insert(0, rdflib_path)
+#import rdflib
+#from rdflib.namespace import RDF
+#from rdflib.graph import Graph
+#from rdflib.plugins.memory import Memory
+#from rdflib import URIRef
+#from rdflib import Literal
+#rdflib.plugin.register('sparql',rdflib.query.Processor,'rdfextras.sparql.processor','Processor')
+#rdflib.plugin.register('sparql', rdflib.query.Result,
+# 'rdfextras.sparql.query', 'SPARQLQueryResult')
+
+from StringIO import StringIO
+
+from rdflib import RDF, URIRef, Literal
+from rdflib.Graph import ConjunctiveGraph as Graph
+
+#from time import sleep
+#import subprocess
+
+if __name__ == "__main__":
+ # For testing:
+ # add main library directory to python path if running stand-alone
+ sys.path.append("..")
+
+#from MiscLib import TestUtils
+from testlib import TestUtils
+from testlib import SparqlQueryTestCase
+
+#from RDFDatabankConfigProd import RDFDatabankConfig as RC
+from RDFDatabankConfig import RDFDatabankConfig as RC
+
+RDFDatabankConfig = RC()
+logger = logging.getLogger('TestSubmission')
+
+class TestSubmission(SparqlQueryTestCase.SparqlQueryTestCase):
+ """
+ Test simple dataset submissions to RDFDatabank
+ """
+ def setUp(self):
+ self.setRequestEndPoint(
+ endpointhost=RDFDatabankConfig.endpointhost, # Via SSH tunnel
+ endpointpath=RDFDatabankConfig.endpointpath)
+ self.setRequestUserPass(
+ endpointuser=RDFDatabankConfig.endpointuser,
+ endpointpass=RDFDatabankConfig.endpointpass)
+ self.setRequestUriRoot(
+ manifesturiroot=RDFDatabankConfig.granary_uri_root)
+ self.setRequestUserPass(
+ endpointuser=RDFDatabankConfig.endpointuser,
+ endpointpass=RDFDatabankConfig.endpointpass)
+ resp = self.doHTTP_DELETE(
+ resource="datasets/TestSubmission",
+ expect_status="*", expect_reason="*")
+ return
+
+ def tearDown(self):
+ return
+
+ # Create empty test submission dataset
+ def createSubmissionDataset(self, dataset_id='TestSubmission'):
+ # Create a new dataset, check response
+ fields = \
+ [ ("id", dataset_id)
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/",
+ expect_status=201, expect_reason="Created")
+ LHobtained = resp.getheader('Content-Location', None)
+ LHexpected = "%sdatasets/%s"%(self._endpointpath, dataset_id)
+ self.assertEquals(LHobtained, LHexpected, 'Content-Location not correct')
+ return
+
+ def uploadSubmissionZipfile(self, dataset_id='TestSubmission', file_to_upload="testdir.zip", filename=None):
+ # Submit ZIP file, check response
+ fields = []
+ if filename:
+ fields = \
+ [ ("filename", filename)
+ ]
+ else:
+ filename = file_to_upload
+ zipdata = open("testdata/%s"%file_to_upload).read()
+ files = \
+ [ ("file", file_to_upload, zipdata, "application/zip")
+ ]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/%s/"%dataset_id,
+ expect_status=201, expect_reason="Created")
+ LHobtained = resp.getheader('Content-Location', None)
+ LHexpected = "%sdatasets/%s/%s"%(self._endpointpath, dataset_id, filename)
+ self.assertEquals(LHobtained, LHexpected, 'Content-Location not correct')
+ return zipdata
+
+ def updateSubmissionZipfile(self, dataset_id='TestSubmission', file_to_upload="testdir.zip", filename=None):
+ # Submit ZIP file, check response
+ fields = []
+ if filename:
+ fields = \
+ [ ("filename", filename)
+ ]
+ zipdata = open("testdata/%s"%file_to_upload).read()
+ files = \
+ [ ("file", file_to_upload, zipdata, "application/zip")
+ ]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata)= self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/%s/"%dataset_id,
+ expect_status=204, expect_reason="No Content")
+ return zipdata
+
+ # Actual tests follow
+ def test01CreateSilo(self):
+ """List all silos your account has access to - GET /admin. If the silo 'sandbox' does not exist, create it"""
+ self.setRequestUserPass(
+ endpointuser=RDFDatabankConfig.endpointadminuser,
+ endpointpass=RDFDatabankConfig.endpointadminpass)
+ # Access list silos, check response
+ (resp, data) = self.doHTTP_GET(
+ endpointpath="/",
+ resource="admin/",
+ expect_status=200, expect_reason="OK", expect_type="application/JSON")
+ silo_name = RDFDatabankConfig.endpointpath.strip('/')
+ silolist = data
+ if not silo_name in silolist:
+ #Create new silo
+ owner_list = [RDFDatabankConfig.endpointadminuser]
+ if not RDFDatabankConfig.endpointuser in owner_list:
+ owner_list.append(RDFDatabankConfig.endpointuser)
+ owner_list = ",".join(owner_list)
+ fields = \
+ [ ("silo", silo_name),
+ ("title", "Sandbox silo"),
+ ("description", "Sandbox silo for testing"),
+ ("notes", "Created by test"),
+ ("owners", owner_list),
+ ("disk_allocation", "100000")
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype, resource="admin/", endpointpath="/",
+ expect_status=201, expect_reason="Created")
+ LHobtained = resp.getheader('Content-Location', None)
+ LHexpected = "/%s"%silo_name
+ self.assertEquals(LHobtained, LHexpected, 'Content-Location not correct')
+ # Access list silos, check response
+ (resp, data) = self.doHTTP_GET(
+ endpointpath="/",
+ resource="admin/",
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ newsilolist = data
+ self.failUnless(len(newsilolist)>0, "No silos returned")
+ self.assertEquals(len(newsilolist), len(silolist)+1, "One additional silo should have been returned")
+ for s in silolist: self.failUnless(s in newsilolist, "Silo "+s+" in original list, not in new list")
+ self.failUnless(silo_name in newsilolist, "Silo '%s' not in new list"%silo_name)
+ return
+
+ def testFileUploadBulk(self):
+ for i in range(0, 10000):
+ """Upload file to dataset - POST file to /silo_name/datasets/dataset_name"""
+ # Create a new dataset, check response
+ start = datetime.now()
+ dataset_id='TestSubmission%d'%i
+ f = open('test_times.log', 'a')
+ f.write('%s: Creating and uploading file to dataset %s \n'%(start.isoformat(), dataset_id))
+ f.close()
+ self.createSubmissionDataset(dataset_id=dataset_id)
+ #Access state information
+ (resp, respdata) = self.doHTTP_GET(
+ resource="states/%s"%dataset_id,
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ # Upload zip file, check response
+ zipdata = self.uploadSubmissionZipfile(dataset_id=dataset_id, file_to_upload='rdfdatabank.zip', filename='testdir.zip')
+ end = datetime.now()
+ delta = end - start
+ time_used = delta.days * 86400 + delta.seconds
+ f = open('test_times.log', 'a')
+ f.write(' Time taken: %s \n\n'%str(time_used))
+ f.close()
+ # Access and check list of contents
+ (resp, rdfdata) = self.doHTTP_GET(
+ resource="datasets/%s"%dataset_id,
+ expect_status=200, expect_reason="OK", expect_type="application/rdf+xml")
+ rdfgraph = Graph()
+ rdfstream = StringIO(rdfdata)
+ rdfgraph.parse(rdfstream)
+ subj = URIRef(self.getRequestUri("datasets/%s"%dataset_id))
+ base = self.getRequestUri("datasets/%s/"%dataset_id)
+ dcterms = "http://purl.org/dc/terms/"
+ ore = "http://www.openarchives.org/ore/terms/"
+ oxds = "http://vocab.ox.ac.uk/dataset/schema#"
+ stype = URIRef(oxds+"DataSet")
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
+ self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
+ self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
+ self.failUnless((subj,URIRef(ore+"aggregates"),URIRef(base+"testdir.zip")) in rdfgraph)
+ self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
+ self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
+ self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
+ self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
+ self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
+ # Access and check zip file content
+ (resp, zipfile) = self.doHTTP_GET(
+ resource="datasets/%s/testdir.zip"%dataset_id,
+ expect_status=200, expect_reason="OK", expect_type="application/zip")
+ self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!")
+ #Access state information and check
+ (resp, data) = self.doHTTP_GET(
+ resource="states/%s"%dataset_id,
+ expect_status=200, expect_reason="OK", expect_type="application/json")
+ state = data['state']
+ parts = data['parts']
+ self.assertEqual(len(state.keys()), 11, "States")
+ self.assertEqual(state['item_id'], dataset_id, "Submission item identifier")
+ self.assertEqual(len(state['versions']), 2, "Two versions")
+ self.assertEqual(state['versions'][0], '0', "Version 0")
+ self.assertEqual(state['versions'][1], '1', "Version 1")
+ self.assertEqual(state['currentversion'], '1', "Current version == 1")
+ self.assertEqual(state['rdffileformat'], 'xml', "RDF file type")
+ self.assertEqual(state['rdffilename'], 'manifest.rdf', "RDF file name")
+ self.assertEqual(state['files']['0'], ['manifest.rdf'], "List should contain just manifest.rdf")
+ self.assertEqual(len(state['files']['1']), 2, "List should contain manifest.rdf and testdir.zip")
+ self.assertEqual(len(state['metadata_files']['0']), 0, "metadata_files of version 0")
+ self.assertEqual(len(state['metadata_files']['1']), 0, "metadata_files of version 1")
+ self.assertEqual(len(state['subdir']['0']), 0, "Subdirectory count for version 0")
+ self.assertEqual(len(state['subdir']['1']), 0, "Subdirectory count for version 1")
+ self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointuser, "Created by")
+ self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed until?")
+ self.assertEqual(len(parts.keys()), 4, "Parts")
+ self.assertEqual(len(parts['4=%s'%dataset_id].keys()), 13, "File stats for 4=%s"%dataset_id)
+ self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
+ self.assertEqual(len(parts['testdir.zip'].keys()), 13, "File stats for testdir.zip")
+
+ # Sentinel/placeholder tests
+
+ def testUnits(self):
+ assert (True)
+
+ def testComponents(self):
+ assert (True)
+
+ def testIntegration(self):
+ assert (True)
+
+ def testPending(self):
+ #Need to have performance tests and analyse performance
+ #Need to set the permission of file being uploaded
+ #assert (False), "Pending tests follow"
+ assert (True)
+
+# Assemble test suite
+
+def getTestSuite(select="unit"):
+ """
+ Get test suite
+
+ select is one of the following:
+ "unit" return suite of unit tests only
+ "component" return suite of unit and component tests
+ "all" return suite of unit, component and integration tests
+ "pending" return suite of pending tests
+ name a single named test to be run
+ """
+ testdict = {
+ "unit":
+ [ "testUnits"
+ , "test01CreateSilo"
+ , "testFileUploadBulk"
+ ],
+ "component":
+ [ "testComponents"
+ ],
+ "integration":
+ [ "testIntegration"
+ ],
+ "pending":
+ [ "testPending"
+ ]
+ }
+ return TestUtils.getTestSuite(TestSubmission, testdict, select=select)
+
+if __name__ == "__main__":
+ TestUtils.runTests("TestSubmission.log", getTestSuite, sys.argv)
+
+# End.
diff --git a/rdfdatabank/tests/TestSubmission_submitter.py b/rdfdatabank/tests/TestSubmission_submitter.py
index 0ab35cd..8f55124 100644
--- a/rdfdatabank/tests/TestSubmission_submitter.py
+++ b/rdfdatabank/tests/TestSubmission_submitter.py
@@ -10,6 +10,7 @@
"""
import os, os.path
import datetime
+from dateutil.relativedelta import *
import sys
import unittest
import logging
@@ -42,6 +43,7 @@
from testlib import SparqlQueryTestCase
from RDFDatabankConfig import RDFDatabankConfig
+#from RDFDatabankConfigProd import RDFDatabankConfig
logger = logging.getLogger('TestSubmission')
@@ -71,9 +73,12 @@ def tearDown(self):
# Create empty test submission dataset
def createSubmissionDataset(self):
+ d = (datetime.datetime.now() + datetime.timedelta(days=365*4)).isoformat()
# Create a new dataset, check response
fields = \
- [ ("id", "TestSubmission")
+ [ ("id", "TestSubmission"),
+ ('embargoed', 'True'),
+ ('embargoed_until', d)
]
files =[]
(reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
@@ -84,7 +89,7 @@ def createSubmissionDataset(self):
LHobtained = resp.getheader('Content-Location', None)
LHexpected = "%sdatasets/TestSubmission"%self._endpointpath
self.assertEquals(LHobtained, LHexpected, 'Content-Location not correct')
- return
+ return d
def uploadSubmissionZipfile(self, file_to_upload="testdir.zip"):
# Submit ZIP file, check response
@@ -680,7 +685,7 @@ def testDatasetNotPresent(self):
def testDatasetCreation(self):
"""Create dataset - POST id to /silo_name"""
# Create a new dataset as submitter, check response
- self.createSubmissionDataset()
+ d = self.createSubmissionDataset()
# Access dataset, check response
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
@@ -700,8 +705,8 @@ def testDatasetCreation(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
#Admin user of this silo - Create a new dataset, check response
@@ -731,7 +736,7 @@ def testDatasetCreation(self):
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
subj = URIRef(self.getRequestUri("datasets/TestSubmission2"))
- self.assertEqual(len(rdfgraph), 10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph), 9,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,URIRef(dcterms+"mediator"),RDFDatabankConfig.endpointadminuser) in rdfgraph, 'dcterms:mediator')
#manager user of this silo - Create a new dataset, check response
@@ -761,7 +766,7 @@ def testDatasetCreation(self):
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
subj = URIRef(self.getRequestUri("datasets/TestSubmission3"))
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,URIRef(dcterms+"mediator"),RDFDatabankConfig.endpointmanageruser) in rdfgraph, 'dcterms:mediator')
#General user - Create a new dataset, check response
@@ -882,7 +887,7 @@ def testDatasetCreation2(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
dcterms = "http://purl.org/dc/terms/"
subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
@@ -894,16 +899,17 @@ def testDatasetCreation2(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
-
#Admin user of this silo - Create a new dataset, check response
self.setRequestUserPass(
endpointuser=RDFDatabankConfig.endpointadminuser,
endpointpass=RDFDatabankConfig.endpointadminpass)
+ d = (datetime.datetime.now() + datetime.timedelta(days=365*4)).isoformat()
fields = \
- [ ("id", "TestSubmission2")
+ [ ("id", "TestSubmission2"),
+ ('embargoed', 'True'),
+ ('embargoed_until', d)
]
files =[]
(reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
@@ -933,7 +939,9 @@ def testDatasetCreation2(self):
endpointuser=RDFDatabankConfig.endpointmanageruser,
endpointpass=RDFDatabankConfig.endpointmanagerpass)
fields = \
- [ ("id", "TestSubmission3")
+ [ ("id", "TestSubmission3"),
+ ('embargoed', 'True'),
+ ('embargoed_until', d)
]
files =[]
(reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
@@ -1421,7 +1429,7 @@ def testDatasetNaming(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
elif status == 403 or status == 400:
(resp, respdata) = self.doHTTP_GET(
resource="datasets/%s"%name,
@@ -1439,7 +1447,7 @@ def testDatasetNaming(self):
def testDatasetStateInformation(self):
"""Get state information of dataset - GET /silo_name/states/dataset_name."""
# Create a new dataset by submitter, check response
- self.createSubmissionDataset()
+ d = self.createSubmissionDataset()
# Access state info
(resp, data) = self.doHTTP_GET(
resource="states/TestSubmission",
@@ -1458,6 +1466,7 @@ def testDatasetStateInformation(self):
self.assertEqual(len(state['subdir']['0']), 0, "Subdirectory count for version 0")
self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointsubmitteruser, "Created by")
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], d, "Embargoed until?")
# date
# version_dates
self.assertEqual(len(parts.keys()), 3, "Parts")
@@ -1529,7 +1538,7 @@ def testDatasetStateInformation(self):
def testFileUpload(self):
"""Upload file to dataset - POST file to /silo_name/datasets/dataset_name"""
# Create a new dataset, check response
- self.createSubmissionDataset()
+ d = self.createSubmissionDataset()
#Access state information
(resp, respdata) = self.doHTTP_GET(
resource="states/TestSubmission",
@@ -1558,8 +1567,8 @@ def testFileUpload(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check zip file content
@@ -1741,7 +1750,7 @@ def testFileUpload(self):
def testFileDelete(self):
"""Delete file in dataset - DELETE /silo_name/datasets/dataset_name/file_name"""
# Create a new dataset, check response
- self.createSubmissionDataset()
+ d = self.createSubmissionDataset()
# Upload zip file, check response
zipdata = self.uploadSubmissionZipfile()
# Access and check list of contents
@@ -1788,8 +1797,8 @@ def testFileDelete(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
#Access state information and check
@@ -1954,7 +1963,7 @@ def testFileDelete(self):
def testFileUpdate(self):
"""Update file in dataset - POST file to /silo_name/datasets/dataset_name (x 2)"""
# Create a new dataset, check response
- self.createSubmissionDataset()
+ d = self.createSubmissionDataset()
# Upload zip file, check response (uploads the file testdir.zip)
zipdata = self.uploadSubmissionZipfile()
# Access and check list of contents
@@ -1997,8 +2006,8 @@ def testFileUpdate(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check zip file content
@@ -2191,7 +2200,7 @@ def testGetDatasetByVersionByURI(self):
stype = URIRef(oxds+"DataSet")
#---------Version 0
# Create a new dataset, check response
- self.createSubmissionDataset()
+ d = self.createSubmissionDataset()
# Access and check list of contents
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
@@ -2341,8 +2350,8 @@ def testGetDatasetByVersionByURI(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'0') in rdfgraph, 'oxds:currentVersion')
#Access state information and check
(resp, data) = self.doHTTP_GET(
@@ -2371,8 +2380,8 @@ def testGetDatasetByVersionByURI(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check zip file content
@@ -2409,8 +2418,8 @@ def testGetDatasetByVersionByURI(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),d) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'2') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
# Access and check zip file content
@@ -4893,7 +4902,19 @@ def testUnicodeMetadataFileUpdate(self):
def testChangeEmbargo(self):
"""Change embargo information - POST embargo_change to /silo_name/datasets/dataset_name"""
# Create a new dataset, check response
- self.createSubmissionDataset()
+ #self.createSubmissionDataset()
+ fields = \
+ [ ("id", "TestSubmission")
+ ]
+ files =[]
+ (reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
+ (resp,respdata) = self.doHTTP_POST(
+ reqdata, reqtype,
+ resource="datasets/",
+ expect_status=201, expect_reason="Created")
+ LHobtained = resp.getheader('Content-Location', None)
+ LHexpected = "%sdatasets/TestSubmission"%self._endpointpath
+ self.assertEquals(LHobtained, LHexpected, 'Content-Location not correct')
#Access dataset and check content
(resp, rdfdata) = self.doHTTP_GET(
resource="datasets/TestSubmission",
@@ -4901,7 +4922,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
dcterms = "http://purl.org/dc/terms/"
subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
@@ -4910,15 +4931,13 @@ def testChangeEmbargo(self):
base = self.getRequestUri("datasets/TestSubmission/")
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
#Access state information and check
(resp, data) = self.doHTTP_GET(
resource="states/TestSubmission",
expect_status=200, expect_reason="OK", expect_type="application/json")
- embargoed_until_date = (datetime.date.today() + datetime.timedelta(days=365*70)).isoformat()
state = data['state']
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
- self.failUnless(embargoed_until_date in state['metadata']['embargoed_until'], "Default 70 year embargo failed")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "embargoed_until indefinite - default embargo failed")
# Upload zip file, check response
zipdata = self.uploadSubmissionZipfile(file_to_upload="testdir.zip")
#Access dataset and check content
@@ -4928,10 +4947,9 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
@@ -4956,7 +4974,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=200, expect_reason="OK", expect_type="application/zip")
@@ -4971,7 +4989,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=200, expect_reason="OK", expect_type="application/zip")
@@ -4986,7 +5004,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=403, expect_reason="Forbidden", expect_type="application/zip")
@@ -5000,7 +5018,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=302, expect_reason="Found", expect_type="application/zip")
@@ -5016,7 +5034,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=403, expect_reason="Forbidden", expect_type="application/zip")
@@ -5030,7 +5048,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=403, expect_reason="Forbidden", expect_type="application/zip")
@@ -5044,12 +5062,13 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=403, expect_reason="Forbidden", expect_type="application/zip")
# Delete embargo, check response
+ embargoed_until_date = datetime.datetime.now().isoformat()
self.setRequestUserPass(
endpointuser=RDFDatabankConfig.endpointsubmitteruser,
endpointpass=RDFDatabankConfig.endpointsubmitterpass)
@@ -5209,10 +5228,10 @@ def testChangeEmbargo(self):
self.setRequestUserPass(
endpointuser=RDFDatabankConfig.endpointadminuser,
endpointpass=RDFDatabankConfig.endpointadminpass)
- d = (datetime.datetime.now() + datetime.timedelta(days=365*10)).isoformat()
+ d1 = (datetime.datetime.now() + datetime.timedelta(days=365*10)).isoformat()
fields = \
[ ("embargoed", 'true')
- ,("embargoed_until", d)
+ ,("embargoed_until", d1)
]
files =[]
(reqtype, reqdata) = SparqlQueryTestCase.encode_multipart_formdata(fields, files)
@@ -5241,7 +5260,7 @@ def testChangeEmbargo(self):
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
- self.failUnless(d in state['metadata']['embargoed_until'], "embargoed_until date?")
+ self.failUnless(d1 in state['metadata']['embargoed_until'], "embargoed_until date?")
# Access and check zip file content
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
@@ -5859,14 +5878,13 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
dcterms = "http://purl.org/dc/terms/"
subj = URIRef(self.getRequestUri("datasets/TestSubmission"))
stype = URIRef(oxds+"DataSet")
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
@@ -5882,7 +5900,7 @@ def testChangeEmbargo(self):
expect_status=200, expect_reason="OK", expect_type="application/json")
state = data['state']
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
- self.failUnless(embargoed_until_date in state['metadata']['embargoed_until'], "Updating embargoed_until date")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed?")
# Access and check zip file content by submitter
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
@@ -5898,7 +5916,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=200, expect_reason="OK", expect_type="application/zip")
@@ -5913,7 +5931,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=200, expect_reason="OK", expect_type="application/zip")
@@ -5928,7 +5946,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=403, expect_reason="Forbidden", expect_type="application/zip")
@@ -5942,7 +5960,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=302, expect_reason="Found", expect_type="application/zip")
@@ -5958,7 +5976,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=403, expect_reason="Forbidden", expect_type="application/zip")
@@ -5972,7 +5990,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=403, expect_reason="Forbidden", expect_type="application/zip")
@@ -5986,7 +6004,7 @@ def testChangeEmbargo(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),12,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),11,'Graph length %i' %len(rdfgraph))
(resp, zipfile) = self.doHTTP_GET(
resource="datasets/TestSubmission/testdir.zip",
expect_status=403, expect_reason="Forbidden", expect_type="application/zip")
@@ -6045,8 +6063,8 @@ def testFileUnpack(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
+ self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(oxds+"currentVersion"),"4") in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((URIRef(base+"testdir.zip"),URIRef(dcterms+"hasVersion"),subj2) in rdfgraph, 'ore:aggregates testrdf.zip')
@@ -6060,15 +6078,14 @@ def testFileUnpack(self):
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testdir"))
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir/")
- self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -6106,6 +6123,7 @@ def testFileUnpack(self):
self.assertEqual(state['subdir']['1'], ['directory'], "Subdirectory for version 1")
self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointsubmitteruser, "Created by")
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed?")
self.assertEqual(len(parts.keys()), 5, "Parts")
self.assertEqual(len(parts['4=TestSubmission-testdir'].keys()), 13, "File stats for 4=TestSubmission-testdir")
self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
@@ -6153,15 +6171,14 @@ def testFileUnpack(self):
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir2/")
owl = "http://www.w3.org/2002/07/owl#"
- self.assertEqual(len(rdfgraph),22,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -6210,15 +6227,14 @@ def testFileUnpack(self):
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir2/")
owl = "http://www.w3.org/2002/07/owl#"
- self.assertEqual(len(rdfgraph),22,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -6278,15 +6294,14 @@ def testFileUnpack(self):
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testrdf"))
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testrdf/")
- self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),20,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -6337,15 +6352,14 @@ def testFileUnpack(self):
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testrdf"))
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testrdf/")
- self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),20,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -6616,15 +6630,14 @@ def testSymlinkFileUnpack(self):
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testdir"))
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir/")
- self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -6662,6 +6675,7 @@ def testSymlinkFileUnpack(self):
self.assertEqual(state['subdir']['1'], ['directory'], "Subdirectory for version 1")
self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointsubmitteruser, "Created by")
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed until?")
self.assertEqual(len(parts.keys()), 5, "Parts")
self.assertEqual(len(parts['4=TestSubmission-testdir'].keys()), 13, "File stats for 4=TestSubmission-testdir")
self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
@@ -6714,15 +6728,14 @@ def testFileUploadToUnpackedDataset(self):
dcterms = "http://purl.org/dc/terms/"
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
- self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -6758,15 +6771,14 @@ def testFileUploadToUnpackedDataset(self):
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testdir"))
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir/")
- self.assertEqual(len(rdfgraph),18,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -6803,6 +6815,7 @@ def testFileUploadToUnpackedDataset(self):
self.assertEqual(state['subdir']['2'], ['directory'], "Subdirectory for version 2")
self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointsubmitteruser, "Created by")
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed until?")
self.assertEqual(len(parts.keys()), 6, "Parts")
self.assertEqual(len(parts['4=TestSubmission-testdir'].keys()), 13, "File stats for 4=TestSubmission-testdir")
self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
@@ -7016,15 +7029,14 @@ def testUpdateUnpackedDataset(self):
dcterms = "http://purl.org/dc/terms/"
ore = "http://www.openarchives.org/ore/terms/"
oxds = "http://vocab.ox.ac.uk/dataset/schema#"
- self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -7093,7 +7105,7 @@ def testUpdateUnpackedDataset(self):
base = self.getRequestUri("datasets/TestSubmission-testdir/")
owl = "http://www.w3.org/2002/07/owl#"
dc = "http://purl.org/dc/elements/1.1/"
- self.assertEqual(len(rdfgraph),29,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),28,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -7111,8 +7123,7 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dc+"description"),"This is a archived test item 2a ") in rdfgraph, 'dc:description')
#self.failUnless((subj,URIRef(dcterms+"title"),"Test dataset 4 with updated and merged metadata") in rdfgraph, 'dcterms:title')
@@ -7152,6 +7163,7 @@ def testUpdateUnpackedDataset(self):
self.assertEqual(state['subdir']['2'], ['testrdf4'], "Subdirectory for version 2 should be directory")
self.assertEqual(state['metadata']['createdby'], RDFDatabankConfig.endpointsubmitteruser, "Created by")
self.assertEqual(state['metadata']['embargoed'], True, "Embargoed?")
+ self.assertEqual(state['metadata']['embargoed_until'], '', "Embargoed until?")
self.assertEqual(len(parts.keys()), 4, "Parts")
self.assertEqual(len(parts['4=TestSubmission-testdir'].keys()), 13, "File stats for 4=TestSubmission-testdir")
self.assertEqual(len(parts['manifest.rdf'].keys()), 13, "File stats for manifest.rdf")
@@ -7215,7 +7227,7 @@ def testUpdateUnpackedDataset(self):
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir/")
owl = "http://www.w3.org/2002/07/owl#"
- self.assertEqual(len(rdfgraph),22,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
@@ -7223,7 +7235,6 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -7285,7 +7296,7 @@ def testUpdateUnpackedDataset(self):
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testdir"))
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir/")
- self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),20,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
@@ -7293,7 +7304,6 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -7502,7 +7512,7 @@ def testUpdateUnpackedDataset(self):
stype2 = URIRef(oxds+"Grouping")
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testdir"))
base = self.getRequestUri("datasets/TestSubmission-testdir/")
- self.assertEqual(len(rdfgraph),10,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),9,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
@@ -7510,7 +7520,6 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(oxds+"currentVersion"),"0") in rdfgraph, 'oxds:currentVersion')
#Access state information of TestSubmission-testdir version 0
@@ -7531,7 +7540,7 @@ def testUpdateUnpackedDataset(self):
rdfgraph = Graph()
rdfstream = StringIO(rdfdata)
rdfgraph.parse(rdfstream)
- self.assertEqual(len(rdfgraph),17,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),16,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype2) in rdfgraph, 'Testing submission type: '+subj+", "+stype2)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
@@ -7539,7 +7548,6 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -7590,7 +7598,6 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dc+"description"),"This is a archived test item 2a ") in rdfgraph, 'dc:description')
self.failUnless((subj,URIRef(dcterms+"title"),"Test item 2a") in rdfgraph, 'dcterms:title')
@@ -7640,7 +7647,7 @@ def testUpdateUnpackedDataset(self):
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir/")
owl = "http://www.w3.org/2002/07/owl#"
- self.assertEqual(len(rdfgraph),22,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
@@ -7648,7 +7655,6 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -7712,7 +7718,7 @@ def testUpdateUnpackedDataset(self):
subj = URIRef(self.getRequestUri("datasets/TestSubmission-testdir"))
stype = URIRef("http://vocab.ox.ac.uk/dataset/schema#Grouping")
base = self.getRequestUri("datasets/TestSubmission-testdir/")
- self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),20,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"identifier"),None) in rdfgraph, 'dcterms:identifier')
self.failUnless((subj,URIRef(dcterms+"mediator"),None) in rdfgraph, 'dcterms:mediator')
@@ -7720,7 +7726,6 @@ def testUpdateUnpackedDataset(self):
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -7872,7 +7877,7 @@ def testMetadataMerging(self):
base = self.getRequestUri("datasets/TestSubmission-testrdf/")
owl = "http://www.w3.org/2002/07/owl#"
stype = URIRef(oxds+"Grouping")
- self.assertEqual(len(rdfgraph),21,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),20,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(owl+"sameAs"),URIRef("http://example.org/testrdf/")) in rdfgraph, 'owl:sameAs')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
@@ -7889,8 +7894,7 @@ def testMetadataMerging(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
#Get the file arabic.txt
@@ -7981,7 +7985,7 @@ def testMetadataInDirectoryMerging(self):
base = self.getRequestUri("datasets/TestSubmission-testrdf2/")
owl = "http://www.w3.org/2002/07/owl#"
stype = URIRef(oxds+"Grouping")
- self.assertEqual(len(rdfgraph),20, 'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),19, 'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(owl+"sameAs"),URIRef("http://example.org/testrdf/")) in rdfgraph, 'owl:sameAs')
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
@@ -7999,7 +8003,6 @@ def testMetadataInDirectoryMerging(self):
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
# Delete the dataset TestSubmission-testrdf2
@@ -8091,7 +8094,7 @@ def testReferencedMetadataMerging(self):
base = self.getRequestUri("datasets/TestSubmission-testrdf3/")
stype = URIRef(oxds+"Grouping")
stype2 = URIRef(oxds+"item")
- self.assertEqual(len(rdfgraph),32,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),31,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -8112,8 +8115,7 @@ def testReferencedMetadataMerging(self):
self.failUnless((subj,URIRef(dcterms+"rights"),None) in rdfgraph, 'dcterms:rights')
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
- self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
+ self.failUnless((subj,URIRef(oxds+"isEmbargoed"),'True') in rdfgraph, 'oxds:isEmbargoed')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(owl+"sameAs"),URIRef("http://example.org/2aFiles/")) in rdfgraph, 'owl:sameAs')
@@ -8216,7 +8218,7 @@ def testReferencedMetadataMerging2(self):
dc = "http://purl.org/dc/elements/1.1/"
stype = URIRef(oxds+"Grouping")
stype2 = URIRef(oxds+"item")
- self.assertEqual(len(rdfgraph),29,'Graph length %i' %len(rdfgraph))
+ self.assertEqual(len(rdfgraph),28,'Graph length %i' %len(rdfgraph))
self.failUnless((subj,RDF.type,stype) in rdfgraph, 'Testing submission type: '+subj+", "+stype)
self.failUnless((subj,URIRef(dcterms+"modified"),None) in rdfgraph, 'dcterms:modified')
self.failUnless((subj,URIRef(dcterms+"isVersionOf"),None) in rdfgraph, 'dcterms:isVersionOf')
@@ -8235,7 +8237,6 @@ def testReferencedMetadataMerging2(self):
self.failUnless((subj,URIRef(dcterms+"license"),None) in rdfgraph, 'dcterms:license')
self.failUnless((subj,URIRef(dcterms+"publisher"),None) in rdfgraph, 'dcterms:publisher')
self.failUnless((subj,URIRef(oxds+"isEmbargoed"),None) in rdfgraph, 'oxds:isEmbargoed')
- self.failUnless((subj,URIRef(oxds+"embargoedUntil"),None) in rdfgraph, 'oxds:embargoedUntil')
self.failUnless((subj,URIRef(dcterms+"created"),None) in rdfgraph, 'dcterms:created')
self.failUnless((subj,URIRef(oxds+"currentVersion"),'1') in rdfgraph, 'oxds:currentVersion')
self.failUnless((subj,URIRef(dc+"description"),"This is a archived test item 2a ") in rdfgraph, 'dc:description')
diff --git a/rdfdatabank/tests/testdata/rdfdatabank.zip b/rdfdatabank/tests/testdata/rdfdatabank.zip
new file mode 100644
index 0000000..5486da4
Binary files /dev/null and b/rdfdatabank/tests/testdata/rdfdatabank.zip differ
diff --git a/requirements-dev.txt b/requirements-dev.txt
new file mode 100644
index 0000000..1359fc3
--- /dev/null
+++ b/requirements-dev.txt
@@ -0,0 +1,13 @@
+nose
+pairtree==0.5.6-T
+pylons==0.9.7
+repoze.who==2.0a4
+repoze.who-friendlyform
+repoze.profile
+rdflib==2.4.2
+rdfobject
+solrpy
+uuid
+redis==1.34.1
+python-dateutil==1.5
+
diff --git a/requirements.txt b/requirements.txt
index 5b2d1ff..a1eae4d 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -2,8 +2,10 @@ nose
pairtree==0.5.6-T
pylons==0.9.7
repoze.who==2.0a4
+repoze.who-friendlyform
rdflib==2.4.2
rdfobject
solrpy
uuid
redis==1.34.1
+python-dateutil==1.5