diff --git a/README.txt b/README.txt index 3db4301..f0f52bc 100644 --- a/README.txt +++ b/README.txt @@ -8,7 +8,7 @@ RDF-enhanced storage API, pairtree-backed. Installation and dependancies ============================= -The dependancies are listed in the file docs/Dependancies.pdf +The dependancies are listed in the file docs/Dependancies.txt The installation instructions are available at docs/Databank_VM_Installation.txt For further infromation visit http://www.dataflow.ox.ac.uk/ diff --git a/debian/README b/debian/README index 433bddc..96a2f8c 100644 --- a/debian/README +++ b/debian/README @@ -3,4 +3,4 @@ The Debian Package databank Comments regarding the Package - -- Anusha Ranganathan Fri, 09 Mar 2012 17:08:50 +0000 + -- Anusha Ranganathan Mon, 09 Apr 2012 17:08:50 +0000 diff --git a/debian/changelog b/debian/changelog index 8e38f94..e8d7e6d 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,14 @@ +databank (0.3rc1) unstable; urgency=low + + * Basic support for sword deposit + * Databank search interface enhanced + * Added function to PUT a zipfile and unpack it + * content negotiation also on error messages + * Added message workers for data indexing + + -- Anusha Ranganathan Mon, 09 Apr 2012 17:08:50 +0000 + + databank (0.2) unstable; urgency=low * Added message workers diff --git a/debian/config b/debian/config index 0bdeff2..f97b176 100644 --- a/debian/config +++ b/debian/config @@ -28,10 +28,10 @@ db_go #db_go # Password? -#db_input critical databank/password || true -#db_go +db_input critical databank/password || true +db_go # Note on Password -db_input critical databank/passwdnote || true -db_go +#db_input critical databank/passwdnote || true +#db_go diff --git a/debian/control b/debian/control index 44ee2a6..35ca1c6 100644 --- a/debian/control +++ b/debian/control @@ -29,8 +29,8 @@ Depends: python, libapache2-mod-wsgi, redis-server, supervisor, - debconf, - openjdk-6-jre + openjdk-6-jre, + debconf Recommends: solr-tomcat Description: RDF-enhanced, pairtree-backed storage API Databank is a simple, RDF-enhanced storage API which is backed by pairtree, diff --git a/debian/copyright b/debian/copyright index b0264d8..896670a 100644 --- a/debian/copyright +++ b/debian/copyright @@ -1,6 +1,6 @@ This work was packaged for Debian by: - Anusha Ranganathan on Fri, 09 Mar 2012 17:08:50 +0000 + Anusha Ranganathan on Mon, 09 Apr 2012 17:08:50 +0000 It was downloaded from: diff --git a/debian/databank.install b/debian/databank.install index 7096838..0187a8e 100644 --- a/debian/databank.install +++ b/debian/databank.install @@ -10,3 +10,6 @@ rdfdatabank/ /var/lib/databank/ rdfdatabank.egg-info/ /var/lib/databank test.ini /var/lib/databank/ who.ini /var/lib/databank/ +sss.conf.json /var/lib/databank/ +add_user.py /var/lib/databank/ +persisted_state.json /var/lib/databank/ \ No newline at end of file diff --git a/debian/docs b/debian/docs index eb96eff..6bfb954 100644 --- a/debian/docs +++ b/debian/docs @@ -3,5 +3,5 @@ README.txt requirements.txt docs/assigning_dois docs/Databank_VM_Installation.txt -docs/Dependencies.pdf +docs/Dependencies.txt docs/using_databank_api diff --git a/debian/postinst b/debian/postinst index 187b6b0..9e89a35 100644 --- a/debian/postinst +++ b/debian/postinst @@ -22,8 +22,8 @@ local dbstore="$RET" db_get databank/uri local dburi="$RET" -#db_get databank/password -#local dbpasswd="$RET" +db_get databank/password +local dbpasswd="$RET" #Modify the ini files CONFIG_FILE1=/etc/default/databank/production.ini @@ -51,9 +51,15 @@ TARGET_KEY=granary.uri_root sed -i "s,\($TARGET_KEY *= *\).*,\1$dburi," $CONFIG_FILE1 sed -i "s,\($TARGET_KEY *= *\).*,\1$dburi," $CONFIG_FILE2 -#Copy htpasswd file +#Add the user password #htpasswd -b -c /var/lib/databank/passwd admin $dbpasswd -cp /var/lib/databank/passwd-default /var/lib/databank/passwd +#cp /var/lib/databank/passwd-default /var/lib/databank/passwd +cd /var/lib/databank +if ! [ -f /var/lib/databank/passwd ] +then +touch /var/lib/databank/passwd +fi +python add_user.py admin $dbpasswd #Link config files ln -sf /etc/default/databank/production.ini /var/lib/databank/production.ini @@ -132,4 +138,17 @@ a2ensite databank_wsgi invoke-rc.d apache2 reload invoke-rc.d apache2 start +echo =========================================================== +echo Databank has been successfully installed +echo +echo Your Databank instance is available at http://localhost +echo Databank\'s home directory is /var/lib/databank +echo Data is stored under $dbstore +echo +echo To get started, visit the Databank homepage at http://localhost +echo +echo This paackage is brought to you by the Dataflow Team +echo http://dataflow.ox.ac.uk +echo =========================================================== + db_stop diff --git a/development.ini b/development.ini index c8ad8bb..7dcf5d3 100644 --- a/development.ini +++ b/development.ini @@ -1,3 +1,23 @@ +# Copyright (c) 2012 University of Oxford +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, --INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # # rdfdatabank - Pylons development environment configuration # @@ -8,7 +28,7 @@ debug = true # Uncomment and replace with the address which should receive any error reports #email_to = you@yourdomain.com smtp_server = localhost -error_email_from = paste@localhost +error_email_from = paste@databank [server:main] use = egg:Paste#http @@ -28,14 +48,13 @@ sqlalchemy.url = mysql://databanksqladmin:d6sqL4dm;n@localhost:3306/databankauth sqlalchemy.pool_recycle = 3600 cache_dir = /var/cache/databank -beaker.session.key = localdatabank +beaker.session.key = rdfdatabank beaker.session.secret = somesecret -who.config_file = %(here)s/who.ini +who.config_file = /var/lib/databank/who.ini who.log_level = info -#who.log_file = /var/log/databank/who.log who.log_file = stdout -#who.log_file = %(here)s/logs/who.log +#who.log_file = /var/log/databank/who.log redis.host = localhost @@ -54,14 +73,19 @@ doi.count = /var/lib/databank/rdfdatabank/config/doi_count broadcast.to = redis broadcast.queue = silochanges +metadata.embargoed = False + solr.host = http://localhost:8080/solr naming_rule = [^0-9a-zA-Z_\-\:] +naming_rule_humanized = Numbers, alphabets and -: formats_served = text/html,text/xhtml,text/plain,application/json,application/rdf+xml,text/xml,text/rdf+n3,application/x-turtle,text/rdf+ntriples,text/rdf+nt publisher = Bodleian Libraries, University of Oxford rights = http://ora.ouls.ox.ac.uk/objects/uuid%3A1d00eebb-8fed-46ad-8e38-45dbdb4b224c license = CC0 1.0 Universal (CC0 1.0). See http://creativecommons.org/publicdomain/zero/1.0/legalcode #license = Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License. See http://creativecommons.org/licenses/by-nc-sa/3.0/ +api.version = 0.3 + # If you'd like to fine-tune the individual locations of the cache data dirs # for the Cache data, or the Session saves, un-comment the desired settings # here: @@ -109,8 +133,6 @@ class = FileHandler level = INFO formatter = generic args = ('/var/log/databank/databank.log', 'w') -#args = ('/opt/RDFDatabank/logs/databank.log', 'w') -#args = ('%(here)s/logs/databank.log', 'w') [formatter_generic] format = %(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s diff --git a/docs/Dependencies.pdf b/docs/Dependencies.pdf deleted file mode 100644 index e8cabba..0000000 Binary files a/docs/Dependencies.pdf and /dev/null differ diff --git a/docs/Dependencies.txt b/docs/Dependencies.txt new file mode 100644 index 0000000..3aec7c5 --- /dev/null +++ b/docs/Dependencies.txt @@ -0,0 +1,49 @@ +Databank is a web-based application for storing, curating and publishing data-packages, and is written using python and the pylons web framework. Its default deployment includes a message queue providing databank notifications to listener services. This message queue is handled using `Redis `_ and `Supervisor `_ to maintain the listener services. The search interface in Databank is powered by `Apache SOLR `_. + ++--------------------------------+-----------+ +| Package name | Version | ++================================+===========+ +| python | >=2.6 | ++--------------------------------+-----------+ +| python-pylons | >=0.9.7 | ++--------------------------------+-----------+ +| python-repoze.who | =2.0a4 | ++--------------------------------+-----------+ +| python-repoze.who-friendlyform | =1.0.8 | ++--------------------------------+-----------+ +| python-rdflib | =2.4.2 | ++--------------------------------+-----------+ +| python-dateutil | >=1.4.1-4 | ++--------------------------------+-----------+ +| python-libxml2 | >=2.7.8 | ++--------------------------------+-----------+ +| python-libxslt1 | >=1.1.26-7| ++--------------------------------+-----------+ +| python-pairtree | >=0.7.1 | ++--------------------------------+-----------+ +| python-recordsilo | >=0.4.14 | ++--------------------------------+-----------+ +| python-solrpy | >=0.9.5 | ++--------------------------------+-----------+ +| python-redis | >=2.4.5-1 | ++--------------------------------+-----------+ +| unzip | >=6.0 | ++--------------------------------+-----------+ +| apache2 | >=2.2.20 | ++--------------------------------+-----------+ +| apache2-utils | >=2.2.20 | ++--------------------------------+-----------+ +| libapache2-mod-wsgi | >=3.3 | ++--------------------------------+-----------+ +| redis-server | >=2.2.11 | ++--------------------------------+-----------+ +| supervisor | >=3.0 | ++--------------------------------+-----------+ +| openjdk-6-jre | >=6b23 | ++--------------------------------+-----------+ +| solr-tomcat | >=1.4.1 | ++--------------------------------+-----------+ +| Simple Sword Server * | 2.0 | ++--------------------------------+-----------+ + +\* Availabale from http://sword-app.svn.sourceforge.net/viewvc/sword-app/sss/branches/sss-2/ diff --git a/docs/accessLogWithheaderInfo_2011_03_16.csv b/docs/accessLogWithheaderInfo_2011_03_16.csv deleted file mode 100644 index aea9df5..0000000 Binary files a/docs/accessLogWithheaderInfo_2011_03_16.csv and /dev/null differ diff --git a/docs/accessLogWithheaderInfo_2011_03_16.pdf b/docs/accessLogWithheaderInfo_2011_03_16.pdf deleted file mode 100644 index 962790f..0000000 Binary files a/docs/accessLogWithheaderInfo_2011_03_16.pdf and /dev/null differ diff --git a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run1_5Fail_2Error_pyhonTestMessage.txt b/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run1_5Fail_2Error_pyhonTestMessage.txt deleted file mode 100644 index 49b3f07..0000000 --- a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run1_5Fail_2Error_pyhonTestMessage.txt +++ /dev/null @@ -1,91 +0,0 @@ - -databankadmin@databank:/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank$ python TestSubmission.py -/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/plugin.py:94: UserWarning: Module rdflib was already impo rted from /opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/__init__.py, but /usr/local/lib/python2.6/dist- packages/rdflib-2.4.2-py2.6-linux-i686.egg is being added to sys.path - from pkg_resources import iter_entry_points -.............FFF..FFEE........ -====================================================================== -ERROR: Delete embargo information - POST embargo_change to /silo_name/datasets/dataset_name ----------------------------------------------------------------------- -Traceback (most recent call last): - File "TestSubmission.py", line 1394, in testDeleteEmbargo - expect_status=200, expect_reason="OK", expect_type="application/json") - File "../TestLib/SparqlQueryTestCase.py", line 182, in doHTTP_GET - if (expect_type.lower() == "application/json"): responsedata = simplejson.loads(responsedata) - File "/usr/lib/pymodules/python2.6/simplejson/__init__.py", line 307, in loads - return _default_decoder.decode(s) - File "/usr/lib/pymodules/python2.6/simplejson/decoder.py", line 338, in decode - raise ValueError(errmsg("Extra data", s, end, len(s))) -ValueError: Extra data: line 1 column 4 - line 11 column 1372 (char 4 - 1664) - -====================================================================== -ERROR: Modify embargo information - POST embargo_change, embargo, embargo_until to /silo_name/datasets/dataset_name ----------------------------------------------------------------------- -Traceback (most recent call last): - File "TestSubmission.py", line 1457, in testChangeEmbargo - expect_status=200, expect_reason="OK", expect_type="application/json") - File "../TestLib/SparqlQueryTestCase.py", line 182, in doHTTP_GET - if (expect_type.lower() == "application/json"): responsedata = simplejson.loads(responsedata) - File "/usr/lib/pymodules/python2.6/simplejson/__init__.py", line 307, in loads - return _default_decoder.decode(s) - File "/usr/lib/pymodules/python2.6/simplejson/decoder.py", line 338, in decode - raise ValueError(errmsg("Extra data", s, end, len(s))) -ValueError: Extra data: line 1 column 4 - line 11 column 1394 (char 4 - 1686) - -====================================================================== -FAIL: Update file in dataset - POST file to /silo_name/datasets/dataset_name (x 2) ----------------------------------------------------------------------- -Traceback (most recent call last): - File "TestSubmission.py", line 592, in testFileUpdate - self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!") -AssertionError: Difference between local and remote zipfile! - -====================================================================== -FAIL: Upload files to a dataset - POST file to /silo_name/datasets/dataset_name. Access each of the versions and the files in that version ----------------------------------------------------------------------- -Traceback (most recent call last): - File "TestSubmission.py", line 755, in testGetDatasetByVersion - expect_status=404, expect_reason="Not Found") - File "../TestLib/SparqlQueryTestCase.py", line 180, in doHTTP_GET - expect_status=expect_status, expect_reason=expect_reason) - File "../TestLib/SparqlQueryTestCase.py", line 164, in doRequest - if expect_status != "*": self.assertEqual(response.status, expect_status) -AssertionError: 200 != 404 - -====================================================================== -FAIL: POST manifest to dataset - POST manifest.rdf to /silo_name/datasets/dataset_name ----------------------------------------------------------------------- -Traceback (most recent call last): - File "TestSubmission.py", line 1006, in testMetadataFileUpdate - zipdata = self.updateTestSubmissionZipfile(file_to_upload="manifest2.rdf", filename="manifest.rdf") - File "TestSubmission.py", line 111, in updateTestSubmissionZipfile - expect_status=204, expect_reason="No Content") - File "../TestLib/SparqlQueryTestCase.py", line 207, in doHTTP_POST - expect_status=expect_status, expect_reason=expect_reason) - File "../TestLib/SparqlQueryTestCase.py", line 164, in doRequest - if expect_status != "*": self.assertEqual(response.status, expect_status) -AssertionError: 200 != 204 - -====================================================================== -FAIL: PUT file contents to existing filename - PUT file contents to /silo_name/datasets/dataset_name/file_name ----------------------------------------------------------------------- -Traceback (most recent call last): - File "TestSubmission.py", line 1212, in testPutUpdateFile - self.assertEqual(zipdata3, zipfile, "Difference between local and remote zipfile!") -AssertionError: Difference between local and remote zipfile! - -====================================================================== -FAIL: Add metadata to manifest - PUT metadata to /silo_name/datasets/dataset_name/manifest.rdf ----------------------------------------------------------------------- -Traceback (most recent call last): - File "TestSubmission.py", line 1294, in testPutMetadataFile - expect_status=204, expect_reason="No Content", expect_type="text/plain") - File "../TestLib/SparqlQueryTestCase.py", line 237, in doHTTP_PUT - expect_status=expect_status, expect_reason=expect_reason) - File "../TestLib/SparqlQueryTestCase.py", line 164, in doRequest - if expect_status != "*": self.assertEqual(response.status, expect_status) -AssertionError: 200 != 204 - ----------------------------------------------------------------------- -Ran 30 tests in 91.021s - -FAILED (failures=5, errors=2) diff --git a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run1_5Fail_2Error_wiresharkMessage.pcap b/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run1_5Fail_2Error_wiresharkMessage.pcap deleted file mode 100644 index 4e7e68d..0000000 Binary files a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run1_5Fail_2Error_wiresharkMessage.pcap and /dev/null differ diff --git a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run2_testDeleteEmbargo_pyhonTestMessage.txt b/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run2_testDeleteEmbargo_pyhonTestMessage.txt deleted file mode 100644 index 4295489..0000000 --- a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run2_testDeleteEmbargo_pyhonTestMessage.txt +++ /dev/null @@ -1,39 +0,0 @@ -databankadmin@databank:/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank$ python TestSubmission.py testDeleteEmbargo -/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/plugin.py:94: UserWarning: Module rdflib was already imported from /opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/__init__.pyc, but /usr/local/lib/python2.6/dist-packages/rdflib-2.4.2-py2.6-linux-i686.egg is being added to sys.path - from pkg_resources import iter_entry_points -Delete embargo information - POST embargo_change to /silo_name/datasets/dataset_name ... DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointhost databank.ora.ox.ac.uk: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpath /sandbox/: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointuser admin: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpass test: -DEBUG:SparqlQueryTestCase:DELETE http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 404 Not Found -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/ -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 204 Updated -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/states/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -ERROR - -====================================================================== -ERROR: Delete embargo information - POST embargo_change to /silo_name/datasets/dataset_name ----------------------------------------------------------------------- -Traceback (most recent call last): - File "TestSubmission.py", line 1394, in testDeleteEmbargo - expect_status=200, expect_reason="OK", expect_type="application/json") - File "../TestLib/SparqlQueryTestCase.py", line 182, in doHTTP_GET - if (expect_type.lower() == "application/json"): responsedata = simplejson.loads(responsedata) - File "/usr/lib/pymodules/python2.6/simplejson/__init__.py", line 307, in loads - return _default_decoder.decode(s) - File "/usr/lib/pymodules/python2.6/simplejson/decoder.py", line 338, in decode - raise ValueError(errmsg("Extra data", s, end, len(s))) -ValueError: Extra data: line 1 column 4 - line 11 column 1372 (char 4 - 1664) - ----------------------------------------------------------------------- -Ran 1 test in 19.271s - -FAILED (errors=1) diff --git a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run2_testDeleteEmbargo_wiresharkMessage.pcap b/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run2_testDeleteEmbargo_wiresharkMessage.pcap deleted file mode 100644 index 4402859..0000000 Binary files a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run2_testDeleteEmbargo_wiresharkMessage.pcap and /dev/null differ diff --git a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run3_testChangeEmbargo_pyhonTestMessage.txt b/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run3_testChangeEmbargo_pyhonTestMessage.txt deleted file mode 100644 index 04b281c..0000000 --- a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run3_testChangeEmbargo_pyhonTestMessage.txt +++ /dev/null @@ -1,39 +0,0 @@ -databankadmin@databank:/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank$ python TestSubmission.py testChangeEmbargo -/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/plugin.py:94: UserWarning: Module rdflib was already imported from /opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/__init__.pyc, but /usr/local/lib/python2.6/dist-packages/rdflib-2.4.2-py2.6-linux-i686.egg is being added to sys.path - from pkg_resources import iter_entry_points -Modify embargo information - POST embargo_change, embargo, embargo_until to /silo_name/datasets/dataset_name ... DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointhost databank.ora.ox.ac.uk: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpath /sandbox/: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointuser admin: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpass test: -DEBUG:SparqlQueryTestCase:DELETE http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/ -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 204 Updated -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/states/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -ERROR - -====================================================================== -ERROR: Modify embargo information - POST embargo_change, embargo, embargo_until to /silo_name/datasets/dataset_name ----------------------------------------------------------------------- -Traceback (most recent call last): - File "TestSubmission.py", line 1457, in testChangeEmbargo - expect_status=200, expect_reason="OK", expect_type="application/json") - File "../TestLib/SparqlQueryTestCase.py", line 182, in doHTTP_GET - if (expect_type.lower() == "application/json"): responsedata = simplejson.loads(responsedata) - File "/usr/lib/pymodules/python2.6/simplejson/__init__.py", line 307, in loads - return _default_decoder.decode(s) - File "/usr/lib/pymodules/python2.6/simplejson/decoder.py", line 338, in decode - raise ValueError(errmsg("Extra data", s, end, len(s))) -ValueError: Extra data: line 1 column 4 - line 11 column 1391 (char 4 - 1683) - ----------------------------------------------------------------------- -Ran 1 test in 24.111s - -FAILED (errors=1) diff --git a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run3_testChangeEmbargo_wiresharkMessage.pcap b/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run3_testChangeEmbargo_wiresharkMessage.pcap deleted file mode 100644 index ca89a28..0000000 Binary files a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run3_testChangeEmbargo_wiresharkMessage.pcap and /dev/null differ diff --git a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run4_testFileUpdate_pyhonTestMessage.txt b/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run4_testFileUpdate_pyhonTestMessage.txt deleted file mode 100644 index 8911a45..0000000 --- a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run4_testFileUpdate_pyhonTestMessage.txt +++ /dev/null @@ -1,37 +0,0 @@ -databankadmin@databank:/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank$ python TestSubmission.py testFileUpdate -/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/plugin.py:94: UserWarning: Module rdflib was already imported from /opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/__init__.pyc, but /usr/local/lib/python2.6/dist-packages/rdflib-2.4.2-py2.6-linux-i686.egg is being added to sys.path - from pkg_resources import iter_entry_points -Update file in dataset - POST file to /silo_name/datasets/dataset_name (x 2) ... DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointhost databank.ora.ox.ac.uk: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpath /sandbox/: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointuser admin: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpass test: -DEBUG:SparqlQueryTestCase:DELETE http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/ -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/ -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/ -DEBUG:SparqlQueryTestCase:Status: 204 No Content -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -FAIL - -====================================================================== -FAIL: Update file in dataset - POST file to /silo_name/datasets/dataset_name (x 2) ----------------------------------------------------------------------- -Traceback (most recent call last): - File "TestSubmission.py", line 592, in testFileUpdate - self.assertEqual(zipdata, zipfile, "Difference between local and remote zipfile!") -AssertionError: Difference between local and remote zipfile! - ----------------------------------------------------------------------- -Ran 1 test in 22.510s - -FAILED (failures=1) diff --git a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run4_testFileUpdate_wiresharkMessage.pcap b/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run4_testFileUpdate_wiresharkMessage.pcap deleted file mode 100644 index 2196a96..0000000 Binary files a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run4_testFileUpdate_wiresharkMessage.pcap and /dev/null differ diff --git a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run5_testGetDatasetByVersion_pyhonTestMessage.txt b/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run5_testGetDatasetByVersion_pyhonTestMessage.txt deleted file mode 100644 index b8c9c1c..0000000 --- a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run5_testGetDatasetByVersion_pyhonTestMessage.txt +++ /dev/null @@ -1,69 +0,0 @@ -databankadmin@databank:/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank$ python TestSubmission.py testGetDatasetByVersion -/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/plugin.py:94: UserWarning: Module rdflib was already imported from /opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/__init__.pyc, but /usr/local/lib/python2.6/dist-packages/rdflib-2.4.2-py2.6-linux-i686.egg is being added to sys.path - from pkg_resources import iter_entry_points -Upload files to a dataset - POST file to /silo_name/datasets/dataset_name. Access each of the versions and the files in that version ... DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointhost databank.ora.ox.ac.uk: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpath /sandbox/: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointuser admin: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpass test: -DEBUG:SparqlQueryTestCase:DELETE http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 404 Not Found -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/ -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/states/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/ -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/states/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/version0 -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/ -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir2.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/states/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:DELETE http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip -DEBUG:SparqlQueryTestCase:Status: 404 Not Found -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir2.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/states/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/ -DEBUG:SparqlQueryTestCase:Status: 204 No Content -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -FAIL - -====================================================================== -FAIL: Upload files to a dataset - POST file to /silo_name/datasets/dataset_name. Access each of the versions and the files in that version ----------------------------------------------------------------------- -Traceback (most recent call last): - File "TestSubmission.py", line 755, in testGetDatasetByVersion - expect_status=404, expect_reason="Not Found") - File "../TestLib/SparqlQueryTestCase.py", line 180, in doHTTP_GET - expect_status=expect_status, expect_reason=expect_reason) - File "../TestLib/SparqlQueryTestCase.py", line 164, in doRequest - if expect_status != "*": self.assertEqual(response.status, expect_status) -AssertionError: 200 != 404 - ----------------------------------------------------------------------- -Ran 1 test in 16.518s - -FAILED (failures=1) diff --git a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run5_testGetDatasetByVersion_wiresharkMessage.pcap b/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run5_testGetDatasetByVersion_wiresharkMessage.pcap deleted file mode 100644 index 77c064f..0000000 Binary files a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run5_testGetDatasetByVersion_wiresharkMessage.pcap and /dev/null differ diff --git a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run6_testMetadataFileUpdate_pyhonTestMessage.txt b/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run6_testMetadataFileUpdate_pyhonTestMessage.txt deleted file mode 100644 index 3b5d7d6..0000000 --- a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run6_testMetadataFileUpdate_pyhonTestMessage.txt +++ /dev/null @@ -1,37 +0,0 @@ -databankadmin@databank:/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank$ python TestSubmission.py testMetadataFileUpdate -/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/plugin.py:94: UserWarning: Module rdflib was already imported from /opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/__init__.pyc, but /usr/local/lib/python2.6/dist-packages/rdflib-2.4.2-py2.6-linux-i686.egg is being added to sys.path - from pkg_resources import iter_entry_points -POST manifest to dataset - POST manifest.rdf to /silo_name/datasets/dataset_name ... DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointhost databank.ora.ox.ac.uk: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpath /sandbox/: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointuser admin: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpass test: -DEBUG:SparqlQueryTestCase:DELETE http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/ -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/ -DEBUG:SparqlQueryTestCase:Status: 204 No Content -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/ -DEBUG:SparqlQueryTestCase:Status: 200 OK -FAIL - -====================================================================== -FAIL: POST manifest to dataset - POST manifest.rdf to /silo_name/datasets/dataset_name ----------------------------------------------------------------------- -Traceback (most recent call last): - File "TestSubmission.py", line 1006, in testMetadataFileUpdate - zipdata = self.updateTestSubmissionZipfile(file_to_upload="manifest2.rdf", filename="manifest.rdf") - File "TestSubmission.py", line 111, in updateTestSubmissionZipfile - expect_status=204, expect_reason="No Content") - File "../TestLib/SparqlQueryTestCase.py", line 207, in doHTTP_POST - expect_status=expect_status, expect_reason=expect_reason) - File "../TestLib/SparqlQueryTestCase.py", line 164, in doRequest - if expect_status != "*": self.assertEqual(response.status, expect_status) -AssertionError: 200 != 204 - ----------------------------------------------------------------------- -Ran 1 test in 0.437s - -FAILED (failures=1) diff --git a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run6_testMetadataFileUpdate_wiresharkMessage.pcap b/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run6_testMetadataFileUpdate_wiresharkMessage.pcap deleted file mode 100644 index 9c8178e..0000000 Binary files a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run6_testMetadataFileUpdate_wiresharkMessage.pcap and /dev/null differ diff --git a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run7_testPutUpdateFile_pyhonTestMessage.txt b/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run7_testPutUpdateFile_pyhonTestMessage.txt deleted file mode 100644 index 348d072..0000000 --- a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run7_testPutUpdateFile_pyhonTestMessage.txt +++ /dev/null @@ -1,43 +0,0 @@ -databankadmin@databank:/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank$ python TestSubmission.py testPutUpdateFile -/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/plugin.py:94: UserWarning: Module rdflib was already imported from /opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/__init__.pyc, but /usr/local/lib/python2.6/dist-packages/rdflib-2.4.2-py2.6-linux-i686.egg is being added to sys.path - from pkg_resources import iter_entry_points -PUT file contents to existing filename - PUT file contents to /silo_name/datasets/dataset_name/file_name ... DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointhost databank.ora.ox.ac.uk: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpath /sandbox/: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointuser admin: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpass test: -DEBUG:SparqlQueryTestCase:DELETE http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/ -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/ -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:PUT http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testrdf3.zip -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testrdf3.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:PUT http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip -DEBUG:SparqlQueryTestCase:Status: 204 No Content -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -FAIL - -====================================================================== -FAIL: PUT file contents to existing filename - PUT file contents to /silo_name/datasets/dataset_name/file_name ----------------------------------------------------------------------- -Traceback (most recent call last): - File "TestSubmission.py", line 1212, in testPutUpdateFile - self.assertEqual(zipdata3, zipfile, "Difference between local and remote zipfile!") -AssertionError: Difference between local and remote zipfile! - ----------------------------------------------------------------------- -Ran 1 test in 22.677s - -FAILED (failures=1) diff --git a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run7_testPutUpdateFile_wiresharkMessage.pcap b/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run7_testPutUpdateFile_wiresharkMessage.pcap deleted file mode 100644 index 3d2866c..0000000 Binary files a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run7_testPutUpdateFile_wiresharkMessage.pcap and /dev/null differ diff --git a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run8_testPutMetadataFile_pyhonTestMessage.txt b/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run8_testPutMetadataFile_pyhonTestMessage.txt deleted file mode 100644 index 5028fe5..0000000 --- a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run8_testPutMetadataFile_pyhonTestMessage.txt +++ /dev/null @@ -1,35 +0,0 @@ -databankadmin@databank:/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank$ python TestSubmission.py testPutMetadataFile -/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/plugin.py:94: UserWarning: Module rdflib was already imported from /opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/__init__.pyc, but /usr/local/lib/python2.6/dist-packages/rdflib-2.4.2-py2.6-linux-i686.egg is being added to sys.path - from pkg_resources import iter_entry_points -Add metadata to manifest - PUT metadata to /silo_name/datasets/dataset_name/manifest.rdf ... DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointhost databank.ora.ox.ac.uk: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpath /sandbox/: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointuser admin: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpass test: -DEBUG:SparqlQueryTestCase:DELETE http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/ -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:PUT http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/manifest.rdf -DEBUG:SparqlQueryTestCase:Status: 204 No Content -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:PUT http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/manifest.rdf -DEBUG:SparqlQueryTestCase:Status: 200 OK -FAIL - -====================================================================== -FAIL: Add metadata to manifest - PUT metadata to /silo_name/datasets/dataset_name/manifest.rdf ----------------------------------------------------------------------- -Traceback (most recent call last): - File "TestSubmission.py", line 1294, in testPutMetadataFile - expect_status=204, expect_reason="No Content", expect_type="text/plain") - File "../TestLib/SparqlQueryTestCase.py", line 237, in doHTTP_PUT - expect_status=expect_status, expect_reason=expect_reason) - File "../TestLib/SparqlQueryTestCase.py", line 164, in doRequest - if expect_status != "*": self.assertEqual(response.status, expect_status) -AssertionError: 200 != 204 - ----------------------------------------------------------------------- -Ran 1 test in 3.759s - -FAILED (failures=1) diff --git a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run8_testPutMetadataFile_wiresharkMessage.pcap b/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run8_testPutMetadataFile_wiresharkMessage.pcap deleted file mode 100644 index 6c936a6..0000000 Binary files a/docs/databank_test_run_for_204_response/test1-204_ppdated_returns_content-5_fail_2_error_in_test_run/run8_testPutMetadataFile_wiresharkMessage.pcap and /dev/null differ diff --git a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run1_all_pass_pyhonTestMessage.txt b/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run1_all_pass_pyhonTestMessage.txt deleted file mode 100644 index 244de1a..0000000 --- a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run1_all_pass_pyhonTestMessage.txt +++ /dev/null @@ -1,8 +0,0 @@ -databankadmin@databank:/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank$ python TestSubmission.py -/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/plugin.py:94: UserWarning: Module rdflib was already imported from /opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/__init__.pyc, but /usr/local/lib/python2.6/dist-packages/rdflib-2.4.2-py2.6-linux-i686.egg is being added to sys.path - from pkg_resources import iter_entry_points -.............................. ----------------------------------------------------------------------- -Ran 30 tests in 21.088s - -OK diff --git a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run1_all_pass_wiresharkMessage.pcap b/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run1_all_pass_wiresharkMessage.pcap deleted file mode 100644 index f8f2aec..0000000 Binary files a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run1_all_pass_wiresharkMessage.pcap and /dev/null differ diff --git a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run2_testDeleteEmbargo_pythonTestMessage.txt b/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run2_testDeleteEmbargo_pythonTestMessage.txt deleted file mode 100644 index 3c7429f..0000000 --- a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run2_testDeleteEmbargo_pythonTestMessage.txt +++ /dev/null @@ -1,25 +0,0 @@ -databankadmin@databank:/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank$ python TestSubmission.py testDeleteEmbargo -/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/plugin.py:94: UserWarning: Module rdflib was already imported from /opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/__init__.pyc, but /usr/local/lib/python2.6/dist-packages/rdflib-2.4.2-py2.6-linux-i686.egg is being added to sys.path - from pkg_resources import iter_entry_points -Delete embargo information - POST embargo_change to /silo_name/datasets/dataset_name ... DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointhost databank.ora.ox.ac.uk: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpath /sandbox/: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointuser admin: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpass test: -DEBUG:SparqlQueryTestCase:DELETE http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 404 Not Found -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/ -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 204 Updated -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/states/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -ok - ----------------------------------------------------------------------- -Ran 1 test in 0.369s - -OK diff --git a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run2_testDeleteEmbargo_wiresharkMessage.pcap b/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run2_testDeleteEmbargo_wiresharkMessage.pcap deleted file mode 100644 index ff1f1d8..0000000 Binary files a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run2_testDeleteEmbargo_wiresharkMessage.pcap and /dev/null differ diff --git a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run3_testChangeEmbargo_pythonTestMessage.txt b/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run3_testChangeEmbargo_pythonTestMessage.txt deleted file mode 100644 index 8bfb78b..0000000 --- a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run3_testChangeEmbargo_pythonTestMessage.txt +++ /dev/null @@ -1,25 +0,0 @@ -databankadmin@databank:/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank$ python TestSubmission.py testChangeEmbargo -/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/plugin.py:94: UserWarning: Module rdflib was already imported from /opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/__init__.pyc, but /usr/local/lib/python2.6/dist-packages/rdflib-2.4.2-py2.6-linux-i686.egg is being added to sys.path - from pkg_resources import iter_entry_points -Modify embargo information - POST embargo_change, embargo, embargo_until to /silo_name/datasets/dataset_name ... DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointhost databank.ora.ox.ac.uk: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpath /sandbox/: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointuser admin: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpass test: -DEBUG:SparqlQueryTestCase:DELETE http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/ -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 204 Updated -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/states/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -ok - ----------------------------------------------------------------------- -Ran 1 test in 0.431s - -OK diff --git a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run3_testChangeEmbargo_wiresharkMessage.pcap b/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run3_testChangeEmbargo_wiresharkMessage.pcap deleted file mode 100644 index 88ae69e..0000000 Binary files a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run3_testChangeEmbargo_wiresharkMessage.pcap and /dev/null differ diff --git a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run4_testFileUpdate_pythonTestMessage.txt b/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run4_testFileUpdate_pythonTestMessage.txt deleted file mode 100644 index 44cacca..0000000 --- a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run4_testFileUpdate_pythonTestMessage.txt +++ /dev/null @@ -1,31 +0,0 @@ -databankadmin@databank:/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank$ python TestSubmission.py testFileUpdate -/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/plugin.py:94: UserWarning: Module rdflib was already imported from /opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/__init__.pyc, but /usr/local/lib/python2.6/dist-packages/rdflib-2.4.2-py2.6-linux-i686.egg is being added to sys.path - from pkg_resources import iter_entry_points -Update file in dataset - POST file to /silo_name/datasets/dataset_name (x 2) ... DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointhost databank.ora.ox.ac.uk: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpath /sandbox/: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointuser admin: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpass test: -DEBUG:SparqlQueryTestCase:DELETE http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/ -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/ -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/ -DEBUG:SparqlQueryTestCase:Status: 204 No Content -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/states/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -ok - ----------------------------------------------------------------------- -Ran 1 test in 0.481s - -OK diff --git a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run4_testFileUpdate_wiresharkMessage.pcap b/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run4_testFileUpdate_wiresharkMessage.pcap deleted file mode 100644 index dbe2151..0000000 Binary files a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run4_testFileUpdate_wiresharkMessage.pcap and /dev/null differ diff --git a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run5_testGetDatasetByVersion_pythonTestMessage.txt b/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run5_testGetDatasetByVersion_pythonTestMessage.txt deleted file mode 100644 index c9c0879..0000000 --- a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run5_testGetDatasetByVersion_pythonTestMessage.txt +++ /dev/null @@ -1,99 +0,0 @@ -databankadmin@databank:/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank$ python TestSubmission.py testGetDatasetByVersion -/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/plugin.py:94: UserWarning: Module rdflib was already imported from /opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/__init__.pyc, but /usr/local/lib/python2.6/dist-packages/rdflib-2.4.2-py2.6-linux-i686.egg is being added to sys.path - from pkg_resources import iter_entry_points -Upload files to a dataset - POST file to /silo_name/datasets/dataset_name. Access each of the versions and the files in that version ... DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointhost databank.ora.ox.ac.uk: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpath /sandbox/: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointuser admin: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpass test: -DEBUG:SparqlQueryTestCase:DELETE http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/ -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/states/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/ -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/states/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/version0 -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/ -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir2.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/states/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:DELETE http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip -DEBUG:SparqlQueryTestCase:Status: 404 Not Found -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir2.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/states/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/ -DEBUG:SparqlQueryTestCase:Status: 204 No Content -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip -DEBUG:SparqlQueryTestCase:Status: 404 Not Found -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir2.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/states/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/version0 -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/states/TestSubmission/version0 -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/version1 -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip/version1 -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/states/TestSubmission/version1 -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/version2 -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip/version2 -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir2.zip/version2 -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/states/TestSubmission/version2 -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/version3 -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir2.zip/version3 -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip/version3 -DEBUG:SparqlQueryTestCase:Status: 404 Not Found -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/states/TestSubmission/version3 -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/version4 -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir2.zip/version4 -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip/version4 -DEBUG:SparqlQueryTestCase:Status: 404 Not Found -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/states/TestSubmission/version4 -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/version5 -DEBUG:SparqlQueryTestCase:Status: 404 Not Found -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir2.zip/version5 -DEBUG:SparqlQueryTestCase:Status: 404 Not Found -ok - ----------------------------------------------------------------------- -Ran 1 test in 1.915s - -OK diff --git a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run5_testGetDatasetByVersion_wiresharkMessage.pcap b/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run5_testGetDatasetByVersion_wiresharkMessage.pcap deleted file mode 100644 index f9dabb0..0000000 Binary files a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run5_testGetDatasetByVersion_wiresharkMessage.pcap and /dev/null differ diff --git a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run6_testMetadataFileUpdate_pythonTestMessage.txt b/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run6_testMetadataFileUpdate_pythonTestMessage.txt deleted file mode 100644 index 44d3546..0000000 --- a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run6_testMetadataFileUpdate_pythonTestMessage.txt +++ /dev/null @@ -1,27 +0,0 @@ -databankadmin@databank:/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank$ python TestSubmission.py testMetadataFileUpdate -/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/plugin.py:94: UserWarning: Module rdflib was already imported from /opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/__init__.pyc, but /usr/local/lib/python2.6/dist-packages/rdflib-2.4.2-py2.6-linux-i686.egg is being added to sys.path - from pkg_resources import iter_entry_points -POST manifest to dataset - POST manifest.rdf to /silo_name/datasets/dataset_name ... DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointhost databank.ora.ox.ac.uk: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpath /sandbox/: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointuser admin: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpass test: -DEBUG:SparqlQueryTestCase:DELETE http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/ -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/ -DEBUG:SparqlQueryTestCase:Status: 204 No Content -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/ -DEBUG:SparqlQueryTestCase:Status: 204 No Content -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/states/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -ok - ----------------------------------------------------------------------- -Ran 1 test in 0.490s - -OK diff --git a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run6_testMetadataFileUpdate_wiresharkMessage.pcap b/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run6_testMetadataFileUpdate_wiresharkMessage.pcap deleted file mode 100644 index 3e18a68..0000000 Binary files a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run6_testMetadataFileUpdate_wiresharkMessage.pcap and /dev/null differ diff --git a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run7_testPutUpdateFile_pythonTestMessage.txt b/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run7_testPutUpdateFile_pythonTestMessage.txt deleted file mode 100644 index 3cbc0d9..0000000 --- a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run7_testPutUpdateFile_pythonTestMessage.txt +++ /dev/null @@ -1,41 +0,0 @@ -databankadmin@databank:/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank$ python TestSubmission.py testPutUpdateFile -/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/plugin.py:94: UserWarning: Module rdflib was already imported from /opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/__init__.pyc, but /usr/local/lib/python2.6/dist-packages/rdflib-2.4.2-py2.6-linux-i686.egg is being added to sys.path - from pkg_resources import iter_entry_points -PUT file contents to existing filename - PUT file contents to /silo_name/datasets/dataset_name/file_name ... DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointhost databank.ora.ox.ac.uk: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpath /sandbox/: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointuser admin: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpass test: -DEBUG:SparqlQueryTestCase:DELETE http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/ -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/ -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:PUT http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testrdf3.zip -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testrdf3.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:PUT http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip -DEBUG:SparqlQueryTestCase:Status: 204 No Content -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testrdf3.zip -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/states/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/testdir.zip/version1 -DEBUG:SparqlQueryTestCase:Status: 200 OK -ok - ----------------------------------------------------------------------- -Ran 1 test in 0.819s - -OK diff --git a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run7_testPutUpdateFile_wiresharkMessage.pcap b/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run7_testPutUpdateFile_wiresharkMessage.pcap deleted file mode 100644 index 3bc3d31..0000000 Binary files a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run7_testPutUpdateFile_wiresharkMessage.pcap and /dev/null differ diff --git a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run8_testPutMetadataFile_pythonTestMessage.txt b/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run8_testPutMetadataFile_pythonTestMessage.txt deleted file mode 100644 index 7636f96..0000000 --- a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run8_testPutMetadataFile_pythonTestMessage.txt +++ /dev/null @@ -1,27 +0,0 @@ -databankadmin@databank:/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank$ python TestSubmission.py testPutMetadataFile -/opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/plugin.py:94: UserWarning: Module rdflib was already imported from /opt/RDFDatabank/rdfdatabank/tests/RDFDatabank/rdflib/__init__.pyc, but /usr/local/lib/python2.6/dist-packages/rdflib-2.4.2-py2.6-linux-i686.egg is being added to sys.path - from pkg_resources import iter_entry_points -Add metadata to manifest - PUT metadata to /silo_name/datasets/dataset_name/manifest.rdf ... DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointhost databank.ora.ox.ac.uk: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpath /sandbox/: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointuser admin: -DEBUG:SparqlQueryTestCase:setRequestEndPoint: endpointpass test: -DEBUG:SparqlQueryTestCase:DELETE http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:POST http://databank.ora.ox.ac.uk/sandbox/datasets/ -DEBUG:SparqlQueryTestCase:Status: 201 Created -DEBUG:SparqlQueryTestCase:PUT http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/manifest.rdf -DEBUG:SparqlQueryTestCase:Status: 204 No Content -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:PUT http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission/manifest.rdf -DEBUG:SparqlQueryTestCase:Status: 204 No Content -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/datasets/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -DEBUG:SparqlQueryTestCase:GET http://databank.ora.ox.ac.uk/sandbox/states/TestSubmission -DEBUG:SparqlQueryTestCase:Status: 200 OK -ok - ----------------------------------------------------------------------- -Ran 1 test in 0.538s - -OK diff --git a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run8_testPutMetadataFile_wiresharkMessage.pcap b/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run8_testPutMetadataFile_wiresharkMessage.pcap deleted file mode 100644 index f69a215..0000000 Binary files a/docs/databank_test_run_for_204_response/test2-204_updated_returns_no_content-all_pass_in_test_run/run8_testPutMetadataFile_wiresharkMessage.pcap and /dev/null differ diff --git a/docs/index.txt b/docs/index.txt deleted file mode 100644 index b3a9628..0000000 --- a/docs/index.txt +++ /dev/null @@ -1,19 +0,0 @@ -rdfdatabank -+++++++++++ - -This is the main index page of your documentation. It should be written in -`reStructuredText format `_. - -You can generate your documentation in HTML format by running this command:: - - setup.py pudge - -For this to work you will need to download and install `buildutils`_, -`pudge`_, and `pygments`_. The ``pudge`` command is disabled by -default; to ativate it in your project, run:: - - setup.py addcommand -p buildutils.pudge_command - -.. _buildutils: http://pypi.python.org/pypi/buildutils -.. _pudge: http://pudge.lesscode.org/ -.. _pygments: http://pygments.org/ diff --git a/docs/indexingSolrRecordsUsingSupervisord/LogConfigParser.py b/docs/indexingSolrRecordsUsingSupervisord/LogConfigParser.py deleted file mode 100644 index ef9aec1..0000000 --- a/docs/indexingSolrRecordsUsingSupervisord/LogConfigParser.py +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env python - -import ConfigParser, os - -class Config(ConfigParser.ConfigParser): - DEFAULT_CONFIG_FILE = "loglines.cfg" - def __init__(self, config_file=DEFAULT_CONFIG_FILE): - ConfigParser.ConfigParser.__init__(self) - if os.path.exists(config_file) and os.path.isfile(config_file): - self.read(config_file) - self.validate() - - def validate(self): - pass diff --git a/docs/indexingSolrRecordsUsingSupervisord/broker.py b/docs/indexingSolrRecordsUsingSupervisord/broker.py deleted file mode 100644 index 20a5f6c..0000000 --- a/docs/indexingSolrRecordsUsingSupervisord/broker.py +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env python - -from redisqueue import RedisQueue - -from LogConfigParser import Config - -import sys - -from time import sleep - -if __name__ == "__main__": - c = Config() - redis_section = "redis" - worker_section = "worker_broker" - worker_number = sys.argv[1] - if len(sys.argv) == 3: - if "redis_%s" % sys.argv[2] in c.sections(): - redis_section = "redis_%s" % sys.argv[2] - - rq = RedisQueue(c.get(worker_section, "listento"), "broker_%s" % worker_number, - db=c.get(redis_section, "db"), - host=c.get(redis_section, "host"), - port=c.get(redis_section, "port") - ) - if c.has_option(worker_section, "fanout_status_queue"): - # keep a queue of messages to deliver for a given push'd item - # better resumeability at the cost of more redis operations - topushq = RedisQueue(c.get(worker_section, "fanout_status_queue"), "fanout_broker_%s" % worker_number, - db=c.get(redis_section, "db"), - host=c.get(redis_section, "host"), - port=c.get(redis_section, "port") - ) - fanout_queues = [x.strip() for x in c.get(worker_section, "fanout").split(",") if x] - - if c.has_option(worker_section, "idletime"): - try: - idletime = float(c.get(worker_section, "idletime")) - except ValueError: - idletime = 10 - - while(True): - line = rq.pop() - if line: - fanout_success = True - if topushq: - # if there are residual messages to send, restart with those: - if len(topushq) == 0: - # if the queue is empty, and this is a clean start - for q in fanout_queues: - topushq.push(q) - # Distribution: - while len(topushq) != 0: - q = topushq.pop() - rq.push(line, to_queue=q) - topushq.task_complete() - rq.task_complete() - else: - for q in fanout_queues: - rq.push(line, to_queue=q) - rq.task_complete() - else: - # ratelimit to stop it chewing through CPU cycles - sleep(idletime) diff --git a/docs/indexingSolrRecordsUsingSupervisord/getObjects.py b/docs/indexingSolrRecordsUsingSupervisord/getObjects.py deleted file mode 100644 index 9836564..0000000 --- a/docs/indexingSolrRecordsUsingSupervisord/getObjects.py +++ /dev/null @@ -1,86 +0,0 @@ -import os -from rdfdatabank.lib.broadcast import BroadcastToRedis -from pylons import config - -def get_objs_in_dir(items_list, dirname, fnames): - for fname in fnames: - a = os.path.join(dirname,fname) - if fname == 'obj': - item = a.split('pairtree_root')[1].strip('/').split('obj')[0].replace('/', '') - silo = a.split('pairtree_root')[0].strip('/').split('/')[-1] - if not (silo, item) in items_list: - items_list.append((silo, item)) - return - -def broadcast_links(src_dir): - links_list = [] - os.path.walk(src_dir,get_objs_in_dir,links_list) - b = BroadcastToRedis(config['redis.host'], config['broadcast.queue']) - - for silo, item in links_list: - b.creation(silo, item) - return - -src_dirs = [ -'/silos/admiral/pairtree_root', -'/silos/digitaltest/pairtree_root', -'/silos/eidcsr/pairtree_root', -'/silos/general/pairtree_root', -'/silos/ww1archives/pairtree_root', -'/silos/digitalbooks/pairtree_root/30', -'/silos/digitalbooks/pairtree_root/og/-4/00', -'/silos/digitalbooks/pairtree_root/og/-4/01', -'/silos/digitalbooks/pairtree_root/og/-3/00', -'/silos/digitalbooks/pairtree_root/og/-3/01', -'/silos/digitalbooks/pairtree_root/og/-3/02', -'/silos/digitalbooks/pairtree_root/og/-3/03', -'/silos/digitalbooks/pairtree_root/og/-3/04', -'/silos/digitalbooks/pairtree_root/og/-3/05', -'/silos/digitalbooks/pairtree_root/og/-3/06', -'/silos/digitalbooks/pairtree_root/og/-3/15', -'/silos/digitalbooks/pairtree_root/og/-3/16', -'/silos/digitalbooks/pairtree_root/og/-3/18', -'/silos/digitalbooks/pairtree_root/og/-3/20', -'/silos/digitalbooks/pairtree_root/og/-3/61', -'/silos/digitalbooks/pairtree_root/og/-3/90', -'/silos/digitalbooks/pairtree_root/og/-3/93', -'/silos/digitalbooks/pairtree_root/og/-5/00', -'/silos/digitalbooks/pairtree_root/og/-5/01', -'/silos/digitalbooks/pairtree_root/og/-5/02', -'/silos/digitalbooks/pairtree_root/og/-5/03', -'/silos/digitalbooks/pairtree_root/og/-5/04', -'/silos/digitalbooks/pairtree_root/og/-5/09', -'/silos/digitalbooks/pairtree_root/og/-5/31', -'/silos/digitalbooks/pairtree_root/og/-5/32', -'/silos/digitalbooks/pairtree_root/og/-5/33', -'/silos/digitalbooks/pairtree_root/og/-5/50', -'/silos/digitalbooks/pairtree_root/og/-5/55', -'/silos/digitalbooks/pairtree_root/og/-5/56', -'/silos/digitalbooks/pairtree_root/og/-5/90', -'/silos/digitalbooks/pairtree_root/og/-5/91', -'/silos/digitalbooks/pairtree_root/og/-5/96', -'/silos/digitalbooks/pairtree_root/og/-5/97', -'/silos/digitalbooks/pairtree_root/og/-6/00', -'/silos/digitalbooks/pairtree_root/og/-6/50', -'/silos/digitalbooks/pairtree_root/og/-6/81', -'/silos/digitalbooks/pairtree_root/og/-6/90', -'/silos/digitalbooks/pairtree_root/og/-N/08', -'/silos/digitalbooks/pairtree_root/og/-N/10', -'/silos/digitalbooks/pairtree_root/og/-N/11', -'/silos/digitalbooks/pairtree_root/og/-N/12', -'/silos/digitalbooks/pairtree_root/og/-N/13', -'/silos/digitalbooks/pairtree_root/og/-N/14', -'/silos/digitalbooks/pairtree_root/og/-N/15', -'/silos/digitalbooks/pairtree_root/og/-N/16', -'/silos/digitalbooks/pairtree_root/og/-N/17', -'/silos/digitalbooks/pairtree_root/og/-N/32', -'/silos/digitalbooks/pairtree_root/og/-N/50' -] - -for src_dir in src_dirs: - print "starting", src_dir - links_list = [] - os.path.walk(src_dir,get_objs_in_dir,links_list) - b = BroadcastToRedis(config['redis.host'], config['broadcast.queue']) - for silo, item in links_list: - b.creation(silo, item) \ No newline at end of file diff --git a/docs/indexingSolrRecordsUsingSupervisord/loglines.cfg b/docs/indexingSolrRecordsUsingSupervisord/loglines.cfg deleted file mode 100644 index 7c2812a..0000000 --- a/docs/indexingSolrRecordsUsingSupervisord/loglines.cfg +++ /dev/null @@ -1,55 +0,0 @@ -# CONFIGURE THE FOLLOWING: -[supervisor] -port = 127.0.0.1:9001 -username = guest -password = guest - -# Configure the following to let supervisor control the redis instance -# or comment it out otherwise. -[process_redis] -# add the correct path to the redis instance here -command = redis-1.2.6/redis-server - -# Configure the following to let supervisor control the redis instance -# or comment it out otherwise. -[process_solr] -# add the correct path to the redis instance here -command = /opt/solr/start_solr.sh - -[worker_solr] -listento = solrindex -command = ./solr_worker.py -# Time in seconds to sleep if there is nothing on the queue -solrurl = http://localhost:8983/solr -idletime = 1 -stdout_logfile = workerlogs/solr_worker.log -numprocs = 2 - -[worker_broker] -listento = silochanges -command = ./broker.py -fanout = auditlog, solrindex -fanout_status_queue = broker_temp -# Time in seconds to sleep if there is nothing on the queue -idletime = 1 -stdout_logfile = workerlogs/broker.log -numprocs = 2 - -[logger_auditlogger] -listento = auditlog -command = ./logfromqueue.py -logfile = logs/audit.log -stdout_logfile = workerlogs/auditlogger.log - -# DEFAULT VALUES FOLLOW -############################## -[redis] -host = localhost -port = 6379 -db = 0 - -[redis_test] -host = localhost -port = 6379 -db = 1 - diff --git a/docs/indexingSolrRecordsUsingSupervisord/redisqueue.py b/docs/indexingSolrRecordsUsingSupervisord/redisqueue.py deleted file mode 100644 index d3e422b..0000000 --- a/docs/indexingSolrRecordsUsingSupervisord/redisqueue.py +++ /dev/null @@ -1,87 +0,0 @@ -#!/usr/bin/env python - -from redis import Redis - -WORKERPREFIX = "temp" -HOST = "localhost" -PORT = 6379 -DB = 0 - -import logging - -logger = logging.getLogger("redisqueue") -logger.setLevel(logging.INFO) -# create console handler and set level to debug -ch = logging.StreamHandler() -# create formatter -formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") -# add formatter to ch -ch.setFormatter(formatter) -# add ch to logger -logger.addHandler(ch) - - -"""Simple wrapper around a redis queue that gives methods in line with the other Queue-style classes""" - -class RedisQueue(object): - def __init__(self, queuename, workername, db=DB, host=HOST, port=PORT, workerprefix=WORKERPREFIX): - self.host = host - if isinstance(port, str): - try: - self.port = int(port) - except ValueError: - self.port = PORT - else: - self.port = port - self.queuename = queuename - self.workername = workername - self.workeritem = ":".join([workerprefix, workername]) - self.db = db - self._initclient() - - def _initclient(self): - logger.info("Initialising the redis queue %s for %s" % (self.queuename, self.workername)) - logger.info("Host:%s port:%s DB:%s" % (self.host, self.port, self.db)) - logger.debug("Debug messages detailing worker queue activity") - self._r = Redis(host=self.host, db=self.db, port=self.port) - - def __len__(self): - if self.inprogress(): - return self._r.llen(self.queuename) + 1 - else: - return self._r.llen(self.queuename) - - def __getitem__(self, index): - return self._r.lrange(self.queuename, index, index) - - def inprogress(self): - ip = self._r.lrange(self.workeritem, 0, 0) - if ip: - return ip.pop() - else: - return None - - def task_complete(self): - logger.debug("Task completed by worker %s" % self.workername) - return self._r.rpop(self.workeritem) - - def task_failed(self): - logger.error("Task FAILED by worker %s" % self.workername) - logger.debug(self.inprogress()) - return self._r.rpoplpush(self.workeritem, self.queuename) - - def push(self, item, to_queue=None): - if to_queue: - logger.debug("{%s} put onto queue %s by worker %s" % (item, to_queue,self.workername)) - return self._r.lpush(to_queue, item) - else: - logger.debug("{%s} put onto queue %s by worker %s" % (item, self.queuename,self.workername)) - return self._r.lpush(self.queuename, item) - - def pop(self): - if self._r.llen(self.workeritem) == 0: - self._r.rpoplpush(self.queuename, self.workeritem) - logger.debug("{%s} pulled from queue %s by worker %s" % (self.inprogress(), self.queuename,self.workername)) - else: - logger.debug("{%s} pulled from temporary worker queue by worker %s" % (self.inprogress(), self.workername)) - return self.inprogress() diff --git a/docs/indexingSolrRecordsUsingSupervisord/solrFields.py b/docs/indexingSolrRecordsUsingSupervisord/solrFields.py deleted file mode 100644 index 555a3bc..0000000 --- a/docs/indexingSolrRecordsUsingSupervisord/solrFields.py +++ /dev/null @@ -1,102 +0,0 @@ -solr_fields_mapping = { - "silo":"silo", - "id":"id", - "uuid":"uuid", - "http://www.w3.org/1999/02/22-rdf-syntax-ns#type":"type", - "http://vocab.ox.ac.uk/dataset/schema#isEmbargoed":"embargoStatus", - "http://purl.org/spar/pso/Status":"embargoStatus", - "http://vocab.ox.ac.uk/dataset/schema#embargoedUntil":"embargoedUntilDate", - "http://purl.org/spar/fabio/hasEmbargoDate":"embargoedUntilDate", - "http://vocab.ox.ac.uk/dataset/schema#currentVersion":"currentVersion", - "http://purl.org/ontology/bibo/doi":"doi", - "http://www.openarchives.org/ore/terms/aggregates":"aggregatedResource" - "http://purl.org/spar/fabio/publicationDate":"publicationDate", - "http://dublincore.org/documents/dcmi-terms/abstract":"abstract", - "http://dublincore.org/documents/dcmi-terms/accessRights":"accessRights", - "http://dublincore.org/documents/dcmi-terms/accrualMethod":"accrualMethod", - "http://dublincore.org/documents/dcmi-terms/accrualPeriodicity":"accrualPeriodicity", - "http://dublincore.org/documents/dcmi-terms/accrualPolicy":"accrualPolicy", - "http://dublincore.org/documents/dcmi-terms/alternative":"alternative", - "http://dublincore.org/documents/dcmi-terms/audience":"audience", - "http://dublincore.org/documents/dcmi-terms/available":"available", - "http://dublincore.org/documents/dcmi-terms/bibliographicCitation":"bibliographicCitation", - "http://dublincore.org/documents/dcmi-terms/conformsTo":"conformsTo", - "http://dublincore.org/documents/dcmi-terms/contributor":"contributor", - "http://dublincore.org/documents/dcmi-terms/coverage":"coverage", - "http://dublincore.org/documents/dcmi-terms/created":"created", - "http://dublincore.org/documents/dcmi-terms/creator":"creator", - "http://dublincore.org/documents/dcmi-terms/date":"date", - "http://dublincore.org/documents/dcmi-terms/dateAccepted":"dateAccepted", - "http://dublincore.org/documents/dcmi-terms/dateCopyrighted":"dateCopyrighted", - "http://dublincore.org/documents/dcmi-terms/dateSubmitted":"dateSubmitted", - "http://dublincore.org/documents/dcmi-terms/description":"description", - "http://dublincore.org/documents/dcmi-terms/educationLevel":"educationLevel", - "http://dublincore.org/documents/dcmi-terms/extent":"extent", - "http://dublincore.org/documents/dcmi-terms/format":"format", - "http://dublincore.org/documents/dcmi-terms/hasFormat":"hasFormat", - "http://dublincore.org/documents/dcmi-terms/hasPart":"hasPart", - "http://dublincore.org/documents/dcmi-terms/hasVersion":"hasVersion", - "http://dublincore.org/documents/dcmi-terms/identifier":"identifier", - "http://dublincore.org/documents/dcmi-terms/instructionalMethod":"instructionalMethod", - "http://dublincore.org/documents/dcmi-terms/isFormatOf":"isFormatOf", - "http://dublincore.org/documents/dcmi-terms/isPartOf":"isPartOf", - "http://dublincore.org/documents/dcmi-terms/isReferencedBy":"isReferencedBy", - "http://dublincore.org/documents/dcmi-terms/isReplacedBy":"isReplacedBy", - "http://dublincore.org/documents/dcmi-terms/isRequiredBy":"isRequiredBy", - "http://dublincore.org/documents/dcmi-terms/issued":"issued", - "http://dublincore.org/documents/dcmi-terms/isVersionOf":"isVersionOf", - "http://dublincore.org/documents/dcmi-terms/language":"language", - "http://dublincore.org/documents/dcmi-terms/license":"license", - "http://dublincore.org/documents/dcmi-terms/mediator":"mediator", - "http://dublincore.org/documents/dcmi-terms/medium":"medium", - "http://dublincore.org/documents/dcmi-terms/modified":"modified", - "http://dublincore.org/documents/dcmi-terms/provenance":"provenance", - "http://dublincore.org/documents/dcmi-terms/publisher":"publisher", - "http://dublincore.org/documents/dcmi-terms/references":"references", - "http://dublincore.org/documents/dcmi-terms/relation":"relation", - "http://dublincore.org/documents/dcmi-terms/replaces":"replaces", - "http://dublincore.org/documents/dcmi-terms/requires":"requires", - "http://dublincore.org/documents/dcmi-terms/rights":"rights", - "http://dublincore.org/documents/dcmi-terms/rightsHolder":"rightsHolder", - "http://dublincore.org/documents/dcmi-terms/source":"source", - "http://dublincore.org/documents/dcmi-terms/spatial":"spatial", - "http://dublincore.org/documents/dcmi-terms/subject":"subject", - "http://dublincore.org/documents/dcmi-terms/tableOfContents":"tableOfContents", - "http://dublincore.org/documents/dcmi-terms/temporal":"temporal", - "http://dublincore.org/documents/dcmi-terms/title":"title", - "http://dublincore.org/documents/dcmi-terms/type":"type", - "http://dublincore.org/documents/dcmi-terms/valid":"valid", - "http://purl.org/dc/elements/1.1/contributor":"contributor", - "http://purl.org/dc/elements/1.1/coverage":"coverage", - "http://purl.org/dc/elements/1.1/creator":"creator", - "http://purl.org/dc/elements/1.1/date":"date", - "http://purl.org/dc/elements/1.1/description":"description", - "http://purl.org/dc/elements/1.1/format":"format", - "http://purl.org/dc/elements/1.1/identifier":"identifier", - "http://purl.org/dc/elements/1.1/language":"language", - "http://purl.org/dc/elements/1.1/publisher":"publisher", - "http://purl.org/dc/elements/1.1/relation":"relation", - "http://purl.org/dc/elements/1.1/rights":"rights", - "http://purl.org/dc/elements/1.1/source":"source", - "http://purl.org/dc/elements/1.1/subject":"subject", - "http://purl.org/dc/elements/1.1/title":"title", - "http://purl.org/dc/elements/1.1/type":"type" -} - -facets = [ - 'f_creator', - 'f_mediator', - 'f_embargoedUntilDate', - 'f_license', - 'f_rights', - 'f_type', - 'f_publisher', - 'f_isPartOf', - 'f_hasVersion', - 'f_publicationDate', - 'f_contributor', - 'f_language', - 'f_rightsHolder', - 'f_source', - 'f_subject' -] \ No newline at end of file diff --git a/docs/indexingSolrRecordsUsingSupervisord/solr_worker.py b/docs/indexingSolrRecordsUsingSupervisord/solr_worker.py deleted file mode 100644 index 32bebab..0000000 --- a/docs/indexingSolrRecordsUsingSupervisord/solr_worker.py +++ /dev/null @@ -1,118 +0,0 @@ -#!/usr/bin/env python - -from redisqueue import RedisQueue -from LogConfigParser import Config -from solrFields import solr_fields_mapping - -import sys -from time import sleep -from rdflib import URIRef -import simplejson - -from recordsilo import Granary -from solr import SolrConnection - -DB_ROOT = "/opt/RDFDatabank" - -class NoSuchSilo(Exception): - pass - -def gather_document(silo_name, item): - graph = item.get_graph() - document = {'uuid':item.metadata['uuid'], 'id':item.item_id, 'silo':silo_name} - for (_,p,o) in graph.triples((URIRef(item.uri), None, None)): - #print p,o - #if p.startswith("http://purl.org/dc/terms/"): - if p in solr_fields_mapping: - #field = p[len("http://purl.org/dc/terms/"):].encode("utf-8") - field = solr_fields_mapping[p] - if not document.has_key(field): - document[field] = [] - #print "Adding %s to %s" % (o, field) - document[field].append(unicode(o).encode("utf-8")) - else: - if not document.has_key('text'): - document['text'] = [] - #print "Adding %s to %s - (from %s)" % (o, "text", p) - document['text'].append(unicode(o).encode("utf-8")) - #print document - return document - -if __name__ == "__main__": - c = Config() - redis_section = "redis" - worker_section = "worker_solr" - worker_number = sys.argv[1] - if len(sys.argv) == 3: - if "redis_%s" % sys.argv[2] in c.sections(): - redis_section = "redis_%s" % sys.argv[2] - - rq = RedisQueue(c.get(worker_section, "listento"), "solr_%s" % worker_number, - db=c.get(redis_section, "db"), - host=c.get(redis_section, "host"), - port=c.get(redis_section, "port") - ) - rdfdb_config = Config("%s/production.ini" % DB_ROOT) - granary_root = rdfdb_config.get("app:main", "granary.store", 0, {'here':DB_ROOT}) - - g = Granary(granary_root) - - solr = SolrConnection(c.get(worker_section, "solrurl")) - - idletime = 2 - - while(True): - sleep(idletime) - line = rq.pop() - if not line: - continue - msg = simplejson.loads(line) - # solr switch - silo_name = msg['silo'] - if silo_name not in g.silos: - raise NoSuchSilo - s = g.get_rdf_silo(silo_name) - if msg['type'] == "c" or msg['type'] == "u" or msg['type'] == "embargo": - # Creation - itemid = msg.get('id') - print "Got creation message on id:%s in silo:%s" % (itemid, silo_name) - if itemid and s.exists(itemid): - item = s.get_item(itemid) - solr_doc = gather_document(silo_name, item) - try: - solr.add(_commit=True, **solr_doc) - except: - f = open('/opt/logs/solr_error.log', 'a') - f.write("Error adding record (creating) id:%s in silo:%s" % (itemid, silo_name)) - f.close() - rq.task_complete() - elif msg['type'] == "d": - # Deletion - itemid = msg.get('id') - if itemid and s.exists(itemid): - solr.delete(itemid) - solr.commit() - rq.task_complete() - """ - elif msg['type'] == "embargo": - itemid = msg.get('id') - if itemid and s.exists(itemid): - if msg['embargoed'] in ['false', 'False', 0, False]: - # Embargo removed: update solr - item = s.get_item(itemid) - solr_doc = gather_document(silo_name, item) - try: - solr.add(_commit=True, **solr_doc) - except: - f = open('/opt/logs/solr_error.log', 'a') - f.write("Error adding record (embargo change) id:%s in silo:%s" % (itemid, silo_name)) - f.close() - else: - solr.delete(itemid) - solr.commit() - rq.task_complete() - except NoSuchSilo: - print "ERROR: Silo doesn't exist %s" % silo_name - print line - rq.task_complete() - """ diff --git a/docs/indexingSolrRecordsUsingSupervisord/supervisor_startup.sh b/docs/indexingSolrRecordsUsingSupervisord/supervisor_startup.sh deleted file mode 100644 index 667f8fb..0000000 --- a/docs/indexingSolrRecordsUsingSupervisord/supervisor_startup.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -su databankadmin --command "exec /usr/local/bin/supervisord -c /opt/workers/supervisord.conf" - diff --git a/docs/indexingSolrRecordsUsingSupervisord/supervisord.conf b/docs/indexingSolrRecordsUsingSupervisord/supervisord.conf deleted file mode 100644 index 098e111..0000000 --- a/docs/indexingSolrRecordsUsingSupervisord/supervisord.conf +++ /dev/null @@ -1,29 +0,0 @@ -[unix_http_server] -username = guest -password = guest -file = /tmp/supervisor.sock - -[supervisord] -minfds = 1024 -minprocs = 200 -loglevel = info -logfile = /tmp/supervisord.log -logfile_maxbytes = 50MB -nodaemon = false -pidfile = /tmp/supervisord.pid -logfile_backups = 10 - -[supervisorctl] -serverurl = unix:///tmp/supervisor.sock - -[rpcinterface:supervisor] -supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface - -[include] -files = workers_enabled/*.conf - -[inet_http_server] -username = guest -password = guest -port = 127.0.0.1:9001 - diff --git a/docs/indexingSolrRecordsUsingSupervisord/workers_available/logger_articlelogger.conf b/docs/indexingSolrRecordsUsingSupervisord/workers_available/logger_articlelogger.conf deleted file mode 100644 index 30b3481..0000000 --- a/docs/indexingSolrRecordsUsingSupervisord/workers_available/logger_articlelogger.conf +++ /dev/null @@ -1,13 +0,0 @@ -[program:logger_articlelogger] -autorestart = true -numprocs = 1 -startretries = 3 -redirect_stderr = True -stopwaitsecs = 10 -process_name = logger_articlelogger_%(process_num)s -priority = 999 -command = ./logfromqueue.py %(process_num)s logger_articlelogger -autostart = true -startsecs = 10 -stdout_logfile = workerlogs/articlelogger.log - diff --git a/docs/indexingSolrRecordsUsingSupervisord/workers_available/logger_auditlogger.conf b/docs/indexingSolrRecordsUsingSupervisord/workers_available/logger_auditlogger.conf deleted file mode 100644 index 65c219d..0000000 --- a/docs/indexingSolrRecordsUsingSupervisord/workers_available/logger_auditlogger.conf +++ /dev/null @@ -1,13 +0,0 @@ -[program:logger_auditlogger] -autorestart = true -numprocs = 1 -startretries = 3 -redirect_stderr = True -stopwaitsecs = 10 -process_name = logger_auditlogger_%(process_num)s -priority = 666 -directory = /opt/workers/ -command = /opt/workers/logfromqueue.py %(process_num)s logger_auditlogger -autostart = true -startsecs = 10 -stdout_logfile = /opt/workers/workerlogs/auditlogger.log diff --git a/docs/indexingSolrRecordsUsingSupervisord/workers_available/process_redis.conf b/docs/indexingSolrRecordsUsingSupervisord/workers_available/process_redis.conf deleted file mode 100644 index b024366..0000000 --- a/docs/indexingSolrRecordsUsingSupervisord/workers_available/process_redis.conf +++ /dev/null @@ -1,14 +0,0 @@ -[program:process_redis] -autorestart = true -numprocs = 1 -startretries = 3 -redirect_stderr = True -stopwaitsecs = 10 -process_name = process_redis_%(process_num)s -priority = 555 -directory = /opt/workers/ -command = /opt/workers/redis-1.2.6/redis-server -autostart = true -startsecs = 10 -stdout_logfile = /opt/workers/workerlogs/process_redis.log - diff --git a/docs/indexingSolrRecordsUsingSupervisord/workers_available/process_solr.conf b/docs/indexingSolrRecordsUsingSupervisord/workers_available/process_solr.conf deleted file mode 100644 index c866f22..0000000 --- a/docs/indexingSolrRecordsUsingSupervisord/workers_available/process_solr.conf +++ /dev/null @@ -1,14 +0,0 @@ -[program:process_solr] -autorestart = true -numprocs = 1 -startretries = 3 -redirect_stderr = True -stopwaitsecs = 10 -process_name = process_solr_%(process_num)s -priority = 999 -directory = /opt/solr/ -command = /opt/solr/start_solr.sh -autostart = true -startsecs = 10 -stdout_logfile = /opt/workers/workerlogs/process_solr.log - diff --git a/docs/indexingSolrRecordsUsingSupervisord/workers_available/worker_broker.conf b/docs/indexingSolrRecordsUsingSupervisord/workers_available/worker_broker.conf deleted file mode 100644 index d7f8b2b..0000000 --- a/docs/indexingSolrRecordsUsingSupervisord/workers_available/worker_broker.conf +++ /dev/null @@ -1,14 +0,0 @@ -[program:worker_broker] -autorestart = true -numprocs = 2 -startretries = 3 -redirect_stderr = True -stopwaitsecs = 10 -process_name = worker_broker_%(process_num)s -priority = 777 -directory = /opt/workers/ -command = /opt/workers/broker.py %(process_num)s -autostart = true -startsecs = 10 -stdout_logfile = /opt/workers/workerlogs/broker.log - diff --git a/docs/indexingSolrRecordsUsingSupervisord/workers_available/worker_solr.conf b/docs/indexingSolrRecordsUsingSupervisord/workers_available/worker_solr.conf deleted file mode 100644 index a659549..0000000 --- a/docs/indexingSolrRecordsUsingSupervisord/workers_available/worker_solr.conf +++ /dev/null @@ -1,14 +0,0 @@ -[program:worker_solr] -autorestart = true -numprocs = 2 -startretries = 3 -redirect_stderr = True -stopwaitsecs = 10 -process_name = worker_solr_%(process_num)s -priority = 888 -directory = /opt/workers/ -command = /opt/workers/solr_worker.py %(process_num)s -autostart = true -startsecs = 10 -stdout_logfile = /opt/workers/workerlogs/solr_worker.log - diff --git a/docs/using_databank_api/DatabankDemo.py b/docs/using_databank_api/DatabankDemo.py index 4045c17..4519576 100644 --- a/docs/using_databank_api/DatabankDemo.py +++ b/docs/using_databank_api/DatabankDemo.py @@ -1,33 +1,9 @@ -#-*- coding: utf-8 -*- -""" -Copyright (c) 2012 University of Oxford - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, --INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -""" - #Databank API demo import urllib2 import base64 import urllib -from multipartform import MultiPartForm +from lib.multipartform import MultiPartForm #=============================================================================== #Using urllib2 to create a package in Databank @@ -55,7 +31,7 @@ #Add a file form = MultiPartForm() filename = "solrconfig.xml" -filepath = "./solrconfig.xml" +filepath = "data/unicode07.xml" form.add_file('file', filename, fileHandle=open(filepath)) # Build the request @@ -75,4 +51,4 @@ ans2 = urllib2.urlopen(req2) print print 'SERVER RESPONSE:' -ans2.read() \ No newline at end of file +ans2.read() diff --git a/docs/using_databank_api/README b/docs/using_databank_api/README index a54356a..d5ef618 100644 --- a/docs/using_databank_api/README +++ b/docs/using_databank_api/README @@ -1,15 +1,25 @@ Guide to using the RDFDatabank API +The directory contains example python code that services can use for interacting with the Databank API. +There are two sets of examples +1. main.py + This uses the helper class HTTP_request.py which in turn uses httplib to make http calls. + It has code on using GET, POST, PUT and DELETE methods for interacting with the API and works over both http / https. + +2. DatabankDemo.py or postingToDatabank.py + This is exmaple code to POST items to Databank using urllib2. + The form data is constructed is slightly differently in DatabankDemo.py and postingToDatabank.py which can be seen in multipartform.py and multipart.py respectively. + RDFDatabank was developed by Ben O'Steen and Anusha Ranganathan at the Bodleian Libraries, Universiy of Oxford The purpose of DataBank is to provide a robust and efficient system for the safe storage of and open access to research data. The API documentation for using RDFdatabank is at -http://databank.ora.ox.ac.uk/api or -https://github.com/anusharanganathan/RDFDatabank/tree/master/rdfdatabank/public/static/api_files +http://databank-vm1.oerc.ox.ac.uk/api or +https://github.com/databank/RDFDatabank/tree/master/rdfdatabank/public/static/api_files -The source code for databank is available at https://github.com/anusharanganathan/RDFDatabank. -The test code for databank can be found in https://github.com/anusharanganathan/RDFDatabank/tree/master/rdfdatabank/tests/RDFDatabank +The source code for databank is available at https://github.com/dataflow/RDFDatabank. +The test code for databank can be found in https://github.com/dataflow/RDFDatabank/tree/master/rdfdatabank/tests/RDFDatabank Note: DataBank is not intended to store large-scale data sets such as grid data or other vast data sets. diff --git a/docs/using_databank_api/HTTP_request.py b/docs/using_databank_api/lib/HTTP_request.py similarity index 93% rename from docs/using_databank_api/HTTP_request.py rename to docs/using_databank_api/lib/HTTP_request.py index bd76624..52d21fd 100644 --- a/docs/using_databank_api/HTTP_request.py +++ b/docs/using_databank_api/lib/HTTP_request.py @@ -1,4 +1,3 @@ -#-*- coding: utf-8 -*- """ Copyright (c) 2012 University of Oxford @@ -116,17 +115,21 @@ def encodeFormData(self, params): return reqtype, reqdata def doRequest(self, command, resource, reqdata=None, reqheaders={}): + #print "User:", self._endpointuser + #print "Host:", self._endpointhost + #print "Resource:", resource if self._endpointuser: auth = base64.encodestring("%s:%s" % (self._endpointuser, self._endpointpass)).strip() reqheaders["Authorization"] = "Basic %s" % auth - hc = httplib.HTTPConnection(self._endpointhost) - path = self.getRequestPath(resource) + hc = httplib.HTTPConnection(self._endpointhost) + #hc = httplib.HTTPSConnection(self._endpointhost) + #resource = self.getRequestPath(resource) response = None responsedata = None repeat = 10 - while path and repeat > 0: + while resource and repeat > 0: repeat -= 1 - hc.request(command, path, reqdata, reqheaders) + hc.request(command, resource, reqdata, reqheaders) response = hc.getresponse() if response.status != 301: break path = response.getheader('Location', None) @@ -138,6 +141,9 @@ def doRequest(self, command, resource, reqdata=None, reqheaders={}): response.read() # Seems to be needed to free up connection for new request logger.debug("Status: %i %s" % (response.status, response.reason)) responsedata = response.read() + #print "Response data", responsedata + #print "Response status", response.status + #print "Response reason", response.reason hc.close() return (response, responsedata) diff --git a/docs/using_databank_api/lib/__init__.py b/docs/using_databank_api/lib/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/docs/using_databank_api/multipart.py b/docs/using_databank_api/lib/multipart.py similarity index 100% rename from docs/using_databank_api/multipart.py rename to docs/using_databank_api/lib/multipart.py diff --git a/docs/using_databank_api/multipartform.py b/docs/using_databank_api/lib/multipartform.py similarity index 65% rename from docs/using_databank_api/multipartform.py rename to docs/using_databank_api/lib/multipartform.py index 0bfd916..11a301d 100644 --- a/docs/using_databank_api/multipartform.py +++ b/docs/using_databank_api/lib/multipartform.py @@ -1,27 +1,3 @@ -#-*- coding: utf-8 -*- -""" -Copyright (c) 2012 University of Oxford - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, --INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -""" - import itertools import mimetools import mimetypes diff --git a/docs/using_databank_api/main.py b/docs/using_databank_api/main.py index d9f7748..6a904df 100644 --- a/docs/using_databank_api/main.py +++ b/docs/using_databank_api/main.py @@ -33,49 +33,65 @@ """ import json as simplejson -from HTTP_request import HTTPRequest +from lib.HTTP_request import HTTPRequest #--CONFIG------------------------------------------------------- -host = 'databank.ora.ox.ac.uk' -user_name = '' -password = '' +host = 'databank-vm1.oerc.ox.ac.uk' +user_name = 'admin' +password = 'test' datastore = HTTPRequest(endpointhost=host) datastore.setRequestUserPass(endpointuser=user_name, endpointpass=password) #--HTTP GET------------------------------------------------------- #Get a list of silos accessible to the user (resp, respdata) = datastore.doHTTP_GET(resource="/silos", expect_type="application/JSON") +print "Get list of silos" +print resp.status, resp.reason if resp.status >= 200 and resp.status < 300: -silos_list = simplejson.loads(respdata) + silos_list = simplejson.loads(respdata) + print "number of silos", len(silos_list) +print "-"*40, "\n\n" #--HTTP GET------------------------------------------------------- #Get a list of all the datasets in the silo 'sandbox' (resp, respdata) = datastore.doHTTP_GET(resource="/sandbox", expect_type="application/JSON") +print "Get list of datasets" +print resp.status, resp.reason if resp.status >= 200 and resp.status < 300: -dataset_list = simplejson.loads(respdata) + dataset_list = simplejson.loads(respdata) + print "number of datasets", len(dataset_list.keys()) +else: + print "Error getting list of datasets" +print "-"*40, "\n\n" + +#--HTTP DELETE------------------------------------------------------- +#Delete the dataset 'TestSubmission' in the silo 'sandbox' +(resp, respdata) = datastore.doHTTP_DELETE(resource="/sandbox/datasets/TestSubmission") +print "deleting dataset" +print resp.status, resp.reason +print respdata +print "-"*40, "\n\n" #--HTTP POST------------------------------------------------------- #Create a new dataset 'TestSubmission' in the silo 'sandbox' fields = [ -("id", "TestSubmission") + ("id", "TestSubmission") ] files =[] (reqtype, reqdata) = datastore.encode_multipart_formdata(fields, files) -(resp, respdata) = datastore.doHTTP_POST(reqdata, data_type=reqtype, resource="/sandbox/datsets", expect_type="application/JSON") -if resp.status >= 200 and resp.status < 300: - print resp.status, resp.reason - print respdata - -#--HTTP DELETE------------------------------------------------------- -#Delete the dataset 'TestSubmission' in the silo 'sandbox' -(resp, respdata) = datastore.doHTTP_DELETE(resource="/sandbox/datasets/TestSubmission") +(resp, respdata) = datastore.doHTTP_POST(reqdata, data_type=reqtype, resource="/sandbox/datasets", expect_type="application/JSON") +print "Create new dataset" print resp.status, resp.reason -print respdata +if resp.status >= 200 and resp.status < 300: + print respdata +else: + print "Error creating dataset" +print "-"*40, "\n\n" #--HTTP POST------------------------------------------------------- #Upload file to dataset - POST file to dataset 'TestSubmission' in silo 'sandbox' (path is /sandbox/datasets/TestSubmission) -file_name="testdir.zip" -file_path="data/testdir.zip" +file_name="testrdf4.zip" +file_path="data/testrdf4.zip" fields = [] zipdata = open(file_path).read() files = [ @@ -83,8 +99,33 @@ ] (reqtype, reqdata) = datastore.encode_multipart_formdata(fields, files) (resp, respdata) = datastore.doHTTP_POST(reqdata, data_type=reqtype, resource="/sandbox/datasets/TestSubmission", expect_type="application/JSON") +print "Post file testrdf4.zip to dataset" print resp.status, resp.reason -print respdata +if resp.status >= 200 and resp.status < 300: + print respdata +else: + print "Error posting file to dataset" +print "-"*40, "\n\n" + +#--HTTP POST------------------------------------------------------- +#Upload file to dataset and test munging. POST file to dataset 'TestSubmission' in silo 'sandbox' (path is /sandbox/datasets/TestSubmission) +#file_name="unicode07.xml" +file_name="manifest.rdf" +file_path="data/unicode07.xml" +fields = [] +zipdata = open(file_path).read() +files = [ + ("file", file_name, zipdata, "application/rdf+xml") +] +(reqtype, reqdata) = datastore.encode_multipart_formdata(fields, files) +(resp, respdata) = datastore.doHTTP_POST(reqdata, data_type=reqtype, resource="/sandbox/datasets/TestSubmission", expect_type="application/JSON") +print "Post file unicode07.xml to dataset" +print resp.status, resp.reason +if resp.status >= 200 and resp.status < 300: + print respdata +else: + print "Error posting file to dataset" +print "-"*40, "\n\n" #--HTTP PUT------------------------------------------------------- #example metadata constructed in rdf. Add this metadata to the manifest (PUT this in manifest.rdf file) @@ -94,15 +135,41 @@ xmlns:ore='http://www.openarchives.org/ore/terms/' xmlns:rdf='http://www.w3.org/1999/02/22-rdf-syntax-ns#' > - + Test dataset Carl Sagan abstract """ -(resp, respdata) = datastore.doHTTP_PUT(metadata_content, resource="sandbox/datasets/TestSubmission/manifest.rdf", expect_type="text/plain") +(resp, respdata) = datastore.doHTTP_PUT(metadata_content, resource="/sandbox/datasets/TestSubmission/manifest.rdf", expect_type="text/plain") +print "Putting manifest data into dataset" print resp.status, resp.reason -print respdata +if resp.status >= 200 and resp.status < 300: + print respdata +else: + print "Error putting manifest data into dataset" +print "-"*40, "\n\n" + +#--HTTP POST------------------------------------------------------- +#Unpack zip file in dataset +file_name="testrdf4.zip" +fields = [] +fields = [ + ("filename", "testrdf4.zip"), + ("id", "TestSubmission") +] +zipdata = open(file_path).read() +files = [] +(reqtype, reqdata) = datastore.encode_multipart_formdata(fields, files) +(resp, respdata) = datastore.doHTTP_POST(reqdata, data_type=reqtype, resource="/sandbox/items/TestSubmission", expect_type="application/JSON") +print "Post file testrdf4.zip to dataset for unpacking" +print resp.status, resp.reason +if resp.status >= 200 and resp.status < 300: + print respdata +else: + print "Error unpacking file to dataset" +print "-"*40, "\n\n" #--------------------------------------------------------- + diff --git a/docs/using_databank_api/postingToDatabank.py b/docs/using_databank_api/postingToDatabank.py index 89326b3..095f8b2 100644 --- a/docs/using_databank_api/postingToDatabank.py +++ b/docs/using_databank_api/postingToDatabank.py @@ -1,31 +1,7 @@ -#-*- coding: utf-8 -*- -""" -Copyright (c) 2012 University of Oxford - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, --INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -""" - import urllib2 import base64 import urllib -from multipart import MultiPartFormData +from lib.multipart import MultiPartFormData import os #=============================================================================== @@ -54,7 +30,7 @@ #Add a file filename = "solrconfig.xml" -filepath = "./solrconfig.xml" +filepath = "data/unicode07.xml" f = open(filepath, 'rb') stat_info = os.stat(filepath) @@ -86,4 +62,4 @@ #print print 'SERVER RESPONSE:' ans2.read() -#=============================================================================== \ No newline at end of file +#=============================================================================== diff --git a/production-jenkins.ini b/production-jenkins.ini index 66c8267..c0c6e0a 100644 --- a/production-jenkins.ini +++ b/production-jenkins.ini @@ -44,28 +44,30 @@ use = egg:rdfdatabank full_stack = true static_files = true -cache_dir = %(here)s/data +sqlalchemy.url = mysql://databanksqladmin:d6sqL4dm;n@localhost:3306/databankauth +sqlalchemy.pool_recycle = 3600 + +cache_dir = /var/cache/databank beaker.session.key = rdfdatabank beaker.session.secret = somesecret who.config_file = %(here)s/who.ini who.log_level = info who.log_file = /var/log/databank/who.log -#who.log_file = %(here)s/logs/who.log redis.host = localhost granary.store = %(here)s/silos granary.uri_root = http://dataflow-jenkins.bodleian.ox.ac.uk/ -#profile.log_filename = %(here)s/logs/profile.log +#profile.log_filename = /var/log/databank/profile.log #profile.path = /__profile__ -auth.file = %(here)s/passwd -auth.info = %(here)s/rdfdatabank/config/users.py +#auth.file = /var/lib/databank/passwd +#auth.info = /var/lib/databank/rdfdatabank/config/users.py -doi.config = %(here)s/rdfdatabank/config/doi_config.py -doi.count = %(here)s/rdfdatabank/config/doi_count +doi.config = /var/lib/databank/rdfdatabank/config/doi_config.py +doi.count = /var/lib/databank/rdfdatabank/config/doi_count broadcast.to = redis broadcast.queue = silochanges @@ -74,13 +76,14 @@ metadata.embargoed = False solr.host = http://localhost:8080/solr naming_rule = [^0-9a-zA-Z_\-\:] +naming_rule_humanized = Numbers, alphabets and -: formats_served = text/html,text/xhtml,text/plain,application/json,application/rdf+xml,text/xml,text/rdf+n3,application/x-turtle,text/rdf+ntriples,text/rdf+nt publisher = Bodleian Libraries, University of Oxford rights = http://ora.ouls.ox.ac.uk/objects/uuid%3A1d00eebb-8fed-46ad-8e38-45dbdb4b224c license = CC0 1.0 Universal (CC0 1.0). See http://creativecommons.org/publicdomain/zero/1.0/legalcode #license = Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License. See http://creativecommons.org/licenses/by-nc-sa/3.0/ -api.version = 0.2 +api.version = 0.3 # If you'd like to fine-tune the individual locations of the cache data dirs # for the Cache data, or the Session saves, un-comment the desired settings diff --git a/production.ini b/production.ini index 5096080..30185de 100644 --- a/production.ini +++ b/production.ini @@ -60,8 +60,8 @@ redis.host = localhost granary.store = /silos granary.uri_root = http://databank/ -auth.file = /var/lib/databank/passwd -auth.info = /var/lib/databank/rdfdatabank/config/users.py +#auth.file = /var/lib/databank/passwd +#auth.info = /var/lib/databank/rdfdatabank/config/users.py doi.config = /var/lib/databank/rdfdatabank/config/doi_config.py doi.count = /var/lib/databank/rdfdatabank/config/doi_count @@ -73,13 +73,14 @@ metadata.embargoed = False solr.host = http://localhost:8080/solr naming_rule = [^0-9a-zA-Z_\-\:] +naming_rule_humanized = Numbers, alphabets and -: formats_served = text/html,text/xhtml,text/plain,application/json,application/rdf+xml,text/xml,text/rdf+n3,application/x-turtle,text/rdf+ntriples,text/rdf+nt publisher = Bodleian Libraries, University of Oxford rights = http://ora.ouls.ox.ac.uk/objects/uuid%3A1d00eebb-8fed-46ad-8e38-45dbdb4b224c license = CC0 1.0 Universal (CC0 1.0). See http://creativecommons.org/publicdomain/zero/1.0/legalcode #license = Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License. See http://creativecommons.org/licenses/by-nc-sa/3.0/ -api.version = 0.2 +api.version = 0.3 # If you'd like to fine-tune the individual locations of the cache data dirs # for the Cache data, or the Session saves, un-comment the desired settings diff --git a/rdfdatabank/config/middleware.py b/rdfdatabank/config/middleware.py index ae99e76..01cb991 100644 --- a/rdfdatabank/config/middleware.py +++ b/rdfdatabank/config/middleware.py @@ -84,6 +84,7 @@ def make_app(global_conf, full_stack=True, static_files=True, **app_conf): app = SessionMiddleware(app, config) app = CacheMiddleware(app, config) + #TODO: Check if the new error controller works with sword server # CUSTOM MIDDLEWARE HERE (filtered by error handling middlewares) if asbool(full_stack): # Handle Python exceptions diff --git a/rdfdatabank/config/routing.py b/rdfdatabank/config/routing.py index 4008cb9..9e4ffc9 100644 --- a/rdfdatabank/config/routing.py +++ b/rdfdatabank/config/routing.py @@ -48,7 +48,7 @@ def make_map(): _redirect_code='301 Moved Permanently') #Special controller to redirect datasets from databank.ouls to databank.ora - map.connect('/objects/{id}', controller='redirect', action='index') + #map.connect('/objects/{id}', controller='redirect', action='index') map.connect("/login", controller='account', action='login') map.connect("/logout", controller='account', action='logout') @@ -74,8 +74,6 @@ def make_map(): map.connect('/{silo}/datasets', controller='datasets', action='siloview') map.connect('/{silo}/datasets/{id}', controller='datasets', action='datasetview') - map.connect('/{silo}/datasets/{id}/version{vnum}', controller='datasets', action='datasetview_vnum') - map.connect('/{silo}/datasets/{id}/{path:.*}/version{vnum}', controller='datasets', action='itemview_vnum') map.connect('/{silo}/datasets/{id}/{path:.*}', controller='datasets', action='itemview') map.connect('/{silo}/items', controller='items', action='siloview') @@ -86,10 +84,16 @@ def make_map(): map.connect('/{silo}/states', controller='states', action='siloview') map.connect('/{silo}/states/{id}', controller='states', action='datasetview') - map.connect('/{silo}/states/{id}/version{vnum}', controller='states', action='datasetview_vnum') map.connect('/{silo}/doi/{id}', controller='doi', action='datasetview') + # SWORDv2 Configuration + map.connect('/swordv2/service-document', controller="sword", action="service_document") # From which to retrieve the service document + map.connect('/swordv2/silo/{path:.*?}', controller="sword", action="collection") # Representing a Collection as listed in the service document + map.connect('/swordv2/edit-media/{path:.*?}', controller="sword", action="media_resource") # The URI used in atom:link@rel=edit-media + map.connect('/swordv2/edit/{path:.*?}', controller="sword", action="container") # The URI used in atom:link@rel=edit + map.connect('/swordv2/statement/{path:.*?}', controller="sword", action="statement") # The URI used in atom:link@rel=sword:statement + map.connect('/{controller}') map.connect('/{controller}/{action}') map.connect('/{controller}/{action}/{id}') diff --git a/rdfdatabank/controllers/datasets.py b/rdfdatabank/controllers/datasets.py index bc6a102..02f52d4 100644 --- a/rdfdatabank/controllers/datasets.py +++ b/rdfdatabank/controllers/datasets.py @@ -133,7 +133,7 @@ def siloview(self, silo): response.content_type = "text/plain" response.status_int = 400 response.status = "400 Bad request. Dataset name not valid" - return "Dataset name can contain only the following characters - %s and has to be more than 1 character"%ag.naming_rule + return "Dataset name can contain only the following characters - %s and has to be more than 1 character"%ag.naming_rule_humanized del params['id'] item = create_new(c_silo, id, ident['repoze.who.userid'], **params) @@ -201,10 +201,12 @@ def datasetview(self, silo, id): abort(403, "Forbidden") silos_admin = ag.authz(granary_list, ident, permission='administrator') silos_manager = ag.authz(granary_list, ident, permission='manager') - elif http_method == "GET": + + if http_method in ["GET", "DELETE"]: if not c_silo.exists(id): abort(404) + if http_method == "GET": embargoed = False item = c_silo.get_item(id) @@ -271,8 +273,6 @@ def datasetview(self, silo, id): else: c.view = 'user' - # Method determination - if http_method == "GET": c.embargos = {} c.embargos[id] = is_embargoed(c_silo, id) c.parts = item.list_parts(detailed=True) @@ -353,55 +353,24 @@ def datasetview(self, silo, id): #Whoops - nothing staisfies - default to text/html return render('/datasetview.html') elif http_method == "POST": - params = request.POST + code = None + #Create new dataset if it does not exist if not c_silo.exists(id): - #Create new data-package. Any authorized user can do this if not allowable_id2(id): response.content_type = "text/plain" response.status_int = 400 response.status = "400 Bad request. Dataset name not valid" - return "Dataset name can contain only the following characters - %s and has to be more than 1 character"%ag.naming_rule - if 'id' in params.keys(): - del params['id'] + return "Dataset name can contain only the following characters - %s and has to be more than 1 character"%ag.naming_rule_humanized + params = {} item = create_new(c_silo, id, ident['repoze.who.userid'], **params) - - # Broadcast change as message - try: - ag.b.creation(silo, id, ident=ident['repoze.who.userid']) - except: - pass - - # conneg return - accept_list = None - if 'HTTP_ACCEPT' in request.environ: - try: - accept_list = conneg_parse(request.environ['HTTP_ACCEPT']) - except: - accept_list= [MT("text", "html")] - if not accept_list: - accept_list= [MT("text", "html")] - mimetype = accept_list.pop(0) - while(mimetype): - if str(mimetype).lower() in ["text/html", "text/xhtml"]: - # probably a browser - redirect to newly created dataset - redirect(url(controller="datasets", action="datasetview", silo=silo, id=id)) - elif str(mimetype).lower() in ["text/plain", "application/json"]: - response.content_type = "text/plain" - response.status_int = 201 - response.status = "201 Created" - response.headers["Content-Location"] = url(controller="datasets", action="datasetview", silo=silo, id=id) - return "201 Created" - try: - mimetype = accept_list.pop(0) - except IndexError: - mimetype = None - # Whoops - nothing satisfies - return text/plain - response.content_type = "text/plain" - response.status_int = 201 - response.headers["Content-Location"] = url(controller="datasets", action="datasetview", silo=silo, id=id) + code = 201 response.status = "201 Created" - return "201 Created" - elif params.has_key('embargoed') and params['embargoed']: + response.status_int = 201 + response.headers["Content-Location"] = url(controller="datasets", action="datasetview", id=id, silo=silo) + response_message = "201 Created empyt data package" + #Update embargo info + params = request.POST + if params.has_key('embargoed') and params['embargoed']: item = c_silo.get_item(id) creator = None if item.manifest and item.manifest.state and 'metadata' in item.manifest.state and item.manifest.state['metadata'] and \ @@ -456,38 +425,15 @@ def datasetview(self, silo, id): item.add_triple(item.uri, u"oxds:currentVersion", item.currentversion) item.sync() - # conneg return - accept_list = None - if 'HTTP_ACCEPT' in request.environ: - try: - accept_list = conneg_parse(request.environ['HTTP_ACCEPT']) - except: - accept_list= [MT("text", "html")] - if not accept_list: - accept_list= [MT("text", "html")] - mimetype = accept_list.pop(0) - while(mimetype): - if str(mimetype).lower() in ["text/html", "text/xhtml"]: - redirect(url(controller="datasets", action="datasetview", id=id, silo=silo)) - elif str(mimetype).lower() in ["text/plain", "application/json"]: - response.content_type = "text/plain" - response.status_int = 204 - response.status = "204 Updated" - return - try: - mimetype = accept_list.pop(0) - except IndexError: - mimetype = None - #Whoops - nothing satisfies - return text / plain - response.content_type = "text/plain" - response.status_int = 204 - response.status = "204 Updated" - return - elif params.has_key('file'): + if not code: + code = 204 + response.content_type = "text/plain" + response.status_int = 204 + response.status = "204 Updated" + response_message = None + if params.has_key('file'): # File upload by a not-too-savvy method - Service-orientated fallback: # Assume file upload to 'filename' - params = request.POST - item = c_silo.get_item(id) creator = None if item.manifest and item.manifest.state and 'metadata' in item.manifest.state and item.manifest.state['metadata'] and \ @@ -565,33 +511,8 @@ def datasetview(self, silo, id): response.status = "204 Updated" response.status_int = 204 response_message = None - - # conneg return - accept_list = None - if 'HTTP_ACCEPT' in request.environ: - try: - accept_list = conneg_parse(request.environ['HTTP_ACCEPT']) - except: - accept_list= [MT("text", "html")] - if not accept_list: - accept_list= [MT("text", "html")] - mimetype = accept_list.pop(0) - while(mimetype): - if str(mimetype).lower() in ["text/html", "text/xhtml"]: - redirect(url(controller="datasets", action="datasetview", id=id, silo=silo)) - elif str(mimetype).lower() in ["text/plain"]: - response.content_type = "text/plain" - return response_message - try: - mimetype = accept_list.pop(0) - except IndexError: - mimetype = None - #Whoops - nothing satisfies - return text / plain - response.content_type = "text/plain" - return response_message elif params.has_key('text'): # Text upload convenience service - params = request.POST item = c_silo.get_item(id) filename = params.get('filename') if not filename: @@ -665,38 +586,35 @@ def datasetview(self, silo, id): response.status = "204 Updated" response.status_int = 204 response_message = None - # conneg return - accept_list = None - if 'HTTP_ACCEPT' in request.environ: - try: - accept_list = conneg_parse(request.environ['HTTP_ACCEPT']) - except: - accept_list= [MT("text", "html")] - if not accept_list: - accept_list= [MT("text", "html")] - mimetype = accept_list.pop(0) - while(mimetype): - if str(mimetype).lower() in ["text/html", "text/xhtml"]: - redirect(url(controller="datasets", action="datasetview", id=id, silo=silo)) - elif str(mimetype).lower() in ["text/plain", "application/json"]: - response.content_type = "text/plain" - return response_message - try: - mimetype = accept_list.pop(0) - except IndexError: - mimetype = None - #Whoops - nothing satisfies - return text / plain - response.content_type = "text/plain" - return response_message - else: + if not code: response.content_type = "text/plain" response.status_int = 400 response.status = "400 Bad request" return "400 Bad Request. No valid parameters found." + # conneg return + accept_list = None + if 'HTTP_ACCEPT' in request.environ: + try: + accept_list = conneg_parse(request.environ['HTTP_ACCEPT']) + except: + accept_list= [MT("text", "html")] + if not accept_list: + accept_list= [MT("text", "html")] + mimetype = accept_list.pop(0) + while(mimetype): + if str(mimetype).lower() in ["text/html", "text/xhtml"]: + redirect(url(controller="datasets", action="datasetview", id=id, silo=silo)) + elif str(mimetype).lower() in ["text/plain", "application/json"]: + response.content_type = "text/plain" + return response_message + try: + mimetype = accept_list.pop(0) + except IndexError: + mimetype = None + #Whoops - nothing satisfies - return text / plain + response.content_type = "text/plain" + return response_message elif http_method == "DELETE": - if not c_silo.exists(id): - abort(404) - item = c_silo.get_item(id) creator = None if item.manifest and item.manifest.state and 'metadata' in item.manifest.state and item.manifest.state['metadata'] and \ @@ -719,115 +637,6 @@ def datasetview(self, silo, id): response.status = "200 OK" return "{'ok':'true'}" # required for the JQuery magic delete to succede. - @rest.restrict('GET') - def datasetview_vnum(self, silo, id, vnum): - if not ag.granary.issilo(silo): - abort(404) - c.silo_name = silo - c.id = id - c_silo = ag.granary.get_rdf_silo(silo) - - if not c_silo.exists(id): - abort(404) - - item = c_silo.get_item(id) - vnum = str(vnum) - if not vnum in item.manifest['versions']: - abort(404) - - # filename options - used to check if DOI redirects the parameters - c.options = request.GET - - #Set the item's version cursor - item.set_version_cursor(vnum) - c.version = vnum - - embargoed = False - c.editor = False - - if item.metadata.get('embargoed') not in ["false", 0, False]: - embargoed = True - if embargoed: - if not request.environ.get('repoze.who.identity'): - abort(401, "Not Authorized") - ident = request.environ.get('repoze.who.identity') - c.ident = ident - granary_list = ag.granary.silos - silos = ag.authz(granary_list, ident) - if silo not in silos: - abort(403, "Forbidden") - c.editor = silo in silos - elif request.environ.get('repoze.who.identity'): - ident = request.environ.get('repoze.who.identity') - c.ident = ident - granary_list = ag.granary.silos - silos = ag.authz(granary_list, ident) - c.editor = silo in silos - - # Check to see if embargo is on: - c.embargos = {} - c.embargos[id] = is_embargoed(c_silo, id) - c.readme_text = None - c.parts = item.list_parts(detailed=True) - c.manifest_pretty = item.rdf_to_string(format="pretty-xml") - if "README" in c.parts.keys(): - c.readme_text = get_readme_text(item) - - accept_list = None - if 'HTTP_ACCEPT' in request.environ: - try: - accept_list = conneg_parse(request.environ['HTTP_ACCEPT']) - except: - accept_list= [MT("text", "html")] - if not accept_list: - accept_list= [MT("text", "html")] - mimetype = accept_list.pop(0) - - while(mimetype): - if str(mimetype).lower() in ["text/html", "text/xhtml"]: - return render('/datasetview_version.html') - elif str(mimetype).lower() in ["text/plain", "application/json"]: - response.content_type = 'application/json; charset="UTF-8"' - response.status_int = 200 - response.status = "200 OK" - returndata = {} - returndata['embargos'] = c.embargos - returndata['view'] = c.view - returndata['editor'] = c.editor - returndata['parts'] = {} - for part in c.parts: - returndata['parts'][part] = serialisable_stat(c.parts[part]) - returndata['readme_text'] = c.readme_text - returndata['manifest_pretty'] = c.manifest_pretty - return simplejson.dumps(returndata) - elif str(mimetype).lower() in ["application/rdf+xml", "text/xml"]: - response.content_type = 'application/rdf+xml; charset="UTF-8"' - response.status_int = 200 - response.status = "200 OK" - return c.manifest_pretty - elif str(mimetype).lower() == "text/rdf+n3": - response.content_type = 'text/rdf+n3; charset="UTF-8"' - response.status_int = 200 - response.status = "200 OK" - return item.rdf_to_string(format="n3") - elif str(mimetype).lower() == "application/x-turtle": - response.content_type = 'application/x-turtle; charset="UTF-8"' - response.status_int = 200 - response.status = "200 OK" - return item.rdf_to_string(format="turtle") - elif str(mimetype).lower() in ["text/rdf+ntriples", "text/rdf+nt"]: - response.content_type = 'text/rdf+ntriples; charset="UTF-8"' - response.status_int = 200 - response.status = "200 OK" - return item.rdf_to_string(format="nt") - # Whoops - nothing satisfies - try: - mimetype = accept_list.pop(0) - except IndexError: - mimetype = None - #Whoops - nothing staisfies - default to text/html - return render('/datasetview_version.html') - @rest.restrict('GET', 'POST', 'PUT', 'DELETE') def itemview(self, silo, id, path): if not ag.granary.issilo(silo): @@ -1212,87 +1021,3 @@ def itemview(self, silo, id, path): else: abort(404) - @rest.restrict('GET') - def itemview_vnum(self, silo, id, path, vnum): - if not ag.granary.issilo(silo): - abort(404) - # Check to see if embargo is on: - c.silo_name = silo - c.id = id - c.version = vnum - c.path = path - c_silo = ag.granary.get_rdf_silo(silo) - - if not c_silo.exists(id): - # dataset doesn't exist - abort(404) - - item = c_silo.get_item(id) - vnum = str(vnum) - if not vnum in item.manifest['versions']: - abort(404) - item.set_version_cursor(vnum) - - embargoed = False - if item.metadata.get('embargoed') not in ["false", 0, False]: - embargoed = True - - if embargoed: - #identity management if item - if not request.environ.get('repoze.who.identity'): - abort(401, "Not Authorised") - ident = request.environ.get('repoze.who.identity') - c.ident = ident - granary_list = ag.granary.silos - silos = ag.authz(granary_list, ident) - if silo not in silos: - abort(403, "Forbidden") - silos_admin = ag.authz(granary_list, ident, permission='administrator') - silos_manager = ag.authz(granary_list, ident, permission='manager') - creator = None - if item.manifest and item.manifest.state and 'metadata' in item.manifest.state and item.manifest.state['metadata'] and \ - 'createdby' in item.manifest.state['metadata'] and item.manifest.state['metadata']['createdby']: - creator = item.manifest.state['metadata']['createdby'] - #if not ident['repoze.who.userid'] == creator and not ident.get('role') in ["admin", "manager"]: - if not (ident['repoze.who.userid'] == creator or silo in silos_admin or silo in silos_manager): - abort(403) - - if item.isfile(path): - fileserve_app = FileApp(item.to_dirpath(path)) - return fileserve_app(request.environ, self.start_response) - elif item.isdir(path): - c.parts = item.list_parts(path, detailed=True) - c.readme_text = None - if "README" in c.parts.keys(): - c.readme_text = get_readme_text(item, "%s/README" % path) - - accept_list = None - if 'HTTP_ACCEPT' in request.environ: - try: - accept_list = conneg_parse(request.environ['HTTP_ACCEPT']) - except: - accept_list= [MT("text", "html")] - if not accept_list: - accept_list= [MT("text", "html")] - mimetype = accept_list.pop(0) - while(mimetype): - if str(mimetype).lower() in ["text/html", "text/xhtml"]: - return render("/itemview_version.html") - elif str(mimetype).lower() in ["text/plain", "application/json"]: - response.content_type = 'application/json; charset="UTF-8"' - response.status_int = 200 - response.status = "200 OK" - returndata = {} - returndata['parts'] = {} - for part in c.parts: - returndata['parts'][part] = serialisable_stat(c.parts[part]) - returndata['readme_text'] = c.readme_text - return simplejson.dumps(returndata) - try: - mimetype = accept_list.pop(0) - except IndexError: - mimetype = None - #Whoops - nothing satisfies - return text/html - return render("/itemview_version.html") - else: - abort(404) diff --git a/rdfdatabank/controllers/error.py b/rdfdatabank/controllers/error.py index 82188c3..ac76f03 100644 --- a/rdfdatabank/controllers/error.py +++ b/rdfdatabank/controllers/error.py @@ -50,6 +50,7 @@ def document(self): resp = request.environ.get('pylons.original_response') content = literal(resp.body) or cgi.escape(request.GET.get('message', '')) code = cgi.escape(request.GET.get('code', str(resp.status_int))) + accept_list = None if 'HTTP_ACCEPT' in request.environ: try: accept_list = conneg_parse(request.environ['HTTP_ACCEPT']) diff --git a/rdfdatabank/controllers/items.py b/rdfdatabank/controllers/items.py index fddd106..bf658bb 100644 --- a/rdfdatabank/controllers/items.py +++ b/rdfdatabank/controllers/items.py @@ -23,7 +23,7 @@ """ import logging -import os, time +import re, os, time from datetime import datetime, timedelta import simplejson from pylons import request, response, session, tmpl_context as c, url, app_globals as ag @@ -36,6 +36,7 @@ from rdfdatabank.lib.conneg import MimeType as MT, parse as conneg_parse log = logging.getLogger(__name__) +JAILBREAK = re.compile("[\/]*\.\.[\/]*") class ItemsController(BaseController): def siloview(self, silo): @@ -154,7 +155,7 @@ def datasetview(self, silo, id): response.content_type = "text/plain" response.status_int = 400 response.status = "400 Bad request. Dataset name not valid" - return "Dataset name can contain only the following characters - %s and has to be more than 1 character"%ag.naming_rule + return "Dataset name can contain only the following characters - %s and has to be more than 1 character"%ag.naming_rule_humanized target_dataset = create_new(rdfsilo, target_dataset_name, ident['repoze.who.userid']) response.status_int = 201 response.status = "201 Created" @@ -176,6 +177,17 @@ def datasetview(self, silo, id): target_dataset.sync() target_dataset.sync() + if response.status_int == 201: + try: + ag.b.creation(silo, id, ident=ident['repoze.who.userid']) + except: + pass + else: + try: + ag.b.change(silo, id, ident=ident['repoze.who.userid']) + except: + pass + # conneg return accept_list = None if 'HTTP_ACCEPT' in request.environ: @@ -200,9 +212,12 @@ def datasetview(self, silo, id): response.content_type = "text/plain" return response_message - @rest.restrict('GET', 'POST') + @rest.restrict('GET', 'POST', 'PUT') def itemview(self, silo, id, path): - """API call to read the contents of a zip-file (without having to unpack) and unpack a zip file into a new / existing dataset""" + """API call to + GET - read the contents of a zip-file (without having to unpack) and + POST- unpack a zip file into a new / existing dataset + PUT - Add the zipfile and unpack it onto the existing dataset""" #tmpl_context variables needed: c.silo_name, c.zipfile_contents c.ident, c.id, c.path if not path: abort(400, "You must supply a filename to unpack") @@ -252,12 +267,14 @@ def itemview(self, silo, id, path): item_real_filepath = dataset.to_dirpath() target_filepath = "%s/%s"%(item_real_filepath, path) #c.parts = dataset.list_parts(detailed=False) - if not dataset.isfile(path): - abort(404, "File not found") - if not os.path.isfile(target_filepath): - abort(404, "File not found") - if not check_file_mimetype(target_filepath, 'application/zip'): - abort(415, "File is not of type application/zip") + + if http_method in ["GET", "POST"]: + if not dataset.isfile(path): + abort(404, "File not found") + if not os.path.isfile(target_filepath): + abort(404, "File not found") + if not check_file_mimetype(target_filepath, 'application/zip'): + abort(415, "File is not of type application/zip") if http_method == "GET": try: @@ -307,7 +324,7 @@ def itemview(self, silo, id, path): response.content_type = "text/plain" response.status_int = 400 response.status = "400 Bad request. Dataset name not valid" - return "Dataset name can contain only the following characters - %s and has to be more than 1 character"%ag.naming_rule + return "Dataset name can contain only the following characters - %s and has to be more than 1 character"%ag.naming_rule_humanized target_dataset = create_new(rdfsilo, target_dataset_name, ident['repoze.who.userid']) response.status_int = 201 response.status = "201 Created" @@ -329,6 +346,17 @@ def itemview(self, silo, id, path): target_dataset.sync() target_dataset.sync() + if response.status_int == 201: + try: + ag.b.creation(silo, id, ident=ident['repoze.who.userid']) + except: + pass + else: + try: + ag.b.change(silo, id, ident=ident['repoze.who.userid']) + except: + pass + # conneg return accept_list = None if 'HTTP_ACCEPT' in request.environ: @@ -352,6 +380,83 @@ def itemview(self, silo, id, path): # Whoops - nothing satisfies - return text/plain response.content_type = "text/plain" return response_message + elif http_method == "PUT": + # Pylons loads the request body into request.body... + # This is not going to work for large files... ah well + # POST will handle large files as they are pushed to disc, + # but this won't + content = request.body + + if JAILBREAK.search(path) != None: + abort(400, "'..' cannot be used in the path") + + #Step 1: Put zipfile in dataset + if dataset.isdir(path): + response.content_type = "text/plain" + response.status_int = 403 + response.status = "403 Forbidden" + return "Cannot PUT a file on to an existing directory" + + if dataset.isfile(path): + code = 204 + else: + code = 201 + + if code == 204: + dataset.increment_version_delta(clone_previous_version=True, copy_filenames=['manifest.rdf', path]) + else: + dataset.increment_version_delta(clone_previous_version=True, copy_filenames=['manifest.rdf']) + dataset.put_stream(path, content) + dataset.del_triple(dataset.uri, u"dcterms:modified") + dataset.add_triple(dataset.uri, u"dcterms:modified", datetime.now()) + dataset.del_triple(dataset.uri, u"oxds:currentVersion") + dataset.add_triple(dataset.uri, u"oxds:currentVersion", dataset.currentversion) + dataset.sync() + + target_dataset = rdfsilo.get_item(id) + #step 2: Unpack zip item + if not check_file_mimetype(target_filepath, 'application/zip'): + abort(415, "File is not of type application/zip") + try: + unpack_zip_item(target_dataset, dataset, path, rdfsilo, ident['repoze.who.userid']) + except BadZipfile: + abort(400, "Couldn't unpack zipfile") + + target_dataset.sync() + target_dataset.sync() + target_dataset.sync() + + response.status = "204 Updated" + response.status_int = 204 + response_message = None + try: + ag.b.change(silo, id, path, ident=ident['repoze.who.userid']) + except: + pass + + # conneg return + accept_list = None + if 'HTTP_ACCEPT' in request.environ: + try: + accept_list = conneg_parse(request.environ['HTTP_ACCEPT']) + except: + accept_list= [MT("text", "html")] + if not accept_list: + accept_list= [MT("text", "html")] + mimetype = accept_list.pop(0) + while(mimetype): + if str(mimetype).lower() in ["text/html", "text/xhtml"]: + redirect(url(controller="datasets", action="datasetview", silo=silo, id=id)) + elif str(mimetype).lower() in ["text/plain", "application/json"]: + response.content_type = "text/plain" + return response_message + try: + mimetype = accept_list.pop(0) + except IndexError: + mimetype = None + # Whoops - nothing satisfies - return text/plain + response.content_type = "text/plain" + return response_message @rest.restrict('GET') def subitemview(self, silo, id, path, subpath): diff --git a/rdfdatabank/controllers/search.py b/rdfdatabank/controllers/search.py index 455b9ee..1641fe6 100644 --- a/rdfdatabank/controllers/search.py +++ b/rdfdatabank/controllers/search.py @@ -175,8 +175,8 @@ def detailed(self, query=None, additional_fields=[]): start = request.params.get('start', None) rows = request.params.get('rows', None) sort = request.params.get('sort', None) - format = request.params.get('format', None) - if not format: + res_format = request.params.get('format', None) + if not res_format: accept_list = None if 'HTTP_ACCEPT' in request.environ: try: @@ -188,23 +188,24 @@ def detailed(self, query=None, additional_fields=[]): mimetype = accept_list.pop(0) while(mimetype): if str(mimetype).lower() in ["text/html", "text/xhtml"]: - format = 'html' + res_format = 'html' break elif str(mimetype).lower() in ["text/plain", "application/json"]: - format = 'json' + res_format = 'json' break elif str(mimetype).lower() in ["text/xml"]: - format = 'xml' + res_format = 'xml' break elif str(mimetype).lower() in ["text/csv"]: - format = 'csv' + res_format = 'csv' break try: mimetype = accept_list.pop(0) except IndexError: mimetype = None # Whoops - nothing satisfies - return text/plain - format = 'json' + if not res_format: + res_format = 'html' c.sort = 'score desc' # Lock down the sort parameter. @@ -309,8 +310,8 @@ def detailed(self, query=None, additional_fields=[]): else: solr_params['q'] = c.q.encode('utf-8')+query_filter - if format in ['json', 'xml', 'python', 'php']: - solr_params['wt'] = format + if res_format in ['json', 'xml', 'python', 'php']: + solr_params['wt'] = res_format else: solr_params['wt'] = 'json' @@ -342,16 +343,16 @@ def detailed(self, query=None, additional_fields=[]): # conneg return response.status_int = 200 response.status = "200 OK" - if format == "html": + if res_format == "html": c.numFound = 0 c.message = 'Sorry, either that search "%s" resulted in no matches, or the search service is not functional.' % c.q return render('/search.html') - elif format == 'xml': + elif res_format == 'xml': response.headers['Content-Type'] = 'application/xml' response.charset = 'utf8' c.atom = {} return render('/atom_results.html') - elif format == 'json': + elif res_format == 'json': response.headers['Content-Type'] = 'application/json' response.charset = 'utf8' return {} @@ -362,16 +363,16 @@ def detailed(self, query=None, additional_fields=[]): response.status_int = 200 response.status = "200 OK" - if format == 'xml': + if res_format == 'xml': response.headers['Content-Type'] = 'application/xml' response.charset = 'utf8' c.atom = solr_response return render('/atom_results.html') - elif format == 'json': + elif res_format == 'json': response.headers['Content-Type'] = 'application/json' response.charset = 'utf8' return solr_response - elif format in ['csv', 'python', 'php']: + elif res_format in ['csv', 'python', 'php']: response.headers['Content-Type'] = 'application/text' response.charset = 'utf8' return solr_response diff --git a/rdfdatabank/lib/app_globals.py b/rdfdatabank/lib/app_globals.py index ce3425a..b0195a8 100644 --- a/rdfdatabank/lib/app_globals.py +++ b/rdfdatabank/lib/app_globals.py @@ -88,6 +88,11 @@ def __init__(self): if config.has_key("naming_rule"): self.naming_rule = config['naming_rule'] + if config.has_key("naming_rule_humanized"): + self.naming_rule_humanized = config['naming_rule_humanized'] + elif config.has_key("naming_rule"): + self.naming_rule_humanized = config['naming_rule'] + if config.has_key("metadata.embargoed"): self.metadata_embargoed = config['metadata.embargoed'] if isinstance(self.metadata_embargoed, basestring): diff --git a/rdfdatabank/lib/file_unpack.py b/rdfdatabank/lib/file_unpack.py index 0ac4dca..90a1521 100644 --- a/rdfdatabank/lib/file_unpack.py +++ b/rdfdatabank/lib/file_unpack.py @@ -191,7 +191,7 @@ def unpack_zip_item(target_dataset, current_dataset, zip_item, silo, ident): # -- Step 4 ----------------------------- #Copy unpacked dir as new version - target_dataset.move_directory_as_new_version(unpacked_dir) + target_dataset.move_directory_as_new_version(unpacked_dir, log="Unpacked file %s. Contents"%zip_item) # -- Step 5 ----------------------------- #Add type and isVersionOf metadata @@ -242,12 +242,14 @@ def unpack_zip_item(target_dataset, current_dataset, zip_item, silo, ident): os.remove(mani_file) # -- Step 7 ----------------------------- + #uri_s = "%s/%s" % (current_dataset.uri, zip_item.lstrip(os.sep)) + #uri_p = "%s?version=%s" % (target_dataset.uri, target_dataset.currentversion) + #current_dataset.add_triple(uri_s, "dcterms:hasVersion", uri_p) + #current_dataset.sync() + target_dataset.sync() target_dataset.sync() target_dataset.sync() - current_dataset.add_triple("%s/%s" % (current_dataset.uri, zip_item.lstrip(os.sep)), "dcterms:hasVersion", target_dataset.uri) - current_dataset.sync() - return True """ diff --git a/rdfdatabank/lib/utils.py b/rdfdatabank/lib/utils.py index 0e75975..5ecb04a 100644 --- a/rdfdatabank/lib/utils.py +++ b/rdfdatabank/lib/utils.py @@ -310,6 +310,8 @@ def munge_manifest(manifest_file, item): pass for (s, p, o) in triples: item.add_triple(s, p, o) + manifest_file_name = os.path.basename(manifest_file) + item.manifest['versionlog'][item.currentversion].append('Updated file manifest.rdf') item.sync() if seeAlsoFiles: for fileuri in seeAlsoFiles: diff --git a/rdfdatabank/public/static/styles/basic.css b/rdfdatabank/public/static/styles/basic.css index a80293a..a848892 100644 --- a/rdfdatabank/public/static/styles/basic.css +++ b/rdfdatabank/public/static/styles/basic.css @@ -642,7 +642,8 @@ color: #fff; .intro, .pleaseNote, -.message{ +.message, +.about { background: #fdfefe; /* for non-css3 browsers */ filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fdfefe', endColorstr='#f4f7fa'); /* for IE */ background: -webkit-gradient(linear, left top, left bottom, from(#fdfefe), to(#f4f7fa)); /* for webkit browsers */ @@ -667,7 +668,14 @@ color: #fff; font-weight: bold; } +.about { +margin: 0px 20px; +padding: 10px 30px; +} +.about p { + margin-bottom: 20px; +} .create-silo form { border: 1px solid #eee; padding: 5px; @@ -790,32 +798,66 @@ div.response_doc { padding-bottom: 20px; border-bottom: 1px solid #999; } -.response_doc span.stitle a { + +.response_doc .silotitle { font-weight: bold; - color: #002147; - font-size: 14px; + font-size: 1.2em; + margin-bottom: 5px; } -.row { - width: 100%; - clear: both; + +.response_doc .silotitle a { + font-weight: bold; + font-size: 1.2em; + + background: #7e94a5; /* for non-css3 browsers */ + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#7e94a5', endColorstr='#90b6b7'); /* for IE */ + background: -webkit-gradient(linear, left top, left bottom, from(#7e94a5), to(#90b6b7)); /* for webkit browsers */ + background: -moz-linear-gradient(top, #7e94a5, #90b6b7); /* for firefox 3.6+ */ + + color: #fff; + display: block; + padding: 5px; + text-decoration: none; + } -.label { +#results .response_doc .silotitle a:focus, +#results .response_doc .silotitle a:hover { + +text-decoration: underline; + +} + + +.response_doc .silotitle a:visited { + + + background: #425e77; /* for non-css3 browsers */ + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#425e77', endColorstr='#609494'); /* for IE */ + background: -webkit-gradient(linear, left top, left bottom, from(#425e77), to(#609494)); /* for webkit browsers */ + background: -moz-linear-gradient(top, #425e77, #609494); /* for firefox 3.6+ */ + + +} + + + + +.response_doc dt { font-weight: bold; - color: #002147; + font-size: 1em; vertical-align: top; width: 150px; float:left; display: inline; } -.value, .value a, .value a:hover, .value a:active, .value a:visited { - display:inline; - padding-left:2px; - text-align:right; - color: #000000; - text-decoration: none; - font-weight: normal; +.response_doc dd { + padding-left:3px; + text-align:left; + font-weight: normal; + font-size: 1em; + margin-bottom: 5px; } .rt { @@ -826,12 +868,12 @@ div.response_doc { display: inline; } +/* facets in results */ #facet_wrapper { position: absolute; width: 160px; - left: 10px; margin-left: 10px; - top: 250px; + top: 150px; font-size: 11px; } @@ -840,10 +882,39 @@ div.response_doc { word-wrap: break-word; } +.facet_title { + /* + background: #BBB; + padding: 10px 5px; + border-bottom: 1px solid #002147 !important; + font-size: 1.3em; + font-weight: bold; + text-align: center; + */ + + background: #425e77; + color: #fff; + padding: 10px 5px; + border-bottom: 1px solid #fff !important; + font-size: 1.3em; + font-weight: bold; + text-align: center; + +} + .facet_results .subheading { + /* background: #BBB; padding: 4px 5px; border-bottom: 1px solid #002147 !important; + */ + background: #425e77; /* for non-css3 browsers */ + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#425e77', endColorstr='#609494'); /* for IE */ + background: -webkit-gradient(linear, left top, left bottom, from(#425e77), to(#609494)); /* for webkit browsers */ + background: -moz-linear-gradient(top, #425e77, #609494); /* for firefox 3.6+ */ + color: #fff; + padding: 4px 5px; + border-bottom: 1px solid #fff !important; } .facet_results .facetlist { @@ -855,7 +926,7 @@ div.response_doc { .facet_results .subheading a { text-decoration: none; - color: black; + color: #fff; font-weight: normal; background: url('/static/styles/images/fplus.png') no-repeat 0% 60%; padding-left: 18px; @@ -885,13 +956,78 @@ div.response_doc { font-size: 11px; font-weight: bold; } -#current_search { - font-weight: bold; - background: #002147; - color: white; - font-size: 1.2em; - border-bottom: 1px solid white; - padding: 4px 5px; + +/*Current search */ +.current_search { + + background: #FDFEFE; + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fdfefe', endColorstr='#f4f7fa'); + background: -webkit-gradient(linear, left top, left bottom, from(#FDFEFE), to(#F4F7FA)); + background: -moz-linear-gradient(top, #FDFEFE, #F4F7FA); + border: 1px solid #DFE2E4; + -moz-border-radius: 5px; + -webkit-border-radius: 5px; + border-radius: 5px; + padding: 5px; +} + +h2.current_search_header { + + border-bottom: 1px solid #DFE2E4; + font-size: 1.3em; + margin: 0px; + padding: 0px; + +} + +.numRecords { + font-size: 1.2em; + font-weight: bold; +} + + +.current_facet { + margin: 10px; + border-bottom: 1px dotted #cccccc; + clear: both; + height: 25px; +} + +.current_facet .search_remove { + float:left; + position:inline; + font-weight: bold; + width: 50px; + text-align: left; + padding-right: 10px; +} + +.current_facet .search_remove form { + margin: 0px; +} + +.current_facet .search_remove form input{ + margin: 0px; + width: 20px; + font-size: 1em; + font-weight: bold; +} + +.current_facet .label { + float:left; + position:inline; + font-weight: bold; + width: 200px; + text-align: left; + padding: 5px 10px 0px 0px; +} + +.current_facet .value { + float:left; + position:inline; + text-align: left; + margin-bottom: 5px; + padding: 5px 10px 0px 0px; } div#link_to_this_search { @@ -899,6 +1035,8 @@ div#link_to_this_search { margin-bottom: -15px; clear: both; color: #000000; + padding-bottom: 10px; + border-bottom: 1px solid #999999 } div#link_to_this_search a { @@ -1010,6 +1148,14 @@ span#itemsppt a.active:hover, span#itemsppb a.active:hover { border: 1px solid #666; background: #FFFF99; } + + +ul.versions li{ +display: inline; +list-style: none; +} + + /* Used in the api */ .List-contents { padding-bottom: 5px; @@ -1086,23 +1232,38 @@ span#itemsppt a.active:hover, span#itemsppb a.active:hover { /* Data package */ #editor_view { float: right; - background: #fff; /* for non-css3 browsers */ - filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff', endColorstr='#ccc'); /* for IE */ - background: -webkit-gradient(linear, left top, left bottom, from(#fff), to(#ccc)); /* for webkit browsers */ - background: -moz-linear-gradient(top, #fff, #ccc); /* for firefox 3.6+ */ - border-top: 1px solid #ccc; - box-shadow: #666 0.3em 0.3em 0.2em; /* Opera 10.5, IE 9.0 */ - -webkit-box-shadow: #aaa 0.3em 0.3em 0.2em; /* Chrome Safari */ - -moz-box-shadow: #aaa 0.3em 0.3em 0.2em; /* Firefox F3.5+ */ + background: #FDFEFE; + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fdfefe', endColorstr='#f4f7fa'); + background: -webkit-gradient(linear, left top, left bottom, from(#FDFEFE), to(#F4F7FA)); + background: -moz-linear-gradient(top, #FDFEFE, #F4F7FA); + border: 1px solid #DFE2E4; + -moz-border-radius: 5px; + -webkit-border-radius: 5px; + border-radius: 5px; + padding: 0px; clear: both; - margin: 10px 20px 10px 10px; - padding: 5px; + margin: 0px 20px 10px 10px; text-decoration: none; + } -#editor_view .title{ - text-align: center; - font-weight: bold; +#editor_view ul { + margin: 0px; + padding: 5px 2px; +} + +#editor_view h2{ + + background: #425E77; +filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#425e77', endColorstr='#609494'); +background: -webkit-gradient(linear, left top, left bottom, from(#425E77), to(#609494)); +background: -moz-linear-gradient(top, #425E77, #609494); +color: white; +font-size: 1.3em; + margin: 0px; + padding: 3px; + text-align: center; + } #editor_view a { @@ -1173,8 +1334,8 @@ span#itemsppt a.active:hover, span#itemsppb a.active:hover { } .metadata td div { - max-height: 150px; - overflow: auto; + /*max-height: 150px; + overflow: auto;*/ } pre { diff --git a/rdfdatabank/templates/about.html b/rdfdatabank/templates/about.html index 48945e3..101d6a6 100644 --- a/rdfdatabank/templates/about.html +++ b/rdfdatabank/templates/about.html @@ -3,34 +3,13 @@ <%def name="head_tags()"> About DataBank +

About Databank

+
+

Databank is being developed by the Bodleian Digital Library Systems and Services of the University of Oxford as a part of the Dataflow project

-

info on project goes here goes here...

+

DataFlow is creating a two-stage data management infrastructure that makes it easy for you and your research group to work with, annotate, publish, and permanently store your research data. You manage this locally using your own instance of DataStage, while allowing your institution to deploy DataBank easily to preserve and publish your most valuable datasets.

-

Background

+

Published datasets have assigned DOIs to make them citable and to gain you academic credit.

- -

Documentation

- - - \ No newline at end of file +

Want to test-drive the system? Help develop it? Learn more and Join us!

+
diff --git a/rdfdatabank/templates/admin_siloview.html b/rdfdatabank/templates/admin_siloview.html index f948663..03118d4 100644 --- a/rdfdatabank/templates/admin_siloview.html +++ b/rdfdatabank/templates/admin_siloview.html @@ -12,6 +12,6 @@

Silo Metadata


Delete this silo - NO UNDO!


-

Alter silo metadata

+

Update silo metadata

<%include file="/alter_silo.html"/> diff --git a/rdfdatabank/templates/admin_user.html b/rdfdatabank/templates/admin_user.html index 8bf256f..e05061f 100644 --- a/rdfdatabank/templates/admin_user.html +++ b/rdfdatabank/templates/admin_user.html @@ -34,6 +34,6 @@

User ${c.username}

% endif % if c.ident['user'].user_name == c.username: -

Update user details for ${c.username}

+

Update user details for ${c.username}

<%include file="/update_user.html"/> % endif diff --git a/rdfdatabank/templates/base.html b/rdfdatabank/templates/base.html index 4f344e2..d22be59 100644 --- a/rdfdatabank/templates/base.html +++ b/rdfdatabank/templates/base.html @@ -44,6 +44,9 @@ ${ h.javascript_link( '/static/js/html5.js' ) } + + + @@ -90,36 +93,33 @@ <% user_logged_in = request.environ.get("repoze.who.identity") + showAdmin = False if user_logged_in: c.user_logged_in_name = user_logged_in['repoze.who.userid'] + if 'role' in user_logged_in and user_logged_in['role'] == "admin": + showAdmin = True current_url = request.environ.get('PATH_INFO') %>