diff --git a/poetry.lock b/poetry.lock
index bfd5dbc5..fbe12062 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -26,7 +26,7 @@ description = "Low-level AMQP client for Python (fork of amqplib)."
name = "amqp"
optional = false
python-versions = ">=3.6"
-version = "5.0.1"
+version = "5.0.2"
[package.dependencies]
vine = "5.0.0"
@@ -96,11 +96,11 @@ description = "Classes Without Boilerplate"
name = "attrs"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-version = "20.2.0"
+version = "20.3.0"
[package.extras]
-dev = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "sphinx-rtd-theme", "pre-commit"]
-docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"]
+dev = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "furo", "sphinx", "pre-commit"]
+docs = ["furo", "sphinx", "zope.interface"]
tests = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"]
tests_no_zope = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"]
@@ -191,7 +191,7 @@ description = "Bootstrap helper for Flask/Jinja2."
name = "bootstrap-flask"
optional = false
python-versions = "*"
-version = "1.5"
+version = "1.5.1"
[package.dependencies]
Flask = "*"
@@ -294,7 +294,7 @@ description = "Python package for providing Mozilla's CA Bundle."
name = "certifi"
optional = false
python-versions = "*"
-version = "2020.6.20"
+version = "2020.11.8"
[[package]]
category = "main"
@@ -413,6 +413,22 @@ pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"]
ssh = ["bcrypt (>=3.1.5)"]
test = ["pytest (>=3.6.0,<3.9.0 || >3.9.0,<3.9.1 || >3.9.1,<3.9.2 || >3.9.2)", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,<3.79.2 || >3.79.2)"]
+[[package]]
+category = "main"
+description = "Dublin Core XML generation from Python dictionaries."
+name = "dcxml"
+optional = false
+python-versions = "*"
+version = "0.1.2"
+
+[package.dependencies]
+lxml = ">=4.1.1"
+
+[package.extras]
+all = ["Sphinx (>=1.5.2)", "check-manifest (>=0.25)", "coverage (>=4.0)", "isort (>=4.2.2)", "pydocstyle (>=1.0.0)", "pytest-cov (>=1.8.0)", "pytest-pep8 (>=1.0.6)", "pytest (>=2.8.0)"]
+docs = ["Sphinx (>=1.5.2)"]
+tests = ["check-manifest (>=0.25)", "coverage (>=4.0)", "isort (>=4.2.2)", "pydocstyle (>=1.0.0)", "pytest-cov (>=1.8.0)", "pytest-pep8 (>=1.0.6)", "pytest (>=2.8.0)"]
+
[[package]]
category = "main"
description = "Decorators for Humans"
@@ -527,7 +543,7 @@ description = "A robust email syntax and deliverability validation library for P
name = "email-validator"
optional = false
python-versions = "*"
-version = "1.1.1"
+version = "1.1.2"
[package.dependencies]
dnspython = ">=1.15.0"
@@ -1473,7 +1489,7 @@ description = "Database management for Invenio."
name = "invenio-db"
optional = false
python-versions = "*"
-version = "1.0.6"
+version = "1.0.7"
[package.dependencies]
Flask-Alembic = ">=2.0.1"
@@ -1484,19 +1500,19 @@ invenio-base = ">=1.2.3"
[package.dependencies.SQLAlchemy-Continuum]
optional = true
-version = ">=1.3.6"
+version = ">=1.3.11"
[package.dependencies.psycopg2-binary]
optional = true
-version = ">=2.7.4"
+version = ">=2.8.6"
[package.extras]
-all = ["Sphinx (>=3.0.0)", "pymysql (>=0.6.7)", "psycopg2-binary (>=2.7.4)", "SQLAlchemy-Continuum (>=1.3.6)", "pytest-invenio (>=1.4.0)", "cryptography (>=2.1.4)", "mock (>=1.3.0)"]
+all = ["Sphinx (>=3.0.0)", "pymysql (>=0.10.1)", "psycopg2-binary (>=2.8.6)", "SQLAlchemy-Continuum (>=1.3.11)", "pytest-invenio (>=1.4.0)", "cryptography (>=2.1.4)", "mock (>=4.0.0)"]
docs = ["Sphinx (>=3.0.0)"]
-mysql = ["pymysql (>=0.6.7)"]
-postgresql = ["psycopg2-binary (>=2.7.4)"]
-tests = ["pytest-invenio (>=1.4.0)", "cryptography (>=2.1.4)", "mock (>=1.3.0)"]
-versioning = ["SQLAlchemy-Continuum (>=1.3.6)"]
+mysql = ["pymysql (>=0.10.1)"]
+postgresql = ["psycopg2-binary (>=2.8.6)"]
+tests = ["pytest-invenio (>=1.4.0)", "cryptography (>=2.1.4)", "mock (>=4.0.0)"]
+versioning = ["SQLAlchemy-Continuum (>=1.3.11)"]
[[package]]
category = "main"
@@ -2432,11 +2448,11 @@ description = "A lightweight library for converting complex datatypes to and fro
name = "marshmallow"
optional = false
python-versions = ">=3.5"
-version = "3.9.0"
+version = "3.9.1"
[package.extras]
dev = ["pytest", "pytz", "simplejson", "mypy (0.790)", "flake8 (3.8.4)", "flake8-bugbear (20.1.4)", "pre-commit (>=2.4,<3.0)", "tox"]
-docs = ["sphinx (3.2.1)", "sphinx-issues (1.2.0)", "alabaster (0.7.12)", "sphinx-version-warning (1.1.2)", "autodocsumm (0.2.1)"]
+docs = ["sphinx (3.3.0)", "sphinx-issues (1.2.0)", "alabaster (0.7.12)", "sphinx-version-warning (1.1.2)", "autodocsumm (0.2.1)"]
lint = ["mypy (0.790)", "flake8 (3.8.4)", "flake8-bugbear (20.1.4)", "pre-commit (>=2.4,<3.0)"]
tests = ["pytest", "pytz", "simplejson"]
@@ -3455,6 +3471,10 @@ version = "0.35.0"
SQLAlchemy = ">=1.0"
six = "*"
+[package.dependencies.cryptography]
+optional = true
+version = ">=0.6"
+
[package.extras]
anyjson = ["anyjson (>=0.3.3)"]
arrow = ["arrow (>=0.3.4)"]
@@ -3802,7 +3822,7 @@ docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"]
testing = ["pytest (>=3.5,<3.7.3 || >3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "jaraco.test (>=3.2.0)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"]
[metadata]
-content-hash = "35a53712fb4f553ba1328ab1d305f7954858ff443198a223d3087b003a3df488"
+content-hash = "6702810a4c6990e934951b9225ef711151df61c618b69e51bb9c6520a2c97cf7"
lock-version = "1.0"
python-versions = "^3.6"
@@ -3816,8 +3836,8 @@ alembic = [
{file = "alembic-1.4.3.tar.gz", hash = "sha256:5334f32314fb2a56d86b4c4dd1ae34b08c03cae4cb888bc699942104d66bc245"},
]
amqp = [
- {file = "amqp-5.0.1-py2.py3-none-any.whl", hash = "sha256:a8fb8151eb9d12204c9f1784c0da920476077609fa0a70f2468001e3a4258484"},
- {file = "amqp-5.0.1.tar.gz", hash = "sha256:9881f8e6fe23e3db9faa6cfd8c05390213e1d1b95c0162bc50552cad75bffa5f"},
+ {file = "amqp-5.0.2-py3-none-any.whl", hash = "sha256:5b9062d5c0812335c75434bf17ce33d7a20ecfedaa0733faec7379868eb4068a"},
+ {file = "amqp-5.0.2.tar.gz", hash = "sha256:fcd5b3baeeb7fc19b3486ff6d10543099d40ae1f5c9196eae695d1cde1b2f784"},
]
angular-gettext-babel = [
{file = "angular_gettext_babel-0.3-py2.py3-none-any.whl", hash = "sha256:9ff197829501e994ac962c0b22aba99459dcf7b0018bf6514a0de15796ba37d9"},
@@ -3843,8 +3863,8 @@ atomicwrites = [
{file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
]
attrs = [
- {file = "attrs-20.2.0-py2.py3-none-any.whl", hash = "sha256:fce7fc47dfc976152e82d53ff92fa0407700c21acd20886a13777a0d20e655dc"},
- {file = "attrs-20.2.0.tar.gz", hash = "sha256:26b54ddbbb9ee1d34d5d3668dd37d6cf74990ab23c828c2888dccdceee395594"},
+ {file = "attrs-20.3.0-py2.py3-none-any.whl", hash = "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6"},
+ {file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"},
]
babel = [
{file = "Babel-2.8.0-py2.py3-none-any.whl", hash = "sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4"},
@@ -3875,8 +3895,8 @@ blinker = [
{file = "blinker-1.4.tar.gz", hash = "sha256:471aee25f3992bd325afa3772f1063dbdbbca947a041b8b89466dc00d606f8b6"},
]
bootstrap-flask = [
- {file = "Bootstrap-Flask-1.5.tar.gz", hash = "sha256:94b8e67f7ba15e8e6ba83e7ca30aa784f45c8d713a18d8fbf013a59ce9370954"},
- {file = "Bootstrap_Flask-1.5-py2.py3-none-any.whl", hash = "sha256:77f26a4ecd749063433b0e8780652c41c1c29bc91bf88756f07330fef3158cbb"},
+ {file = "Bootstrap-Flask-1.5.1.tar.gz", hash = "sha256:4813ab4b7cd35fe16ebd15789b4edb45ed6e528c8eb6f44790efba48e8410ebc"},
+ {file = "Bootstrap_Flask-1.5.1-py2.py3-none-any.whl", hash = "sha256:b2716e60289fa7d636b633614717a338dc5cae144bdf8983680403193b891305"},
]
build = [
{file = "build-0.1.0-py2.py3-none-any.whl", hash = "sha256:2390c690a53bc22a09cbd35f70ece69d40cc8553e267ece046db4a5a1d32d856"},
@@ -3922,8 +3942,8 @@ celery = [
{file = "celery-5.0.2.tar.gz", hash = "sha256:012c814967fe89e3f5d2cf49df2dba3de5f29253a7f4f2270e8fce6b901b4ebf"},
]
certifi = [
- {file = "certifi-2020.6.20-py2.py3-none-any.whl", hash = "sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41"},
- {file = "certifi-2020.6.20.tar.gz", hash = "sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3"},
+ {file = "certifi-2020.11.8-py2.py3-none-any.whl", hash = "sha256:1f422849db327d534e3d0c5f02a263458c3955ec0aae4ff09b95f195c59f4edd"},
+ {file = "certifi-2020.11.8.tar.gz", hash = "sha256:f05def092c44fbf25834a51509ef6e631dc19765ab8a57b4e7ab85531f0a9cf4"},
]
cffi = [
{file = "cffi-1.14.3-2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3eeeb0405fd145e714f7633a5173318bd88d8bbfc3dd0a5751f8c4f70ae629bc"},
@@ -4048,6 +4068,10 @@ cryptography = [
{file = "cryptography-3.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:d25cecbac20713a7c3bc544372d42d8eafa89799f492a43b79e1dfd650484851"},
{file = "cryptography-3.2.1.tar.gz", hash = "sha256:d3d5e10be0cf2a12214ddee45c6bd203dab435e3d83b4560c03066eda600bfe3"},
]
+dcxml = [
+ {file = "dcxml-0.1.2-py2.py3-none-any.whl", hash = "sha256:36a394f09ebfbb52c2931f259873a7b4ef5468f54b0bc3df66dd0d2fd2633092"},
+ {file = "dcxml-0.1.2.tar.gz", hash = "sha256:484b812517afebf4e119175b5ac2efaee5a9caa2c0b62323e451e49f541e5c17"},
+]
decorator = [
{file = "decorator-4.4.2-py2.py3-none-any.whl", hash = "sha256:41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760"},
{file = "decorator-4.4.2.tar.gz", hash = "sha256:e3a62f0520172440ca0dcc823749319382e377f37f140a0b99ef45fecb84bfe7"},
@@ -4081,8 +4105,8 @@ elasticsearch-dsl = [
{file = "elasticsearch_dsl-7.3.0-py2.py3-none-any.whl", hash = "sha256:9390d8e5cf82ebad3505e7f656e407259cf703f5a4035f211cef454127672572"},
]
email-validator = [
- {file = "email_validator-1.1.1-py2.py3-none-any.whl", hash = "sha256:5f246ae8d81ce3000eade06595b7bb55a4cf350d559e890182a1466a21f25067"},
- {file = "email_validator-1.1.1.tar.gz", hash = "sha256:63094045c3e802c3d3d575b18b004a531c36243ca8d1cec785ff6bfcb04185bb"},
+ {file = "email-validator-1.1.2.tar.gz", hash = "sha256:1a13bd6050d1db4475f13e444e169b6fe872434922d38968c67cea9568cce2f0"},
+ {file = "email_validator-1.1.2-py2.py3-none-any.whl", hash = "sha256:094b1d1c60d790649989d38d34f69e1ef07792366277a2cf88684d03495d018f"},
]
entrypoints = [
{file = "entrypoints-0.3-py2.py3-none-any.whl", hash = "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19"},
@@ -4281,8 +4305,8 @@ invenio-config = [
{file = "invenio_config-1.0.3-py2.py3-none-any.whl", hash = "sha256:238ab074991e7f0d6ee7ebc6eb2f5e41658749dd977ab6e86476e862c0efaf28"},
]
invenio-db = [
- {file = "invenio-db-1.0.6.tar.gz", hash = "sha256:da7c3fcecfcfed7b4aaf241c4f8542c04dbe7e5860dc5bf62298d9dcad64ffa8"},
- {file = "invenio_db-1.0.6-py2.py3-none-any.whl", hash = "sha256:d67bfaf1169289bf3afb4ff88b6eeaa39b34ed0f42122f5e1a2de2d3484cd223"},
+ {file = "invenio-db-1.0.7.tar.gz", hash = "sha256:0a5a17fc2ae54c14b689b821c58f7ec04d8ada90844df6bcf0840566a75bf2cf"},
+ {file = "invenio_db-1.0.7-py2.py3-none-any.whl", hash = "sha256:67b31298d9dda57812f24b97449b5b20dbe0e0864223b1c2217366bd55ab695b"},
]
invenio-files-rest = [
{file = "invenio-files-rest-1.2.0.tar.gz", hash = "sha256:bb308d13b52d330aab04aabb6caf611e915e8788b931119c7d8ebc712742b2ab"},
@@ -4528,8 +4552,8 @@ markupsafe = [
{file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"},
]
marshmallow = [
- {file = "marshmallow-3.9.0-py2.py3-none-any.whl", hash = "sha256:4bc31ab18133083b12893c61f2fc38b93c390d3fd6ae2ac61980b7dc936a1afa"},
- {file = "marshmallow-3.9.0.tar.gz", hash = "sha256:97ad6acaf727be986330969cff16040fce051510759ca709de9cd48093c55d04"},
+ {file = "marshmallow-3.9.1-py2.py3-none-any.whl", hash = "sha256:e26763201474b588d144dae9a32bdd945cd26a06c943bc746a6882e850475378"},
+ {file = "marshmallow-3.9.1.tar.gz", hash = "sha256:73facc37462dfc0b27f571bdaffbef7709e19f7a616beb3802ea425b07843f4e"},
]
maxminddb = [
{file = "maxminddb-2.0.3.tar.gz", hash = "sha256:47e86a084dd814fac88c99ea34ba3278a74bc9de5a25f4b815b608798747c7dc"},
diff --git a/pyproject.toml b/pyproject.toml
index 356fef92..b72723be 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -30,6 +30,7 @@ flask-cors = ">3.0.8"
nbconvert = {version = ">=5.6.1,<6.0.0", extras = ["execute"]}
cryptography = ">=3.2"
netaddr = "*"
+dcxml = "*"
[tool.poetry.dev-dependencies]
Flask-Debugtoolbar = ">=0.10.1"
diff --git a/sonar/config.py b/sonar/config.py
index b94d2bcd..b4bf502e 100644
--- a/sonar/config.py
+++ b/sonar/config.py
@@ -665,3 +665,13 @@ def _(x):
OAISERVER_ID_PREFIX = 'oai:sonar.ch:'
#: XSL file
OAISERVER_XSL_URL = 'static/xsl/oai2.xsl'
+#: Export formats
+OAISERVER_METADATA_FORMATS = {
+ 'oai_dc': {
+ 'namespace': 'http://www.openarchives.org/OAI/2.0/oai_dc/',
+ 'schema': 'http://www.openarchives.org/OAI/2.0/oai_dc.xsd',
+ 'serializer': 'sonar.modules.documents.serializers.oaipmh_oai_dc',
+ }
+}
+#: Number of records to return per page in OAI-PMH results.
+OAISERVER_PAGE_SIZE = 100
diff --git a/sonar/modules/api.py b/sonar/modules/api.py
index 99005218..07f3a5d0 100644
--- a/sonar/modules/api.py
+++ b/sonar/modules/api.py
@@ -31,7 +31,9 @@
from invenio_jsonschemas import current_jsonschemas
from invenio_pidstore.errors import PIDDoesNotExistError
from invenio_pidstore.models import PersistentIdentifier, PIDStatus
-from invenio_records_files.api import FilesMixin, Record
+from invenio_records_files.api import FileObject as InvenioFileObjet
+from invenio_records_files.api import FilesMixin as InvenioFilesMixin
+from invenio_records_files.api import Record
from invenio_records_files.models import RecordsBuckets
from invenio_records_rest.utils import obj_or_import_string
from invenio_search import current_search
@@ -39,6 +41,22 @@
from sqlalchemy.orm.exc import NoResultFound
+class FileObject(InvenioFileObjet):
+ """Wrapper for files."""
+
+ def dumps(self):
+ """Create a dump of the metadata associated to the record."""
+ super(FileObject, self).dumps()
+ self.data.update({'mimetype': self.obj.mimetype})
+ return self.data
+
+
+class FilesMixin(InvenioFilesMixin):
+ """Implement files attribute for Record models."""
+
+ file_cls = FileObject
+
+
class SonarRecord(Record, FilesMixin):
"""SONAR Record."""
diff --git a/sonar/modules/deposits/jsonschemas/deposits/deposit-v1.0.0_src.json b/sonar/modules/deposits/jsonschemas/deposits/deposit-v1.0.0_src.json
index 47eb1634..4b0b46f5 100644
--- a/sonar/modules/deposits/jsonschemas/deposits/deposit-v1.0.0_src.json
+++ b/sonar/modules/deposits/jsonschemas/deposits/deposit-v1.0.0_src.json
@@ -50,6 +50,11 @@
"type": "string",
"minLength": 1
},
+ "mimetype": {
+ "title": "MIME type",
+ "type": "string",
+ "minLength": 1
+ },
"checksum": {
"title": "Checksum",
"description": "MD5 checksum of the file.",
diff --git a/sonar/modules/deposits/mappings/v7/deposits/deposit-v1.0.0.json b/sonar/modules/deposits/mappings/v7/deposits/deposit-v1.0.0.json
index aba5b75a..3f75bea3 100644
--- a/sonar/modules/deposits/mappings/v7/deposits/deposit-v1.0.0.json
+++ b/sonar/modules/deposits/mappings/v7/deposits/deposit-v1.0.0.json
@@ -27,6 +27,9 @@
"key": {
"type": "keyword"
},
+ "mimetype": {
+ "type": "keyword"
+ },
"checksum": {
"type": "keyword"
},
diff --git a/sonar/modules/documents/jsonschemas/documents/document-v1.0.0_src.json b/sonar/modules/documents/jsonschemas/documents/document-v1.0.0_src.json
index 0d2cea26..f39dc492 100644
--- a/sonar/modules/documents/jsonschemas/documents/document-v1.0.0_src.json
+++ b/sonar/modules/documents/jsonschemas/documents/document-v1.0.0_src.json
@@ -49,6 +49,11 @@
"type": "string",
"minLength": 1
},
+ "mimetype": {
+ "title": "MIME type",
+ "type": "string",
+ "minLength": 1
+ },
"checksum": {
"title": "Checksum",
"description": "MD5 checksum of the file.",
diff --git a/sonar/modules/documents/mappings/v7/documents/document-v1.0.0.json b/sonar/modules/documents/mappings/v7/documents/document-v1.0.0.json
index 6edd745f..3ac1d250 100644
--- a/sonar/modules/documents/mappings/v7/documents/document-v1.0.0.json
+++ b/sonar/modules/documents/mappings/v7/documents/document-v1.0.0.json
@@ -37,6 +37,9 @@
"key": {
"type": "keyword"
},
+ "mimetype": {
+ "type": "keyword"
+ },
"checksum": {
"type": "keyword"
},
diff --git a/sonar/modules/documents/marshmallow/json.py b/sonar/modules/documents/marshmallow/json.py
index 76e0e5d5..e58680fd 100644
--- a/sonar/modules/documents/marshmallow/json.py
+++ b/sonar/modules/documents/marshmallow/json.py
@@ -56,6 +56,7 @@ class Meta:
file_id = SanitizedUnicode()
version_id = SanitizedUnicode()
key = SanitizedUnicode()
+ mimetype = SanitizedUnicode()
checksum = SanitizedUnicode()
size = fields.Number()
label = SanitizedUnicode()
diff --git a/sonar/modules/documents/serializers/__init__.py b/sonar/modules/documents/serializers/__init__.py
index 150f0a76..d0e18c0f 100644
--- a/sonar/modules/documents/serializers/__init__.py
+++ b/sonar/modules/documents/serializers/__init__.py
@@ -25,6 +25,8 @@
from invenio_records_rest.serializers.response import record_responsify, \
search_responsify
+from sonar.modules.documents.serializers.dc import SonarDublinCoreSerializer
+from sonar.modules.documents.serializers.schemas.dc import DublinCoreV1
from sonar.modules.organisations.api import OrganisationRecord
from sonar.modules.serializers import JSONSerializer as _JSONSerializer
from sonar.modules.users.api import current_user_record
@@ -72,6 +74,8 @@ def post_process_serialize_search(self, results, pid_fetcher):
# ===========
#: JSON serializer definition.
json_v1 = JSONSerializer(DocumentSchemaV1)
+#: Dublin Core serializer
+dc_v1 = SonarDublinCoreSerializer(DublinCoreV1, replace_refs=True)
# Records-REST serializers
# ========================
@@ -85,3 +89,8 @@ def post_process_serialize_search(self, results, pid_fetcher):
'json_v1_response',
'json_v1_search',
)
+
+# OAI-PMH record serializers.
+# ===========================
+#: OAI-PMH OAI Dublin Core record serializer.
+oaipmh_oai_dc = dc_v1.serialize_oaipmh
diff --git a/sonar/modules/documents/serializers/dc.py b/sonar/modules/documents/serializers/dc.py
new file mode 100644
index 00000000..db2c9b30
--- /dev/null
+++ b/sonar/modules/documents/serializers/dc.py
@@ -0,0 +1,32 @@
+# -*- coding: utf-8 -*-
+#
+# Swiss Open Access Repository
+# Copyright (C) 2019 RERO
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, version 3 of the License.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""Dublin Core serializer."""
+
+from invenio_records_rest.serializers.dc import DublinCoreSerializer
+
+
+class SonarDublinCoreSerializer(DublinCoreSerializer):
+ """Marshmallow based DublinCore serializer for records."""
+
+ def dump(self, obj, context=None):
+ """Serialize object with schema.
+
+ Mandatory to override this method, as invenio-records-rest does not
+ use the right way to dump objects (compatible with marshmallow 3.9).
+ """
+ return self.schema_class(context=context).dump(obj)
diff --git a/sonar/modules/documents/serializers/schemas/dc.py b/sonar/modules/documents/serializers/schemas/dc.py
new file mode 100644
index 00000000..23ae7aff
--- /dev/null
+++ b/sonar/modules/documents/serializers/schemas/dc.py
@@ -0,0 +1,322 @@
+# -*- coding: utf-8 -*-
+#
+# Swiss Open Access Repository
+# Copyright (C) 2019 RERO
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, version 3 of the License.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""Dublin Core marshmallow schema."""
+
+import re
+
+from flask import request
+from marshmallow import Schema, fields, pre_dump
+
+from sonar.modules.documents.api import DocumentRecord
+from sonar.modules.documents.utils import has_external_urls_for_files, \
+ populate_files_properties
+from sonar.modules.documents.views import part_of_format
+
+
+class DublinCoreV1(Schema):
+ """Schema for records v1 in JSON."""
+
+ contributors = fields.Method('get_contributors')
+ creators = fields.Method('get_creators')
+ dates = fields.Method('get_dates')
+ descriptions = fields.Method('get_descriptions')
+ formats = fields.Method('get_formats')
+ identifiers = fields.Method('get_identifiers')
+ languages = fields.Method('get_languages')
+ publishers = fields.Method('get_publishers')
+ relations = fields.Method('get_relations')
+ rights = fields.Method('get_rights')
+ sources = fields.Method('get_sources')
+ subjects = fields.Method('get_subjects')
+ titles = fields.Method('get_titles')
+ types = fields.Method('get_types')
+
+ @pre_dump
+ def pre_dump(self, item, **kwargs):
+ """Do some transformations in record before dumping it.
+
+ - Store the main file to use it in methods.
+ - Check if files must point to an external URL.
+ - Populate restrictions, thumbnail and URL in files.
+
+ :param item: Item object to process
+ :returns: Modified item
+ """
+ if not item['metadata'].get('_files'):
+ return item
+
+ # Store the main file
+ main_file = self.get_main_file(item)
+ if main_file:
+ item['metadata']['mainFile'] = main_file
+
+ # Check if organisation record forces to point file to an external url
+ item['metadata']['external_url'] = has_external_urls_for_files(
+ item['metadata'])
+
+ # Add restriction, link and thumbnail to files
+ populate_files_properties(item['metadata'])
+
+ return item
+
+ def get_contributors(self, obj):
+ """Get contributors."""
+ items = []
+ for contributor in obj['metadata'].get('contribution', []):
+ if contributor['role'][0] != 'cre' and contributor['agent'].get(
+ 'preferred_name'):
+ items.append(self.format_contributor(contributor))
+
+ return items
+
+ def get_creators(self, obj):
+ """Get creators."""
+ items = []
+ for contributor in obj['metadata'].get('contribution', []):
+ if contributor['role'][0] == 'cre' and contributor['agent'].get(
+ 'preferred_name'):
+ items.append(self.format_contributor(contributor))
+
+ return items
+
+ def get_dates(self, obj):
+ """Get dates."""
+ items = []
+
+ for provision_activity in obj['metadata'].get('provisionActivity', []):
+ if provision_activity[
+ 'type'] == 'bf:Publication' and provision_activity.get(
+ 'startDate'):
+ items.append(provision_activity['startDate'])
+
+ if obj['metadata'].get('mainFile') and obj['metadata']['mainFile'][
+ 'restriction']['date']:
+ items.append('info:eu-repo/date/embargoEnd/{date}'.format(
+ date=obj['metadata']['mainFile']['embargo_date']))
+
+ return items
+
+ def get_descriptions(self, obj):
+ """Get descriptions."""
+ return [file['value'] for file in obj['metadata'].get('abstracts', [])]
+
+ def get_formats(self, obj):
+ """Get formats."""
+ main_file = obj['metadata'].get('mainFile')
+
+ if main_file and main_file.get('mimetype'):
+ return [main_file['mimetype']]
+
+ return []
+
+ def get_identifiers(self, obj):
+ """Get identifiers."""
+ return [
+ DocumentRecord.get_permanent_link(request.host_url,
+ obj['metadata']['pid'])
+ ]
+
+ def get_languages(self, obj):
+ """Get languages."""
+ return [
+ language['value']
+ for language in obj['metadata'].get('language', [])
+ ]
+
+ def get_publishers(self, obj):
+ """Get publishers."""
+ if not obj['metadata'].get('provisionActivity'):
+ return []
+
+ items = []
+
+ for provision_activity in obj['metadata']['provisionActivity']:
+ if provision_activity[
+ 'type'] == 'bf:Publication' and provision_activity.get(
+ 'statement'):
+ for statement in provision_activity['statement']:
+ if statement['type'] == 'bf:Agent':
+ items.append(statement['label'][0]['value'])
+
+ return items
+
+ def get_relations(self, obj):
+ """Get relations."""
+ items = [
+ other_edition['document']['electronicLocator']
+ for other_edition in obj['metadata'].get('otherEdition', [])
+ ]
+
+ result = 'info:eu-repo/semantics/altIdentifier/{schema}/{identifier}'
+
+ # Identifiers
+ for identifier in obj['metadata'].get('identifiedBy', []):
+ # ARK
+ matches = re.match(r'^ark\:\/(.*)$', identifier['value'])
+ if matches:
+ items.append(
+ result.format(schema='ark', identifier=matches.group(1)))
+
+ # DOI
+ matches = re.match(r'^(10\..*)$', identifier['value'])
+ if identifier['type'] == 'bf:Doi' and matches:
+ items.append(
+ result.format(schema='doi', identifier=matches.group(1)))
+
+ # ISBN
+ if identifier['type'] == 'bf:Isbn':
+ items.append(
+ result.format(schema='isbn',
+ identifier=identifier['value']))
+
+ # ISSN
+ if identifier['type'] == 'bf:Issn':
+ items.append(
+ result.format(schema='issn',
+ identifier=identifier['value']))
+
+ # PMID
+ if identifier['type'] == 'bf:Local' and identifier.get(
+ 'source'
+ ) and identifier['source'].lower().find('pmid') != -1:
+ items.append(
+ result.format(schema='pmid',
+ identifier=identifier['value']))
+
+ # URN
+ if identifier['type'] == 'bf:Urn':
+ items.append(
+ result.format(schema='urn',
+ identifier=identifier['value']))
+
+ return items
+
+ def get_rights(self, obj):
+ """Get rights."""
+ items = []
+
+ # Main file
+ result = 'info:eu-repo/semantics/{access}'
+
+ main_file = obj['metadata'].get('mainFile')
+ if main_file:
+ if main_file['restriction']['restricted']:
+ # Embargo
+ if main_file['restriction']['date']:
+ items.append(result.format(access='embargoedAccess'))
+ # Restricted
+ else:
+ items.append(result.format(access='restrictedAccess'))
+ # No restriction
+ else:
+ items.append(result.format(access='openAccess'))
+
+ # Usage en access policy
+ if obj['metadata'].get('usageAndAccessPolicy'):
+ result = [obj['metadata']['usageAndAccessPolicy']['license']]
+
+ if obj['metadata']['usageAndAccessPolicy'].get('label'):
+ result.append(obj['metadata']['usageAndAccessPolicy']['label'])
+
+ items.append(', '.join(result))
+
+ return items
+
+ def get_sources(self, obj):
+ """Get sources."""
+ return [
+ part_of_format(part_of)
+ for part_of in obj['metadata'].get('partOf', [])
+ ]
+
+ def get_subjects(self, obj):
+ """Get subjects."""
+ items = []
+
+ # Subjects
+ for subjects in obj['metadata'].get('subjects', []):
+ items = items + subjects['label']['value']
+
+ # Classification
+ for classification in obj['metadata'].get('classification', []):
+ classification_type = 'udc'
+
+ if classification['type'] == 'bf:ClassificationDdc':
+ classification_type = 'ddc'
+
+ items.append(
+ 'info:eu-repo/classification/{type}/{classification}'.format(
+ type=classification_type,
+ classification=classification['classificationPortion']))
+
+ return items
+
+ def get_titles(self, obj):
+ """Get titles."""
+ title = [obj['metadata']['title'][0]['mainTitle'][0]['value']]
+
+ if obj['metadata']['title'][0].get('subtitle'):
+ title.append(obj['metadata']['title'][0]['subtitle'][0]['value'])
+
+ return [' : '.join(title)]
+
+ def get_types(self, obj):
+ """Get types."""
+ if obj['metadata'].get('documentType'):
+ return [
+ 'http://purl.org/coar/resource_type/{type}'.format(
+ type=obj['metadata']['documentType'].split(':')[1])
+ ]
+
+ return []
+
+ def format_contributor(self, contributor):
+ """Format contributor item.
+
+ :param contributor: Contributor dict.
+ :returns: Formatted string representing the contributor.
+ """
+ data = contributor['agent']['preferred_name']
+
+ info = []
+ if contributor['agent'].get('number'):
+ info.append(contributor['agent']['number'])
+
+ if contributor['agent'].get('date'):
+ info.append(contributor['agent']['date'])
+
+ if contributor['agent'].get('place'):
+ info.append(contributor['agent']['place'])
+
+ if info:
+ data += ' ({info})'.format(info=' : '.join(info))
+
+ return data
+
+ def get_main_file(self, obj):
+ """Return the main file.
+
+ :param obj: Record dict.
+ :returns: Main file or None.
+ """
+ files = [
+ file for file in obj['metadata'].get('_files', [])
+ if file.get('type') == 'file'
+ ]
+ files = sorted(files, key=lambda file: file.get('order', 100))
+ return files[0] if files else None
diff --git a/sonar/modules/documents/utils.py b/sonar/modules/documents/utils.py
index 30cab0bb..6df919f0 100644
--- a/sonar/modules/documents/utils.py
+++ b/sonar/modules/documents/utils.py
@@ -147,7 +147,8 @@ def is_allowed_by_scope():
not_restricted = {'restricted': False, 'date': None}
# We are in admin, no restrictions are applied.
- if not request.args.get('view') and not request.view_args.get('view'):
+ if not request.args.get('view') and not request.view_args.get(
+ 'view') and request.url_rule.rule != '/oai2d':
return not_restricted
# No specific access or specific access is open access
diff --git a/sonar/modules/documents/views.py b/sonar/modules/documents/views.py
index 96f6cc01..9f5d71d6 100644
--- a/sonar/modules/documents/views.py
+++ b/sonar/modules/documents/views.py
@@ -171,13 +171,13 @@ def part_of_format(part_of):
items.append('{label} {value}'.format(
label=_('vol.'), value=part_of['numberingVolume']))
- if 'numberingIssue' in part_of:
- items.append('{label} {value}'.format(label=_('no.'),
- value=part_of['numberingIssue']))
+ if 'numberingIssue' in part_of:
+ items.append('{label} {value}'.format(
+ label=_('no.'), value=part_of['numberingIssue']))
- if 'numberingPages' in part_of:
- items.append('{label} {value}'.format(label=_('p.'),
- value=part_of['numberingPages']))
+ if 'numberingPages' in part_of:
+ items.append('{label} {value}'.format(
+ label=_('p.'), value=part_of['numberingPages']))
return ', '.join(items)
diff --git a/sonar/modules/organisations/jsonschemas/organisations/organisation-v1.0.0.json b/sonar/modules/organisations/jsonschemas/organisations/organisation-v1.0.0.json
index 5f4e0e0b..d93e6a05 100644
--- a/sonar/modules/organisations/jsonschemas/organisations/organisation-v1.0.0.json
+++ b/sonar/modules/organisations/jsonschemas/organisations/organisation-v1.0.0.json
@@ -108,6 +108,11 @@
"type": "string",
"minLength": 1
},
+ "mimetype": {
+ "title": "MIME type",
+ "type": "string",
+ "minLength": 1
+ },
"checksum": {
"title": "Checksum",
"description": "MD5 checksum of the file.",
diff --git a/tests/ui/documents/test_dc_schema.py b/tests/ui/documents/test_dc_schema.py
new file mode 100644
index 00000000..bd8cccca
--- /dev/null
+++ b/tests/ui/documents/test_dc_schema.py
@@ -0,0 +1,460 @@
+# -*- coding: utf-8 -*-
+#
+# Swiss Open Access Repository
+# Copyright (C) 2019 RERO
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, version 3 of the License.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""Test Dublic Core marshmallow schema."""
+
+from io import BytesIO
+
+import pytest
+
+from sonar.modules.documents.api import DocumentRecord
+from sonar.modules.documents.serializers import dc_v1
+
+
+@pytest.fixture()
+def minimal_document(db, bucket_location, organisation):
+ record = DocumentRecord.create(
+ {
+ 'pid':
+ '1000',
+ 'title': [{
+ 'type':
+ 'bf:Title',
+ 'mainTitle': [{
+ 'language': 'eng',
+ 'value': 'Title of the document'
+ }]
+ }],
+ 'organisation': {
+ '$ref': 'https://sonar.ch/api/organisations/org'
+ }
+ },
+ dbcommit=True,
+ with_bucket=True)
+ record.commit()
+ db.session.commit()
+ return record
+
+
+@pytest.fixture()
+def contributors():
+ return [{
+ 'agent': {
+ 'preferred_name': 'Creator 1'
+ },
+ 'role': ['cre'],
+ }, {
+ 'agent': {
+ 'preferred_name': 'Creator 2',
+ 'number': '123',
+ 'date': '2019',
+ 'place': 'Martigny'
+ },
+ 'role': ['cre'],
+ }, {
+ 'agent': {
+ 'preferred_name': 'Contributor 1'
+ },
+ 'role': ['ctb'],
+ }, {
+ 'agent': {
+ 'preferred_name': 'Contributor 2',
+ 'number': '999',
+ 'date': '2010',
+ 'place': 'Sion'
+ },
+ 'role': ['ctb'],
+ }, {
+ 'agent': {
+ 'preferred_name': 'Degree supervisor'
+ },
+ 'role': ['dgs'],
+ }, {
+ 'agent': {
+ 'preferred_name': 'Printer'
+ },
+ 'role': ['prt'],
+ }, {
+ 'agent': {
+ 'preferred_name': 'Editor'
+ },
+ 'role': ['edt'],
+ }]
+
+
+def test_contributors(minimal_document, contributors):
+ """Test contributors serialization."""
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['contributors'] == []
+
+ minimal_document.update({'contribution': contributors})
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['contributors'] == [
+ 'Contributor 1',
+ 'Contributor 2 (999 : 2010 : Sion)',
+ 'Degree supervisor',
+ 'Printer',
+ 'Editor',
+ ]
+
+
+def test_creators(minimal_document, contributors):
+ """Test creators serialization."""
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['contributors'] == []
+
+ minimal_document.update({'contribution': contributors})
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['creators'] == [
+ 'Creator 1', 'Creator 2 (123 : 2019 : Martigny)'
+ ]
+
+
+def test_dates(app, minimal_document):
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['dates'] == []
+
+ minimal_document.update({
+ 'provisionActivity': [{
+ 'type': 'bf:Agent',
+ 'startDate': '2019'
+ }, {
+ 'type': 'bf:Publication',
+ }, {
+ 'type': 'bf:Publication',
+ 'startDate': '2019'
+ }, {
+ 'type': 'bf:Publication',
+ 'startDate': '2020-01-01'
+ }]
+ })
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['dates'] == ['2019', '2020-01-01']
+
+ minimal_document.pop('provisionActivity', None)
+
+ minimal_document.files['test.pdf'] = BytesIO(b'File content')
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['dates'] == []
+
+ minimal_document.files['test.pdf']['type'] = 'file'
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['dates'] == []
+
+ with app.test_request_context() as req:
+ req.request.args = {'view': 'global'}
+ minimal_document.files['test.pdf']['type'] = 'file'
+ minimal_document.files['test.pdf']['access'] = 'coar:c_f1cf'
+ minimal_document.files['test.pdf']['restricted'] = 'full'
+ minimal_document.files['test.pdf']['embargo_date'] = '2022-01-01'
+ result = dc_v1.transform_record(minimal_document['pid'],
+ minimal_document)
+ assert result['dates'] == ['info:eu-repo/date/embargoEnd/2022-01-01']
+
+
+def test_descriptions(minimal_document):
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['descriptions'] == []
+
+ minimal_document['abstracts'] = [{
+ 'value': 'Description 1'
+ }, {
+ 'value': 'Description 2'
+ }]
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['descriptions'] == ['Description 1', 'Description 2']
+
+
+def test_formats(minimal_document):
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['formats'] == []
+
+ minimal_document.files['test.pdf'] = BytesIO(b'File content')
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['formats'] == []
+
+ minimal_document.files['test.pdf'] = BytesIO(b'File content')
+ minimal_document.files['test.pdf']['type'] = 'file'
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['formats'] == ['application/pdf']
+
+
+def test_identifiers(minimal_document):
+ """Test identifiers serialization."""
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['identifiers'] == ['http://localhost/global/documents/1000']
+
+
+def test_languages(minimal_document):
+ """Test languages serialization."""
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['languages'] == []
+
+ minimal_document['language'] = [{'value': 'eng'}, {'value': 'fre'}]
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['languages'] == ['eng', 'fre']
+
+
+def test_publishers(minimal_document):
+ """Test publishers serialization."""
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['publishers'] == []
+
+ minimal_document['provisionActivity'] = [{
+ 'type':
+ 'bf:Manufacture',
+ 'statement': [{
+ 'type': 'bf:Agent',
+ 'label': [{
+ 'value': 'Publisher'
+ }]
+ }]
+ }, {
+ 'type': 'bf:Publication'
+ }, {
+ 'type':
+ 'bf:Publication',
+ 'statement': [{
+ 'type': 'bf:Place',
+ 'label': [{
+ 'value': 'Place'
+ }]
+ }]
+ }, {
+ 'type':
+ 'bf:Publication',
+ 'statement': [{
+ 'type': 'bf:Agent',
+ 'label': [{
+ 'value': 'Publisher 1'
+ }]
+ }]
+ }]
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['publishers'] == ['Publisher 1']
+
+
+def test_relations(minimal_document):
+ """Test relations serialization."""
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['relations'] == []
+
+ minimal_document['otherEdition'] = [{
+ 'document': {
+ 'electronicLocator': 'https://some.url.1'
+ }
+ }, {
+ 'document': {
+ 'electronicLocator': 'https://some.url.2'
+ }
+ }]
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['relations'] == ['https://some.url.1', 'https://some.url.2']
+
+ minimal_document.pop('otherEdition', None)
+ minimal_document['identifiedBy'] = [{
+ 'type': 'bf:Identifier',
+ 'value': 'ark:/13030/tf5p30086k'
+ }, {
+ 'type': 'bf:Local',
+ 'value': '10.1186'
+ }, {
+ 'type': 'bf:Doi',
+ 'value': '09.1186'
+ }, {
+ 'type': 'bf:Doi',
+ 'value': '10.1186/2041-1480-3-9'
+ }, {
+ 'type': 'bf:Isbn',
+ 'value': '123456'
+ }, {
+ 'type': 'bf:Issn',
+ 'value': '987654321'
+ }, {
+ 'type': 'bf:Local',
+ 'source': 'some pmid',
+ 'value': '1111111'
+ }, {
+ 'type': 'bf:Local',
+ 'source': 'PMID',
+ 'value': '2222222'
+ }, {
+ 'type': 'bf:Urn',
+ 'value': '1.2.3.4'
+ }]
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['relations'] == [
+ 'info:eu-repo/semantics/altIdentifier/ark/13030/tf5p30086k',
+ 'info:eu-repo/semantics/altIdentifier/doi/10.1186/2041-1480-3-9',
+ 'info:eu-repo/semantics/altIdentifier/isbn/123456',
+ 'info:eu-repo/semantics/altIdentifier/issn/987654321',
+ 'info:eu-repo/semantics/altIdentifier/pmid/1111111',
+ 'info:eu-repo/semantics/altIdentifier/pmid/2222222',
+ 'info:eu-repo/semantics/altIdentifier/urn/1.2.3.4'
+ ]
+
+
+def test_rights(app, minimal_document):
+ """Test rights serialization."""
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['rights'] == []
+
+ minimal_document['usageAndAccessPolicy'] = {'license': 'CC BY-NC-SA'}
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['rights'] == ['CC BY-NC-SA']
+
+ minimal_document['usageAndAccessPolicy'] = {
+ 'license': 'Other OA / license undefined',
+ 'label': 'Custom license'
+ }
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['rights'] == ['Other OA / license undefined, Custom license']
+
+ minimal_document.pop('usageAndAccessPolicy', None)
+ with app.test_request_context() as req:
+ req.request.args = {'view': 'global'}
+
+ minimal_document.files['test.pdf'] = BytesIO(b'File content')
+ minimal_document.files['test.pdf']['type'] = 'file'
+ result = dc_v1.transform_record(minimal_document['pid'],
+ minimal_document)
+ assert result['rights'] == ['info:eu-repo/semantics/openAccess']
+
+ minimal_document.files['test.pdf']['access'] = 'coar:c_16ec'
+ minimal_document.files['test.pdf']['restricted'] = 'full'
+ result = dc_v1.transform_record(minimal_document['pid'],
+ minimal_document)
+ assert result['rights'] == ['info:eu-repo/semantics/restrictedAccess']
+
+ minimal_document.files['test.pdf']['access'] = 'coar:c_f1cf'
+ minimal_document.files['test.pdf']['embargo_date'] = '2022-01-01'
+ result = dc_v1.transform_record(minimal_document['pid'],
+ minimal_document)
+ assert result['rights'] == ['info:eu-repo/semantics/embargoedAccess']
+
+
+def test_sources(minimal_document):
+ """Test sources serialization."""
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['sources'] == []
+
+ minimal_document['partOf'] = [{
+ 'document': {
+ 'title': 'Document 1'
+ },
+ 'numberingYear': '2020'
+ }, {
+ 'document': {
+ 'title': 'Document 2'
+ },
+ 'numberingYear': '2020',
+ 'numberingVolume': '6',
+ 'numberingPages': '135-139',
+ 'numberingIssue': '12'
+ }, {
+ 'document': {
+ 'title': 'Document 3'
+ },
+ 'numberingYear': '2019',
+ 'numberingPages': '135-139',
+ 'numberingIssue': '12'
+ }]
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['sources'] == [
+ 'Document 1, 2020',
+ 'Document 2, 2020, vol. 6, no. 12, p. 135-139',
+ 'Document 3, 2019',
+ ]
+
+
+def test_subjects(minimal_document):
+ """Test subjects serialization."""
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['subjects'] == []
+
+ minimal_document['subjects'] = [{
+ 'label': {
+ 'language': 'eng',
+ 'value': ['Subject 1', 'Subject 2']
+ }
+ }, {
+ 'label': {
+ 'language': 'fre',
+ 'value': ['Sujet 1', 'Sujet 2']
+ }
+ }]
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['subjects'] == [
+ 'Subject 1', 'Subject 2', 'Sujet 1', 'Sujet 2'
+ ]
+
+ minimal_document.pop('subjects', None)
+ minimal_document['classification'] = [{
+ 'type': 'bf:ClassificationUdc',
+ 'classificationPortion': '54'
+ }, {
+ 'type': 'bf:ClassificationDdc',
+ 'classificationPortion': 'Portion'
+ }]
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['subjects'] == [
+ 'info:eu-repo/classification/udc/54',
+ 'info:eu-repo/classification/ddc/Portion'
+ ]
+
+
+def test_titles(minimal_document):
+ """Test titles serialization."""
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['titles'] == ['Title of the document']
+
+ minimal_document['title'] = [{
+ 'mainTitle': [{
+ 'language': 'eng',
+ 'value': 'Title 1'
+ }]
+ }, {
+ 'mainTitle': [{
+ 'language': 'eng',
+ 'value': 'Title 2'
+ }]
+ }]
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['titles'] == ['Title 1']
+
+ minimal_document['title'] = [{
+ 'mainTitle': [{
+ 'language': 'eng',
+ 'value': 'Title 1'
+ }],
+ 'subtitle': [{
+ 'language': 'eng',
+ 'value': 'Subtitle 1'
+ }]
+ }]
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['titles'] == ['Title 1 : Subtitle 1']
+
+
+def test_types(minimal_document):
+ """Test types serialization."""
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['types'] == []
+
+ minimal_document['documentType'] = 'coar:c_2f33'
+ result = dc_v1.transform_record(minimal_document['pid'], minimal_document)
+ assert result['types'] == ['http://purl.org/coar/resource_type/c_2f33']
diff --git a/tests/ui/documents/test_documents_views.py b/tests/ui/documents/test_documents_views.py
index 895def6a..54ff98cd 100644
--- a/tests/ui/documents/test_documents_views.py
+++ b/tests/ui/documents/test_documents_views.py
@@ -201,6 +201,15 @@ def test_part_of_format():
'numberingPages': '469-480'
}) == 'Mehr oder weniger Staat?, 2015, vol. 28, no. 2, p. 469-480'
+ assert views.part_of_format({
+ 'document': {
+ 'title': 'Mehr oder weniger Staat?'
+ },
+ 'numberingYear': '2015',
+ 'numberingIssue': '2',
+ 'numberingPages': '469-480'
+ }) == 'Mehr oder weniger Staat?, 2015'
+
assert views.part_of_format({
'numberingYear': '2015',
}) == '2015'