From 6cfe538cac9ed16873e277d117eacfd1e4fd7fd9 Mon Sep 17 00:00:00 2001 From: bencomp Date: Tue, 18 Oct 2022 00:03:48 +0200 Subject: [PATCH 001/252] Update HTTP URL to HTTPS URL --- .../source/_static/navbarscroll.js | 2 +- .../source/_templates/navbar.html | 32 +++++++++---------- doc/sphinx-guides/source/admin/monitoring.rst | 2 +- .../source/api/client-libraries.rst | 6 ++-- .../source/api/external-tools.rst | 4 +-- .../source/api/getting-started.rst | 2 +- doc/sphinx-guides/source/api/intro.rst | 2 +- doc/sphinx-guides/source/api/native-api.rst | 12 +++---- doc/sphinx-guides/source/api/sword.rst | 12 +++---- doc/sphinx-guides/source/conf.py | 2 +- .../source/developers/dev-environment.rst | 8 ++--- .../source/developers/documentation.rst | 6 ++-- doc/sphinx-guides/source/developers/intro.rst | 6 ++-- .../source/developers/testing.rst | 8 ++--- doc/sphinx-guides/source/developers/tools.rst | 8 ++--- .../source/developers/unf/index.rst | 2 +- .../source/developers/unf/unf-v3.rst | 6 ++-- .../source/developers/unf/unf-v6.rst | 2 +- .../source/developers/version-control.rst | 2 +- doc/sphinx-guides/source/index.rst | 4 +-- .../source/installation/config.rst | 26 +++++++-------- .../source/installation/installation-main.rst | 6 ++-- .../source/installation/intro.rst | 4 +-- .../source/installation/oauth2.rst | 2 +- .../source/installation/oidc.rst | 2 +- .../source/installation/prerequisites.rst | 4 +-- .../source/installation/shibboleth.rst | 6 ++-- .../source/style/foundations.rst | 16 +++++----- doc/sphinx-guides/source/style/patterns.rst | 28 ++++++++-------- doc/sphinx-guides/source/user/account.rst | 2 +- doc/sphinx-guides/source/user/appendix.rst | 10 +++--- .../source/user/dataset-management.rst | 6 ++-- .../user/tabulardataingest/ingestprocess.rst | 4 +-- 33 files changed, 122 insertions(+), 122 deletions(-) diff --git a/doc/sphinx-guides/source/_static/navbarscroll.js b/doc/sphinx-guides/source/_static/navbarscroll.js index 66c9d4d7995..735f80870cd 100644 --- a/doc/sphinx-guides/source/_static/navbarscroll.js +++ b/doc/sphinx-guides/source/_static/navbarscroll.js @@ -1,6 +1,6 @@ /* Use to fix hidden section headers behind the navbar when using links with targets - See: http://stackoverflow.com/questions/10732690/offsetting-an-html-anchor-to-adjust-for-fixed-header + See: https://stackoverflow.com/questions/10732690/offsetting-an-html-anchor-to-adjust-for-fixed-header */ $jqTheme(document).ready(function() { $jqTheme('a[href*="#"]:not([href="#"])').on('click', function() { diff --git a/doc/sphinx-guides/source/_templates/navbar.html b/doc/sphinx-guides/source/_templates/navbar.html index 538cccf74d7..c7b81dcb937 100644 --- a/doc/sphinx-guides/source/_templates/navbar.html +++ b/doc/sphinx-guides/source/_templates/navbar.html @@ -15,7 +15,7 @@ - Dataverse Project + Dataverse Project @@ -24,15 +24,15 @@
  • - + Community
  • @@ -49,18 +49,18 @@
  • - + Contact
  • diff --git a/doc/sphinx-guides/source/admin/monitoring.rst b/doc/sphinx-guides/source/admin/monitoring.rst index a4affda1302..e902d5fdcc9 100644 --- a/doc/sphinx-guides/source/admin/monitoring.rst +++ b/doc/sphinx-guides/source/admin/monitoring.rst @@ -14,7 +14,7 @@ In production you'll want to monitor the usual suspects such as CPU, memory, fre Munin +++++ -http://munin-monitoring.org says, "A default installation provides a lot of graphs with almost no work." From RHEL or CentOS 7, you can try the following steps. +https://munin-monitoring.org says, "A default installation provides a lot of graphs with almost no work." From RHEL or CentOS 7, you can try the following steps. Enable the EPEL yum repo (if you haven't already): diff --git a/doc/sphinx-guides/source/api/client-libraries.rst b/doc/sphinx-guides/source/api/client-libraries.rst index 634f03a8125..388a9d641ed 100755 --- a/doc/sphinx-guides/source/api/client-libraries.rst +++ b/doc/sphinx-guides/source/api/client-libraries.rst @@ -13,7 +13,7 @@ Python There are two Python modules for interacting with Dataverse Software APIs. -`pyDataverse `_ primarily allows developers to manage Dataverse collections, datasets and datafiles. Its intention is to help with data migrations and DevOps activities such as testing and configuration management. The module is developed by `Stefan Kasberger `_ from `AUSSDA - The Austrian Social Science Data Archive `_. +`pyDataverse `_ primarily allows developers to manage Dataverse collections, datasets and datafiles. Its intention is to help with data migrations and DevOps activities such as testing and configuration management. The module is developed by `Stefan Kasberger `_ from `AUSSDA - The Austrian Social Science Data Archive `_. `dataverse-client-python `_ had its initial release in 2015. `Robert Liebowitz `_ created this library while at the `Center for Open Science (COS) `_ and the COS uses it to integrate the `Open Science Framework (OSF) `_ with a Dataverse installation via an add-on which itself is open source and listed on the :doc:`/api/apps` page. @@ -30,14 +30,14 @@ R https://github.com/IQSS/dataverse-client-r is the official R package for Dataverse Software APIs. The latest release can be installed from `CRAN `_. The R client can search and download datasets. It is useful when automatically (instead of manually) downloading data files as part of a script. For bulk edit and upload operations, we currently recommend pyDataverse. -The package is currently maintained by `Shiro Kuriwaki `_. It was originally created by `Thomas Leeper `_ and then formerly maintained by `Will Beasley `_. +The package is currently maintained by `Shiro Kuriwaki `_. It was originally created by `Thomas Leeper `_ and then formerly maintained by `Will Beasley `_. Java ---- https://github.com/IQSS/dataverse-client-java is the official Java library for Dataverse Software APIs. -`Richard Adams `_ from `ResearchSpace `_ created and maintains this library. +`Richard Adams `_ from `ResearchSpace `_ created and maintains this library. Ruby ---- diff --git a/doc/sphinx-guides/source/api/external-tools.rst b/doc/sphinx-guides/source/api/external-tools.rst index d72a6f62004..8c6c9fa8d46 100644 --- a/doc/sphinx-guides/source/api/external-tools.rst +++ b/doc/sphinx-guides/source/api/external-tools.rst @@ -11,7 +11,7 @@ Introduction External tools are additional applications the user can access or open from your Dataverse installation to preview, explore, and manipulate data files and datasets. The term "external" is used to indicate that the tool is not part of the main Dataverse Software. -Once you have created the external tool itself (which is most of the work!), you need to teach a Dataverse installation how to construct URLs that your tool needs to operate. For example, if you've deployed your tool to fabulousfiletool.com your tool might want the ID of a file and the siteUrl of the Dataverse installation like this: https://fabulousfiletool.com?fileId=42&siteUrl=http://demo.dataverse.org +Once you have created the external tool itself (which is most of the work!), you need to teach a Dataverse installation how to construct URLs that your tool needs to operate. For example, if you've deployed your tool to fabulousfiletool.com your tool might want the ID of a file and the siteUrl of the Dataverse installation like this: https://fabulousfiletool.com?fileId=42&siteUrl=https://demo.dataverse.org In short, you will be creating a manifest in JSON format that describes not only how to construct URLs for your tool, but also what types of files your tool operates on, where it should appear in the Dataverse installation web interfaces, etc. @@ -94,7 +94,7 @@ Terminology toolParameters **Query parameters** are supported and described below. - queryParameters **Key/value combinations** that can be appended to the toolUrl. For example, once substitution takes place (described below) the user may be redirected to ``https://fabulousfiletool.com?fileId=42&siteUrl=http://demo.dataverse.org``. + queryParameters **Key/value combinations** that can be appended to the toolUrl. For example, once substitution takes place (described below) the user may be redirected to ``https://fabulousfiletool.com?fileId=42&siteUrl=https://demo.dataverse.org``. query parameter keys An **arbitrary string** to associate with a value that is populated with a reserved word (described below). As the author of the tool, you have control over what "key" you would like to be passed to your tool. For example, if you want to have your tool receive and operate on the query parameter "dataverseFileId=42" instead of just "fileId=42", that's fine. diff --git a/doc/sphinx-guides/source/api/getting-started.rst b/doc/sphinx-guides/source/api/getting-started.rst index c465b726421..fd7c561cdf0 100644 --- a/doc/sphinx-guides/source/api/getting-started.rst +++ b/doc/sphinx-guides/source/api/getting-started.rst @@ -9,7 +9,7 @@ If you are a researcher or curator who wants to automate parts of your workflow, Servers You Can Test With ------------------------- -Rather than using a production Dataverse installation, API users are welcome to use http://demo.dataverse.org for testing. You can email support@dataverse.org if you have any trouble with this server. +Rather than using a production Dataverse installation, API users are welcome to use https://demo.dataverse.org for testing. You can email support@dataverse.org if you have any trouble with this server. If you would rather have full control over your own test server, deployments to AWS, Docker, Vagrant, and more are covered in the :doc:`/developers/index` and the :doc:`/installation/index`. diff --git a/doc/sphinx-guides/source/api/intro.rst b/doc/sphinx-guides/source/api/intro.rst index 933932cd7b9..6c61bb8c20d 100755 --- a/doc/sphinx-guides/source/api/intro.rst +++ b/doc/sphinx-guides/source/api/intro.rst @@ -237,7 +237,7 @@ Dataverse Software API questions are on topic in all the usual places: - The dataverse-community Google Group: https://groups.google.com/forum/#!forum/dataverse-community - The Dataverse Project community calls: https://dataverse.org/community-calls -- The Dataverse Project chat room: http://chat.dataverse.org +- The Dataverse Project chat room: https://chat.dataverse.org - The Dataverse Project ticketing system: support@dataverse.org After your question has been answered, you are welcome to help improve the :doc:`faq` section of this guide. diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 93e1c36f179..578b35011ff 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -9,7 +9,7 @@ The Dataverse Software exposes most of its GUI functionality via a REST-based AP .. _CORS: https://www.w3.org/TR/cors/ -.. warning:: The Dataverse Software's API is versioned at the URI - all API calls may include the version number like so: ``http://server-address/api/v1/...``. Omitting the ``v1`` part would default to the latest API version (currently 1). When writing scripts/applications that will be used for a long time, make sure to specify the API version, so they don't break when the API is upgraded. +.. warning:: The Dataverse Software's API is versioned at the URI - all API calls may include the version number like so: ``https://server-address/api/v1/...``. Omitting the ``v1`` part would default to the latest API version (currently 1). When writing scripts/applications that will be used for a long time, make sure to specify the API version, so they don't break when the API is upgraded. .. contents:: |toctitle| :local: @@ -508,7 +508,7 @@ The fully expanded example above (without environment variables) looks like this curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X PUT https://demo.dataverse.org/api/dataverses/root/metadatablocks/isRoot -.. note:: Previous endpoints ``$SERVER/api/dataverses/$id/metadatablocks/:isRoot`` and ``POST http://$SERVER/api/dataverses/$id/metadatablocks/:isRoot?key=$apiKey`` are deprecated, but supported. +.. note:: Previous endpoints ``$SERVER/api/dataverses/$id/metadatablocks/:isRoot`` and ``POST https://$SERVER/api/dataverses/$id/metadatablocks/:isRoot?key=$apiKey`` are deprecated, but supported. .. _create-dataset-command: @@ -720,7 +720,7 @@ Getting its draft version: export SERVER_URL=https://demo.dataverse.org export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB - curl -H "X-Dataverse-key:$API_TOKEN" http://$SERVER/api/datasets/:persistentId/versions/:draft?persistentId=$PERSISTENT_IDENTIFIER + curl -H "X-Dataverse-key:$API_TOKEN" https://$SERVER/api/datasets/:persistentId/versions/:draft?persistentId=$PERSISTENT_IDENTIFIER The fully expanded example above (without environment variables) looks like this: @@ -2226,7 +2226,7 @@ The fully expanded example above (without environment variables) looks like this Currently the following methods are used to detect file types: - The file type detected by the browser (or sent via API). -- JHOVE: http://jhove.openpreservation.org +- JHOVE: https://jhove.openpreservation.org - The file extension (e.g. ".ipybn") is used, defined in a file called ``MimeTypeDetectionByFileExtension.properties``. - The file name (e.g. "Dockerfile") is used, defined in a file called ``MimeTypeDetectionByFileName.properties``. @@ -2413,7 +2413,7 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \ - http://demo.dataverse.org/api/files/24/metadata + https://demo.dataverse.org/api/files/24/metadata A curl example using a ``PERSISTENT_ID`` @@ -2614,7 +2614,7 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/:persistentId/prov-freeform?persistentId=doi:10.5072/FK2/AAA000" -H "Content-type:application/json" --upload-file provenance.json -See a sample JSON file :download:`file-provenance.json <../_static/api/file-provenance.json>` from http://openprovenance.org (c.f. Huynh, Trung Dong and Moreau, Luc (2014) ProvStore: a public provenance repository. At 5th International Provenance and Annotation Workshop (IPAW'14), Cologne, Germany, 09-13 Jun 2014. pp. 275-277). +See a sample JSON file :download:`file-provenance.json <../_static/api/file-provenance.json>` from https://openprovenance.org (c.f. Huynh, Trung Dong and Moreau, Luc (2014) ProvStore: a public provenance repository. At 5th International Provenance and Annotation Workshop (IPAW'14), Cologne, Germany, 09-13 Jun 2014. pp. 275-277). Delete Provenance JSON for an uploaded file ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/doc/sphinx-guides/source/api/sword.rst b/doc/sphinx-guides/source/api/sword.rst index 11b43e98774..c9ac83bc204 100755 --- a/doc/sphinx-guides/source/api/sword.rst +++ b/doc/sphinx-guides/source/api/sword.rst @@ -9,19 +9,19 @@ SWORD_ stands for "Simple Web-service Offering Repository Deposit" and is a "pro About ----- -Introduced in Dataverse Network (DVN) `3.6 `_, the SWORD API was formerly known as the "Data Deposit API" and ``data-deposit/v1`` appeared in the URLs. For backwards compatibility these URLs continue to work (with deprecation warnings). Due to architectural changes and security improvements (especially the introduction of API tokens) in Dataverse Software 4.0, a few backward incompatible changes were necessarily introduced and for this reason the version has been increased to ``v1.1``. For details, see :ref:`incompatible`. +Introduced in Dataverse Network (DVN) `3.6 `_, the SWORD API was formerly known as the "Data Deposit API" and ``data-deposit/v1`` appeared in the URLs. For backwards compatibility these URLs continue to work (with deprecation warnings). Due to architectural changes and security improvements (especially the introduction of API tokens) in Dataverse Software 4.0, a few backward incompatible changes were necessarily introduced and for this reason the version has been increased to ``v1.1``. For details, see :ref:`incompatible`. -The Dataverse Software implements most of SWORDv2_, which is specified at http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html . Please reference the `SWORDv2 specification`_ for expected HTTP status codes (i.e. 201, 204, 404, etc.), headers (i.e. "Location"), etc. +The Dataverse Software implements most of SWORDv2_, which is specified at https://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html . Please reference the `SWORDv2 specification`_ for expected HTTP status codes (i.e. 201, 204, 404, etc.), headers (i.e. "Location"), etc. As a profile of AtomPub, XML is used throughout SWORD. As of Dataverse Software 4.0 datasets can also be created via JSON using the "native" API. SWORD is limited to the dozen or so fields listed below in the crosswalk, but the native API allows you to populate all metadata fields available in a Dataverse installation. -.. _SWORD: http://en.wikipedia.org/wiki/SWORD_%28protocol%29 +.. _SWORD: https://en.wikipedia.org/wiki/SWORD_%28protocol%29 .. _SWORDv2: http://swordapp.org/sword-v2/sword-v2-specifications/ .. _RFC 5023: https://tools.ietf.org/html/rfc5023 -.. _SWORDv2 specification: http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html +.. _SWORDv2 specification: https://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html .. _sword-auth: @@ -86,7 +86,7 @@ New features as of v1.1 - "Contact E-mail" is automatically populated from dataset owner's email. -- "Subject" uses our controlled vocabulary list of subjects. This list is in the Citation Metadata of our User Guide > `Metadata References `_. Otherwise, if a term does not match our controlled vocabulary list, it will put any subject terms in "Keyword". If Subject is empty it is automatically populated with "N/A". +- "Subject" uses our controlled vocabulary list of subjects. This list is in the Citation Metadata of our User Guide > `Metadata References `_. Otherwise, if a term does not match our controlled vocabulary list, it will put any subject terms in "Keyword". If Subject is empty it is automatically populated with "N/A". - Zero-length files are now allowed (but not necessarily encouraged). @@ -127,7 +127,7 @@ Dublin Core Terms (DC Terms) Qualified Mapping - Dataverse Project DB Element Cr +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ |dcterms:creator | authorName (LastName, FirstName) | Y | Author(s) for the Dataset. | +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ -|dcterms:subject | subject (Controlled Vocabulary) OR keyword | Y | Controlled Vocabulary list is in our User Guide > `Metadata References `_. | +|dcterms:subject | subject (Controlled Vocabulary) OR keyword | Y | Controlled Vocabulary list is in our User Guide > `Metadata References `_. | +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ |dcterms:description | dsDescriptionValue | Y | Describing the purpose, scope or nature of the Dataset. Can also use dcterms:abstract. | +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py index 880ed561720..5ff538c3c46 100755 --- a/doc/sphinx-guides/source/conf.py +++ b/doc/sphinx-guides/source/conf.py @@ -432,7 +432,7 @@ # Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {'http://docs.python.org/': None} +intersphinx_mapping = {'https://docs.python.org/': None} # Suppress "WARNING: unknown mimetype for ..." https://github.com/IQSS/dataverse/issues/3391 suppress_warnings = ['epub.unknown_project_files'] rst_prolog = """ diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst index e44a70a405f..2139b85c64a 100755 --- a/doc/sphinx-guides/source/developers/dev-environment.rst +++ b/doc/sphinx-guides/source/developers/dev-environment.rst @@ -34,7 +34,7 @@ On Linux, you are welcome to use the OpenJDK available from package managers. Install Netbeans or Maven ~~~~~~~~~~~~~~~~~~~~~~~~~ -NetBeans IDE is recommended, and can be downloaded from http://netbeans.org . Developers may use any editor or IDE. We recommend NetBeans because it is free, works cross platform, has good support for Jakarta EE projects, and includes a required build tool, Maven. +NetBeans IDE is recommended, and can be downloaded from https://netbeans.org . Developers may use any editor or IDE. We recommend NetBeans because it is free, works cross platform, has good support for Jakarta EE projects, and includes a required build tool, Maven. Below we describe how to build the Dataverse Software war file with Netbeans but if you prefer to use only Maven, you can find installation instructions in the :doc:`tools` section. @@ -74,7 +74,7 @@ On Mac, run this command: ``brew install jq`` -On Linux, install ``jq`` from your package manager or download a binary from http://stedolan.github.io/jq/ +On Linux, install ``jq`` from your package manager or download a binary from https://stedolan.github.io/jq/ Install Payara ~~~~~~~~~~~~~~ @@ -117,7 +117,7 @@ On Linux, you should just install PostgreSQL using your favorite package manager Install Solr ~~~~~~~~~~~~ -`Solr `_ 8.11.1 is required. +`Solr `_ 8.11.1 is required. To install Solr, execute the following commands: @@ -127,7 +127,7 @@ To install Solr, execute the following commands: ``cd /usr/local/solr`` -``curl -O http://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz`` +``curl -O https://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz`` ``tar xvfz solr-8.11.1.tgz`` diff --git a/doc/sphinx-guides/source/developers/documentation.rst b/doc/sphinx-guides/source/developers/documentation.rst index b20fd112533..46fc268461b 100755 --- a/doc/sphinx-guides/source/developers/documentation.rst +++ b/doc/sphinx-guides/source/developers/documentation.rst @@ -34,7 +34,7 @@ If you would like to read more about the Dataverse Project's use of GitHub, plea Building the Guides with Sphinx ------------------------------- -The Dataverse guides are written using Sphinx (http://sphinx-doc.org). We recommend installing Sphinx and building the guides locally so you can get an accurate preview of your changes. +The Dataverse guides are written using Sphinx (https://sphinx-doc.org). We recommend installing Sphinx and building the guides locally so you can get an accurate preview of your changes. Installing Sphinx ~~~~~~~~~~~~~~~~~ @@ -58,7 +58,7 @@ In some parts of the documentation, graphs are rendered as images using the Sphi Building the guides requires the ``dot`` executable from GraphViz. -This requires having `GraphViz `_ installed and either having ``dot`` on the path or +This requires having `GraphViz `_ installed and either having ``dot`` on the path or `adding options to the make call `_. Editing and Building the Guides @@ -67,7 +67,7 @@ Editing and Building the Guides To edit the existing documentation: - Create a branch (see :ref:`how-to-make-a-pull-request`). -- In ``doc/sphinx-guides/source`` you will find the .rst files that correspond to http://guides.dataverse.org. +- In ``doc/sphinx-guides/source`` you will find the .rst files that correspond to https://guides.dataverse.org. - Using your preferred text editor, open and edit the necessary files, or create new ones. Once you are done, open a terminal, change directories to ``doc/sphinx-guides``, activate (or reactivate) your Python virtual environment, and build the guides. diff --git a/doc/sphinx-guides/source/developers/intro.rst b/doc/sphinx-guides/source/developers/intro.rst index 7f4e8c1ba34..6469a43b5ab 100755 --- a/doc/sphinx-guides/source/developers/intro.rst +++ b/doc/sphinx-guides/source/developers/intro.rst @@ -2,7 +2,7 @@ Introduction ============ -Welcome! `The Dataverse Project `_ is an `open source `_ project that loves `contributors `_! +Welcome! `The Dataverse Project `_ is an `open source `_ project that loves `contributors `_! .. contents:: |toctitle| :local: @@ -19,7 +19,7 @@ To get started, you'll want to set up your :doc:`dev-environment` and make sure Getting Help ------------ -If you have any questions at all, please reach out to other developers via the channels listed in https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md such as http://chat.dataverse.org, the `dataverse-dev `_ mailing list, `community calls `_, or support@dataverse.org. +If you have any questions at all, please reach out to other developers via the channels listed in https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md such as https://chat.dataverse.org, the `dataverse-dev `_ mailing list, `community calls `_, or support@dataverse.org. .. _core-technologies: @@ -52,7 +52,7 @@ Related Guides If you are a developer who wants to make use of the Dataverse Software APIs, please see the :doc:`/api/index`. If you have front-end UI questions, please see the :doc:`/style/index`. -If you are a sysadmin who likes to code, you may be interested in hacking on installation scripts mentioned in the :doc:`/installation/index`. We validate the installation scripts with :doc:`/developers/tools` such as `Vagrant `_ and Docker (see the :doc:`containers` section). +If you are a sysadmin who likes to code, you may be interested in hacking on installation scripts mentioned in the :doc:`/installation/index`. We validate the installation scripts with :doc:`/developers/tools` such as `Vagrant `_ and Docker (see the :doc:`containers` section). Related Projects ---------------- diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index 4b3d5fd0a55..132120291c2 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -46,7 +46,7 @@ The main takeaway should be that we care about unit testing enough to measure th Writing Unit Tests with JUnit ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -We are aware that there are newer testing tools such as TestNG, but we use `JUnit `_ because it's tried and true. +We are aware that there are newer testing tools such as TestNG, but we use `JUnit `_ because it's tried and true. We support both (legacy) JUnit 4.x tests (forming the majority of our tests) and newer JUnit 5 based testing. @@ -238,11 +238,11 @@ Remember, it’s only a test (and it's not graded)! Some guidelines to bear in m - Map out which logical functions you want to test - Understand what’s being tested and ensure it’s repeatable - Assert the conditions of success / return values for each operation - * A useful resource would be `HTTP status codes `_ + * A useful resource would be `HTTP status codes `_ - Let the code do the labor; automate everything that happens when you run your test file. - Just as with any development, if you’re stuck: ask for help! -To execute existing integration tests on your local Dataverse installation, a helpful command line tool to use is `Maven `_. You should have Maven installed as per the `Development Environment `_ guide, but if not it’s easily done via Homebrew: ``brew install maven``. +To execute existing integration tests on your local Dataverse installation, a helpful command line tool to use is `Maven `_. You should have Maven installed as per the `Development Environment `_ guide, but if not it’s easily done via Homebrew: ``brew install maven``. Once installed, you may run commands with ``mvn [options] [] []``. @@ -487,7 +487,7 @@ Future Work on Integration Tests - Automate testing of dataverse-client-python: https://github.com/IQSS/dataverse-client-python/issues/10 - Work with @leeper on testing the R client: https://github.com/IQSS/dataverse-client-r - Review and attempt to implement "API Test Checklist" from @kcondon at https://docs.google.com/document/d/199Oq1YwQ4pYCguaeW48bIN28QAitSk63NbPYxJHCCAE/edit?usp=sharing -- Generate code coverage reports for **integration** tests: https://github.com/pkainulainen/maven-examples/issues/3 and http://www.petrikainulainen.net/programming/maven/creating-code-coverage-reports-for-unit-and-integration-tests-with-the-jacoco-maven-plugin/ +- Generate code coverage reports for **integration** tests: https://github.com/pkainulainen/maven-examples/issues/3 and https://www.petrikainulainen.net/programming/maven/creating-code-coverage-reports-for-unit-and-integration-tests-with-the-jacoco-maven-plugin/ - Consistent logging of API Tests. Show test name at the beginning and end and status codes returned. - expected passing and known/expected failing integration tests: https://github.com/IQSS/dataverse/issues/4438 diff --git a/doc/sphinx-guides/source/developers/tools.rst b/doc/sphinx-guides/source/developers/tools.rst index cbd27d6e8d2..17673ae499e 100755 --- a/doc/sphinx-guides/source/developers/tools.rst +++ b/doc/sphinx-guides/source/developers/tools.rst @@ -43,20 +43,20 @@ On Windows if you see an error like ``/usr/bin/perl^M: bad interpreter`` you mig PlantUML ++++++++ -PlantUML is used to create diagrams in the guides and other places. Download it from http://plantuml.com and check out an example script at https://github.com/IQSS/dataverse/blob/v4.6.1/doc/Architecture/components.sh . Note that for this script to work, you'll need the ``dot`` program, which can be installed on Mac with ``brew install graphviz``. +PlantUML is used to create diagrams in the guides and other places. Download it from https://plantuml.com and check out an example script at https://github.com/IQSS/dataverse/blob/v4.6.1/doc/Architecture/components.sh . Note that for this script to work, you'll need the ``dot`` program, which can be installed on Mac with ``brew install graphviz``. Eclipse Memory Analyzer Tool (MAT) ++++++++++++++++++++++++++++++++++ The Memory Analyzer Tool (MAT) from Eclipse can help you analyze heap dumps, showing you "leak suspects" such as seen at https://github.com/payara/Payara/issues/350#issuecomment-115262625 -It can be downloaded from http://www.eclipse.org/mat +It can be downloaded from https://www.eclipse.org/mat If the heap dump provided to you was created with ``gcore`` (such as with ``gcore -o /tmp/app.core $app_pid``) rather than ``jmap``, you will need to convert the file before you can open it in MAT. Using ``app.core.13849`` as example of the original 33 GB file, here is how you could convert it into a 26 GB ``app.core.13849.hprof`` file. Please note that this operation took almost 90 minutes: ``/usr/java7/bin/jmap -dump:format=b,file=app.core.13849.hprof /usr/java7/bin/java app.core.13849`` -A file of this size may not "just work" in MAT. When you attempt to open it you may see something like "An internal error occurred during: "Parsing heap dump from '/tmp/heapdumps/app.core.13849.hprof'". Java heap space". If so, you will need to increase the memory allocated to MAT. On Mac OS X, this can be done by editing ``MemoryAnalyzer.app/Contents/MacOS/MemoryAnalyzer.ini`` and increasing the value "-Xmx1024m" until it's high enough to open the file. See also http://wiki.eclipse.org/index.php/MemoryAnalyzer/FAQ#Out_of_Memory_Error_while_Running_the_Memory_Analyzer +A file of this size may not "just work" in MAT. When you attempt to open it you may see something like "An internal error occurred during: "Parsing heap dump from '/tmp/heapdumps/app.core.13849.hprof'". Java heap space". If so, you will need to increase the memory allocated to MAT. On Mac OS X, this can be done by editing ``MemoryAnalyzer.app/Contents/MacOS/MemoryAnalyzer.ini`` and increasing the value "-Xmx1024m" until it's high enough to open the file. See also https://wiki.eclipse.org/index.php/MemoryAnalyzer/FAQ#Out_of_Memory_Error_while_Running_the_Memory_Analyzer PageKite ++++++++ @@ -73,7 +73,7 @@ The first time you run ``./pagekite.py`` a file at ``~/.pagekite.rc`` will be created. You can edit this file to configure PageKite to serve up port 8080 (the default app server HTTP port) or the port of your choosing. -According to https://pagekite.net/support/free-for-foss/ PageKite (very generously!) offers free accounts to developers writing software the meets http://opensource.org/docs/definition.php such as the Dataverse Project. +According to https://pagekite.net/support/free-for-foss/ PageKite (very generously!) offers free accounts to developers writing software the meets https://opensource.org/docs/definition.php such as the Dataverse Project. MSV +++ diff --git a/doc/sphinx-guides/source/developers/unf/index.rst b/doc/sphinx-guides/source/developers/unf/index.rst index 2423877348f..856de209e82 100644 --- a/doc/sphinx-guides/source/developers/unf/index.rst +++ b/doc/sphinx-guides/source/developers/unf/index.rst @@ -27,7 +27,7 @@ with Dataverse Software 2.0 and throughout the 3.* lifecycle, UNF v.5 UNF v.6. Two parallel implementation, in R and Java, will be available, for cross-validation. -Learn more: Micah Altman and Gary King. 2007. “A Proposed Standard for the Scholarly Citation of Quantitative Data.” D-Lib Magazine, 13. Publisher’s Version Copy at http://j.mp/2ovSzoT +Learn more: Micah Altman and Gary King. 2007. “A Proposed Standard for the Scholarly Citation of Quantitative Data.” D-Lib Magazine, 13. Publisher’s Version Copy at https://j.mp/2ovSzoT **Contents:** diff --git a/doc/sphinx-guides/source/developers/unf/unf-v3.rst b/doc/sphinx-guides/source/developers/unf/unf-v3.rst index 3f0018d7fa5..98c07b398e0 100644 --- a/doc/sphinx-guides/source/developers/unf/unf-v3.rst +++ b/doc/sphinx-guides/source/developers/unf/unf-v3.rst @@ -34,11 +34,11 @@ For example, the number pi at five digits is represented as -3.1415e+, and the n 1. Terminate character strings representing nonmissing values with a POSIX end-of-line character. -2. Encode each character string with `Unicode bit encoding `_. Versions 3 through 4 use UTF-32BE; Version 4.1 uses UTF-8. +2. Encode each character string with `Unicode bit encoding `_. Versions 3 through 4 use UTF-32BE; Version 4.1 uses UTF-8. 3. Combine the vector of character strings into a single sequence, with each character string separated by a POSIX end-of-line character and a null byte. -4. Compute a hash on the resulting sequence using the standard MD5 hashing algorithm for Version 3 and using `SHA256 `_ for Version 4. The resulting hash is `base64 `_ encoded to support readability. +4. Compute a hash on the resulting sequence using the standard MD5 hashing algorithm for Version 3 and using `SHA256 `_ for Version 4. The resulting hash is `base64 `_ encoded to support readability. 5. Calculate the UNF for each lower-level data object, using a consistent UNF version and level of precision across the individual UNFs being combined. @@ -49,4 +49,4 @@ For example, the number pi at five digits is represented as -3.1415e+, and the n 8. Combine UNFs from multiple variables to form a single UNF for an entire data frame, and then combine UNFs for a set of data frames to form a single UNF that represents an entire research study. Learn more: -Software for computing UNFs is available in an R Module, which includes a Windows standalone tool and code for Stata and SAS languages. Also see the following for more details: Micah Altman and Gary King. 2007. "A Proposed Standard for the Scholarly Citation of Quantitative Data," D-Lib Magazine, Vol. 13, No. 3/4 (March). (Abstract: `HTML `_ | Article: `PDF `_) +Software for computing UNFs is available in an R Module, which includes a Windows standalone tool and code for Stata and SAS languages. Also see the following for more details: Micah Altman and Gary King. 2007. "A Proposed Standard for the Scholarly Citation of Quantitative Data," D-Lib Magazine, Vol. 13, No. 3/4 (March). (Abstract: `HTML `_ | Article: `PDF `_) diff --git a/doc/sphinx-guides/source/developers/unf/unf-v6.rst b/doc/sphinx-guides/source/developers/unf/unf-v6.rst index 9648bae47c8..b2495ff3dd9 100644 --- a/doc/sphinx-guides/source/developers/unf/unf-v6.rst +++ b/doc/sphinx-guides/source/developers/unf/unf-v6.rst @@ -156,7 +156,7 @@ For example, to specify a non-default precision the parameter it is specified us | Allowed values are {``128`` , ``192`` , ``196`` , ``256``} with ``128`` being the default. | ``R1`` - **truncate** numeric values to ``N`` digits, **instead of rounding**, as previously described. -`Dr. Micah Altman's classic UNF v5 paper `_ mentions another optional parameter ``T###``, for specifying rounding of date and time values (implemented as stripping the values of entire components - fractional seconds, seconds, minutes, hours... etc., progressively) - but it doesn't specify its syntax. It is left as an exercise for a curious reader to contact the author and work out the details, if so desired. (Not implemented in UNF Version 6 by the Dataverse Project). +`Dr. Micah Altman's classic UNF v5 paper `_ mentions another optional parameter ``T###``, for specifying rounding of date and time values (implemented as stripping the values of entire components - fractional seconds, seconds, minutes, hours... etc., progressively) - but it doesn't specify its syntax. It is left as an exercise for a curious reader to contact the author and work out the details, if so desired. (Not implemented in UNF Version 6 by the Dataverse Project). Note: we do not recommend truncating character strings at fewer bytes than the default ``128`` (the ``X`` parameter). At the very least this number **must** be high enough so that the printable UNFs of individual variables or files are not truncated, when calculating combined UNFs of files or datasets, respectively. diff --git a/doc/sphinx-guides/source/developers/version-control.rst b/doc/sphinx-guides/source/developers/version-control.rst index aacc245af5a..31fc0a4e602 100644 --- a/doc/sphinx-guides/source/developers/version-control.rst +++ b/doc/sphinx-guides/source/developers/version-control.rst @@ -24,7 +24,7 @@ The goals of the Dataverse Software branching strategy are: - allow for concurrent development - only ship stable code -We follow a simplified "git flow" model described at http://nvie.com/posts/a-successful-git-branching-model/ involving a "master" branch, a "develop" branch, and feature branches such as "1234-bug-fix". +We follow a simplified "git flow" model described at https://nvie.com/posts/a-successful-git-branching-model/ involving a "master" branch, a "develop" branch, and feature branches such as "1234-bug-fix". Branches ~~~~~~~~ diff --git a/doc/sphinx-guides/source/index.rst b/doc/sphinx-guides/source/index.rst index f7e81756e5b..37bb2353ff7 100755 --- a/doc/sphinx-guides/source/index.rst +++ b/doc/sphinx-guides/source/index.rst @@ -42,7 +42,7 @@ Other Resources Additional information about the Dataverse Project itself including presentations, information about upcoming releases, data management and citation, and announcements can be found at -`http://dataverse.org/ `__ +`https://dataverse.org/ `__ **User Group** @@ -65,7 +65,7 @@ The support email address is `support@dataverse.org `__ -or use `GitHub pull requests `__, +or use `GitHub pull requests `__, if you have some code, scripts or documentation that you'd like to share. If you have a **security issue** to report, please email `security@dataverse.org `__. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index f2de9d5702f..0edb09784e1 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -112,7 +112,7 @@ The need to redirect port HTTP (port 80) to HTTPS (port 443) for security has al Your decision to proxy or not should primarily be driven by which features of the Dataverse Software you'd like to use. If you'd like to use Shibboleth, the decision is easy because proxying or "fronting" Payara with Apache is required. The details are covered in the :doc:`shibboleth` section. -Even if you have no interest in Shibboleth, you may want to front your Dataverse installation with Apache or nginx to simply the process of installing SSL certificates. There are many tutorials on the Internet for adding certs to Apache, including a some `notes used by the Dataverse Project team `_, but the process of adding a certificate to Payara is arduous and not for the faint of heart. The Dataverse Project team cannot provide much help with adding certificates to Payara beyond linking to `tips `_ on the web. +Even if you have no interest in Shibboleth, you may want to front your Dataverse installation with Apache or nginx to simply the process of installing SSL certificates. There are many tutorials on the Internet for adding certs to Apache, including a some `notes used by the Dataverse Project team `_, but the process of adding a certificate to Payara is arduous and not for the faint of heart. The Dataverse Project team cannot provide much help with adding certificates to Payara beyond linking to `tips `_ on the web. Still not convinced you should put Payara behind another web server? Even if you manage to get your SSL certificate into Payara, how are you going to run Payara on low ports such as 80 and 443? Are you going to run Payara as root? Bad idea. This is a security risk. Under "Additional Recommendations" under "Securing Your Installation" above you are advised to configure Payara to run as a user other than root. @@ -124,7 +124,7 @@ If you really don't want to front Payara with any proxy (not recommended), you c ``./asadmin set server-config.network-config.network-listeners.network-listener.http-listener-2.port=443`` -What about port 80? Even if you don't front your Dataverse installation with Apache, you may want to let Apache run on port 80 just to rewrite HTTP to HTTPS as described above. You can use a similar command as above to change the HTTP port that Payara uses from 8080 to 80 (substitute ``http-listener-1.port=80``). Payara can be used to enforce HTTPS on its own without Apache, but configuring this is an exercise for the reader. Answers here may be helpful: http://stackoverflow.com/questions/25122025/glassfish-v4-java-7-port-unification-error-not-able-to-redirect-http-to +What about port 80? Even if you don't front your Dataverse installation with Apache, you may want to let Apache run on port 80 just to rewrite HTTP to HTTPS as described above. You can use a similar command as above to change the HTTP port that Payara uses from 8080 to 80 (substitute ``http-listener-1.port=80``). Payara can be used to enforce HTTPS on its own without Apache, but configuring this is an exercise for the reader. Answers here may be helpful: https://stackoverflow.com/questions/25122025/glassfish-v4-java-7-port-unification-error-not-able-to-redirect-http-to If you are running an installation with Apache and Payara on the same server, and would like to restrict Payara from responding to any requests to port 8080 from external hosts (in other words, not through Apache), you can restrict the AJP listener to localhost only with: @@ -157,7 +157,7 @@ and restart Payara. The prefix can be configured via the API (where it is referr Once this is done, you will be able to publish datasets and files, but the persistent identifiers will not be citable, and they will only resolve from the DataCite test environment (and then only if the Dataverse installation from which you published them is accessible - DOIs minted from your laptop will not resolve). Note that any datasets or files created using the test configuration cannot be directly migrated and would need to be created again once a valid DOI namespace is configured. -To properly configure persistent identifiers for a production installation, an account and associated namespace must be acquired for a fee from a DOI or HDL provider. **DataCite** (https://www.datacite.org) is the recommended DOI provider (see https://dataversecommunity.global for more on joining DataCite) but **EZID** (http://ezid.cdlib.org) is an option for the University of California according to https://www.cdlib.org/cdlinfo/2017/08/04/ezid-doi-service-is-evolving/ . **Handle.Net** (https://www.handle.net) is the HDL provider. +To properly configure persistent identifiers for a production installation, an account and associated namespace must be acquired for a fee from a DOI or HDL provider. **DataCite** (https://www.datacite.org) is the recommended DOI provider (see https://dataversecommunity.global for more on joining DataCite) but **EZID** (https://ezid.cdlib.org) is an option for the University of California according to https://www.cdlib.org/cdlinfo/2017/08/04/ezid-doi-service-is-evolving/ . **Handle.Net** (https://www.handle.net) is the HDL provider. Once you have your DOI or Handle account credentials and a namespace, configure your Dataverse installation to use them using the JVM options and database settings below. @@ -205,7 +205,7 @@ Here are the configuration options for handles: - :ref:`:IndependentHandleService <:IndependentHandleService>` (optional) - :ref:`:HandleAuthHandle <:HandleAuthHandle>` (optional) -Note: If you are **minting your own handles** and plan to set up your own handle service, please refer to `Handle.Net documentation `_. +Note: If you are **minting your own handles** and plan to set up your own handle service, please refer to `Handle.Net documentation `_. .. _auth-modes: @@ -288,7 +288,7 @@ Multiple file stores should specify different directories (which would nominally Swift Storage +++++++++++++ -Rather than storing data files on the filesystem, you can opt for an experimental setup with a `Swift Object Storage `_ backend. Each dataset that users create gets a corresponding "container" on the Swift side, and each data file is saved as a file within that container. +Rather than storing data files on the filesystem, you can opt for an experimental setup with a `Swift Object Storage `_ backend. Each dataset that users create gets a corresponding "container" on the Swift side, and each data file is saved as a file within that container. **In order to configure a Swift installation,** you need to complete these steps to properly modify the JVM options: @@ -304,7 +304,7 @@ First, run all the following create commands with your Swift endpoint informatio ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.files..username.endpoint1=your-username" ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.files..endpoint.endpoint1=your-swift-endpoint" -``auth_type`` can either be ``keystone``, ``keystone_v3``, or it will assumed to be ``basic``. ``auth_url`` should be your keystone authentication URL which includes the tokens (e.g. for keystone, ``https://openstack.example.edu:35357/v2.0/tokens`` and for keystone_v3, ``https://openstack.example.edu:35357/v3/auth/tokens``). ``swift_endpoint`` is a URL that looks something like ``http://rdgw.swift.example.org/swift/v1``. +``auth_type`` can either be ``keystone``, ``keystone_v3``, or it will assumed to be ``basic``. ``auth_url`` should be your keystone authentication URL which includes the tokens (e.g. for keystone, ``https://openstack.example.edu:35357/v2.0/tokens`` and for keystone_v3, ``https://openstack.example.edu:35357/v3/auth/tokens``). ``swift_endpoint`` is a URL that looks something like ``https://rdgw.swift.example.org/swift/v1``. Then create a password alias by running (without changes): @@ -400,7 +400,7 @@ You'll need an AWS account with an associated S3 bucket for your installation to **Make note** of the **bucket's name** and the **region** its data is hosted in. To **create a user** with full S3 access and nothing more for security reasons, we recommend using IAM -(Identity and Access Management). See `IAM User Guide `_ +(Identity and Access Management). See `IAM User Guide `_ for more info on this process. **Generate the user keys** needed for a Dataverse installation afterwards by clicking on the created user. @@ -410,7 +410,7 @@ for more info on this process. If you are hosting your Dataverse installation on an AWS EC2 instance alongside storage in S3, it is possible to use IAM Roles instead of the credentials file (the file at ``~/.aws/credentials`` mentioned below). Please note that you will still need the ``~/.aws/config`` file to specify the region. For more information on this option, see - http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html + https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html Preparation When Using Custom S3-Compatible Service ################################################### @@ -471,7 +471,7 @@ Additional profiles can be added to these files by appending the relevant inform aws_access_key_id = aws_secret_access_key = -Place these two files in a folder named ``.aws`` under the home directory for the user running your Dataverse Installation on Payara. (From the `AWS Command Line Interface Documentation `_: +Place these two files in a folder named ``.aws`` under the home directory for the user running your Dataverse Installation on Payara. (From the `AWS Command Line Interface Documentation `_: "In order to separate credentials from less sensitive options, region and output format are stored in a separate file named config in the same folder") @@ -598,7 +598,7 @@ You may provide the values for these via any of the Reported Working S3-Compatible Storage ###################################### -`Minio v2018-09-12 `_ +`Minio v2018-09-12 `_ Set ``dataverse.files..path-style-access=true``, as Minio works path-based. Works pretty smooth, easy to setup. **Can be used for quick testing, too:** just use the example values above. Uses the public (read: unsecure and possibly slow) https://play.minio.io:9000 service. @@ -2063,7 +2063,7 @@ Note: by default, the URL is composed from the settings ``:GuidesBaseUrl`` and ` :GuidesBaseUrl ++++++++++++++ -Set ``:GuidesBaseUrl`` to override the default value "http://guides.dataverse.org". If you are interested in writing your own version of the guides, you may find the :doc:`/developers/documentation` section of the Developer Guide helpful. +Set ``:GuidesBaseUrl`` to override the default value "https://guides.dataverse.org". If you are interested in writing your own version of the guides, you may find the :doc:`/developers/documentation` section of the Developer Guide helpful. ``curl -X PUT -d http://dataverse.example.edu http://localhost:8080/api/admin/settings/:GuidesBaseUrl`` @@ -2084,14 +2084,14 @@ Set ``:NavbarSupportUrl`` to a fully-qualified URL which will be used for the "S Note that this will override the default behaviour for the "Support" menu option, which is to display the Dataverse collection 'feedback' dialog. -``curl -X PUT -d http://dataverse.example.edu/supportpage.html http://localhost:8080/api/admin/settings/:NavbarSupportUrl`` +``curl -X PUT -d https://dataverse.example.edu/supportpage.html http://localhost:8080/api/admin/settings/:NavbarSupportUrl`` :MetricsUrl +++++++++++ Make the metrics component on the root Dataverse collection a clickable link to a website where you present metrics on your Dataverse installation, perhaps one of the community-supported tools mentioned in the :doc:`/admin/reporting-tools-and-queries` section of the Admin Guide. -``curl -X PUT -d http://metrics.dataverse.example.edu http://localhost:8080/api/admin/settings/:MetricsUrl`` +``curl -X PUT -d https://metrics.dataverse.example.edu http://localhost:8080/api/admin/settings/:MetricsUrl`` .. _:MaxFileUploadSizeInBytes: diff --git a/doc/sphinx-guides/source/installation/installation-main.rst b/doc/sphinx-guides/source/installation/installation-main.rst index 4b000f1ef9e..5cb6e7153d4 100755 --- a/doc/sphinx-guides/source/installation/installation-main.rst +++ b/doc/sphinx-guides/source/installation/installation-main.rst @@ -98,7 +98,7 @@ The supplied site URL will be saved under the JVM option :ref:`dataverse.siteUrl The Dataverse Software uses JHOVE_ to help identify the file format (CSV, PNG, etc.) for files that users have uploaded. The installer places files called ``jhove.conf`` and ``jhoveConfig.xsd`` into the directory ``/usr/local/payara5/glassfish/domains/domain1/config`` by default and makes adjustments to the jhove.conf file based on the directory into which you chose to install Payara. -.. _JHOVE: http://jhove.openpreservation.org +.. _JHOVE: https://jhove.openpreservation.org Logging In ---------- @@ -118,7 +118,7 @@ Use the following credentials to log in: - username: dataverseAdmin - password: admin -Congratulations! You have a working Dataverse installation. Soon you'll be tweeting at `@dataverseorg `_ asking to be added to the map at http://dataverse.org :) +Congratulations! You have a working Dataverse installation. Soon you'll be tweeting at `@dataverseorg `_ asking to be added to the map at https://dataverse.org :) Trouble? See if you find an answer in the troubleshooting section below. @@ -197,7 +197,7 @@ Be sure you save the changes made here and then restart your Payara server to te UnknownHostException While Deploying ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -If you are seeing "Caused by: java.net.UnknownHostException: myhost: Name or service not known" in server.log and your hostname is "myhost" the problem is likely that "myhost" doesn't appear in ``/etc/hosts``. See also http://stackoverflow.com/questions/21817809/glassfish-exception-during-deployment-project-with-stateful-ejb/21850873#21850873 +If you are seeing "Caused by: java.net.UnknownHostException: myhost: Name or service not known" in server.log and your hostname is "myhost" the problem is likely that "myhost" doesn't appear in ``/etc/hosts``. See also https://stackoverflow.com/questions/21817809/glassfish-exception-during-deployment-project-with-stateful-ejb/21850873#21850873 .. _fresh-reinstall: diff --git a/doc/sphinx-guides/source/installation/intro.rst b/doc/sphinx-guides/source/installation/intro.rst index 2251af7b81b..e5b10883d4b 100644 --- a/doc/sphinx-guides/source/installation/intro.rst +++ b/doc/sphinx-guides/source/installation/intro.rst @@ -2,7 +2,7 @@ Introduction ============ -Welcome! Thanks for installing `The Dataverse Project `_! +Welcome! Thanks for installing `The Dataverse Project `_! .. contents:: |toctitle| :local: @@ -36,7 +36,7 @@ Getting Help To get help installing or configuring a Dataverse installation, please try one or more of: - posting to the `dataverse-community `_ Google Group. -- asking at http://chat.dataverse.org +- asking at https://chat.dataverse.org - emailing support@dataverse.org to open a private ticket at https://help.hmdc.harvard.edu Information to Send to Support When Installation Fails diff --git a/doc/sphinx-guides/source/installation/oauth2.rst b/doc/sphinx-guides/source/installation/oauth2.rst index 0dfdb0393e0..cd765c91b7f 100644 --- a/doc/sphinx-guides/source/installation/oauth2.rst +++ b/doc/sphinx-guides/source/installation/oauth2.rst @@ -11,7 +11,7 @@ As explained under "Auth Modes" in the :doc:`config` section, OAuth2 is one of t `OAuth2 `_ is an authentication protocol that allows systems to share user data, while letting the users control what data is being shared. When you see buttons stating "login with Google" or "login through Facebook", OAuth2 is probably involved. For the purposes of this section, we will shorten "OAuth2" to just "OAuth." OAuth can be compared and contrasted with :doc:`shibboleth`. -The Dataverse Software supports four OAuth providers: `ORCID `_, `Microsoft Azure Active Directory (AD) `_, `GitHub `_, and `Google `_. +The Dataverse Software supports four OAuth providers: `ORCID `_, `Microsoft Azure Active Directory (AD) `_, `GitHub `_, and `Google `_. In addition :doc:`oidc` are supported, using a standard based on OAuth2. diff --git a/doc/sphinx-guides/source/installation/oidc.rst b/doc/sphinx-guides/source/installation/oidc.rst index a40ef758dc7..ee154ca9b9c 100644 --- a/doc/sphinx-guides/source/installation/oidc.rst +++ b/doc/sphinx-guides/source/installation/oidc.rst @@ -51,7 +51,7 @@ Just like with :doc:`oauth2` you need to obtain a *Client ID* and a *Client Secr You need to apply for credentials out-of-band. The Dataverse installation will discover all necessary metadata for a given provider on its own (this is `part of the standard -`_). +`_). To enable this, you need to specify an *Issuer URL* when creating the configuration for your provider (see below). diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst index 3cf876a2251..7d458bbc37b 100644 --- a/doc/sphinx-guides/source/installation/prerequisites.rst +++ b/doc/sphinx-guides/source/installation/prerequisites.rst @@ -26,7 +26,7 @@ Installing Java The Dataverse Software should run fine with only the Java Runtime Environment (JRE) installed, but installing the Java Development Kit (JDK) is recommended so that useful tools for troubleshooting production environments are available. We recommend using Oracle JDK or OpenJDK. -The Oracle JDK can be downloaded from http://www.oracle.com/technetwork/java/javase/downloads/index.html +The Oracle JDK can be downloaded from https://www.oracle.com/technetwork/java/javase/downloads/index.html On a RHEL/derivative, install OpenJDK (devel version) using yum:: @@ -261,7 +261,7 @@ Installing jq or you may install it manually:: # cd /usr/bin - # wget http://stedolan.github.io/jq/download/linux64/jq + # wget https://stedolan.github.io/jq/download/linux64/jq # chmod +x jq # jq --version diff --git a/doc/sphinx-guides/source/installation/shibboleth.rst b/doc/sphinx-guides/source/installation/shibboleth.rst index cd0fbda77a6..3a2e1b99c70 100644 --- a/doc/sphinx-guides/source/installation/shibboleth.rst +++ b/doc/sphinx-guides/source/installation/shibboleth.rst @@ -76,7 +76,7 @@ A ``jk-connector`` network listener should have already been set up when you ran You can verify this with ``./asadmin list-network-listeners``. -This enables the `AJP protocol `_ used in Apache configuration files below. +This enables the `AJP protocol `_ used in Apache configuration files below. SSLEngine Warning Workaround ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -93,7 +93,7 @@ Configure Apache Enforce HTTPS ~~~~~~~~~~~~~ -To prevent attacks such as `FireSheep `_, HTTPS should be enforced. https://wiki.apache.org/httpd/RewriteHTTPToHTTPS provides a good method. You **could** copy and paste that those "rewrite rule" lines into Apache's main config file at ``/etc/httpd/conf/httpd.conf`` but using Apache's "virtual hosts" feature is recommended so that you can leave the main configuration file alone and drop a host-specific file into place. +To prevent attacks such as `FireSheep `_, HTTPS should be enforced. https://wiki.apache.org/httpd/RewriteHTTPToHTTPS provides a good method. You **could** copy and paste that those "rewrite rule" lines into Apache's main config file at ``/etc/httpd/conf/httpd.conf`` but using Apache's "virtual hosts" feature is recommended so that you can leave the main configuration file alone and drop a host-specific file into place. Below is an example of how "rewrite rule" lines look within a ``VirtualHost`` block. Download a :download:`sample file <../_static/installation/files/etc/httpd/conf.d/dataverse.example.edu.conf>` , edit it to substitute your own hostname under ``ServerName``, and place it at ``/etc/httpd/conf.d/dataverse.example.edu.conf`` or a filename that matches your hostname. The file must be in ``/etc/httpd/conf.d`` and must end in ".conf" to be included in Apache's configuration. @@ -235,7 +235,7 @@ Run semodule Silent is golden. No output is expected. This will place a file in ``/etc/selinux/targeted/modules/active/modules/shibboleth.pp`` and include "shibboleth" in the output of ``semodule -l``. See the ``semodule`` man page if you ever want to remove or disable the module you just added. -Congrats! You've made the creator of http://stopdisablingselinux.com proud. :) +Congrats! You've made the creator of https://stopdisablingselinux.com proud. :) Restart Apache and Shibboleth ----------------------------- diff --git a/doc/sphinx-guides/source/style/foundations.rst b/doc/sphinx-guides/source/style/foundations.rst index 31e0c314a05..cc193666868 100755 --- a/doc/sphinx-guides/source/style/foundations.rst +++ b/doc/sphinx-guides/source/style/foundations.rst @@ -9,7 +9,7 @@ Foundation elements are the very basic building blocks to create a page in Datav Grid Layout =========== -`Bootstrap `__ provides a responsive, fluid, 12-column grid system that we use to organize our page layouts. +`Bootstrap `__ provides a responsive, fluid, 12-column grid system that we use to organize our page layouts. We use the fixed-width ``.container`` class which provides responsive widths (i.e. auto, 750px, 970px or 1170px) based on media queries for the page layout, with a series of rows and columns for the content. @@ -42,7 +42,7 @@ The grid layout uses ``.col-sm-*`` classes for horizontal groups of columns, ins Typography ========== -The typeface, text size, and line-height are set in the `Bootstrap CSS `__. We use Bootstrap's global default ``font-size`` of **14px**, with a ``line-height`` of **1.428**, which is applied to the ```` and all paragraphs. +The typeface, text size, and line-height are set in the `Bootstrap CSS `__. We use Bootstrap's global default ``font-size`` of **14px**, with a ``line-height`` of **1.428**, which is applied to the ```` and all paragraphs. .. code-block:: css @@ -57,7 +57,7 @@ The typeface, text size, and line-height are set in the `Bootstrap CSS `__. It provides the background, border, text and link colors used across the application. +The default color palette is set in the `Bootstrap CSS `__. It provides the background, border, text and link colors used across the application. Brand Colors @@ -138,7 +138,7 @@ We use our brand color, a custom burnt orange ``{color:#C55B28;}``, which is set Text Colors ----------- -Text color is the default setting from `Bootstrap CSS `__. +Text color is the default setting from `Bootstrap CSS `__. .. code-block:: css @@ -163,7 +163,7 @@ Text color is the default setting from `Bootstrap CSS `__. The hover state color is set to 15% darker. +Link color is the default setting from `Bootstrap CSS `__. The hover state color is set to 15% darker. **Please note**, there is a CSS override issue with the link color due to the use of both a Bootstrap stylesheet and a PrimeFaces stylesheet in the UI. We've added CSS such as ``.ui-widget-content a {color: #428BCA;}`` to our stylesheet to keep the link color consistent. @@ -204,7 +204,7 @@ Link color is the default setting from `Bootstrap CSS `__ can be used to style background and text colors. Semantic colors include various colors assigned to meaningful contextual values. We convey meaning through color with a handful of emphasis utility classes. +Contextual classes from `Bootstrap CSS `__ can be used to style background and text colors. Semantic colors include various colors assigned to meaningful contextual values. We convey meaning through color with a handful of emphasis utility classes. .. raw:: html @@ -259,7 +259,7 @@ We use various icons across the application, which we get from Bootstrap, FontCu Bootstrap Glyphicons -------------------- -There are over 250 glyphs in font format from the Glyphicon Halflings set provided by `Bootstrap `__. We utilize these mainly as icons inside of buttons and in message blocks. +There are over 250 glyphs in font format from the Glyphicon Halflings set provided by `Bootstrap `__. We utilize these mainly as icons inside of buttons and in message blocks. .. raw:: html @@ -305,7 +305,7 @@ The :doc:`/developers/fontcustom` section of the Developer Guide explains how to Socicon Icon Font ----------------- -We use `Socicon `__ for our custom social icons. In the footer we use icons for Twitter and Github. In our Share feature, we also use custom social icons to allow users to select from a list of social media channels. +We use `Socicon `__ for our custom social icons. In the footer we use icons for Twitter and Github. In our Share feature, we also use custom social icons to allow users to select from a list of social media channels. .. raw:: html diff --git a/doc/sphinx-guides/source/style/patterns.rst b/doc/sphinx-guides/source/style/patterns.rst index e96f17dc2ec..c6602ffa26e 100644 --- a/doc/sphinx-guides/source/style/patterns.rst +++ b/doc/sphinx-guides/source/style/patterns.rst @@ -1,7 +1,7 @@ Patterns ++++++++ -Patterns are what emerge when using the foundation elements together with basic objects like buttons and alerts, more complex Javascript components from `Bootstrap `__ like tooltips and dropdowns, and AJAX components from `PrimeFaces `__ like datatables and commandlinks. +Patterns are what emerge when using the foundation elements together with basic objects like buttons and alerts, more complex Javascript components from `Bootstrap `__ like tooltips and dropdowns, and AJAX components from `PrimeFaces `__ like datatables and commandlinks. .. contents:: |toctitle| :local: @@ -9,7 +9,7 @@ Patterns are what emerge when using the foundation elements together with basic Navbar ====== -The `Navbar component `__ from Bootstrap spans the top of the application and contains the logo/branding, aligned to the left, plus search form and links, aligned to the right. +The `Navbar component `__ from Bootstrap spans the top of the application and contains the logo/branding, aligned to the left, plus search form and links, aligned to the right. When logged in, the account name is a dropdown menu, linking the user to account-specific content and the log out link. @@ -74,7 +74,7 @@ When logged in, the account name is a dropdown menu, linking the user to account Breadcrumbs =========== -The breadcrumbs are displayed under the header, and provide a trail of links for users to navigate the hierarchy of containing objects, from file to dataset to Dataverse collection. It utilizes a JSF `repeat component `_ to iterate through the breadcrumbs. +The breadcrumbs are displayed under the header, and provide a trail of links for users to navigate the hierarchy of containing objects, from file to dataset to Dataverse collection. It utilizes a JSF `repeat component `_ to iterate through the breadcrumbs. .. raw:: html @@ -108,7 +108,7 @@ The breadcrumbs are displayed under the header, and provide a trail of links for Tables ====== -Most tables use the `DataTable components `__ from PrimeFaces and are styled using the `Tables component `__ from Bootstrap. +Most tables use the `DataTable components `__ from PrimeFaces and are styled using the `Tables component `__ from Bootstrap. .. raw:: html @@ -187,7 +187,7 @@ Most tables use the `DataTable components `__ from Bootstrap. Form elements like the `InputText component `__ from PrimeFaces are kept looking clean and consistent across each page. +Forms fulfill various functions across the site, but we try to style them consistently. We use the ``.form-horizontal`` layout, which uses ``.form-group`` to create a grid of rows for the labels and inputs. The consistent style of forms is maintained using the `Forms component `__ from Bootstrap. Form elements like the `InputText component `__ from PrimeFaces are kept looking clean and consistent across each page. .. raw:: html @@ -289,7 +289,7 @@ Here are additional form elements that are common across many pages, including r Buttons ======= -There are various types of buttons for various actions, so we have many components to use, including the `CommandButton component `__ and `CommandLink component `__ from PrimeFaces, as well as the basic JSF `Link component `__ and `OutputLink component `__. Those are styled using the `Buttons component `__, `Button Groups component `__ and `Buttons Dropdowns component `__ from Bootstrap. +There are various types of buttons for various actions, so we have many components to use, including the `CommandButton component `__ and `CommandLink component `__ from PrimeFaces, as well as the basic JSF `Link component `__ and `OutputLink component `__. Those are styled using the `Buttons component `__, `Button Groups component `__ and `Buttons Dropdowns component `__ from Bootstrap. Action Buttons -------------- @@ -668,7 +668,7 @@ Another variation of icon-only buttons uses the ``.btn-link`` style class from B Pagination ========== -We use the `Pagination component `__ from Bootstrap for paging through search results. +We use the `Pagination component `__ from Bootstrap for paging through search results. .. raw:: html @@ -738,7 +738,7 @@ We use the `Pagination component `__ from Bootstrap is used for publication status (DRAFT, In Review, Unpublished, Deaccessioned), and Dataset version, as well as Tabular Data Tags (Survey, Time Series, Panel, Event, Genomics, Network, Geospatial). +The `Labels component `__ from Bootstrap is used for publication status (DRAFT, In Review, Unpublished, Deaccessioned), and Dataset version, as well as Tabular Data Tags (Survey, Time Series, Panel, Event, Genomics, Network, Geospatial). .. raw:: html @@ -768,7 +768,7 @@ The `Labels component `__ from Boots Alerts ====== -For our help/information, success, warning, and error message blocks we use a custom built UI component based on the `Alerts component `__ from Bootstrap. +For our help/information, success, warning, and error message blocks we use a custom built UI component based on the `Alerts component `__ from Bootstrap. .. raw:: html @@ -859,9 +859,9 @@ Style classes can be added to ``p``, ``div``, ``span`` and other elements to add Images ====== -For images, we use the `GraphicImage component `__ from PrimeFaces, or the basic JSF `GraphicImage component `__. +For images, we use the `GraphicImage component `__ from PrimeFaces, or the basic JSF `GraphicImage component `__. -To display images in a responsive way, they are styled with ``.img-responsive``, an `Images CSS class `__ from Bootstrap. +To display images in a responsive way, they are styled with ``.img-responsive``, an `Images CSS class `__ from Bootstrap. .. raw:: html @@ -879,7 +879,7 @@ To display images in a responsive way, they are styled with ``.img-responsive``, Panels ====== -The most common of our containers, the `Panels component `__ from Bootstrap is used to add a border and padding around sections of content like metadata blocks. Displayed with a header and/or footer, it can also be used with the `Collapse plugin `__ from Bootstrap. +The most common of our containers, the `Panels component `__ from Bootstrap is used to add a border and padding around sections of content like metadata blocks. Displayed with a header and/or footer, it can also be used with the `Collapse plugin `__ from Bootstrap. .. raw:: html @@ -943,7 +943,7 @@ Tabs Tabs are used to provide content panes on a page that allow the user to view different sections of content without navigating to a different page. -We use the `TabView component `__ from PrimeFaces, which is styled using the `Tab component `__ from Bootstrap. +We use the `TabView component `__ from PrimeFaces, which is styled using the `Tab component `__ from Bootstrap. .. raw:: html @@ -989,7 +989,7 @@ Modals are dialog prompts that act as popup overlays, but don't create a new bro Buttons usually provide the UI prompt. A user clicks the button, which then opens a `Dialog component `__ or `Confirm Dialog component `__ from PrimeFaces that displays the modal with the necessary information and actions to take. -The modal is styled using the `Modal component `__ from Bootstrap, for a popup window that prompts a user for information, with overlay and a backdrop, then header, content, and buttons. We can use style classes from Bootstrap for large (``.bs-example-modal-lg``) and small (``.bs-example-modal-sm``) width options. +The modal is styled using the `Modal component `__ from Bootstrap, for a popup window that prompts a user for information, with overlay and a backdrop, then header, content, and buttons. We can use style classes from Bootstrap for large (``.bs-example-modal-lg``) and small (``.bs-example-modal-sm``) width options. .. raw:: html diff --git a/doc/sphinx-guides/source/user/account.rst b/doc/sphinx-guides/source/user/account.rst index 12cc54c7fde..792fad730cf 100755 --- a/doc/sphinx-guides/source/user/account.rst +++ b/doc/sphinx-guides/source/user/account.rst @@ -109,7 +109,7 @@ If you are leaving your institution and need to convert your Dataverse installat ORCID Log In ~~~~~~~~~~~~~ -You can set up your Dataverse installation account to allow you to log in using your ORCID credentials. ORCID® is an independent non-profit effort to provide an open registry of unique researcher identifiers and open services to link research activities and organizations to these identifiers. Learn more at `orcid.org `_. +You can set up your Dataverse installation account to allow you to log in using your ORCID credentials. ORCID® is an independent non-profit effort to provide an open registry of unique researcher identifiers and open services to link research activities and organizations to these identifiers. Learn more at `orcid.org `_. Create a Dataverse installation account using ORCID ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/doc/sphinx-guides/source/user/appendix.rst b/doc/sphinx-guides/source/user/appendix.rst index b05459b6aaf..ae0ec37aff3 100755 --- a/doc/sphinx-guides/source/user/appendix.rst +++ b/doc/sphinx-guides/source/user/appendix.rst @@ -22,13 +22,13 @@ Supported Metadata Detailed below are what metadata schemas we support for Citation and Domain Specific Metadata in the Dataverse Project: -- `Citation Metadata `__ (`see .tsv version `__): compliant with `DDI Lite `_, `DDI 2.5 Codebook `__, `DataCite 3.1 `__, and Dublin Core's `DCMI Metadata Terms `__ . Language field uses `ISO 639-1 `__ controlled vocabulary. -- `Geospatial Metadata `__ (`see .tsv version `__): compliant with DDI Lite, DDI 2.5 Codebook, DataCite, and Dublin Core. Country / Nation field uses `ISO 3166-1 `_ controlled vocabulary. +- `Citation Metadata `__ (`see .tsv version `__): compliant with `DDI Lite `_, `DDI 2.5 Codebook `__, `DataCite 3.1 `__, and Dublin Core's `DCMI Metadata Terms `__ . Language field uses `ISO 639-1 `__ controlled vocabulary. +- `Geospatial Metadata `__ (`see .tsv version `__): compliant with DDI Lite, DDI 2.5 Codebook, DataCite, and Dublin Core. Country / Nation field uses `ISO 3166-1 `_ controlled vocabulary. - `Social Science & Humanities Metadata `__ (`see .tsv version `__): compliant with DDI Lite, DDI 2.5 Codebook, and Dublin Core. - `Astronomy and Astrophysics Metadata `__ (`see .tsv version `__): These metadata elements can be mapped/exported to the International Virtual Observatory Alliance’s (IVOA) - `VOResource Schema format `__ and is based on - `Virtual Observatory (VO) Discovery and Provenance Metadata `__. -- `Life Sciences Metadata `__ (`see .tsv version `__): based on `ISA-Tab Specification `__, along with controlled vocabulary from subsets of the `OBI Ontology `__ and the `NCBI Taxonomy for Organisms `__. + `VOResource Schema format `__ and is based on + `Virtual Observatory (VO) Discovery and Provenance Metadata `__. +- `Life Sciences Metadata `__ (`see .tsv version `__): based on `ISA-Tab Specification `__, along with controlled vocabulary from subsets of the `OBI Ontology `__ and the `NCBI Taxonomy for Organisms `__. - `Journal Metadata `__ (`see .tsv version `__): based on the `Journal Archiving and Interchange Tag Set, version 1.2 `__. Experimental Metadata diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index 77a760ef838..a3637154050 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -192,7 +192,7 @@ Additional download options available for tabular data (found in the same drop-d - As tab-delimited data (with the variable names in the first row); - The original file uploaded by the user; - Saved as R data (if the original file was not in R format); -- Variable Metadata (as a `DDI Codebook `_ XML file); +- Variable Metadata (as a `DDI Codebook `_ XML file); - Data File Citation (currently in either RIS, EndNote XML, or BibTeX format); - All of the above, as a zipped bundle. @@ -297,7 +297,7 @@ You can also search for files within datasets that have been tagged as "Workflow Astronomy (FITS) ---------------- -Metadata found in the header section of `Flexible Image Transport System (FITS) files `_ are automatically extracted by the Dataverse Software, aggregated and displayed in the Astronomy Domain-Specific Metadata of the Dataset that the file belongs to. This FITS file metadata, is therefore searchable and browsable (facets) at the Dataset-level. +Metadata found in the header section of `Flexible Image Transport System (FITS) files `_ are automatically extracted by the Dataverse Software, aggregated and displayed in the Astronomy Domain-Specific Metadata of the Dataset that the file belongs to. This FITS file metadata, is therefore searchable and browsable (facets) at the Dataset-level. Compressed Files ---------------- @@ -388,7 +388,7 @@ Choosing a License ------------------ Each Dataverse installation provides a set of license(s) data can be released under, and whether users can specify custom terms instead (see below). -One of the available licenses (often the `Creative Commons CC0 Public Domain Dedication `_) serves as the default if you do not make an explicit choice. +One of the available licenses (often the `Creative Commons CC0 Public Domain Dedication `_) serves as the default if you do not make an explicit choice. If you want to apply one of the other available licenses to your dataset, you can change it on the Terms tab of your Dataset page. License Selection and Professional Norms diff --git a/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst b/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst index f1d5611ede9..33ae9b555e6 100644 --- a/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst +++ b/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst @@ -27,7 +27,7 @@ separately, in a relational database, so that it can be accessed efficiently by the application. For the purposes of archival preservation it can be exported, in plain text XML files, using a standardized, open `DDI Codebook -`_ +`_ format. (more info below) @@ -53,6 +53,6 @@ Tabular Metadata in the Dataverse Software The structure of the metadata defining tabular data variables used in the Dataverse Software was originally based on the `DDI Codebook -`_ format. +`_ format. You can see an example of DDI output under the :ref:`data-variable-metadata-access` section of the :doc:`/api/dataaccess` section of the API Guide. From 044ed40c17e1fa5fdbb7c8745a4671add25414c0 Mon Sep 17 00:00:00 2001 From: bencomp Date: Tue, 18 Oct 2022 00:32:55 +0200 Subject: [PATCH 002/252] Align table boundary in SWORD doc --- doc/sphinx-guides/source/api/sword.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/sword.rst b/doc/sphinx-guides/source/api/sword.rst index c9ac83bc204..51391784bde 100755 --- a/doc/sphinx-guides/source/api/sword.rst +++ b/doc/sphinx-guides/source/api/sword.rst @@ -127,7 +127,7 @@ Dublin Core Terms (DC Terms) Qualified Mapping - Dataverse Project DB Element Cr +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ |dcterms:creator | authorName (LastName, FirstName) | Y | Author(s) for the Dataset. | +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ -|dcterms:subject | subject (Controlled Vocabulary) OR keyword | Y | Controlled Vocabulary list is in our User Guide > `Metadata References `_. | +|dcterms:subject | subject (Controlled Vocabulary) OR keyword | Y | Controlled Vocabulary list is in our User Guide > `Metadata References `_. | +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ |dcterms:description | dsDescriptionValue | Y | Describing the purpose, scope or nature of the Dataset. Can also use dcterms:abstract. | +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ From 98e5b3fbae8871ef0fecbd0550ad8fefb00e2b22 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 21 Jun 2023 12:34:37 +0200 Subject: [PATCH 003/252] fix(ct): enable sane default for upload storage location in containers The default from microprofile-config.properties does NOT work, as the location must already be resolvable while the servlet is being initialized - the app shipped defaults file is not yet read at this point. This is similar to the database options, which must be set using one of the other Payara included config sources. (Non-easily resolvable timing issue). The solution for containers is to add an env var to the docker file, which can be overriden by any env var from compose or K8s etc. (Problem is the high ordinal of the env source though) --- src/main/docker/Dockerfile | 4 +++- src/main/resources/META-INF/microprofile-config.properties | 1 - 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/main/docker/Dockerfile b/src/main/docker/Dockerfile index 88020a118b5..f64e88cb414 100644 --- a/src/main/docker/Dockerfile +++ b/src/main/docker/Dockerfile @@ -27,7 +27,9 @@ FROM $BASE_IMAGE # Make Payara use the "ct" profile for MicroProfile Config. Will switch various defaults for the application # setup in META-INF/microprofile-config.properties. # See also https://download.eclipse.org/microprofile/microprofile-config-3.0/microprofile-config-spec-3.0.html#configprofile -ENV MP_CONFIG_PROFILE=ct +ENV MP_CONFIG_PROFILE=ct \ + # NOTE: this cannot be provided as default from microprofile-config.properties as not yet avail when servlet starts + DATAVERSE_FILES_UPLOADS="${STORAGE_DIR}/uploads" # Copy app and deps from assembly in proper layers COPY --chown=payara:payara maven/deps ${DEPLOY_DIR}/dataverse/WEB-INF/lib/ diff --git a/src/main/resources/META-INF/microprofile-config.properties b/src/main/resources/META-INF/microprofile-config.properties index 7c16495f870..748ed6de55a 100644 --- a/src/main/resources/META-INF/microprofile-config.properties +++ b/src/main/resources/META-INF/microprofile-config.properties @@ -12,7 +12,6 @@ dataverse.build= dataverse.files.directory=/tmp/dataverse # The variables are replaced with the environment variables from our base image, but still easy to override %ct.dataverse.files.directory=${STORAGE_DIR} -%ct.dataverse.files.uploads=${STORAGE_DIR}/uploads # SEARCH INDEX dataverse.solr.host=localhost From d71cdf2d427011fc660794bb12afbab9db1c2bc7 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 21 Jun 2023 16:07:03 +0200 Subject: [PATCH 004/252] fix(ct,conf): switch to different approach to default upload location Instead of trying to provide a default using STORAGE_DIR env var from microprofile-config.properties as before, using this env var reference in glassfish-web.xml directly now. By defaulting to "." if not present (as in classic installations), it is fully equivalent to the former hardcoded default value. Providing a synced variant of it in microprofile-config.properties and leaving a hint about the pitfalls, we can reuse the setting for other purposes within the codebase as well (and expect the same behaviour because same defaults). --- src/main/docker/Dockerfile | 4 +--- src/main/resources/META-INF/microprofile-config.properties | 6 ++++++ src/main/webapp/WEB-INF/glassfish-web.xml | 2 +- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/main/docker/Dockerfile b/src/main/docker/Dockerfile index f64e88cb414..88020a118b5 100644 --- a/src/main/docker/Dockerfile +++ b/src/main/docker/Dockerfile @@ -27,9 +27,7 @@ FROM $BASE_IMAGE # Make Payara use the "ct" profile for MicroProfile Config. Will switch various defaults for the application # setup in META-INF/microprofile-config.properties. # See also https://download.eclipse.org/microprofile/microprofile-config-3.0/microprofile-config-spec-3.0.html#configprofile -ENV MP_CONFIG_PROFILE=ct \ - # NOTE: this cannot be provided as default from microprofile-config.properties as not yet avail when servlet starts - DATAVERSE_FILES_UPLOADS="${STORAGE_DIR}/uploads" +ENV MP_CONFIG_PROFILE=ct # Copy app and deps from assembly in proper layers COPY --chown=payara:payara maven/deps ${DEPLOY_DIR}/dataverse/WEB-INF/lib/ diff --git a/src/main/resources/META-INF/microprofile-config.properties b/src/main/resources/META-INF/microprofile-config.properties index 748ed6de55a..f3745126cb2 100644 --- a/src/main/resources/META-INF/microprofile-config.properties +++ b/src/main/resources/META-INF/microprofile-config.properties @@ -12,6 +12,12 @@ dataverse.build= dataverse.files.directory=/tmp/dataverse # The variables are replaced with the environment variables from our base image, but still easy to override %ct.dataverse.files.directory=${STORAGE_DIR} +# NOTE: the following uses STORAGE_DIR for both containers and classic installations. By defaulting to "." if not +# present, it equals the hardcoded default from before again. Also, be aware that this props file cannot provide +# any value for lookups in glassfish-web.xml during servlet initialization, as this file will not have +# been read yet! The names and their values are in sync here and over there to ensure the config checker +# is able to check for the directories (exist + writeable). +dataverse.files.uploads=${STORAGE_DIR:.}/uploads # SEARCH INDEX dataverse.solr.host=localhost diff --git a/src/main/webapp/WEB-INF/glassfish-web.xml b/src/main/webapp/WEB-INF/glassfish-web.xml index e56d7013abf..8041ebd4447 100644 --- a/src/main/webapp/WEB-INF/glassfish-web.xml +++ b/src/main/webapp/WEB-INF/glassfish-web.xml @@ -18,5 +18,5 @@ This folder is not only holding compiled JSP pages but also the place where file streams are stored during uploads. As Dataverse does not use any JSP, there will only be uploads stored here. --> - + From a4ec3a66e76aa1559aea0c05cedc2da2b38d7b03 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 21 Jun 2023 16:44:08 +0200 Subject: [PATCH 005/252] feat(conf): introduce ConfigCheckService to validate config on startup #9572 Starting with important local storage locations for the Dataverse application, this service uses EJB startup mechanisms to verify configuration bits on startup. Checks for the temp storage location and JSF upload location as crucial parts of the app, which, if not exist or write protected, while only cause errors and failures on the first data upload attempt. This is not desirable as it might cause users to be blocked. --- .../settings/ConfigCheckService.java | 65 +++++++++++++++++++ .../iq/dataverse/settings/JvmSettings.java | 1 + .../harvard/iq/dataverse/util/FileUtil.java | 29 ++++----- 3 files changed, 77 insertions(+), 18 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java new file mode 100644 index 00000000000..4ba028903b0 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java @@ -0,0 +1,65 @@ +package edu.harvard.iq.dataverse.settings; + +import edu.harvard.iq.dataverse.util.FileUtil; + +import javax.annotation.PostConstruct; +import javax.ejb.DependsOn; +import javax.ejb.Singleton; +import javax.ejb.Startup; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; + +@Startup +@Singleton +@DependsOn("StartupFlywayMigrator") +public class ConfigCheckService { + + private static final Logger logger = Logger.getLogger(ConfigCheckService.class.getCanonicalName()); + + public static class ConfigurationError extends RuntimeException { + public ConfigurationError(String message) { + super(message); + } + } + + @PostConstruct + public void startup() { + if (!checkSystemDirectories()) { + throw new ConfigurationError("Not all configuration checks passed successfully. See logs above."); + } + } + + /** + * In this method, we check the existence and write-ability of all important directories we use during + * normal operations. It does not include checks for the storage system. If directories are not available, + * try to create them (and fail when not allowed to). + * + * @return True if all checks successful, false otherwise. + */ + public boolean checkSystemDirectories() { + Map paths = Map.of( + Path.of(JvmSettings.UPLOADS_DIRECTORY.lookup()), "temporary JSF upload space (see " + JvmSettings.UPLOADS_DIRECTORY.getScopedKey() + ")", + Path.of(FileUtil.getFilesTempDirectory()), "temporary processing space (see " + JvmSettings.FILES_DIRECTORY.getScopedKey() + ")"); + + boolean success = true; + for (Path path : paths.keySet()) { + if (Files.notExists(path)) { + try { + Files.createDirectories(path); + } catch (IOException e) { + logger.log(Level.SEVERE, () -> "Could not create directory " + path + " for " + paths.get(path)); + success = false; + } + } else if (!Files.isWritable(path)) { + logger.log(Level.SEVERE, () -> "Directory " + path + " for " + paths.get(path) + " exists, but is not writeable"); + success = false; + } + } + return success; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index ff04a633ea7..c5c5682821a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -49,6 +49,7 @@ public enum JvmSettings { // FILES SETTINGS SCOPE_FILES(PREFIX, "files"), FILES_DIRECTORY(SCOPE_FILES, "directory"), + UPLOADS_DIRECTORY(SCOPE_FILES, "uploads"), // SOLR INDEX SETTINGS SCOPE_SOLR(PREFIX, "solr"), diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 6bb7e1d583b..ee1ee5a6a1c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -40,6 +40,7 @@ import edu.harvard.iq.dataverse.ingest.IngestServiceShapefileHelper; import edu.harvard.iq.dataverse.ingest.IngestableDataChecker; import edu.harvard.iq.dataverse.license.License; +import edu.harvard.iq.dataverse.settings.ConfigCheckService; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.file.BagItFileHandler; import edu.harvard.iq.dataverse.util.file.CreateDataFileResult; @@ -1478,25 +1479,17 @@ public static boolean canIngestAsTabular(String mimeType) { } } + /** + * Return the location where data should be stored temporarily after uploading (UI or API) + * for local processing (ingest, unzip, ...) and transfer to final destination (see storage subsystem). + * + * This location is checked to be configured, does exist, and is writeable via + * {@link ConfigCheckService#checkSystemDirectories()}. + * + * @return String with a path to the temporary location. Will not be null (former versions did to indicate failure) + */ public static String getFilesTempDirectory() { - - String filesRootDirectory = JvmSettings.FILES_DIRECTORY.lookup(); - String filesTempDirectory = filesRootDirectory + "/temp"; - - if (!Files.exists(Paths.get(filesTempDirectory))) { - /* Note that "createDirectories()" must be used - not - * "createDirectory()", to make sure all the parent - * directories that may not yet exist are created as well. - */ - try { - Files.createDirectories(Paths.get(filesTempDirectory)); - } catch (IOException ex) { - logger.severe("Failed to create filesTempDirectory: " + filesTempDirectory ); - return null; - } - } - - return filesTempDirectory; + return JvmSettings.FILES_DIRECTORY.lookup() + File.separator + "temp"; } public static void generateS3PackageStorageIdentifier(DataFile dataFile) { From 6999093dcea8e889a24aafbe84dd6035e8a4b5db Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 21 Jun 2023 17:37:40 +0200 Subject: [PATCH 006/252] feat(conf): make docroot location configurable #9662 Add JVM Setting and add to config checker on startup to ensure target location is in good shape. --- .../harvard/iq/dataverse/settings/ConfigCheckService.java | 3 ++- .../edu/harvard/iq/dataverse/settings/JvmSettings.java | 1 + .../resources/META-INF/microprofile-config.properties | 1 + src/main/webapp/WEB-INF/glassfish-web.xml | 8 ++++---- 4 files changed, 8 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java index 4ba028903b0..443d12fc17a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java @@ -43,7 +43,8 @@ public void startup() { public boolean checkSystemDirectories() { Map paths = Map.of( Path.of(JvmSettings.UPLOADS_DIRECTORY.lookup()), "temporary JSF upload space (see " + JvmSettings.UPLOADS_DIRECTORY.getScopedKey() + ")", - Path.of(FileUtil.getFilesTempDirectory()), "temporary processing space (see " + JvmSettings.FILES_DIRECTORY.getScopedKey() + ")"); + Path.of(FileUtil.getFilesTempDirectory()), "temporary processing space (see " + JvmSettings.FILES_DIRECTORY.getScopedKey() + ")", + Path.of(JvmSettings.DOCROOT_DIRECTORY.lookup()), "docroot space (see " + JvmSettings.DOCROOT_DIRECTORY.getScopedKey() + ")"); boolean success = true; for (Path path : paths.keySet()) { diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index c5c5682821a..540dc8201a0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -50,6 +50,7 @@ public enum JvmSettings { SCOPE_FILES(PREFIX, "files"), FILES_DIRECTORY(SCOPE_FILES, "directory"), UPLOADS_DIRECTORY(SCOPE_FILES, "uploads"), + DOCROOT_DIRECTORY(SCOPE_FILES, "docroot"), // SOLR INDEX SETTINGS SCOPE_SOLR(PREFIX, "solr"), diff --git a/src/main/resources/META-INF/microprofile-config.properties b/src/main/resources/META-INF/microprofile-config.properties index f3745126cb2..597d50b2e0c 100644 --- a/src/main/resources/META-INF/microprofile-config.properties +++ b/src/main/resources/META-INF/microprofile-config.properties @@ -18,6 +18,7 @@ dataverse.files.directory=/tmp/dataverse # been read yet! The names and their values are in sync here and over there to ensure the config checker # is able to check for the directories (exist + writeable). dataverse.files.uploads=${STORAGE_DIR:.}/uploads +dataverse.files.docroot=${STORAGE_DIR:.}/docroot # SEARCH INDEX dataverse.solr.host=localhost diff --git a/src/main/webapp/WEB-INF/glassfish-web.xml b/src/main/webapp/WEB-INF/glassfish-web.xml index 8041ebd4447..5088e5a7fba 100644 --- a/src/main/webapp/WEB-INF/glassfish-web.xml +++ b/src/main/webapp/WEB-INF/glassfish-web.xml @@ -10,10 +10,10 @@ - - - - + + + + + From 2913a52f35645621bace35c93a9c0b2707004da1 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 21 Jun 2023 18:32:55 +0200 Subject: [PATCH 010/252] refactor(conf): simplify sitemap output location lookup using new docroot setting --- .../iq/dataverse/sitemap/SiteMapUtil.java | 21 +++++++++++-------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java b/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java index e32b811ee2c..86ae697f771 100644 --- a/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java @@ -3,6 +3,8 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DvObjectContainer; +import edu.harvard.iq.dataverse.settings.ConfigCheckService; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.xml.XmlValidator; import java.io.File; @@ -210,16 +212,17 @@ public static boolean stageFileExists() { } return false; } - + + /** + * Lookup the location where to generate the sitemap. + * + * Note: the location is checked to be configured, does exist and is writeable in + * {@link ConfigCheckService#checkSystemDirectories()} + * + * @return Sitemap storage location ([docroot]/sitemap) + */ private static String getSitemapPathString() { - String sitemapPathString = "/tmp"; - // i.e. /usr/local/glassfish4/glassfish/domains/domain1 - String domainRoot = System.getProperty("com.sun.aas.instanceRoot"); - if (domainRoot != null) { - // Note that we write to a directory called "sitemap" but we serve just "/sitemap.xml" using PrettyFaces. - sitemapPathString = domainRoot + File.separator + "docroot" + File.separator + "sitemap"; - } - return sitemapPathString; + return JvmSettings.DOCROOT_DIRECTORY.lookup() + File.separator + "sitemap"; } } From 2ffec04318861c5e12f63e100514cda1c793f41d Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Tue, 25 Jul 2023 10:54:15 -0400 Subject: [PATCH 011/252] #9724 linking a dataset requires Publish Dataset permission --- doc/sphinx-guides/source/user/dataverse-management.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/user/dataverse-management.rst b/doc/sphinx-guides/source/user/dataverse-management.rst index b5e8d8f4fc9..b039c6c65b1 100755 --- a/doc/sphinx-guides/source/user/dataverse-management.rst +++ b/doc/sphinx-guides/source/user/dataverse-management.rst @@ -212,7 +212,7 @@ Dataset linking allows a Dataverse collection owner to "link" their Dataverse co For example, researchers working on a collaborative study across institutions can each link their own individual institutional Dataverse collections to the one collaborative dataset, making it easier for interested parties from each institution to find the study. -In order to link a dataset, you will need your account to have the "Add Dataset" permission on the Dataverse collection that is doing the linking. If you created the Dataverse collection then you should have this permission already, but if not then you will need to ask the admin of that Dataverse collection to assign that permission to your account. You do not need any special permissions on the dataset being linked. +In order to link a dataset, you will need your account to have the "Publish Dataset" permission on the Dataverse collection that is doing the linking. If you created the Dataverse collection then you should have this permission already, but if not then you will need to ask the admin of that Dataverse collection to assign that permission to your account. You do not need any special permissions on the dataset being linked. To link a dataset to your Dataverse collection, you must navigate to that dataset and click the white "Link" button in the upper-right corner of the dataset page. This will open up a window where you can type in the name of the Dataverse collection that you would like to link the dataset to. Select your Dataverse collection and click the save button. This will establish the link, and the dataset will now appear under your Dataverse collection. From a835f5db1dc2ed3fd307c012d8b1535dae24523f Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 18 Aug 2023 18:42:30 -0400 Subject: [PATCH 012/252] added pagination to the /versions api. dropped the files section from the (default) output of the api. (#9763) --- doc/sphinx-guides/source/api/native-api.rst | 9 +- .../iq/dataverse/DatasetServiceBean.java | 2 +- .../harvard/iq/dataverse/DatasetVersion.java | 6 +- .../dataverse/DatasetVersionServiceBean.java | 112 +++++++++++++++++- .../harvard/iq/dataverse/api/Datasets.java | 36 +++--- .../iq/dataverse/dataset/DatasetUtil.java | 2 +- .../command/impl/ListVersionsCommand.java | 48 +++++--- .../iq/dataverse/util/json/JsonPrinter.java | 16 +-- 8 files changed, 188 insertions(+), 43 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 4d9466703e4..da3fbfffa73 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -830,7 +830,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash - curl "https://demo.dataverse.org/api/datasets/24/versions" + curl "https://demo.dataverse.org/api/datasets/24/versions?includeFiles=true" It returns a list of versions with their metadata, and file list: @@ -883,6 +883,10 @@ It returns a list of versions with their metadata, and file list: ] } +The optional ``includeFiles`` parameter specifies whether the files should be listed in the output. It defaults to ``false``. (Note that for a dataset with a large number of versions and/or files having the files included can dramatically increase the volume of the output). A separate ``/files`` API can be used for listing the files, or a subset thereof in a given version. + +The optional ``offset`` and ``limit`` parameters can be used to specify the range of the versions list to be shown. This can be used to paginate through the list in a dataset with a large number of versions. + Get Version of a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~ @@ -903,6 +907,9 @@ The fully expanded example above (without environment variables) looks like this curl "https://demo.dataverse.org/api/datasets/24/versions/1.0" +The optional ``includeFiles`` parameter specifies whether the files should be listed in the output (defaults to ``false``). Note that a separate ``/files`` API can be used for listing the files, or a subset thereof in a given version. + + .. _export-dataset-metadata-api: Export Metadata of a Dataset in Various Formats diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 52eb5868c35..ceb5902defa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -137,7 +137,7 @@ public Dataset findDeep(Object pk) { .setHint("eclipselink.left-join-fetch", "o.files.roleAssignments") .getSingleResult(); } - + public List findByOwnerId(Long ownerId) { return findByOwnerId(ownerId, false); } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index 5836bd9e175..8d4dafad62a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -68,7 +68,11 @@ query = "SELECT OBJECT(o) FROM DatasetVersion AS o WHERE o.dataset.harvestedFrom IS NULL and o.releaseTime IS NOT NULL and o.archivalCopyLocation IS NULL" ), @NamedQuery(name = "DatasetVersion.findById", - query = "SELECT o FROM DatasetVersion o LEFT JOIN FETCH o.fileMetadatas WHERE o.id=:id")}) + query = "SELECT o FROM DatasetVersion o LEFT JOIN FETCH o.fileMetadatas WHERE o.id=:id"), + @NamedQuery(name = "DatasetVersion.findByDataset", + query = "SELECT o FROM DatasetVersion o WHERE o.dataset.id=:datasetId ORDER BY o.versionNumber DESC, o.minorVersionNumber DESC"), + @NamedQuery(name = "DatasetVersion.findReleasedByDataset", + query = "SELECT o FROM DatasetVersion o WHERE o.dataset.id=:datasetId AND o.versionState=edu.harvard.iq.dataverse.DatasetVersion.VersionState.RELEASED ORDER BY o.versionNumber DESC, o.minorVersionNumber DESC")}) @Entity diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 28243c37eee..27a4f4773d4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -48,7 +48,23 @@ public class DatasetVersionServiceBean implements java.io.Serializable { private static final Logger logger = Logger.getLogger(DatasetVersionServiceBean.class.getCanonicalName()); private static final SimpleDateFormat logFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss"); - + + private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL = "SELECT fm FROM FileMetadata fm" + + " WHERE fm.datasetVersion.id=:datasetVersionId" + + " ORDER BY fm.label"; + private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE = "SELECT fm FROM FileMetadata fm, DvObject dvo" + + " WHERE fm.datasetVersion.id = :datasetVersionId" + + " AND fm.dataFile.id = dvo.id" + + " ORDER BY CASE WHEN dvo.publicationDate IS NOT NULL THEN dvo.publicationDate ELSE dvo.createDate END"; + private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_SIZE = "SELECT fm FROM FileMetadata fm, DataFile df" + + " WHERE fm.datasetVersion.id = :datasetVersionId" + + " AND fm.dataFile.id = df.id" + + " ORDER BY df.filesize"; + private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_TYPE = "SELECT fm FROM FileMetadata fm, DataFile df" + + " WHERE fm.datasetVersion.id = :datasetVersionId" + + " AND fm.dataFile.id = df.id" + + " ORDER BY df.contentType"; + @EJB DatasetServiceBean datasetService; @@ -149,7 +165,19 @@ public DatasetVersion getDatasetVersion(){ return this.datasetVersionForResponse; } } // end RetrieveDatasetVersionResponse - + + /** + * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionServiceBean#getFileMetadatas} + */ + public enum FileMetadatasOrderCriteria { + NameAZ, + NameZA, + Newest, + Oldest, + Size, + Type + } + public DatasetVersion find(Object pk) { return em.find(DatasetVersion.class, pk); } @@ -168,7 +196,39 @@ public DatasetVersion findDeep(Object pk) { .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.dataFile.creator") .getSingleResult(); } - + + /** + * Performs the same database lookup as the one behind Dataset.getVersions(). + * Additionally, provides the arguments for selecting a partial list of + * (length-offset) versions for pagination, plus the ability to pre-select + * only the publicly-viewable versions. + * @param datasetId + * @param offset for pagination through long lists of versions + * @param length for pagination through long lists of versions + * @param includeUnpublished retrieves all the versions, including drafts and deaccessioned. + * @return (partial) list of versions + */ + public List findVersions(Long datasetId, Integer offset, Integer length, boolean includeUnpublished) { + TypedQuery query; + if (includeUnpublished) { + query = em.createNamedQuery("DatasetVersion.findByDataset", DatasetVersion.class); + } else { + query = em.createNamedQuery("DatasetVersion.findReleasedByDataset", DatasetVersion.class) + .setParameter("datasetId", datasetId); + } + + query.setParameter("datasetId", datasetId); + + if (offset != null) { + query.setFirstResult(offset); + } + if (length != null) { + query.setMaxResults(length); + } + + return query.getResultList(); + } + public DatasetVersion findByFriendlyVersionNumber(Long datasetId, String friendlyVersionNumber) { Long majorVersionNumber = null; Long minorVersionNumber = null; @@ -1224,4 +1284,50 @@ public List getUnarchivedDatasetVersions(){ return null; } } // end getUnarchivedDatasetVersions + + /** + * Returns a FileMetadata list of files in the specified DatasetVersion + * + * @param datasetVersion the DatasetVersion to access + * @param limit for pagination, can be null + * @param offset for pagination, can be null + * @param orderCriteria a FileMetadatasOrderCriteria to order the results + * @return a FileMetadata list of the specified DatasetVersion + */ + public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, FileMetadatasOrderCriteria orderCriteria) { + TypedQuery query = em.createQuery(getQueryStringFromFileMetadatasOrderCriteria(orderCriteria), FileMetadata.class) + .setParameter("datasetVersionId", datasetVersion.getId()); + if (limit != null) { + query.setMaxResults(limit); + } + if (offset != null) { + query.setFirstResult(offset); + } + return query.getResultList(); + } + + private String getQueryStringFromFileMetadatasOrderCriteria(FileMetadatasOrderCriteria orderCriteria) { + String queryString; + switch (orderCriteria) { + case NameZA: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL + " DESC"; + break; + case Newest: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE + " DESC"; + break; + case Oldest: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE; + break; + case Size: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_SIZE; + break; + case Type: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_TYPE; + break; + default: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL; + break; + } + return queryString; + } } // end class diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index dbea63cb1c8..25d077f9807 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -260,7 +260,7 @@ public Response getDataset(@Context ContainerRequestContext crc, @PathParam("id" MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, retrieved); mdcLogService.logEntry(entry); } - return ok(jsonbuilder.add("latestVersion", (latest != null) ? json(latest) : null)); + return ok(jsonbuilder.add("latestVersion", (latest != null) ? json(latest, true) : null)); }, getRequestUser(crc)); } @@ -466,31 +466,39 @@ public Response useDefaultCitationDate(@Context ContainerRequestContext crc, @Pa @GET @AuthRequired @Path("{id}/versions") - public Response listVersions(@Context ContainerRequestContext crc, @PathParam("id") String id ) { + public Response listVersions(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("includeFiles") Boolean includeFiles, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset) { return response( req -> - ok( execCommand( new ListVersionsCommand(req, findDatasetOrDie(id)) ) + ok( execCommand( new ListVersionsCommand(req, findDatasetOrDie(id), offset, limit) ) .stream() - .map( d -> json(d) ) + .map( d -> json(d, includeFiles == null ? false : includeFiles) ) .collect(toJsonArray())), getRequestUser(crc)); } @GET @AuthRequired @Path("{id}/versions/{versionId}") - public Response getVersion(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + public Response getVersion(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @QueryParam("includeFiles") Boolean includeFiles, @Context UriInfo uriInfo, @Context HttpHeaders headers) { return response( req -> { DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found") - : ok(json(dsv)); + : ok(json(dsv, includeFiles == null ? false : includeFiles)); }, getRequestUser(crc)); } @GET @AuthRequired @Path("{id}/versions/{versionId}/files") - public Response getVersionFiles(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return response( req -> ok( jsonFileMetadatas( - getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers).getFileMetadatas())), getRequestUser(crc)); + public Response getVersionFiles(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset, @QueryParam("orderCriteria") String orderCriteria, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + return response( req -> { + DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); + DatasetVersionServiceBean.FileMetadatasOrderCriteria fileMetadatasOrderCriteria; + try { + fileMetadatasOrderCriteria = orderCriteria != null ? DatasetVersionServiceBean.FileMetadatasOrderCriteria.valueOf(orderCriteria) : DatasetVersionServiceBean.FileMetadatasOrderCriteria.NameAZ; + } catch (IllegalArgumentException e) { + return error(Response.Status.BAD_REQUEST, "Invalid order criteria: " + orderCriteria); + } + return ok(jsonFileMetadatas(datasetversionService.getFileMetadatas(datasetVersion, limit, offset, fileMetadatasOrderCriteria))); + }, getRequestUser(crc)); } @GET @@ -708,7 +716,7 @@ public Response updateDraftVersion(@Context ContainerRequestContext crc, String } managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, incomingVersion)); } - return ok( json(managedVersion) ); + return ok( json(managedVersion, true) ); } catch (JsonParseException ex) { logger.log(Level.SEVERE, "Semantic error parsing dataset version Json: " + ex.getMessage(), ex); @@ -943,7 +951,7 @@ private Response processDatasetFieldDataDelete(String jsonBody, String id, Datav DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion(); - return ok(json(managedVersion)); + return ok(json(managedVersion, true)); } catch (JsonParseException ex) { logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex); @@ -1092,7 +1100,7 @@ private Response processDatasetUpdate(String jsonBody, String id, DataverseReque } DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion(); - return ok(json(managedVersion)); + return ok(json(managedVersion, true)); } catch (JsonParseException ex) { logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex); @@ -3848,9 +3856,9 @@ public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String JsonObjectBuilder responseJson; if (isAnonymizedAccess) { List anonymizedFieldTypeNamesList = new ArrayList<>(Arrays.asList(anonymizedFieldTypeNames.split(",\\s"))); - responseJson = json(dsv, anonymizedFieldTypeNamesList); + responseJson = json(dsv, anonymizedFieldTypeNamesList, true); } else { - responseJson = json(dsv); + responseJson = json(dsv, true); } return ok(responseJson); } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index adbd132bce8..e36ba34a364 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -521,7 +521,7 @@ public static boolean validateDatasetMetadataExternally(Dataset ds, String execu // for the filter to whitelist by these attributes. try { - jsonMetadata = json(ds).add("datasetVersion", json(ds.getLatestVersion())) + jsonMetadata = json(ds).add("datasetVersion", json(ds.getLatestVersion(), true)) .add("sourceAddress", sourceAddressLabel) .add("userIdentifier", userIdentifier) .add("parentAlias", ds.getOwner().getAlias()) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListVersionsCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListVersionsCommand.java index 51283f29156..80a5fe9b080 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListVersionsCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListVersionsCommand.java @@ -23,23 +23,41 @@ */ // No permission needed to view published dvObjects @RequiredPermissions({}) -public class ListVersionsCommand extends AbstractCommand>{ - +public class ListVersionsCommand extends AbstractCommand> { + private final Dataset ds; + private final Integer limit; + private final Integer offset; - public ListVersionsCommand(DataverseRequest aRequest, Dataset aDataset) { - super(aRequest, aDataset); - ds = aDataset; - } + public ListVersionsCommand(DataverseRequest aRequest, Dataset aDataset) { + this(aRequest, aDataset, null, null); + } + + public ListVersionsCommand(DataverseRequest aRequest, Dataset aDataset, Integer offset, Integer limit) { + super(aRequest, aDataset); + ds = aDataset; + this.offset = offset; + this.limit = limit; + } - @Override - public List execute(CommandContext ctxt) throws CommandException { - List outputList = new LinkedList<>(); - for ( DatasetVersion dsv : ds.getVersions() ) { - if (dsv.isReleased() || ctxt.permissions().request( getRequest() ).on(ds).has(Permission.EditDataset)) { - outputList.add(dsv); + @Override + public List execute(CommandContext ctxt) throws CommandException { + + boolean includeUnpublished = ctxt.permissions().request(getRequest()).on(ds).has(Permission.EditDataset); + + if (offset == null && limit == null) { + // @todo: this fragment can be dropped, and the service-based method below + // can be used for both cases. + List outputList = new LinkedList<>(); + for (DatasetVersion dsv : ds.getVersions()) { + if (dsv.isReleased() || includeUnpublished) { + outputList.add(dsv); + } } - } - return outputList; - } + return outputList; + } else { + // Only a partial list (one "page"-worth) of versions is being requested + return ctxt.datasetVersion().findVersions(ds.getId(), offset, limit, includeUnpublished); + } + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index b6026998bb7..dc8971c9539 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -368,11 +368,11 @@ public static JsonObjectBuilder json(FileDetailsHolder ds) { .add("mime",ds.getMime())); } - public static JsonObjectBuilder json(DatasetVersion dsv) { - return json(dsv, null); + public static JsonObjectBuilder json(DatasetVersion dsv, boolean includeFiles) { + return json(dsv, null, includeFiles); } - public static JsonObjectBuilder json(DatasetVersion dsv, List anonymizedFieldTypeNamesList) { + public static JsonObjectBuilder json(DatasetVersion dsv, List anonymizedFieldTypeNamesList, boolean includeFiles) { Dataset dataset = dsv.getDataset(); JsonObjectBuilder bld = jsonObjectBuilder() .add("id", dsv.getId()).add("datasetId", dataset.getId()) @@ -415,7 +415,9 @@ public static JsonObjectBuilder json(DatasetVersion dsv, List anonymized jsonByBlocks(dsv.getDatasetFields(), anonymizedFieldTypeNamesList) : jsonByBlocks(dsv.getDatasetFields()) ); - bld.add("files", jsonFileMetadatas(dsv.getFileMetadatas())); + if (includeFiles) { + bld.add("files", jsonFileMetadatas(dsv.getFileMetadatas())); + } return bld; } @@ -447,8 +449,8 @@ public static JsonObjectBuilder jsonDataFileList(List dataFiles){ * to the regular `json` method for DatasetVersion? Will anything break? * Unit tests for that method could not be found. */ - public static JsonObjectBuilder jsonWithCitation(DatasetVersion dsv) { - JsonObjectBuilder dsvWithCitation = JsonPrinter.json(dsv); + public static JsonObjectBuilder jsonWithCitation(DatasetVersion dsv, boolean includeFiles) { + JsonObjectBuilder dsvWithCitation = JsonPrinter.json(dsv, includeFiles); dsvWithCitation.add("citation", dsv.getCitation()); return dsvWithCitation; } @@ -467,7 +469,7 @@ public static JsonObjectBuilder jsonWithCitation(DatasetVersion dsv) { */ public static JsonObjectBuilder jsonAsDatasetDto(DatasetVersion dsv) { JsonObjectBuilder datasetDtoAsJson = JsonPrinter.json(dsv.getDataset()); - datasetDtoAsJson.add("datasetVersion", jsonWithCitation(dsv)); + datasetDtoAsJson.add("datasetVersion", jsonWithCitation(dsv, true)); return datasetDtoAsJson; } From de35ae7c65fc4b77704ab0cd5df4a9d31ec0dbad Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Mon, 21 Aug 2023 00:17:17 -0400 Subject: [PATCH 013/252] added left join hints to the full filemetadatas lookup. #9763 --- .../dataverse/DatasetVersionServiceBean.java | 25 +++++++++++++++---- .../harvard/iq/dataverse/api/Datasets.java | 1 + 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 27a4f4773d4..1edc281fa3e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -202,6 +202,8 @@ public DatasetVersion findDeep(Object pk) { * Additionally, provides the arguments for selecting a partial list of * (length-offset) versions for pagination, plus the ability to pre-select * only the publicly-viewable versions. + * It is recommended that individual software components utilize the + * ListVersionsCommand, instead of calling this service method directly. * @param datasetId * @param offset for pagination through long lists of versions * @param length for pagination through long lists of versions @@ -1297,11 +1299,24 @@ public List getUnarchivedDatasetVersions(){ public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, FileMetadatasOrderCriteria orderCriteria) { TypedQuery query = em.createQuery(getQueryStringFromFileMetadatasOrderCriteria(orderCriteria), FileMetadata.class) .setParameter("datasetVersionId", datasetVersion.getId()); - if (limit != null) { - query.setMaxResults(limit); - } - if (offset != null) { - query.setFirstResult(offset); + + if (limit == null && offset == null) { + query.setHint("eclipselink.left-join-fetch", "fm.dataFile.ingestRequest") + .setHint("eclipselink.left-join-fetch", "fm.dataFile.thumbnailForDataset") + .setHint("eclipselink.left-join-fetch", "fm.dataFile.dataTables") + .setHint("eclipselink.left-join-fetch", "fm.fileCategories") + .setHint("eclipselink.left-join-fetch", "fm.dataFile.embargo") + .setHint("eclipselink.left-join-fetch", "fm.datasetVersion") + .setHint("eclipselink.left-join-fetch", "fm.dataFile.releaseUser") + .setHint("eclipselink.left-join-fetch", "fm.dataFile.creator"); + } else { + // @todo: is there really no way to use offset-limit with left join hints? + if (limit != null) { + query.setMaxResults(limit); + } + if (offset != null) { + query.setFirstResult(offset); + } } return query.getResultList(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 25d077f9807..48755d4ea8a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -479,6 +479,7 @@ public Response listVersions(@Context ContainerRequestContext crc, @PathParam("i @Path("{id}/versions/{versionId}") public Response getVersion(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @QueryParam("includeFiles") Boolean includeFiles, @Context UriInfo uriInfo, @Context HttpHeaders headers) { return response( req -> { + // @todo: consider using DatasetVersionServiceBean.findDeep() here  DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found") : ok(json(dsv, includeFiles == null ? false : includeFiles)); From 1db004245edd2f62f8d76290f4a183f095af36cc Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 22 Aug 2023 14:24:55 +0200 Subject: [PATCH 014/252] build(settings): migrate ConfigCheckService to Jakarta EE 10 --- .../harvard/iq/dataverse/settings/ConfigCheckService.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java index 9e2a82d6b58..b175eafc3e0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java @@ -2,10 +2,10 @@ import edu.harvard.iq.dataverse.util.FileUtil; -import javax.annotation.PostConstruct; -import javax.ejb.DependsOn; -import javax.ejb.Singleton; -import javax.ejb.Startup; +import jakarta.annotation.PostConstruct; +import jakarta.ejb.DependsOn; +import jakarta.ejb.Singleton; +import jakarta.ejb.Startup; import java.io.IOException; import java.nio.file.FileSystemException; import java.nio.file.Files; From 72722e77afb848131dc38042d23b7cf44156a6e8 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 22 Aug 2023 15:01:05 +0200 Subject: [PATCH 015/252] refactor(collection): make logo location non-static #9662 By introducing a new static method ThemeWidgetFragment.getLogoDir all other places (api.Access, api.Dataverse, UpdateDataverseThemeCommand, DataverseServiceBean) can use a lookup function from one central place instead of building the path on their own. Reducing code duplication also means we can more easily get the location from a setting, enabling relocation of the data. That is especially important for container usage. Also, we can now use ConfigCheckService to detect if the folders we configured are read/write accessible to us. --- .../iq/dataverse/DataverseServiceBean.java | 11 +---------- .../iq/dataverse/ThemeWidgetFragment.java | 18 ++++++++++++++---- .../edu/harvard/iq/dataverse/api/Access.java | 12 ++---------- .../harvard/iq/dataverse/api/Dataverses.java | 2 ++ .../impl/UpdateDataverseThemeCommand.java | 4 ++-- 5 files changed, 21 insertions(+), 26 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index 7194a1ef31e..549b8310122 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -399,16 +399,7 @@ private File getLogo(Dataverse dataverse) { DataverseTheme theme = dataverse.getDataverseTheme(); if (theme != null && theme.getLogo() != null && !theme.getLogo().isEmpty()) { - Properties p = System.getProperties(); - String domainRoot = p.getProperty("com.sun.aas.instanceRoot"); - - if (domainRoot != null && !"".equals(domainRoot)) { - return new File (domainRoot + File.separator + - "docroot" + File.separator + - "logos" + File.separator + - dataverse.getLogoOwnerId() + File.separator + - theme.getLogo()); - } + return ThemeWidgetFragment.getLogoDir(dataverse.getLogoOwnerId()).resolve(theme.getLogo()).toFile(); } return null; diff --git a/src/main/java/edu/harvard/iq/dataverse/ThemeWidgetFragment.java b/src/main/java/edu/harvard/iq/dataverse/ThemeWidgetFragment.java index 9a62a99722a..f30051e26ae 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThemeWidgetFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThemeWidgetFragment.java @@ -7,6 +7,7 @@ import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseThemeCommand; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.JsfHelper; import java.io.File; @@ -14,6 +15,7 @@ import java.net.MalformedURLException; import java.net.URL; import java.nio.file.Files; +import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.util.logging.Level; @@ -49,6 +51,8 @@ public class ThemeWidgetFragment implements java.io.Serializable { static final String DEFAULT_TEXT_COLOR = "888888"; private static final Logger logger = Logger.getLogger(ThemeWidgetFragment.class.getCanonicalName()); + public static final String LOGOS_SUBDIR = "logos"; + public static final String LOGOS_TEMP_SUBDIR = LOGOS_SUBDIR + File.separator + "temp"; private File tempDir; private File uploadedFile; @@ -86,12 +90,18 @@ public void setTaglineInput(HtmlInputText taglineInput) { } - + public static Path getLogoDir(String ownerId) { + return Path.of(JvmSettings.DOCROOT_DIRECTORY.lookup(), LOGOS_SUBDIR, ownerId); + } - private void createTempDir() { + private void createTempDir() { try { - File tempRoot = Files.createDirectories(Paths.get("../docroot/logos/temp")).toFile(); - tempDir = Files.createTempDirectory(tempRoot.toPath(),editDv.getId().toString()).toFile(); + // Create the temporary space if not yet existing (will silently ignore preexisting) + // Note that the docroot directory is checked within ConfigCheckService for presence and write access. + Path tempRoot = Path.of(JvmSettings.DOCROOT_DIRECTORY.lookup(), LOGOS_TEMP_SUBDIR); + Files.createDirectories(tempRoot); + + this.tempDir = Files.createTempDirectory(tempRoot, editDv.getId().toString()).toFile(); } catch (IOException e) { throw new RuntimeException("Error creating temp directory", e); // improve error handling } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 0341f8c1127..ce7cfb6b254 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -31,6 +31,7 @@ import edu.harvard.iq.dataverse.RoleAssignment; import edu.harvard.iq.dataverse.UserNotification; import edu.harvard.iq.dataverse.UserNotificationServiceBean; +import edu.harvard.iq.dataverse.ThemeWidgetFragment; import static edu.harvard.iq.dataverse.api.Datasets.handleVersion; @@ -1196,16 +1197,7 @@ private File getLogo(Dataverse dataverse) { DataverseTheme theme = dataverse.getDataverseTheme(); if (theme != null && theme.getLogo() != null && !theme.getLogo().equals("")) { - Properties p = System.getProperties(); - String domainRoot = p.getProperty("com.sun.aas.instanceRoot"); - - if (domainRoot != null && !"".equals(domainRoot)) { - return new File (domainRoot + File.separator + - "docroot" + File.separator + - "logos" + File.separator + - dataverse.getLogoOwnerId() + File.separator + - theme.getLogo()); - } + return ThemeWidgetFragment.getLogoDir(dataverse.getLogoOwnerId()).resolve(theme.getLogo()).toFile(); } return null; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index a60775cbd38..30c14535251 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -976,6 +976,8 @@ public Response listAssignments(@Context ContainerRequestContext crc, @PathParam */ // File tempDir; // +// TODO: Code duplicate in ThemeWidgetFragment. Maybe extract, make static and put some place else? +// Important: at least use JvmSettings.DOCROOT_DIRECTORY and not the hardcoded location! // private void createTempDir(Dataverse editDv) { // try { // File tempRoot = java.nio.file.Files.createDirectories(Paths.get("../docroot/logos/temp")).toFile(); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseThemeCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseThemeCommand.java index add7b825659..9ef9fed4b1b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseThemeCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseThemeCommand.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.engine.command.impl; import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.ThemeWidgetFragment; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; @@ -22,7 +23,6 @@ public class UpdateDataverseThemeCommand extends AbstractCommand { private final Dataverse editedDv; private final File uploadedFile; - private final Path logoPath = Paths.get("../docroot/logos"); private String locate; public UpdateDataverseThemeCommand(Dataverse editedDv, File uploadedFile, DataverseRequest aRequest, String location) { @@ -44,7 +44,7 @@ public UpdateDataverseThemeCommand(Dataverse editedDv, File uploadedFile, Datave public Dataverse execute(CommandContext ctxt) throws CommandException { // Get current dataverse, so we can delete current logo file if necessary Dataverse currentDv = ctxt.dataverses().find(editedDv.getId()); - File logoFileDir = new File(logoPath.toFile(), editedDv.getId().toString()); + File logoFileDir = ThemeWidgetFragment.getLogoDir(editedDv.getId().toString()).toFile(); File currentFile=null; if (locate.equals("FOOTER")){ From 431afed3d78fd514151e8e25c9389fcbfea79f22 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 22 Aug 2023 16:12:22 +0200 Subject: [PATCH 016/252] fix(webapp): revert to hardcoded default for dirs in glassfish-web.xml #9662 Payara does not support looking up variables in default values of a lookup. As a consequence, we must return to the hardcoded "./docroot" and "./uploads" and instead supply default values using two environment variables in the applications' Dockerfile. This way they stay configurable from cmdline or other sources of env vars. --- src/main/docker/Dockerfile | 5 +++++ .../META-INF/microprofile-config.properties | 4 ++-- src/main/webapp/WEB-INF/glassfish-web.xml | 14 +++++++++----- 3 files changed, 16 insertions(+), 7 deletions(-) diff --git a/src/main/docker/Dockerfile b/src/main/docker/Dockerfile index 88020a118b5..201f164d961 100644 --- a/src/main/docker/Dockerfile +++ b/src/main/docker/Dockerfile @@ -29,6 +29,11 @@ FROM $BASE_IMAGE # See also https://download.eclipse.org/microprofile/microprofile-config-3.0/microprofile-config-spec-3.0.html#configprofile ENV MP_CONFIG_PROFILE=ct +# Workaround to configure upload directories by default to useful place until we can have variable lookups in +# defaults for glassfish-web.xml and other places. +ENV DATAVERSE_FILES_UPLOADS="${STORAGE_DIR}/uploads" +ENV DATAVERSE_FILES_DOCROOT="${STORAGE_DIR}/docroot" + # Copy app and deps from assembly in proper layers COPY --chown=payara:payara maven/deps ${DEPLOY_DIR}/dataverse/WEB-INF/lib/ COPY --chown=payara:payara maven/app ${DEPLOY_DIR}/dataverse/ diff --git a/src/main/resources/META-INF/microprofile-config.properties b/src/main/resources/META-INF/microprofile-config.properties index b58c728316b..11471663fc3 100644 --- a/src/main/resources/META-INF/microprofile-config.properties +++ b/src/main/resources/META-INF/microprofile-config.properties @@ -12,8 +12,8 @@ dataverse.build= dataverse.files.directory=/tmp/dataverse # The variables are replaced with the environment variables from our base image, but still easy to override %ct.dataverse.files.directory=${STORAGE_DIR} -# NOTE: the following uses STORAGE_DIR for both containers and classic installations. By defaulting to -# "com.sun.aas.instanceRoot" if not present, it equals the hardcoded former default "." in glassfish-web.xml +# NOTE: The following uses STORAGE_DIR for both containers and classic installations. By defaulting to +# "com.sun.aas.instanceRoot" if not present, it equals the hardcoded default "." in glassfish-web.xml # (which is relative to the domain root folder). # Also, be aware that this props file cannot provide any value for lookups in glassfish-web.xml during servlet # initialization, as this file will not have been read yet! The names and their values are in sync here and over diff --git a/src/main/webapp/WEB-INF/glassfish-web.xml b/src/main/webapp/WEB-INF/glassfish-web.xml index 9677bf089e2..015a309fd6b 100644 --- a/src/main/webapp/WEB-INF/glassfish-web.xml +++ b/src/main/webapp/WEB-INF/glassfish-web.xml @@ -11,13 +11,17 @@ - - - - + + + + + - + + From ec131f8b93463047811ff3b0ed626f61ee831041 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 22 Aug 2023 16:15:28 +0200 Subject: [PATCH 017/252] fix(settings): make ConfigCheckService use Files.exist #9662 With Files.notExist if some folder does not have the "execute" attribute, it cannot detect a folder does not exist. Inverting the Files.exists call solves the problem. Also adding tests for the business logic. --- .../settings/ConfigCheckService.java | 2 +- .../settings/ConfigCheckServiceTest.java | 92 +++++++++++++++++++ 2 files changed, 93 insertions(+), 1 deletion(-) create mode 100644 src/test/java/edu/harvard/iq/dataverse/settings/ConfigCheckServiceTest.java diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java index b175eafc3e0..83c3f6ac90d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java @@ -49,7 +49,7 @@ public boolean checkSystemDirectories() { boolean success = true; for (Path path : paths.keySet()) { - if (Files.notExists(path)) { + if (! Files.exists(path)) { try { Files.createDirectories(path); } catch (IOException e) { diff --git a/src/test/java/edu/harvard/iq/dataverse/settings/ConfigCheckServiceTest.java b/src/test/java/edu/harvard/iq/dataverse/settings/ConfigCheckServiceTest.java new file mode 100644 index 00000000000..796448e579a --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/settings/ConfigCheckServiceTest.java @@ -0,0 +1,92 @@ +package edu.harvard.iq.dataverse.settings; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Assumptions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Set; + +import static java.nio.file.attribute.PosixFilePermission.GROUP_READ; +import static java.nio.file.attribute.PosixFilePermission.OWNER_READ; + +class ConfigCheckServiceTest { + + @Nested + class TestDirNotWritable { + @TempDir + Path testDir; + + private String oldUploadDirSetting; + + @BeforeEach + void setUp() throws IOException { + Files.setPosixFilePermissions(this.testDir, Set.of(OWNER_READ, GROUP_READ)); + + // TODO: This is a workaround until PR #9273 is merged, providing the ability to lookup values for + // @JvmSetting from static methods. Should be deleted. + this.oldUploadDirSetting = System.getProperty(JvmSettings.UPLOADS_DIRECTORY.getScopedKey()); + System.setProperty(JvmSettings.UPLOADS_DIRECTORY.getScopedKey(), this.testDir.toString()); + } + + @AfterEach + void tearDown() { + // TODO: This is a workaround until PR #9273 is merged, providing the ability to lookup values for + // @JvmSetting from static methods. Should be deleted. + if (this.oldUploadDirSetting != null) + System.setProperty(JvmSettings.UPLOADS_DIRECTORY.getScopedKey(), this.oldUploadDirSetting); + } + + @Test + void writeCheckFails() { + Assumptions.assumeTrue(Files.exists(this.testDir)); + + ConfigCheckService sut = new ConfigCheckService(); + Assertions.assertFalse(sut.checkSystemDirectories()); + } + } + + @Nested + class TestDirNotExistent { + @TempDir + Path testDir; + String subFolder = "foobar"; + + String oldUploadDirSetting; + + @BeforeEach + void setUp() throws IOException { + // Make test dir not writeable, so the subfolder cannot be created + Files.setPosixFilePermissions(this.testDir, Set.of(OWNER_READ, GROUP_READ)); + + // TODO: This is a workaround until PR #9273 is merged, providing the ability to lookup values for + // @JvmSetting from static methods. Should be deleted. + oldUploadDirSetting = System.getProperty(JvmSettings.UPLOADS_DIRECTORY.getScopedKey()); + System.setProperty(JvmSettings.UPLOADS_DIRECTORY.getScopedKey(), this.testDir.resolve(this.subFolder).toString()); + } + + @AfterEach + void tearDown() { + // TODO: This is a workaround until PR #9273 is merged, providing the ability to lookup values for + // @JvmSetting from static methods. Should be deleted. + if (this.oldUploadDirSetting != null) + System.setProperty(JvmSettings.UPLOADS_DIRECTORY.getScopedKey(), this.oldUploadDirSetting); + } + + @Test + void mkdirFails() { + Assumptions.assumeTrue(Files.exists(this.testDir)); + Assumptions.assumeFalse(Files.exists(this.testDir.resolve(this.subFolder))); + + ConfigCheckService sut = new ConfigCheckService(); + Assertions.assertFalse(sut.checkSystemDirectories()); + } + } + +} \ No newline at end of file From 4cd62eb6ed0812fec031e9328d3595dd13616225 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 22 Aug 2023 13:23:49 -0400 Subject: [PATCH 018/252] (ongoing experiments; a lot of these changes are temporary and will be deleted) #9763 --- .../harvard/iq/dataverse/DatasetVersion.java | 4 ++- .../dataverse/DatasetVersionServiceBean.java | 31 +++++++++++++++++-- .../harvard/iq/dataverse/api/Datasets.java | 20 ++++++++++++ .../command/impl/ListVersionsCommand.java | 4 ++- .../search/SearchIncludeFragment.java | 1 + .../iq/dataverse/util/json/JsonPrinter.java | 5 +++ 6 files changed, 60 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index 8d4dafad62a..f547f2963d1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -72,7 +72,9 @@ @NamedQuery(name = "DatasetVersion.findByDataset", query = "SELECT o FROM DatasetVersion o WHERE o.dataset.id=:datasetId ORDER BY o.versionNumber DESC, o.minorVersionNumber DESC"), @NamedQuery(name = "DatasetVersion.findReleasedByDataset", - query = "SELECT o FROM DatasetVersion o WHERE o.dataset.id=:datasetId AND o.versionState=edu.harvard.iq.dataverse.DatasetVersion.VersionState.RELEASED ORDER BY o.versionNumber DESC, o.minorVersionNumber DESC")}) + query = "SELECT o FROM DatasetVersion o WHERE o.dataset.id=:datasetId AND o.versionState=edu.harvard.iq.dataverse.DatasetVersion.VersionState.RELEASED ORDER BY o.versionNumber DESC, o.minorVersionNumber DESC")/*, + @NamedQuery(name = "DatasetVersion.findVersionElements", + query = "SELECT o.id, o.versionState, o.versionNumber, o.minorVersionNumber FROM DatasetVersion o WHERE o.dataset.id=:datasetId ORDER BY o.versionNumber DESC, o.minorVersionNumber DESC")*/}) @Entity diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 1edc281fa3e..fbed7d93cdd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -1301,7 +1301,7 @@ public List getFileMetadatas(DatasetVersion datasetVersion, Intege .setParameter("datasetVersionId", datasetVersion.getId()); if (limit == null && offset == null) { - query.setHint("eclipselink.left-join-fetch", "fm.dataFile.ingestRequest") + query = query.setHint("eclipselink.left-join-fetch", "fm.dataFile.ingestRequest") .setHint("eclipselink.left-join-fetch", "fm.dataFile.thumbnailForDataset") .setHint("eclipselink.left-join-fetch", "fm.dataFile.dataTables") .setHint("eclipselink.left-join-fetch", "fm.fileCategories") @@ -1312,10 +1312,35 @@ public List getFileMetadatas(DatasetVersion datasetVersion, Intege } else { // @todo: is there really no way to use offset-limit with left join hints? if (limit != null) { - query.setMaxResults(limit); + query = query.setMaxResults(limit); } if (offset != null) { - query.setFirstResult(offset); + query = query.setFirstResult(offset); + } + } + return query.getResultList(); + } + + public List getFileMetadatasByDbId(Long versionId, Integer limit, Integer offset, FileMetadatasOrderCriteria orderCriteria) { + TypedQuery query = em.createQuery(getQueryStringFromFileMetadatasOrderCriteria(orderCriteria), FileMetadata.class) + .setParameter("datasetVersionId", versionId); + + if (limit == null && offset == null) { + query = query.setHint("eclipselink.left-join-fetch", "fm.dataFile.ingestRequest") + .setHint("eclipselink.left-join-fetch", "fm.dataFile.thumbnailForDataset") + .setHint("eclipselink.left-join-fetch", "fm.dataFile.dataTables") + .setHint("eclipselink.left-join-fetch", "fm.fileCategories") + .setHint("eclipselink.left-join-fetch", "fm.dataFile.embargo") + .setHint("eclipselink.left-join-fetch", "fm.datasetVersion") + .setHint("eclipselink.left-join-fetch", "fm.dataFile.releaseUser") + .setHint("eclipselink.left-join-fetch", "fm.dataFile.creator"); + } else { + // @todo: is there really no way to use offset-limit with left join hints? + if (limit != null) { + query = query.setMaxResults(limit); + } + if (offset != null) { + query = query.setFirstResult(offset); } } return query.getResultList(); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 48755d4ea8a..47c249b7c8a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -467,6 +467,9 @@ public Response useDefaultCitationDate(@Context ContainerRequestContext crc, @Pa @AuthRequired @Path("{id}/versions") public Response listVersions(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("includeFiles") Boolean includeFiles, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset) { + // @todo: when full versions list - including files - is requested, consider + // using datasetservice.findDeep() (needs testing on "monstrous" datasets + // with a lot of versions!) return response( req -> ok( execCommand( new ListVersionsCommand(req, findDatasetOrDie(id), offset, limit) ) .stream() @@ -502,6 +505,23 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, @PathParam }, getRequestUser(crc)); } + //@todo: remember to delete this! (for experiments only!) + @GET + @AuthRequired + @Path("{id}/versions/{versionId}/files2") + public Response getVersionFiles2(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") Long versionId, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset, @QueryParam("orderCriteria") String orderCriteria, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + return response( req -> { + //DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); + DatasetVersionServiceBean.FileMetadatasOrderCriteria fileMetadatasOrderCriteria; + try { + fileMetadatasOrderCriteria = orderCriteria != null ? DatasetVersionServiceBean.FileMetadatasOrderCriteria.valueOf(orderCriteria) : DatasetVersionServiceBean.FileMetadatasOrderCriteria.NameAZ; + } catch (IllegalArgumentException e) { + return error(Response.Status.BAD_REQUEST, "Invalid order criteria: " + orderCriteria); + } + return ok(jsonFileMetadatas(datasetversionService.getFileMetadatasByDbId(versionId, limit, offset, fileMetadatasOrderCriteria))); + }, getRequestUser(crc)); + } + @GET @AuthRequired @Path("{id}/dirindex") diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListVersionsCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListVersionsCommand.java index 80a5fe9b080..d3675a8f206 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListVersionsCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListVersionsCommand.java @@ -47,7 +47,9 @@ public List execute(CommandContext ctxt) throws CommandException if (offset == null && limit == null) { // @todo: this fragment can be dropped, and the service-based method below - // can be used for both cases. + // can be used for both cases. + // @todo: on the other hand, consider using datasetservice.findDeep() + // when a full list of versions is requested. List outputList = new LinkedList<>(); for (DatasetVersion dsv : ds.getVersions()) { if (dsv.isReleased() || includeUnpublished) { diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index e249b81c983..5c5dc8b5171 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -205,6 +205,7 @@ public String searchRedirect(String dataverseRedirectPage, Dataverse dataverseIn */ dataverse = dataverseIn; + logger.info("redirect page: "+dataverseRedirectPage); dataverseRedirectPage = StringUtils.isBlank(dataverseRedirectPage) ? "dataverse.xhtml" : dataverseRedirectPage; String optionalDataverseScope = "&alias=" + dataverse.getAlias(); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index dc8971c9539..68f0be3a067 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -373,6 +373,9 @@ public static JsonObjectBuilder json(DatasetVersion dsv, boolean includeFiles) { } public static JsonObjectBuilder json(DatasetVersion dsv, List anonymizedFieldTypeNamesList, boolean includeFiles) { + /* return json(dsv, null, includeFiles, null); + } + public static JsonObjectBuilder json(DatasetVersion dsv, List anonymizedFieldTypeNamesList, boolean includeFiles, Long numberOfFiles) {*/ Dataset dataset = dsv.getDataset(); JsonObjectBuilder bld = jsonObjectBuilder() .add("id", dsv.getId()).add("datasetId", dataset.getId()) @@ -388,6 +391,8 @@ public static JsonObjectBuilder json(DatasetVersion dsv, List anonymized .add("alternativePersistentId", dataset.getAlternativePersistentIdentifier()) .add("publicationDate", dataset.getPublicationDateFormattedYYYYMMDD()) .add("citationDate", dataset.getCitationDateFormattedYYYYMMDD()); + //.add("numberOfFiles", numberOfFiles); + License license = DatasetUtil.getLicense(dsv); if (license != null) { bld.add("license", jsonLicense(dsv)); From 848f564a1c63656381352a6a0f52443bd05f9cb7 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 22 Aug 2023 23:41:42 +0200 Subject: [PATCH 019/252] test(settings): make ConfigCheckService actually testable #9662 --- .../settings/ConfigCheckServiceTest.java | 82 ++++++++++--------- .../META-INF/microprofile-config.properties | 7 +- 2 files changed, 50 insertions(+), 39 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/settings/ConfigCheckServiceTest.java b/src/test/java/edu/harvard/iq/dataverse/settings/ConfigCheckServiceTest.java index 796448e579a..1018ad8d47b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/settings/ConfigCheckServiceTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/settings/ConfigCheckServiceTest.java @@ -1,6 +1,6 @@ package edu.harvard.iq.dataverse.settings; -import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Assumptions; import org.junit.jupiter.api.BeforeEach; @@ -17,35 +17,32 @@ import static java.nio.file.attribute.PosixFilePermission.OWNER_READ; class ConfigCheckServiceTest { + + @TempDir + static Path testDir; + + private static final String testDirProp = "test.filesDir"; + + @AfterAll + static void tearDown() { + System.clearProperty(testDirProp); + } @Nested class TestDirNotWritable { - @TempDir - Path testDir; - private String oldUploadDirSetting; + Path notWriteableSubfolder = testDir.resolve("readonly"); @BeforeEach void setUp() throws IOException { - Files.setPosixFilePermissions(this.testDir, Set.of(OWNER_READ, GROUP_READ)); - - // TODO: This is a workaround until PR #9273 is merged, providing the ability to lookup values for - // @JvmSetting from static methods. Should be deleted. - this.oldUploadDirSetting = System.getProperty(JvmSettings.UPLOADS_DIRECTORY.getScopedKey()); - System.setProperty(JvmSettings.UPLOADS_DIRECTORY.getScopedKey(), this.testDir.toString()); - } - - @AfterEach - void tearDown() { - // TODO: This is a workaround until PR #9273 is merged, providing the ability to lookup values for - // @JvmSetting from static methods. Should be deleted. - if (this.oldUploadDirSetting != null) - System.setProperty(JvmSettings.UPLOADS_DIRECTORY.getScopedKey(), this.oldUploadDirSetting); + Files.createDirectory(notWriteableSubfolder); + Files.setPosixFilePermissions(notWriteableSubfolder, Set.of(OWNER_READ, GROUP_READ)); + System.setProperty(testDirProp, notWriteableSubfolder.toString()); } @Test void writeCheckFails() { - Assumptions.assumeTrue(Files.exists(this.testDir)); + Assumptions.assumeTrue(Files.exists(notWriteableSubfolder)); ConfigCheckService sut = new ConfigCheckService(); Assertions.assertFalse(sut.checkSystemDirectories()); @@ -54,38 +51,47 @@ void writeCheckFails() { @Nested class TestDirNotExistent { - @TempDir - Path testDir; - String subFolder = "foobar"; - String oldUploadDirSetting; + Path notExistTestfolder = testDir.resolve("parent-readonly"); + Path notExistConfigSubfolder = notExistTestfolder.resolve("foobar"); @BeforeEach void setUp() throws IOException { + Files.createDirectory(notExistTestfolder); // Make test dir not writeable, so the subfolder cannot be created - Files.setPosixFilePermissions(this.testDir, Set.of(OWNER_READ, GROUP_READ)); + Files.setPosixFilePermissions(notExistTestfolder, Set.of(OWNER_READ, GROUP_READ)); + System.setProperty(testDirProp, notExistConfigSubfolder.toString()); + } + + @Test + void mkdirFails() { + Assumptions.assumeTrue(Files.exists(notExistTestfolder)); + Assumptions.assumeFalse(Files.exists(notExistConfigSubfolder)); - // TODO: This is a workaround until PR #9273 is merged, providing the ability to lookup values for - // @JvmSetting from static methods. Should be deleted. - oldUploadDirSetting = System.getProperty(JvmSettings.UPLOADS_DIRECTORY.getScopedKey()); - System.setProperty(JvmSettings.UPLOADS_DIRECTORY.getScopedKey(), this.testDir.resolve(this.subFolder).toString()); + ConfigCheckService sut = new ConfigCheckService(); + Assertions.assertFalse(sut.checkSystemDirectories()); } + } + + @Nested + class TestDirCreated { + + Path missingToBeCreatedTestfolder = testDir.resolve("create-me"); + Path missingToBeCreatedSubfolder = missingToBeCreatedTestfolder.resolve("foobar"); - @AfterEach - void tearDown() { - // TODO: This is a workaround until PR #9273 is merged, providing the ability to lookup values for - // @JvmSetting from static methods. Should be deleted. - if (this.oldUploadDirSetting != null) - System.setProperty(JvmSettings.UPLOADS_DIRECTORY.getScopedKey(), this.oldUploadDirSetting); + @BeforeEach + void setUp() throws IOException { + Files.createDirectory(missingToBeCreatedTestfolder); + System.setProperty(testDirProp, missingToBeCreatedSubfolder.toString()); } @Test - void mkdirFails() { - Assumptions.assumeTrue(Files.exists(this.testDir)); - Assumptions.assumeFalse(Files.exists(this.testDir.resolve(this.subFolder))); + void mkdirSucceeds() { + Assumptions.assumeTrue(Files.exists(missingToBeCreatedTestfolder)); + Assumptions.assumeFalse(Files.exists(missingToBeCreatedSubfolder)); ConfigCheckService sut = new ConfigCheckService(); - Assertions.assertFalse(sut.checkSystemDirectories()); + Assertions.assertTrue(sut.checkSystemDirectories()); } } diff --git a/src/test/resources/META-INF/microprofile-config.properties b/src/test/resources/META-INF/microprofile-config.properties index 21f70b53896..8e5521f8287 100644 --- a/src/test/resources/META-INF/microprofile-config.properties +++ b/src/test/resources/META-INF/microprofile-config.properties @@ -8,4 +8,9 @@ dataverse.pid.ezid.api-url=http://example.org # Also requires the username and the password to be present when used in production, use a default for unit testing. dataverse.pid.ezid.username=Dataverse Unit Test -dataverse.pid.ezid.password=supersecret \ No newline at end of file +dataverse.pid.ezid.password=supersecret + +# To test ConfigCheckService, point our files directories to a common test dir +dataverse.files.directory=${test.filesDir} +dataverse.files.uploads=${test.filesDir}/uploads +dataverse.files.docroot=${test.filesDir}/docroot From c49ee0ab86891521801969a73367fd1cb50817ee Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 22 Aug 2023 23:47:45 +0200 Subject: [PATCH 020/252] test(settings): make ConfigCheckService require absolute paths #9662 --- .../iq/dataverse/settings/ConfigCheckService.java | 7 +++++++ .../iq/dataverse/settings/ConfigCheckServiceTest.java | 10 ++++++++++ 2 files changed, 17 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java index 83c3f6ac90d..a2c3f53d59d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java @@ -49,6 +49,13 @@ public boolean checkSystemDirectories() { boolean success = true; for (Path path : paths.keySet()) { + // Check if the configured path is absolute - avoid potential problems with relative paths this way + if (! path.isAbsolute()) { + logger.log(Level.SEVERE, () -> "Configured directory " + path + " for " + paths.get(path) + " is not absolute"); + success = false; + continue; + } + if (! Files.exists(path)) { try { Files.createDirectories(path); diff --git a/src/test/java/edu/harvard/iq/dataverse/settings/ConfigCheckServiceTest.java b/src/test/java/edu/harvard/iq/dataverse/settings/ConfigCheckServiceTest.java index 1018ad8d47b..b031b9429c6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/settings/ConfigCheckServiceTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/settings/ConfigCheckServiceTest.java @@ -27,6 +27,16 @@ class ConfigCheckServiceTest { static void tearDown() { System.clearProperty(testDirProp); } + + @Nested + class TestDirNotAbsolute { + @Test + void nonAbsolutePathForTestDir() { + System.setProperty(testDirProp, "foobar"); + ConfigCheckService sut = new ConfigCheckService(); + Assertions.assertFalse(sut.checkSystemDirectories()); + } + } @Nested class TestDirNotWritable { From 28ddccc6d0f0baf4a3e48243b81c1ed001428b13 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 23 Aug 2023 10:18:27 +0200 Subject: [PATCH 021/252] fix(test,settings): make SiteMapUtilTest use test.filesDir property This is also used in ConfigCheckServiceTest to verify the checks are working. This property is picked up when sitemap util looks up the storage location via MPCONFIG, parsing the default values during testing from src/test/resources/META-INF/microprofile-config.properties --- .../edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java index 4f2b00bbea4..2ded6cb6a33 100644 --- a/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java @@ -82,7 +82,8 @@ public void testUpdateSiteMap() throws IOException, ParseException { String tmpDir = tmpDirPath.toString(); File docroot = new File(tmpDir + File.separator + "docroot"); docroot.mkdirs(); - System.setProperty("com.sun.aas.instanceRoot", tmpDir); + // TODO: this and the above should be replaced with JUnit 5 @TestDir + System.setProperty("test.filesDir", tmpDir); SiteMapUtil.updateSiteMap(dataverses, datasets); @@ -117,7 +118,7 @@ public void testUpdateSiteMap() throws IOException, ParseException { assertFalse(sitemapString.contains(harvestedPid)); assertFalse(sitemapString.contains(deaccessionedPid)); - System.clearProperty("com.sun.aas.instanceRoot"); + System.clearProperty("test.filesDir"); } From ba8e6d221ad9e0ce5de4391e96757d120e0313b4 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 23 Aug 2023 10:20:29 +0200 Subject: [PATCH 022/252] fix(test,settings): provide default for test.filesDir By providing a sane default unter /tmp, we enable a few tests that do not use a custom testclass scoped directory to run --- src/test/resources/META-INF/microprofile-config.properties | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/test/resources/META-INF/microprofile-config.properties b/src/test/resources/META-INF/microprofile-config.properties index 8e5521f8287..113a098a1fe 100644 --- a/src/test/resources/META-INF/microprofile-config.properties +++ b/src/test/resources/META-INF/microprofile-config.properties @@ -10,7 +10,9 @@ dataverse.pid.ezid.api-url=http://example.org dataverse.pid.ezid.username=Dataverse Unit Test dataverse.pid.ezid.password=supersecret -# To test ConfigCheckService, point our files directories to a common test dir +# To test ConfigCheckService, point our files directories to a common test dir by overriding the +# property test.filesDir via system properties +test.filesDir=/tmp/dataverse dataverse.files.directory=${test.filesDir} dataverse.files.uploads=${test.filesDir}/uploads dataverse.files.docroot=${test.filesDir}/docroot From 396ffff1c5dc2b38435c140ba5860fe5fbee9fd6 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 23 Aug 2023 11:45:49 +0200 Subject: [PATCH 023/252] style(settings): unify default dataverse.files dir options - No more profile to work around Payaras bug with overriding profiled values - Group together because every item using $STORAGE_DIR and a default to match classic installs now --- src/main/resources/META-INF/microprofile-config.properties | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/main/resources/META-INF/microprofile-config.properties b/src/main/resources/META-INF/microprofile-config.properties index 11471663fc3..f51a728332e 100644 --- a/src/main/resources/META-INF/microprofile-config.properties +++ b/src/main/resources/META-INF/microprofile-config.properties @@ -9,15 +9,13 @@ dataverse.build= %ct.dataverse.siteUrl=http://${dataverse.fqdn}:8080 # FILES -dataverse.files.directory=/tmp/dataverse -# The variables are replaced with the environment variables from our base image, but still easy to override -%ct.dataverse.files.directory=${STORAGE_DIR} -# NOTE: The following uses STORAGE_DIR for both containers and classic installations. By defaulting to +# NOTE: The following uses STORAGE_DIR for both containers and classic installations. When defaulting to # "com.sun.aas.instanceRoot" if not present, it equals the hardcoded default "." in glassfish-web.xml # (which is relative to the domain root folder). # Also, be aware that this props file cannot provide any value for lookups in glassfish-web.xml during servlet # initialization, as this file will not have been read yet! The names and their values are in sync here and over # there to ensure the config checker is able to check for the directories (exist + writeable). +dataverse.files.directory=${STORAGE_DIR:/tmp/dataverse} dataverse.files.uploads=${STORAGE_DIR:${com.sun.aas.instanceRoot}}/uploads dataverse.files.docroot=${STORAGE_DIR:${com.sun.aas.instanceRoot}}/docroot From d37eedfd7898eecb9fecb8bbe564f8efd76d5e8b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 23 Aug 2023 11:48:26 +0200 Subject: [PATCH 024/252] docs(settings): refactor docs on the important directories and how to configure them #9662 --- .../source/installation/config.rst | 49 ++++++++++++++----- 1 file changed, 38 insertions(+), 11 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index f9fe74afc7c..dc31b1afae8 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1759,8 +1759,8 @@ protocol, host, and port number and should not include a trailing slash. dataverse.files.directory +++++++++++++++++++++++++ -Please provide an absolute path to a directory backed by some mounted file system. This directory is used for a number -of purposes: +Providing an explicit location here makes it easier to reuse some mounted filesystem and we recommend doing so +to avoid filled up disks, aid in performance, etc. This directory is used for a number of purposes: 1. ``/temp`` after uploading, data is temporarily stored here for ingest and/or before shipping to the final storage destination. @@ -1773,24 +1773,51 @@ of purposes: under certain conditions. This directory may also be used by file stores for :ref:`permanent file storage `, but this is controlled by other, store-specific settings. -Defaults to ``/tmp/dataverse``. Can also be set via *MicroProfile Config API* sources, e.g. the environment variable -``DATAVERSE_FILES_DIRECTORY``. Defaults to ``${STORAGE_DIR}`` for profile ``ct``, important for the -:ref:`Dataverse Application Image `. +Notes: + +- Please provide an absolute path to a directory backed by some mounted file system. +- Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_FILES_DIRECTORY``. +- Defaults to ``/tmp/dataverse`` in a :doc:`default installation `. +- Defaults to ``${STORAGE_DIR}`` using our :ref:`Dataverse container ` (resolving to ``/dv``). +- During startup, this directory will be checked for existence and write access. It will be created for you + if missing. If it cannot be created or does not have proper write access, application deployment will fail. .. _dataverse.files.uploads: dataverse.files.uploads +++++++++++++++++++++++ -Configure a folder to store the incoming file stream during uploads (before transfering to `${dataverse.files.directory}/temp`). +Configure a folder to store the incoming file stream during uploads (before transfering to ``${dataverse.files.directory}/temp``). +Providing an explicit location here makes it easier to reuse some mounted filesystem. Please also see :ref:`temporary-file-storage` for more details. -You can use an absolute path or a relative, which is relative to the application server domain directory. -Defaults to ``./uploads``, which resolves to ``/usr/local/payara6/glassfish/domains/domain1/uploads`` in a default -installation. +Notes: + +- Please provide an absolute path to a directory backed by some mounted file system. +- Defaults to ``${com.sun.aas.instanceRoot}/uploads`` in a :doc:`default installation ` + (resolving to ``/usr/local/payara6/glassfish/domains/domain1/uploads``). +- Defaults to ``${STORAGE_DIR}/uploads`` using our :ref:`Dataverse container ` (resolving to ``/dv/uploads``). +- Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_FILES_UPLOADS``. +- During startup, this directory will be checked for existence and write access. It will be created for you + if missing. If it cannot be created or does not have proper write access, application deployment will fail. + +.. _dataverse.files.docroot: + +dataverse.files.docroot ++++++++++++++++++++++++ + +Configure a folder to store and retrieve additional materials like user uploaded collection logos, generated sitemaps, +and so on. Providing an explicit location here makes it easier to reuse some mounted filesystem. +See also logo customization above. + +Notes: -Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_FILES_UPLOADS``. -Defaults to ``${STORAGE_DIR}/uploads`` for profile ``ct``, important for the :ref:`Dataverse Application Image `. +- Defaults to ``${com.sun.aas.instanceRoot}/docroot`` in a :doc:`default installation ` + (resolves to ``/usr/local/payara6/glassfish/domains/domain1/docroot``). +- Defaults to ``${STORAGE_DIR}/docroot`` using our :ref:`Dataverse container ` (resolving to ``/dv/docroot``). +- Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_FILES_DOCROOT``. +- During startup, this directory will be checked for existence and write access. It will be created for you + if missing. If it cannot be created or does not have proper write access, application deployment will fail. dataverse.auth.password-reset-timeout-in-minutes ++++++++++++++++++++++++++++++++++++++++++++++++ From a98395a167d6f1350a7684553517d400e28a8859 Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Wed, 23 Aug 2023 17:50:27 +0200 Subject: [PATCH 025/252] Update HTTP to HTTPS URLs in classic-dev-env.rst --- doc/sphinx-guides/source/developers/classic-dev-env.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/developers/classic-dev-env.rst b/doc/sphinx-guides/source/developers/classic-dev-env.rst index 6feca558267..363fc87ae17 100755 --- a/doc/sphinx-guides/source/developers/classic-dev-env.rst +++ b/doc/sphinx-guides/source/developers/classic-dev-env.rst @@ -46,7 +46,7 @@ On Linux, you are welcome to use the OpenJDK available from package managers. Install Netbeans or Maven ~~~~~~~~~~~~~~~~~~~~~~~~~ -NetBeans IDE is recommended, and can be downloaded from http://netbeans.org . Developers may use any editor or IDE. We recommend NetBeans because it is free, works cross platform, has good support for Jakarta EE projects, and includes a required build tool, Maven. +NetBeans IDE is recommended, and can be downloaded from https://netbeans.org . Developers may use any editor or IDE. We recommend NetBeans because it is free, works cross platform, has good support for Jakarta EE projects, and includes a required build tool, Maven. Below we describe how to build the Dataverse Software war file with Netbeans but if you prefer to use only Maven, you can find installation instructions in the :doc:`tools` section. @@ -86,7 +86,7 @@ On Mac, run this command: ``brew install jq`` -On Linux, install ``jq`` from your package manager or download a binary from http://stedolan.github.io/jq/ +On Linux, install ``jq`` from your package manager or download a binary from https://stedolan.github.io/jq/ Install Payara ~~~~~~~~~~~~~~ @@ -134,7 +134,7 @@ On Linux, you should just install PostgreSQL using your favorite package manager Install Solr ^^^^^^^^^^^^ -`Solr `_ 8.11.1 is required. +`Solr `_ 8.11.1 is required. To install Solr, execute the following commands: @@ -144,7 +144,7 @@ To install Solr, execute the following commands: ``cd /usr/local/solr`` -``curl -O http://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz`` +``curl -O https://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz`` ``tar xvfz solr-8.11.1.tgz`` From 4677cff58b63bdf5b3f0439b8f62ea01dcc91a90 Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Thu, 24 Aug 2023 10:18:21 +0200 Subject: [PATCH 026/252] Use HTTPS URLs in README.md --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index d40e5f228f7..01de5b2c854 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ Dataverse® =============== -Dataverse is an [open source][] software platform for sharing, finding, citing, and preserving research data (developed by the [Data Science and Products team](http://www.iq.harvard.edu/people/people/data-science-products) at the [Institute for Quantitative Social Science](http://iq.harvard.edu/) and the [Dataverse community][]). +Dataverse is an [open source][] software platform for sharing, finding, citing, and preserving research data (developed by the [Data Science and Products team](https://www.iq.harvard.edu/people/people/data-science-products) at the [Institute for Quantitative Social Science](https://iq.harvard.edu/) and the [Dataverse community][]). [dataverse.org][] is our home on the web and shows a map of Dataverse installations around the world, a list of [features][], [integrations][] that have been made possible through [REST APIs][], our development [roadmap][], and more. @@ -26,15 +26,15 @@ Dataverse is a trademark of President and Fellows of Harvard College and is regi [dataverse.org]: https://dataverse.org [demo.dataverse.org]: https://demo.dataverse.org [Dataverse community]: https://dataverse.org/developers -[Installation Guide]: http://guides.dataverse.org/en/latest/installation/index.html +[Installation Guide]: https://guides.dataverse.org/en/latest/installation/index.html [latest release]: https://github.com/IQSS/dataverse/releases [features]: https://dataverse.org/software-features [roadmap]: https://www.iq.harvard.edu/roadmap-dataverse-project [integrations]: https://dataverse.org/integrations -[REST APIs]: http://guides.dataverse.org/en/latest/api/index.html +[REST APIs]: https://guides.dataverse.org/en/latest/api/index.html [Contributing Guide]: CONTRIBUTING.md [mailing list]: https://groups.google.com/group/dataverse-community [community call]: https://dataverse.org/community-calls -[chat.dataverse.org]: http://chat.dataverse.org +[chat.dataverse.org]: https://chat.dataverse.org [Dataverse Community Meeting]: https://dataverse.org/events [open source]: LICENSE.md From 4c28979b0b62f9244313a90ce74c8a7e22791671 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 29 Aug 2023 17:48:08 -0400 Subject: [PATCH 027/252] work in progress. --- .../java/edu/harvard/iq/dataverse/api/Datasets.java | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 47c249b7c8a..b1858b9982f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -470,11 +470,18 @@ public Response listVersions(@Context ContainerRequestContext crc, @PathParam("i // @todo: when full versions list - including files - is requested, consider // using datasetservice.findDeep() (needs testing on "monstrous" datasets // with a lot of versions!) - return response( req -> - ok( execCommand( new ListVersionsCommand(req, findDatasetOrDie(id), offset, limit) ) + + return response( req -> { + Dataset dataset = findDatasetOrDie(id); + if (includeFiles == null ? false : includeFiles) { + dataset = datasetService.findDeep(dataset.getId()); + } + //return ok( execCommand( new ListVersionsCommand(req, findDatasetOrDie(id), offset, limit) ) + return ok( execCommand( new ListVersionsCommand(req, dataset, offset, limit) ) .stream() .map( d -> json(d, includeFiles == null ? false : includeFiles) ) - .collect(toJsonArray())), getRequestUser(crc)); + .collect(toJsonArray())); + }, getRequestUser(crc)); } @GET From ccd6b7dfd6ea6bc5ae8ec09b0f34819f4adeda59 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 30 Aug 2023 16:58:57 -0400 Subject: [PATCH 028/252] made the "includeFiles" option true by default, cleaned up the ".findDeep()" logic. #9763 --- .../edu/harvard/iq/dataverse/Dataset.java | 8 ++-- .../dataverse/DatasetVersionServiceBean.java | 1 + .../harvard/iq/dataverse/api/Datasets.java | 40 ++++++------------- .../command/impl/ListVersionsCommand.java | 27 ++++++++++--- 4 files changed, 38 insertions(+), 38 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 620e66c6c54..a6123a36c9d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -676,11 +676,11 @@ public Timestamp getCitationDate() { Timestamp citationDate = null; //Only calculate if this dataset doesn't use an alternate date field for publication date if (citationDateDatasetFieldType == null) { - List versions = this.versions; + //List versions = this.versions; // TODo - is this ever not version 1.0 (or draft if not published yet) - DatasetVersion oldest = versions.get(versions.size() - 1); + //DatasetVersion oldest = versions.get(versions.size() - 1); citationDate = super.getPublicationDate(); - if (oldest.isPublished()) { + /*if (oldest.isPublished()) { List fms = oldest.getFileMetadatas(); for (FileMetadata fm : fms) { Embargo embargo = fm.getDataFile().getEmbargo(); @@ -691,7 +691,7 @@ public Timestamp getCitationDate() { } } } - } + }*/ } return citationDate; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index fbed7d93cdd..6c514a2405c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -194,6 +194,7 @@ public DatasetVersion findDeep(Object pk) { .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.datasetVersion") .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.dataFile.releaseUser") .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.dataFile.creator") + .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.dataFile.dataFileTags") .getSingleResult(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index b1858b9982f..23de46c1324 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -467,19 +467,13 @@ public Response useDefaultCitationDate(@Context ContainerRequestContext crc, @Pa @AuthRequired @Path("{id}/versions") public Response listVersions(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("includeFiles") Boolean includeFiles, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset) { - // @todo: when full versions list - including files - is requested, consider - // using datasetservice.findDeep() (needs testing on "monstrous" datasets - // with a lot of versions!) return response( req -> { Dataset dataset = findDatasetOrDie(id); - if (includeFiles == null ? false : includeFiles) { - dataset = datasetService.findDeep(dataset.getId()); - } - //return ok( execCommand( new ListVersionsCommand(req, findDatasetOrDie(id), offset, limit) ) - return ok( execCommand( new ListVersionsCommand(req, dataset, offset, limit) ) + + return ok( execCommand( new ListVersionsCommand(req, dataset, offset, limit, (includeFiles == null ? true : includeFiles)) ) .stream() - .map( d -> json(d, includeFiles == null ? false : includeFiles) ) + .map( d -> json(d, includeFiles == null ? true : includeFiles) ) .collect(toJsonArray())); }, getRequestUser(crc)); } @@ -491,8 +485,15 @@ public Response getVersion(@Context ContainerRequestContext crc, @PathParam("id" return response( req -> { // @todo: consider using DatasetVersionServiceBean.findDeep() here  DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); - return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found") - : ok(json(dsv, includeFiles == null ? false : includeFiles)); + + if (dsv == null || dsv.getId() == null) { + return notFound("Dataset version not found"); + } + + if (includeFiles == null ? true : includeFiles) { + dsv = datasetversionService.findDeep(dsv.getId()); + } + return ok(json(dsv, includeFiles == null ? true : includeFiles)); }, getRequestUser(crc)); } @@ -512,23 +513,6 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, @PathParam }, getRequestUser(crc)); } - //@todo: remember to delete this! (for experiments only!) - @GET - @AuthRequired - @Path("{id}/versions/{versionId}/files2") - public Response getVersionFiles2(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") Long versionId, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset, @QueryParam("orderCriteria") String orderCriteria, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return response( req -> { - //DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); - DatasetVersionServiceBean.FileMetadatasOrderCriteria fileMetadatasOrderCriteria; - try { - fileMetadatasOrderCriteria = orderCriteria != null ? DatasetVersionServiceBean.FileMetadatasOrderCriteria.valueOf(orderCriteria) : DatasetVersionServiceBean.FileMetadatasOrderCriteria.NameAZ; - } catch (IllegalArgumentException e) { - return error(Response.Status.BAD_REQUEST, "Invalid order criteria: " + orderCriteria); - } - return ok(jsonFileMetadatas(datasetversionService.getFileMetadatasByDbId(versionId, limit, offset, fileMetadatasOrderCriteria))); - }, getRequestUser(crc)); - } - @GET @AuthRequired @Path("{id}/dirindex") diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListVersionsCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListVersionsCommand.java index d3675a8f206..b93833ffdf9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListVersionsCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListVersionsCommand.java @@ -14,6 +14,7 @@ import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException; import java.util.LinkedList; import java.util.List; @@ -28,16 +29,22 @@ public class ListVersionsCommand extends AbstractCommand> { private final Dataset ds; private final Integer limit; private final Integer offset; + private final Boolean deepLookup; public ListVersionsCommand(DataverseRequest aRequest, Dataset aDataset) { this(aRequest, aDataset, null, null); } - + public ListVersionsCommand(DataverseRequest aRequest, Dataset aDataset, Integer offset, Integer limit) { + this(aRequest, aDataset, null, null, false); + } + + public ListVersionsCommand(DataverseRequest aRequest, Dataset aDataset, Integer offset, Integer limit, boolean deepLookup) { super(aRequest, aDataset); ds = aDataset; this.offset = offset; this.limit = limit; + this.deepLookup = deepLookup; } @Override @@ -45,14 +52,22 @@ public List execute(CommandContext ctxt) throws CommandException boolean includeUnpublished = ctxt.permissions().request(getRequest()).on(ds).has(Permission.EditDataset); - if (offset == null && limit == null) { - // @todo: this fragment can be dropped, and the service-based method below - // can be used for both cases. - // @todo: on the other hand, consider using datasetservice.findDeep() - // when a full list of versions is requested. + if (offset == null && limit == null) { + List outputList = new LinkedList<>(); for (DatasetVersion dsv : ds.getVersions()) { if (dsv.isReleased() || includeUnpublished) { + if (deepLookup) { + // @todo: when "deep"/extended lookup is requested, and + // we call .findDeep() to look up each version again, + // there is probably a more economical way to obtain the + // numeric ids of the versions, by a direct single query, + // rather than go through ds.getVersions() like we are now. + dsv = ctxt.datasetVersion().findDeep(dsv.getId()); + if (dsv == null) { + throw new CommandExecutionException("Failed to look up full list of dataset versions", this); + } + } outputList.add(dsv); } } From 2d27c0392a2da21895aa9ff49bc62515ebb5faa1 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 31 Aug 2023 17:10:45 -0400 Subject: [PATCH 029/252] intermediate changes for the adjusted citation date. #9763 --- .../edu/harvard/iq/dataverse/Dataset.java | 21 +++++++++++++++++++ .../dataverse/DatasetVersionServiceBean.java | 1 + 2 files changed, 22 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index a6123a36c9d..f5a2f7cc6fb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -158,6 +158,22 @@ public void setCitationDateDatasetFieldType(DatasetFieldType citationDateDataset this.citationDateDatasetFieldType = citationDateDatasetFieldType; } + // Per DataCite best practices, the citation date of a dataset may need + // to be adjusted to reflect the latest embargo availability date of any + // file within the first published version. + // If any files are embargoed in the first version, we will find calculate + // the date and cache it here. + private Timestamp embargoCitationDate; + + public Timestamp getEmbargoCitationDate() { + return embargoCitationDate; + } + + public void setEmbargoCitationDate(Timestamp embargoCitationDate) { + this.embargoCitationDate = embargoCitationDate; + } + + @ManyToOne @JoinColumn(name="template_id",nullable = true) @@ -680,6 +696,11 @@ public Timestamp getCitationDate() { // TODo - is this ever not version 1.0 (or draft if not published yet) //DatasetVersion oldest = versions.get(versions.size() - 1); citationDate = super.getPublicationDate(); + if (embargoCitationDate != null) { + if (citationDate.compareTo(embargoCitationDate) < 0) { + return embargoCitationDate; + } + } /*if (oldest.isPublished()) { List fms = oldest.getFileMetadatas(); for (FileMetadata fm : fms) { diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 6c514a2405c..d1a73358166 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -1309,6 +1309,7 @@ public List getFileMetadatas(DatasetVersion datasetVersion, Intege .setHint("eclipselink.left-join-fetch", "fm.dataFile.embargo") .setHint("eclipselink.left-join-fetch", "fm.datasetVersion") .setHint("eclipselink.left-join-fetch", "fm.dataFile.releaseUser") + .setHint("eclipselink.left-join-fetch", "fm.dataFile.dataFileTags") .setHint("eclipselink.left-join-fetch", "fm.dataFile.creator"); } else { // @todo: is there really no way to use offset-limit with left join hints? From 7b1e799b4eaf3d70328b5237a41dc08622112de0 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 6 Sep 2023 09:54:32 -0400 Subject: [PATCH 030/252] Additional changes needed for the optimized "embargo publication date" aggregate. #9763 --- .../edu/harvard/iq/dataverse/Dataset.java | 4 +++ .../FinalizeDatasetPublicationCommand.java | 33 +++++++++++++++++-- .../V6.0.0.1__9763-embargocitationdate.sql | 14 ++++++++ 3 files changed, 49 insertions(+), 2 deletions(-) create mode 100644 src/main/resources/db/migration/V6.0.0.1__9763-embargocitationdate.sql diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index f5a2f7cc6fb..258806dad77 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -692,15 +692,19 @@ public Timestamp getCitationDate() { Timestamp citationDate = null; //Only calculate if this dataset doesn't use an alternate date field for publication date if (citationDateDatasetFieldType == null) { + // @todo: remove this commented-out code once/if the PR passes review - L.A. //List versions = this.versions; // TODo - is this ever not version 1.0 (or draft if not published yet) //DatasetVersion oldest = versions.get(versions.size() - 1); + // - I believe the answer is yes, the oldest versions will always be + // either 1.0 or draft - L.A. citationDate = super.getPublicationDate(); if (embargoCitationDate != null) { if (citationDate.compareTo(embargoCitationDate) < 0) { return embargoCitationDate; } } + // @todo: remove this commented-out code once/if the PR passes review - L.A. /*if (oldest.isPublished()) { List fms = oldest.getFileMetadatas(); for (FileMetadata fm : fms) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java index f5e70209744..3da087addd9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java @@ -10,6 +10,7 @@ import edu.harvard.iq.dataverse.DatasetVersionUser; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.Embargo; import edu.harvard.iq.dataverse.UserNotification; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; @@ -117,9 +118,37 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // is this the first publication of the dataset? if (theDataset.getPublicationDate() == null) { theDataset.setReleaseUser((AuthenticatedUser) getUser()); - } - if ( theDataset.getPublicationDate() == null ) { + theDataset.setPublicationDate(new Timestamp(new Date().getTime())); + + // if there are any embargoed files in this version, we will save + // the latest availability date as the "embargoCitationDate" for future + // reference (if the files are not available yet, as of publishing of + // the dataset, this date will be used as the "ciatation date" of the dataset, + // instead of the publicatonDate, in compliance with the DataCite + // best practices). + // the code below replicates the logic that used to be in the method + // Dataset.getCitationDate() that calculated this adjusted date in real time. + + Timestamp latestEmbargoDate = null; + for (DataFile dataFile : theDataset.getFiles()) { + // this is the first version of the dataset that is being published. + // therefore we can iterate through .getFiles() instead of obtaining + // the DataFiles by going through the FileMetadatas in the current version. + Embargo embargo = dataFile.getEmbargo(); + if (embargo != null) { + // "dataAvailable" is not nullable in the Embargo class, no need for a null check + Timestamp embargoDate = Timestamp.valueOf(embargo.getDateAvailable().atStartOfDay()); + if (latestEmbargoDate == null || latestEmbargoDate.compareTo(embargoDate) < 0) { + latestEmbargoDate = embargoDate; + } + } + } + // the above loop could be easily replaced with a database query; + // but we iterate through .getFiles() elsewhere in the command, when + // updating and/or registering the files, so it should not result in + // an extra performance hit. + theDataset.setEmbargoCitationDate(latestEmbargoDate); } //Clear any external status diff --git a/src/main/resources/db/migration/V6.0.0.1__9763-embargocitationdate.sql b/src/main/resources/db/migration/V6.0.0.1__9763-embargocitationdate.sql new file mode 100644 index 00000000000..536798015ba --- /dev/null +++ b/src/main/resources/db/migration/V6.0.0.1__9763-embargocitationdate.sql @@ -0,0 +1,14 @@ +-- An aggregated timestamp which is the latest of the availability dates of any embargoed files in the first published version, if present +ALTER TABLE dataset ADD COLUMN IF NOT EXISTS embargoCitationDate timestamp without time zone; +-- ... and an update query that will populate this column for all the published datasets with embargoed files in the first released version: +UPDATE dataset SET embargocitationdate=o.embargocitationdate +FROM (SELECT d.id, MAX(e.dateavailable) AS embargocitationdate +FROM embargo e, dataset d, datafile f, datasetversion v, filemetadata m +WHERE v.dataset_id = d.id +AND v.versionstate = 'RELEASED' +AND v.versionnumber = 1 +AND v.minorversionnumber = 0 +AND f.embargo_id = e.id +AND m.datasetversion_id = v.id +AND m.datafile_id = f.id GROUP BY d.id) o WHERE o.id = dataset.id; +-- (the query follows the logic that used to be in the method Dataset.getCitationDate() that calculated this adjusted date in real time). From fd30fd53e521a786b59f48df786ab4b17366aa6b Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 6 Sep 2023 10:19:05 -0400 Subject: [PATCH 031/252] removing a comment (#9763) --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 23de46c1324..1d7244fd6e7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -483,7 +483,6 @@ public Response listVersions(@Context ContainerRequestContext crc, @PathParam("i @Path("{id}/versions/{versionId}") public Response getVersion(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @QueryParam("includeFiles") Boolean includeFiles, @Context UriInfo uriInfo, @Context HttpHeaders headers) { return response( req -> { - // @todo: consider using DatasetVersionServiceBean.findDeep() here  DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); if (dsv == null || dsv.getId() == null) { From b74affc942e287329bf2aed0e7900d89fcf8bc5e Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 6 Sep 2023 10:49:05 -0400 Subject: [PATCH 032/252] a short release note (#9763) --- doc/release-notes/9763-versions-api-improvements.md | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 doc/release-notes/9763-versions-api-improvements.md diff --git a/doc/release-notes/9763-versions-api-improvements.md b/doc/release-notes/9763-versions-api-improvements.md new file mode 100644 index 00000000000..2c2374dd9b6 --- /dev/null +++ b/doc/release-notes/9763-versions-api-improvements.md @@ -0,0 +1,4 @@ +# Some improvements have been added to the /versions API + +See the [Dataset Versions API](https://guides.dataverse.org/en/9763-lookup-optimizations/api/native-api.html#dataset-versions-api) section of the Guide for more information. + From 2324fe14bdc13c291fdf606ff4187183262e5f0a Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 6 Sep 2023 11:10:16 -0400 Subject: [PATCH 033/252] changed the guide to reflect the fact that the includeFiles flag defaults to "true". (#9763) --- doc/sphinx-guides/source/api/native-api.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index da3fbfffa73..1234e215f0b 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -830,7 +830,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash - curl "https://demo.dataverse.org/api/datasets/24/versions?includeFiles=true" + curl "https://demo.dataverse.org/api/datasets/24/versions" It returns a list of versions with their metadata, and file list: @@ -883,7 +883,7 @@ It returns a list of versions with their metadata, and file list: ] } -The optional ``includeFiles`` parameter specifies whether the files should be listed in the output. It defaults to ``false``. (Note that for a dataset with a large number of versions and/or files having the files included can dramatically increase the volume of the output). A separate ``/files`` API can be used for listing the files, or a subset thereof in a given version. +The optional ``includeFiles`` parameter specifies whether the files should be listed in the output. It defaults to ``true``, preserving backward compatibility. (Note that for a dataset with a large number of versions and/or files having the files included can dramatically increase the volume of the output). A separate ``/files`` API can be used for listing the files, or a subset thereof in a given version. The optional ``offset`` and ``limit`` parameters can be used to specify the range of the versions list to be shown. This can be used to paginate through the list in a dataset with a large number of versions. @@ -899,15 +899,15 @@ Get Version of a Dataset export ID=24 export VERSION=1.0 - curl "$SERVER_URL/api/datasets/$ID/versions/$VERSION" + curl "$SERVER_URL/api/datasets/$ID/versions/$VERSION?includeFiles=false" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl "https://demo.dataverse.org/api/datasets/24/versions/1.0" + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0?includeFiles=false" -The optional ``includeFiles`` parameter specifies whether the files should be listed in the output (defaults to ``false``). Note that a separate ``/files`` API can be used for listing the files, or a subset thereof in a given version. +The optional ``includeFiles`` parameter specifies whether the files should be listed in the output (defaults to ``true``). Note that a separate ``/files`` API can be used for listing the files, or a subset thereof in a given version. .. _export-dataset-metadata-api: From 35835e40390442cac77fd5d38731b2e50d7b6560 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 6 Sep 2023 11:32:42 -0400 Subject: [PATCH 034/252] extended the release note. (#9763) --- doc/release-notes/9763-versions-api-improvements.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/release-notes/9763-versions-api-improvements.md b/doc/release-notes/9763-versions-api-improvements.md index 2c2374dd9b6..191afe8176f 100644 --- a/doc/release-notes/9763-versions-api-improvements.md +++ b/doc/release-notes/9763-versions-api-improvements.md @@ -1,4 +1,8 @@ # Some improvements have been added to the /versions API -See the [Dataset Versions API](https://guides.dataverse.org/en/9763-lookup-optimizations/api/native-api.html#dataset-versions-api) section of the Guide for more information. +- optional pagination has been added to `/api/datasets/{id}/versions` that may be useful in datasets with a large number of versions; +- a new flag `includeFiles` is added to both `/api/datasets/{id}/versions` and `/api/datasets/{id}/versions/{vid}` (true by default), providing an option to drop the file information from the output; +- when files are requested to be included, some database lookup optimizations have been added to improve the performance on datasets with large numbers of files. + +This is reflected in the [Dataset Versions API](https://guides.dataverse.org/en/9763-lookup-optimizations/api/native-api.html#dataset-versions-api) section of the Guide. From 9a9d7d61e95262be66970b1fda41cdfa15def540 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 6 Sep 2023 11:39:10 -0400 Subject: [PATCH 035/252] cosmetic change in the release note (#9763) --- doc/release-notes/9763-versions-api-improvements.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/9763-versions-api-improvements.md b/doc/release-notes/9763-versions-api-improvements.md index 191afe8176f..8d7f6c7a20a 100644 --- a/doc/release-notes/9763-versions-api-improvements.md +++ b/doc/release-notes/9763-versions-api-improvements.md @@ -1,4 +1,4 @@ -# Some improvements have been added to the /versions API +# Improvements in the /versions API - optional pagination has been added to `/api/datasets/{id}/versions` that may be useful in datasets with a large number of versions; - a new flag `includeFiles` is added to both `/api/datasets/{id}/versions` and `/api/datasets/{id}/versions/{vid}` (true by default), providing an option to drop the file information from the output; From d465b209c7cded84ff8d08799f7f4f42fb489fb2 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 6 Sep 2023 11:45:58 -0400 Subject: [PATCH 036/252] cosmetic change, comment text (#9763) --- src/main/java/edu/harvard/iq/dataverse/Dataset.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 258806dad77..ca5a8dd2b81 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -161,8 +161,9 @@ public void setCitationDateDatasetFieldType(DatasetFieldType citationDateDataset // Per DataCite best practices, the citation date of a dataset may need // to be adjusted to reflect the latest embargo availability date of any // file within the first published version. - // If any files are embargoed in the first version, we will find calculate - // the date and cache it here. + // If any files are embargoed in the first version, this date will be + // calculated and cached here upon its publication, in the + // FinalizeDatasetPublicationCommand. private Timestamp embargoCitationDate; public Timestamp getEmbargoCitationDate() { From ee36dee64a128942ad4412a5f64e1a1336a3063c Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 6 Sep 2023 12:53:32 -0400 Subject: [PATCH 037/252] removed a noisy logging line that got checked in by mistake in an earlier PR, as part of a quick fix for #9803 --- .../edu/harvard/iq/dataverse/search/SearchIncludeFragment.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index 0dfad74bedf..2ce06541afa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -205,7 +205,6 @@ public String searchRedirect(String dataverseRedirectPage, Dataverse dataverseIn */ dataverse = dataverseIn; - logger.info("redirect page: "+dataverseRedirectPage); dataverseRedirectPage = StringUtils.isBlank(dataverseRedirectPage) ? "dataverse.xhtml" : dataverseRedirectPage; String optionalDataverseScope = "&alias=" + dataverse.getAlias(); From 37136c039471d15888609724916e89723394879b Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 12 Sep 2023 17:14:11 +0100 Subject: [PATCH 038/252] Added: optional includeDeaccessioned parameter for getVersionFiles API endpoint --- .../harvard/iq/dataverse/api/Datasets.java | 13 ++++++++---- ...LatestAccessibleDatasetVersionCommand.java | 17 ++++++++------- ...tLatestPublishedDatasetVersionCommand.java | 21 ++++++++++++------- ...pecificPublishedDatasetVersionCommand.java | 18 +++++++++------- 4 files changed, 42 insertions(+), 27 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index d082d9c29da..5064579ebfb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -501,10 +501,11 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, @QueryParam("categoryName") String categoryName, @QueryParam("searchText") String searchText, @QueryParam("orderCriteria") String orderCriteria, + @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, @Context UriInfo uriInfo, @Context HttpHeaders headers) { return response(req -> { - DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); + DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned); DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria fileMetadatasOrderCriteria; try { fileMetadatasOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameAZ; @@ -2709,11 +2710,15 @@ public static T handleVersion(String versionId, DsVersionHandler hdl) } private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, UriInfo uriInfo, HttpHeaders headers) throws WrappedResponse { + return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, false); + } + + private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned) throws WrappedResponse { DatasetVersion dsv = execCommand(handleVersion(versionNumber, new DsVersionHandler>() { @Override public Command handleLatest() { - return new GetLatestAccessibleDatasetVersionCommand(req, ds); + return new GetLatestAccessibleDatasetVersionCommand(req, ds, includeDeaccessioned); } @Override @@ -2723,12 +2728,12 @@ public Command handleDraft() { @Override public Command handleSpecific(long major, long minor) { - return new GetSpecificPublishedDatasetVersionCommand(req, ds, major, minor); + return new GetSpecificPublishedDatasetVersionCommand(req, ds, major, minor, includeDeaccessioned); } @Override public Command handleLatestPublished() { - return new GetLatestPublishedDatasetVersionCommand(req, ds); + return new GetLatestPublishedDatasetVersionCommand(req, ds, includeDeaccessioned); } })); if (dsv == null || dsv.getId() == null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java index 680a5c3aaef..1454a4b1fdd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java @@ -17,29 +17,30 @@ /** * Get the latest version of a dataset a user can view. + * * @author Naomi */ // No permission needed to view published dvObjects @RequiredPermissions({}) -public class GetLatestAccessibleDatasetVersionCommand extends AbstractCommand{ +public class GetLatestAccessibleDatasetVersionCommand extends AbstractCommand { private final Dataset ds; + private final boolean includeDeaccessioned; public GetLatestAccessibleDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset) { + this(aRequest, anAffectedDataset, false); + } + + public GetLatestAccessibleDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, boolean includeDeaccessioned) { super(aRequest, anAffectedDataset); ds = anAffectedDataset; + this.includeDeaccessioned = includeDeaccessioned; } @Override public DatasetVersion execute(CommandContext ctxt) throws CommandException { - if (ds.getLatestVersion().isDraft() && ctxt.permissions().requestOn(getRequest(), ds).has(Permission.ViewUnpublishedDataset)) { return ctxt.engine().submit(new GetDraftDatasetVersionCommand(getRequest(), ds)); } - - return ctxt.engine().submit(new GetLatestPublishedDatasetVersionCommand(getRequest(), ds)); - + return ctxt.engine().submit(new GetLatestPublishedDatasetVersionCommand(getRequest(), ds, includeDeaccessioned)); } - - - } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java index 18adff2e55c..9765d0945d8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java @@ -9,26 +9,31 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; /** - * * @author Naomi */ // No permission needed to view published dvObjects @RequiredPermissions({}) -public class GetLatestPublishedDatasetVersionCommand extends AbstractCommand{ +public class GetLatestPublishedDatasetVersionCommand extends AbstractCommand { private final Dataset ds; - + private boolean includeDeaccessioned; + public GetLatestPublishedDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset) { + this(aRequest, anAffectedDataset, false); + } + + public GetLatestPublishedDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, boolean includeDeaccessioned) { super(aRequest, anAffectedDataset); ds = anAffectedDataset; + this.includeDeaccessioned = includeDeaccessioned; } @Override public DatasetVersion execute(CommandContext ctxt) throws CommandException { - for (DatasetVersion dsv: ds.getVersions()) { - if (dsv.isReleased()) { + for (DatasetVersion dsv : ds.getVersions()) { + if (dsv.isReleased() || (includeDeaccessioned && dsv.isDeaccessioned())) { return dsv; - } } - return null; } - } \ No newline at end of file + return null; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java index 3efb38e4a91..879a694ef57 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java @@ -15,27 +15,32 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; /** - * * @author Naomi */ // No permission needed to view published dvObjects @RequiredPermissions({}) -public class GetSpecificPublishedDatasetVersionCommand extends AbstractCommand{ +public class GetSpecificPublishedDatasetVersionCommand extends AbstractCommand { private final Dataset ds; private final long majorVersion; private final long minorVersion; - + private boolean includeDeaccessioned; + public GetSpecificPublishedDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, long majorVersionNum, long minorVersionNum) { + this(aRequest, anAffectedDataset, majorVersionNum, minorVersionNum, false); + } + + public GetSpecificPublishedDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, long majorVersionNum, long minorVersionNum, boolean includeDeaccessioned) { super(aRequest, anAffectedDataset); ds = anAffectedDataset; majorVersion = majorVersionNum; minorVersion = minorVersionNum; + this.includeDeaccessioned = includeDeaccessioned; } @Override public DatasetVersion execute(CommandContext ctxt) throws CommandException { - for (DatasetVersion dsv: ds.getVersions()) { - if (dsv.isReleased()) { + for (DatasetVersion dsv : ds.getVersions()) { + if (dsv.isReleased() || (includeDeaccessioned && dsv.isDeaccessioned())) { if (dsv.getVersionNumber().equals(majorVersion) && dsv.getMinorVersionNumber().equals(minorVersion)) { return dsv; } @@ -43,5 +48,4 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException { } return null; } - -} \ No newline at end of file +} From 829fed9db0333f108fb3bf01782c44ec44cf353f Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 12 Sep 2023 17:16:54 -0400 Subject: [PATCH 039/252] Fixes the issues with the permissions of the docker scripts on Windows reported on the issue #9904 --- modules/container-configbaker/Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/container-configbaker/Dockerfile b/modules/container-configbaker/Dockerfile index 564216b3572..44f3806a591 100644 --- a/modules/container-configbaker/Dockerfile +++ b/modules/container-configbaker/Dockerfile @@ -38,6 +38,7 @@ RUN rm ${SOLR_TEMPLATE}/conf/managed-schema.xml # Copy the data from scripts/api that provide the common base setup you'd get from the installer. # ".dockerignore" will take care of taking only the bare necessities COPY maven/setup ${SCRIPT_DIR}/bootstrap/base/ +RUN chmod +x ${BOOTSTRAP_DIR}/*/*.sh # Set the entrypoint to tini (as a process supervisor) ENTRYPOINT ["/usr/bin/dumb-init", "--"] From bfe7f9c3537a89b75fd3190d063433c8f6147f96 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 13 Sep 2023 10:56:59 -0400 Subject: [PATCH 040/252] RestAssured tests for the new functionality added to the /versions api. (#9763) --- .../harvard/iq/dataverse/api/DatasetsIT.java | 85 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 47 +++++++++- 2 files changed, 130 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index b353b4488d0..d5b3dbca05a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -555,6 +555,91 @@ public void testCreatePublishDestroyDataset() { } + /** + * The apis (/api/datasets/{id}/versions and /api/datasets/{id}/versions/{vid} + * are called from other RestAssured tests, in this class and also FileIT. + * But this test is dedicated to this api specifically, and focuses on the + * functionality added to it in 6.1. + */ + @Test + public void testDatasetVersionsAPI() { + // Create user + String apiToken = UtilIT.createRandomUserGetToken(); + + // Create user with no permission + String apiTokenNoPerms = UtilIT.createRandomUserGetToken(); + + // Create Collection + String collectionAlias = UtilIT.createRandomCollectionGetAlias(apiToken); + + // Create Dataset + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(collectionAlias, apiToken); + createDataset.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset); + String datasetPid = JsonPath.from(createDataset.asString()).getString("data.persistentId"); + + // Upload file + String pathToFile = "src/main/webapp/resources/images/dataverseproject.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Integer fileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); + + // Check that the file we just uploaded is shown by the versions api: + Response unpublishedDraft = UtilIT.getDatasetVersion(datasetPid, ":draft", apiToken); + unpublishedDraft.prettyPrint(); + unpublishedDraft.then().assertThat() + .body("data.files.size()", equalTo(1)) + .statusCode(OK.getStatusCode()); + + // Now check that the file is NOT shown, when we ask the versions api to + // skip files: + boolean skipFiles = true; + unpublishedDraft = UtilIT.getDatasetVersion(datasetPid, ":draft", apiToken, skipFiles); + unpublishedDraft.prettyPrint(); + unpublishedDraft.then().assertThat() + .body("data.files", equalTo(null)) + .statusCode(OK.getStatusCode()); + + // Publish collection and dataset + UtilIT.publishDataverseViaNativeApi(collectionAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode()); + UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).then().assertThat().statusCode(OK.getStatusCode()); + + // Upload another file: + String pathToFile2 = "src/main/webapp/resources/images/cc0.png"; + Response uploadResponse2 = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile2, apiToken); + uploadResponse2.then().assertThat().statusCode(OK.getStatusCode()); + + // We should now have a published version, and a draft. + + // Call /versions api, *with the owner api token*, make sure both + // versions are listed + Response versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiToken); + versionsResponse.prettyPrint(); + versionsResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(2)); + + // And now call it with an un-privileged token, to make sure only one + // (the published one) version is shown: + + versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiTokenNoPerms); + versionsResponse.prettyPrint(); + versionsResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(1)); + + // And now call the "short", no-files version of the same api + versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiTokenNoPerms, skipFiles); + versionsResponse.prettyPrint(); + versionsResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].files", equalTo(null)); + } + + /** * This test requires the root dataverse to be published to pass. */ diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index e47971f9b92..678d4e5523b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -9,6 +9,7 @@ import jakarta.json.JsonObjectBuilder; import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonObject; +import static jakarta.ws.rs.core.Response.Status.CREATED; import java.io.File; import java.io.IOException; @@ -51,7 +52,6 @@ import java.util.Collections; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.*; -import static org.junit.jupiter.api.Assertions.*; public class UtilIT { @@ -119,6 +119,16 @@ public static Response createRandomUser() { return createRandomUser("user"); } + + /** + * A convenience method for creating a random test user, when all you need + * is the api token. + * @return apiToken + */ + public static String createRandomUserGetToken(){ + Response createUser = createRandomUser(); + return getApiTokenFromResponse(createUser); + } public static Response createUser(String username, String email) { logger.info("Creating user " + username); @@ -369,6 +379,20 @@ static Response createRandomDataverse(String apiToken) { String category = null; return createDataverse(alias, category, apiToken); } + + /** + * A convenience method for creating a random collection and getting its + * alias in one step. + * @param apiToken + * @return alias + */ + static String createRandomCollectionGetAlias(String apiToken){ + + Response createCollectionResponse = createRandomDataverse(apiToken); + //createDataverseResponse.prettyPrint(); + createCollectionResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + return UtilIT.getAliasFromResponse(createCollectionResponse); + } static Response showDataverseContents(String alias, String apiToken) { return given() @@ -1403,9 +1427,17 @@ static Response nativeGetUsingPersistentId(String persistentId, String apiToken) } static Response getDatasetVersion(String persistentId, String versionNumber, String apiToken) { + return getDatasetVersion(persistentId, versionNumber, apiToken, false); + } + + static Response getDatasetVersion(String persistentId, String versionNumber, String apiToken, boolean skipFiles) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .get("/api/datasets/:persistentId/versions/" + versionNumber + "?persistentId=" + persistentId); + .get("/api/datasets/:persistentId/versions/" + + versionNumber + + "?persistentId=" + + persistentId + + (skipFiles ? "&includeFiles=false" : "")); } static Response getMetadataBlockFromDatasetVersion(String persistentId, String versionNumber, String metadataBlock, String apiToken) { @@ -1767,6 +1799,10 @@ static Response removeDatasetThumbnail(String datasetPersistentId, String apiTok } static Response getDatasetVersions(String idOrPersistentId, String apiToken) { + return getDatasetVersions(idOrPersistentId, apiToken, false); + } + + static Response getDatasetVersions(String idOrPersistentId, String apiToken, boolean skipFiles) { logger.info("Getting Dataset Versions"); String idInPath = idOrPersistentId; // Assume it's a number. String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path. @@ -1774,6 +1810,13 @@ static Response getDatasetVersions(String idOrPersistentId, String apiToken) { idInPath = ":persistentId"; optionalQueryParam = "?persistentId=" + idOrPersistentId; } + if (skipFiles) { + if ("".equals(optionalQueryParam)) { + optionalQueryParam = "?includeFiles=false"; + } else { + optionalQueryParam = optionalQueryParam.concat("&includeFiles=false"); + } + } RequestSpecification requestSpecification = given(); if (apiToken != null) { requestSpecification = given() From 8e894c37a17ce184bb3c59eb027dc03ed0f21274 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 13 Sep 2023 11:42:25 -0400 Subject: [PATCH 041/252] added another test, for the pagination functionality in the /versions api (also being added in 6.1). #9763 --- .../harvard/iq/dataverse/api/DatasetsIT.java | 26 ++++++++++++++----- .../edu/harvard/iq/dataverse/api/UtilIT.java | 22 ++++++++++++++++ 2 files changed, 41 insertions(+), 7 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index d5b3dbca05a..4a0e1c857c7 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -557,7 +557,7 @@ public void testCreatePublishDestroyDataset() { /** * The apis (/api/datasets/{id}/versions and /api/datasets/{id}/versions/{vid} - * are called from other RestAssured tests, in this class and also FileIT. + * are already called from other RestAssured tests, in this class and also FileIT. * But this test is dedicated to this api specifically, and focuses on the * functionality added to it in 6.1. */ @@ -584,8 +584,6 @@ public void testDatasetVersionsAPI() { String pathToFile = "src/main/webapp/resources/images/dataverseproject.png"; Response uploadResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken); uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); - - Integer fileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); // Check that the file we just uploaded is shown by the versions api: Response unpublishedDraft = UtilIT.getDatasetVersion(datasetPid, ":draft", apiToken); @@ -615,13 +613,27 @@ public void testDatasetVersionsAPI() { // We should now have a published version, and a draft. // Call /versions api, *with the owner api token*, make sure both - // versions are listed + // versions are listed; also check that the correct numbers of files + // are shown in each version (2 in the draft, 1 in the published). Response versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiToken); versionsResponse.prettyPrint(); versionsResponse.then().assertThat() .statusCode(OK.getStatusCode()) - .body("data.size()", equalTo(2)); - + .body("data.size()", equalTo(2)) + .body("data[0].files.size()", equalTo(2)) + .body("data[1].files.size()", equalTo(1)); + + // Now call the this api with the new (as of 6.1) pagination parameters + Integer offset = 0; + Integer howmany = 1; + versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiToken, offset, howmany); + // (the above should return only one version, the draft) + versionsResponse.prettyPrint(); + versionsResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(1)) + .body("data[0].files.size()", equalTo(2)); + // And now call it with an un-privileged token, to make sure only one // (the published one) version is shown: @@ -630,7 +642,7 @@ public void testDatasetVersionsAPI() { versionsResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.size()", equalTo(1)); - + // And now call the "short", no-files version of the same api versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiTokenNoPerms, skipFiles); versionsResponse.prettyPrint(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 678d4e5523b..f94cfa8e400 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -1803,6 +1803,14 @@ static Response getDatasetVersions(String idOrPersistentId, String apiToken) { } static Response getDatasetVersions(String idOrPersistentId, String apiToken, boolean skipFiles) { + return getDatasetVersions(idOrPersistentId, apiToken, null, null, skipFiles); + } + + static Response getDatasetVersions(String idOrPersistentId, String apiToken, Integer offset, Integer limit) { + return getDatasetVersions(idOrPersistentId, apiToken, offset, limit, false); + } + + static Response getDatasetVersions(String idOrPersistentId, String apiToken, Integer offset, Integer limit, boolean skipFiles) { logger.info("Getting Dataset Versions"); String idInPath = idOrPersistentId; // Assume it's a number. String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path. @@ -1817,6 +1825,20 @@ static Response getDatasetVersions(String idOrPersistentId, String apiToken, boo optionalQueryParam = optionalQueryParam.concat("&includeFiles=false"); } } + if (offset != null) { + if ("".equals(optionalQueryParam)) { + optionalQueryParam = "?offset="+offset; + } else { + optionalQueryParam = optionalQueryParam.concat("&offset="+offset); + } + } + if (limit != null) { + if ("".equals(optionalQueryParam)) { + optionalQueryParam = "?limit="+limit; + } else { + optionalQueryParam = optionalQueryParam.concat("&limit="+limit); + } + } RequestSpecification requestSpecification = given(); if (apiToken != null) { requestSpecification = given() From b9e99f3e7253d836aadebac8b128efa21027eef8 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 13 Sep 2023 11:43:42 -0400 Subject: [PATCH 042/252] typo in a comment. #9763 --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 4a0e1c857c7..e726337cf8b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -557,7 +557,7 @@ public void testCreatePublishDestroyDataset() { /** * The apis (/api/datasets/{id}/versions and /api/datasets/{id}/versions/{vid} - * are already called from other RestAssured tests, in this class and also FileIT. + * are already called from other RestAssured tests, in this class and also in FilesIT. * But this test is dedicated to this api specifically, and focuses on the * functionality added to it in 6.1. */ From f164a681deaf14d27ee5fb35a344805d86ac631b Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 13 Sep 2023 11:46:27 -0400 Subject: [PATCH 043/252] more typos in comments. (#9763) --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index e726337cf8b..23fc5911ad0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -623,7 +623,7 @@ public void testDatasetVersionsAPI() { .body("data[0].files.size()", equalTo(2)) .body("data[1].files.size()", equalTo(1)); - // Now call the this api with the new (as of 6.1) pagination parameters + // Now call this api with the new (as of 6.1) pagination parameters Integer offset = 0; Integer howmany = 1; versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiToken, offset, howmany); @@ -635,7 +635,7 @@ public void testDatasetVersionsAPI() { .body("data[0].files.size()", equalTo(2)); // And now call it with an un-privileged token, to make sure only one - // (the published one) version is shown: + // (the published) version is shown: versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiTokenNoPerms); versionsResponse.prettyPrint(); From efaf5d558b34705f8f6998c56a53a8a3d62050ad Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 15 Sep 2023 14:19:33 +0200 Subject: [PATCH 044/252] refactor(test,sitemap): make SiteMapUtilTest use better JUnit5 checks --- .../iq/dataverse/sitemap/SiteMapUtilTest.java | 73 +++++++++---------- 1 file changed, 35 insertions(+), 38 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java index ac6fa1e5166..41032ffa811 100644 --- a/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java @@ -10,7 +10,6 @@ import edu.harvard.iq.dataverse.util.xml.XmlValidator; import java.io.File; import java.io.IOException; -import java.net.MalformedURLException; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; @@ -21,17 +20,39 @@ import java.util.ArrayList; import java.util.Date; import java.util.List; -import static org.junit.jupiter.api.Assertions.*; + import static org.junit.jupiter.api.Assertions.*; import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import org.xml.sax.SAXException; -public class SiteMapUtilTest { - +class SiteMapUtilTest { + + @TempDir + Path tempDir; + Path tempDocroot; + + @BeforeEach + void setup() throws IOException { + // NOTE: This might be unsafe for parallel tests, but our @SystemProperty helper does not yet support + // lookups from vars or methods. + System.setProperty("test.filesDir", tempDir.toString()); + this.tempDocroot = tempDir.resolve("docroot"); + Files.createDirectory(tempDocroot); + } + + @AfterEach + void teardown() { + System.clearProperty("test.filesDir"); + } + @Test - public void testUpdateSiteMap() throws IOException, ParseException { - + void testUpdateSiteMap() throws IOException, ParseException, SAXException { + // given List dataverses = new ArrayList<>(); String publishedDvString = "publishedDv1"; Dataverse publishedDataverse = new Dataverse(); @@ -77,40 +98,18 @@ public void testUpdateSiteMap() throws IOException, ParseException { datasetVersions.add(datasetVersion); deaccessioned.setVersions(datasetVersions); datasets.add(deaccessioned); - - Path tmpDirPath = Files.createTempDirectory(null); - String tmpDir = tmpDirPath.toString(); - File docroot = new File(tmpDir + File.separator + "docroot"); - docroot.mkdirs(); - // TODO: this and the above should be replaced with JUnit 5 @TestDir - System.setProperty("test.filesDir", tmpDir); - + + // when SiteMapUtil.updateSiteMap(dataverses, datasets); - - String pathToTest = tmpDirPath + File.separator + "docroot" + File.separator + "sitemap"; - String pathToSiteMap = pathToTest + File.separator + "sitemap.xml"; - - Exception wellFormedXmlException = null; - try { - assertTrue(XmlValidator.validateXmlWellFormed(pathToSiteMap)); - } catch (Exception ex) { - System.out.println("Exception caught checking that XML is well formed: " + ex); - wellFormedXmlException = ex; - } - assertNull(wellFormedXmlException); - - Exception notValidAgainstSchemaException = null; - try { - assertTrue(XmlValidator.validateXmlSchema(pathToSiteMap, new URL("https://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd"))); - } catch (MalformedURLException | SAXException ex) { - System.out.println("Exception caught validating XML against the sitemap schema: " + ex); - notValidAgainstSchemaException = ex; - } - assertNull(notValidAgainstSchemaException); + + // then + String pathToSiteMap = tempDocroot.resolve("sitemap").resolve("sitemap.xml").toString(); + assertDoesNotThrow(() -> XmlValidator.validateXmlWellFormed(pathToSiteMap)); + assertTrue(XmlValidator.validateXmlSchema(pathToSiteMap, new URL("https://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd"))); File sitemapFile = new File(pathToSiteMap); String sitemapString = XmlPrinter.prettyPrintXml(new String(Files.readAllBytes(Paths.get(sitemapFile.getAbsolutePath())))); - System.out.println("sitemap: " + sitemapString); + //System.out.println("sitemap: " + sitemapString); assertTrue(sitemapString.contains("1955-11-12")); assertTrue(sitemapString.contains(publishedPid)); @@ -118,8 +117,6 @@ public void testUpdateSiteMap() throws IOException, ParseException { assertFalse(sitemapString.contains(harvestedPid)); assertFalse(sitemapString.contains(deaccessionedPid)); - System.clearProperty("test.filesDir"); - } } From 129985535d825ceb501cad899c6ba57771d0eee1 Mon Sep 17 00:00:00 2001 From: GPortas Date: Sat, 16 Sep 2023 16:31:08 +0100 Subject: [PATCH 045/252] Stash: deaccessionDataset API endpoint WIP --- .../harvard/iq/dataverse/api/Datasets.java | 33 +++++++++++++++++-- ...tLatestPublishedDatasetVersionCommand.java | 5 +-- .../edu/harvard/iq/dataverse/api/UtilIT.java | 10 ++++++ 3 files changed, 44 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 5064579ebfb..48d84ba95d7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -29,6 +29,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.CreatePrivateUrlCommand; import edu.harvard.iq.dataverse.engine.command.impl.CuratePublishedDatasetVersionCommand; +import edu.harvard.iq.dataverse.engine.command.impl.DeaccessionDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetLinkingDataverseCommand; @@ -525,9 +526,9 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, @GET @AuthRequired @Path("{id}/versions/{versionId}/files/counts") - public Response getVersionFileCounts(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + public Response getVersionFileCounts(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, @Context UriInfo uriInfo, @Context HttpHeaders headers) { return response(req -> { - DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); + DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned); JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion)); jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion))); @@ -3922,4 +3923,32 @@ public Response getDatasetVersionCitation(@Context ContainerRequestContext crc, return response(req -> ok( getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers).getCitation(true, false)), getRequestUser(crc)); } + + @PUT + @AuthRequired + @Path("{id}/versions/{versionId}/deaccession") + public Response deaccessionDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, String jsonBody, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + if (":draft".equals(versionId) || ":latest".equals(versionId)) { + return badRequest("Only :latest-published or a specific version can be deaccessioned"); + } + return response(req -> { + DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, false); + try (StringReader stringReader = new StringReader(jsonBody)) { + JsonObject jsonObject = Json.createReader(stringReader).readObject(); + datasetVersion.setVersionNote(jsonObject.getString("deaccessionReason")); + String deaccessionForwardURL = jsonObject.getString("deaccessionForwardURL", null); + if (deaccessionForwardURL != null) { + try { + datasetVersion.setArchiveNote(deaccessionForwardURL); + } catch (IllegalArgumentException iae) { + return error(Response.Status.BAD_REQUEST, "Invalid deaccession forward URL: " + iae.getMessage()); + } + } + execCommand(new DeaccessionDatasetVersionCommand(dvRequestService.getDataverseRequest(), datasetVersion, false)); + return ok("Dataset " + datasetId + " deaccessioned for version " + versionId); + } catch (JsonParsingException jpe) { + return error(Response.Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage()); + } + }, getRequestUser(crc)); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java index 9765d0945d8..4e4252fd155 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java @@ -2,6 +2,7 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -15,7 +16,7 @@ @RequiredPermissions({}) public class GetLatestPublishedDatasetVersionCommand extends AbstractCommand { private final Dataset ds; - private boolean includeDeaccessioned; + private final boolean includeDeaccessioned; public GetLatestPublishedDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset) { this(aRequest, anAffectedDataset, false); @@ -30,7 +31,7 @@ public GetLatestPublishedDatasetVersionCommand(DataverseRequest aRequest, Datase @Override public DatasetVersion execute(CommandContext ctxt) throws CommandException { for (DatasetVersion dsv : ds.getVersions()) { - if (dsv.isReleased() || (includeDeaccessioned && dsv.isDeaccessioned())) { + if (dsv.isReleased() || (includeDeaccessioned && dsv.isDeaccessioned() && ctxt.permissions().requestOn(getRequest(), ds).has(Permission.EditDataset))) { return dsv; } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index d243d3c47f2..e32a813a4d3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3386,4 +3386,14 @@ static Response getHasBeenDeleted(String dataFileId, String apiToken) { .header(API_TOKEN_HTTP_HEADER, apiToken) .get("/api/files/" + dataFileId + "/hasBeenDeleted"); } + + static Response deaccessionDataset(Integer datasetId, String version, String apiToken) { + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("deaccessionReason", "Test deaccession."); + String jsonString = jsonObjectBuilder.build().toString(); + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(jsonString) + .put("/api/datasets/" + datasetId + "/versions/" + version + "/deaccession"); + } } From bbfdff391f63cc412e59734b53f0992a937a594a Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 20 Sep 2023 09:01:08 +0100 Subject: [PATCH 046/252] Added: deaccessionDataset API endpoint (pending IT) --- .../harvard/iq/dataverse/api/Datasets.java | 2 +- .../harvard/iq/dataverse/api/DatasetsIT.java | 83 +++++++++++++++---- .../edu/harvard/iq/dataverse/api/UtilIT.java | 18 +++- 3 files changed, 82 insertions(+), 21 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 48d84ba95d7..b7d09cd5d98 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3944,7 +3944,7 @@ public Response deaccessionDataset(@Context ContainerRequestContext crc, @PathPa return error(Response.Status.BAD_REQUEST, "Invalid deaccession forward URL: " + iae.getMessage()); } } - execCommand(new DeaccessionDatasetVersionCommand(dvRequestService.getDataverseRequest(), datasetVersion, false)); + execCommand(new DeaccessionDatasetVersionCommand(req, datasetVersion, false)); return ok("Dataset " + datasetId + " deaccessioned for version " + versionId); } catch (JsonParsingException jpe) { return error(Response.Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 6f103df3fe8..1b77e6c09e5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3299,7 +3299,7 @@ public void getVersionFiles() throws IOException { int testPageSize = 2; // Test page 1 - Response getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, null, null, null, null, null, null, apiToken); + Response getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, null, null, null, null, null, null, false, apiToken); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3313,7 +3313,7 @@ public void getVersionFiles() throws IOException { String testFileId2 = JsonPath.from(getVersionFilesResponsePaginated.body().asString()).getString("data[1].dataFile.id"); // Test page 2 - getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize, null, null, null, null, null, apiToken); + getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize, null, null, null, null, null, false, apiToken); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3324,7 +3324,7 @@ public void getVersionFiles() throws IOException { assertEquals(testPageSize, fileMetadatasCount); // Test page 3 (last) - getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize * 2, null, null, null, null, null, apiToken); + getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize * 2, null, null, null, null, null, false, apiToken); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3334,7 +3334,7 @@ public void getVersionFiles() throws IOException { assertEquals(1, fileMetadatasCount); // Test NameZA order criteria - Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameZA.toString(), apiToken); + Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameZA.toString(), false, apiToken); getVersionFilesResponseNameZACriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3345,7 +3345,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName1)); // Test Newest order criteria - Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Newest.toString(), apiToken); + Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Newest.toString(), false, apiToken); getVersionFilesResponseNewestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3356,7 +3356,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName1)); // Test Oldest order criteria - Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Oldest.toString(), apiToken); + Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Oldest.toString(), false, apiToken); getVersionFilesResponseOldestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3367,7 +3367,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName4)); // Test Size order criteria - Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Size.toString(), apiToken); + Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Size.toString(), false, apiToken); getVersionFilesResponseSizeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3378,7 +3378,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName4)); // Test Type order criteria - Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Type.toString(), apiToken); + Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Type.toString(), false, apiToken); getVersionFilesResponseTypeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3390,13 +3390,13 @@ public void getVersionFiles() throws IOException { // Test invalid order criteria String invalidOrderCriteria = "invalidOrderCriteria"; - Response getVersionFilesResponseInvalidOrderCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, invalidOrderCriteria, apiToken); + Response getVersionFilesResponseInvalidOrderCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, invalidOrderCriteria, false, apiToken); getVersionFilesResponseInvalidOrderCriteria.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()) .body("message", equalTo("Invalid order criteria: " + invalidOrderCriteria)); // Test Content Type - Response getVersionFilesResponseContentType = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, "image/png", null, null, null, null, apiToken); + Response getVersionFilesResponseContentType = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, "image/png", null, null, null, null, false, apiToken); getVersionFilesResponseContentType.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3412,7 +3412,7 @@ public void getVersionFiles() throws IOException { setFileCategoriesResponse = UtilIT.setFileCategories(testFileId2, apiToken, List.of(testCategory)); setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); - Response getVersionFilesResponseCategoryName = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, testCategory, null, null, apiToken); + Response getVersionFilesResponseCategoryName = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, testCategory, null, null, false, apiToken); getVersionFilesResponseCategoryName.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3427,7 +3427,7 @@ public void getVersionFiles() throws IOException { restrictFileResponse.then().assertThat() .statusCode(OK.getStatusCode()); - Response getVersionFilesResponseRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Restricted.toString(), null, null, null, apiToken); + Response getVersionFilesResponseRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Restricted.toString(), null, null, null, false, apiToken); getVersionFilesResponseRestricted.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3452,7 +3452,7 @@ public void getVersionFiles() throws IOException { createActiveFileEmbargoResponse.then().assertThat() .statusCode(OK.getStatusCode()); - Response getVersionFilesResponseEmbargoedThenPublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString(), null, null, null, apiToken); + Response getVersionFilesResponseEmbargoedThenPublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString(), null, null, null, false, apiToken); getVersionFilesResponseEmbargoedThenPublic.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3461,7 +3461,7 @@ public void getVersionFiles() throws IOException { fileMetadatasCount = getVersionFilesResponseEmbargoedThenPublic.jsonPath().getList("data").size(); assertEquals(1, fileMetadatasCount); - Response getVersionFilesResponseEmbargoedThenRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenRestricted.toString(), null, null, null, apiToken); + Response getVersionFilesResponseEmbargoedThenRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenRestricted.toString(), null, null, null, false, apiToken); getVersionFilesResponseEmbargoedThenRestricted.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3471,7 +3471,7 @@ public void getVersionFiles() throws IOException { assertEquals(1, fileMetadatasCount); // Test Access Status Public - Response getVersionFilesResponsePublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString(), null, null, null, apiToken); + Response getVersionFilesResponsePublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString(), null, null, null, false, apiToken); getVersionFilesResponsePublic.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3483,7 +3483,7 @@ public void getVersionFiles() throws IOException { assertEquals(3, fileMetadatasCount); // Test Search Text - Response getVersionFilesResponseSearchText = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, "test_1", null, apiToken); + Response getVersionFilesResponseSearchText = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, "test_1", null, false, apiToken); getVersionFilesResponseSearchText.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3491,6 +3491,33 @@ public void getVersionFiles() throws IOException { fileMetadatasCount = getVersionFilesResponseSearchText.jsonPath().getList("data").size(); assertEquals(1, fileMetadatasCount); + + // Test Deaccessioned + Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String latestPublishedVersion = ":latest-published"; + + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, latestPublishedVersion, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // includeDeaccessioned false + Response getVersionFilesResponseNoDeaccessioned = UtilIT.getVersionFiles(datasetId, latestPublishedVersion, null, null, null, null, null, null, null, false, apiToken); + getVersionFilesResponseNoDeaccessioned.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // includeDeaccessioned true + Response getVersionFilesResponseDeaccessioned = UtilIT.getVersionFiles(datasetId, latestPublishedVersion, null, null, null, null, null, null, null, true, apiToken); + getVersionFilesResponseDeaccessioned.then().assertThat().statusCode(OK.getStatusCode()); + + getVersionFilesResponseDeaccessioned.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName1)) + .body("data[1].label", equalTo(testFileName2)) + .body("data[2].label", equalTo(testFileName3)) + .body("data[3].label", equalTo(testFileName4)) + .body("data[4].label", equalTo(testFileName5)); } @Test @@ -3533,7 +3560,7 @@ public void getVersionFileCounts() throws IOException { createFileEmbargoResponse.then().assertThat().statusCode(OK.getStatusCode()); // Getting the file counts and assert each count - Response getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, ":latest", apiToken); + Response getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, ":latest", false, apiToken); getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); @@ -3548,5 +3575,27 @@ public void getVersionFileCounts() throws IOException { assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); assertEquals(3, responseCountPerAccessStatusMap.get(DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString())); assertEquals(1, responseCountPerAccessStatusMap.get(DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString())); + + // Test Deaccessioned + Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String latestPublishedVersion = ":latest-published"; + + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, latestPublishedVersion, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // includeDeaccessioned false + Response getVersionFileCountsResponseNoDeaccessioned = UtilIT.getVersionFileCounts(datasetId, latestPublishedVersion, false, apiToken); + getVersionFileCountsResponseNoDeaccessioned.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // includeDeaccessioned true + Response getVersionFileCountsResponseDeaccessioned = UtilIT.getVersionFileCounts(datasetId, latestPublishedVersion, true, apiToken); + getVersionFileCountsResponseDeaccessioned.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponseDeaccessioned.jsonPath(); + assertEquals(4, (Integer) responseJsonPath.get("data.total")); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index e32a813a4d3..086fef5f18a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3276,10 +3276,21 @@ static Response getDatasetVersionCitation(Integer datasetId, String version, Str return response; } - static Response getVersionFiles(Integer datasetId, String version, Integer limit, Integer offset, String contentType, String accessStatus, String categoryName, String searchText, String orderCriteria, String apiToken) { + static Response getVersionFiles(Integer datasetId, + String version, + Integer limit, + Integer offset, + String contentType, + String accessStatus, + String categoryName, + String searchText, + String orderCriteria, + boolean includeDeaccessioned, + String apiToken) { RequestSpecification requestSpecification = given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .contentType("application/json"); + .contentType("application/json") + .queryParam("includeDeaccessioned", includeDeaccessioned); if (limit != null) { requestSpecification = requestSpecification.queryParam("limit", limit); } @@ -3355,9 +3366,10 @@ static Response createFileEmbargo(Integer datasetId, Integer fileId, String date .post("/api/datasets/" + datasetId + "/files/actions/:set-embargo"); } - static Response getVersionFileCounts(Integer datasetId, String version, String apiToken) { + static Response getVersionFileCounts(Integer datasetId, String version, boolean includeDeaccessioned, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) + .queryParam("includeDeaccessioned", includeDeaccessioned) .get("/api/datasets/" + datasetId + "/versions/" + version + "/files/counts"); } From b19fb8267d08978b530d3be19cec7edddd72b566 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 20 Sep 2023 09:21:53 +0100 Subject: [PATCH 047/252] Added: deaccessionDataset API endpoint IT --- .../harvard/iq/dataverse/api/DatasetsIT.java | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 1b77e6c09e5..7c0099ef34c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3598,4 +3598,41 @@ public void getVersionFileCounts() throws IOException { responseJsonPath = getVersionFileCountsResponseDeaccessioned.jsonPath(); assertEquals(4, (Integer) responseJsonPath.get("data.total")); } + + @Test + public void deaccessionDataset() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Test that :draft and :latest are not allowed + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, ":draft", apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, ":latest", apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + + // Test that a not found error occurs when there is no published version available + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, ":latest-published", apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // Test that the dataset is successfully deaccessioned when published + Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, ":latest-published", apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Test that a not found error occurs when the only published version has already been deaccessioned + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, ":latest-published", apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + } } From b6ce32b030dded2e2dd3ebf8d2e3b8b65583ea12 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 20 Sep 2023 10:02:07 +0100 Subject: [PATCH 048/252] Refactor: dataset version string identifiers extracted to constants --- .../iq/dataverse/api/ApiConstants.java | 5 ++ .../harvard/iq/dataverse/api/Datasets.java | 32 ++++----- .../iq/dataverse/dataset/DatasetUtil.java | 6 +- .../externaltools/ExternalToolHandler.java | 4 +- .../harvard/iq/dataverse/util/FileUtil.java | 4 +- .../iq/dataverse/util/URLTokenUtil.java | 5 +- .../harvard/iq/dataverse/api/DatasetsIT.java | 71 +++++++++---------- .../iq/dataverse/api/DownloadFilesIT.java | 9 +-- .../edu/harvard/iq/dataverse/api/FilesIT.java | 12 ++-- .../edu/harvard/iq/dataverse/api/UtilIT.java | 9 +-- 10 files changed, 84 insertions(+), 73 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ApiConstants.java b/src/main/java/edu/harvard/iq/dataverse/api/ApiConstants.java index 296869762da..347a8946a46 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/ApiConstants.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/ApiConstants.java @@ -12,4 +12,9 @@ private ApiConstants() { // Authentication public static final String CONTAINER_REQUEST_CONTEXT_USER = "user"; + + // Dataset + public static final String DS_VERSION_LATEST = ":latest"; + public static final String DS_VERSION_DRAFT = ":draft"; + public static final String DS_VERSION_LATEST_PUBLISHED = ":latest-published"; } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index b7d09cd5d98..62d87b198fe 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -98,6 +98,7 @@ import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.search.IndexServiceBean; +import static edu.harvard.iq.dataverse.api.ApiConstants.*; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; @@ -391,8 +392,8 @@ public Response destroyDataset(@Context ContainerRequestContext crc, @PathParam( @AuthRequired @Path("{id}/versions/{versionId}") public Response deleteDraftVersion(@Context ContainerRequestContext crc, @PathParam("id") String id, @PathParam("versionId") String versionId ){ - if ( ! ":draft".equals(versionId) ) { - return badRequest("Only the :draft version can be deleted"); + if (!DS_VERSION_DRAFT.equals(versionId)) { + return badRequest("Only the " + DS_VERSION_DRAFT + " version can be deleted"); } return response( req -> { @@ -545,7 +546,7 @@ public Response getVersionFileCounts(@Context ContainerRequestContext crc, @Path public Response getFileAccessFolderView(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @QueryParam("version") String versionId, @QueryParam("folder") String folderName, @QueryParam("original") Boolean originals, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) { folderName = folderName == null ? "" : folderName; - versionId = versionId == null ? ":latest-published" : versionId; + versionId = versionId == null ? DS_VERSION_LATEST_PUBLISHED : versionId; DatasetVersion version; try { @@ -620,8 +621,8 @@ public Response getVersionMetadataBlock(@Context ContainerRequestContext crc, @AuthRequired @Path("{id}/versions/{versionId}/linkset") public Response getLinkset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - if ( ":draft".equals(versionId) ) { - return badRequest("Signposting is not supported on the :draft version"); + if (DS_VERSION_DRAFT.equals(versionId)) { + return badRequest("Signposting is not supported on the " + DS_VERSION_DRAFT + " version"); } User user = getRequestUser(crc); return response(req -> { @@ -706,10 +707,9 @@ public Response updateDatasetPIDMetadataAll(@Context ContainerRequestContext crc @AuthRequired @Path("{id}/versions/{versionId}") @Consumes(MediaType.APPLICATION_JSON) - public Response updateDraftVersion(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId){ - - if ( ! ":draft".equals(versionId) ) { - return error( Response.Status.BAD_REQUEST, "Only the :draft version can be updated"); + public Response updateDraftVersion(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId) { + if (!DS_VERSION_DRAFT.equals(versionId)) { + return error( Response.Status.BAD_REQUEST, "Only the " + DS_VERSION_DRAFT + " version can be updated"); } try ( StringReader rdr = new StringReader(jsonBody) ) { @@ -792,7 +792,7 @@ public Response getVersionJsonLDMetadata(@Context ContainerRequestContext crc, @ @Path("{id}/metadata") @Produces("application/ld+json, application/json-ld") public Response getVersionJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return getVersionJsonLDMetadata(crc, id, ":draft", uriInfo, headers); + return getVersionJsonLDMetadata(crc, id, DS_VERSION_DRAFT, uriInfo, headers); } @PUT @@ -1726,7 +1726,7 @@ public Response getCustomTermsTab(@PathParam("id") String id, @PathParam("versio return error(Status.NOT_FOUND, "This Dataset has no custom license"); } persistentId = getRequestParameter(":persistentId".substring(1)); - if (versionId.equals(":draft")) { + if (versionId.equals(DS_VERSION_DRAFT)) { versionId = "DRAFT"; } } catch (WrappedResponse wrappedResponse) { @@ -2687,11 +2687,11 @@ private void msgt(String m) { public static T handleVersion(String versionId, DsVersionHandler hdl) throws WrappedResponse { switch (versionId) { - case ":latest": + case DS_VERSION_LATEST: return hdl.handleLatest(); - case ":draft": + case DS_VERSION_DRAFT: return hdl.handleDraft(); - case ":latest-published": + case DS_VERSION_LATEST_PUBLISHED: return hdl.handleLatestPublished(); default: try { @@ -3928,8 +3928,8 @@ public Response getDatasetVersionCitation(@Context ContainerRequestContext crc, @AuthRequired @Path("{id}/versions/{versionId}/deaccession") public Response deaccessionDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, String jsonBody, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - if (":draft".equals(versionId) || ":latest".equals(versionId)) { - return badRequest("Only :latest-published or a specific version can be deaccessioned"); + if (DS_VERSION_DRAFT.equals(versionId) || DS_VERSION_LATEST.equals(versionId)) { + return badRequest("Only " + DS_VERSION_LATEST_PUBLISHED + " or a specific version can be deaccessioned"); } return response(req -> { DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, false); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index adbd132bce8..ac1567b24e5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -9,6 +9,8 @@ import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.dataaccess.DataAccess; + +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT; import static edu.harvard.iq.dataverse.dataaccess.DataAccess.getStorageIO; import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; @@ -580,10 +582,10 @@ public static String getLicenseURI(DatasetVersion dsv) { // Return the URI // For standard licenses, just return the stored URI return (license != null) ? license.getUri().toString() - // For custom terms, construct a URI with :draft or the version number in the URI + // For custom terms, construct a URI with draft version constant or the version number in the URI : (dsv.getVersionState().name().equals("DRAFT") ? dsv.getDataverseSiteUrl() - + "/api/datasets/:persistentId/versions/:draft/customlicense?persistentId=" + + "/api/datasets/:persistentId/versions/" + DS_VERSION_DRAFT + "/customlicense?persistentId=" + dsv.getDataset().getGlobalId().asString() : dsv.getDataverseSiteUrl() + "/api/datasets/:persistentId/versions/" + dsv.getVersionNumber() + "." + dsv.getMinorVersionNumber() + "/customlicense?persistentId=" diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java index a52679deebc..570ef7d4194 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java @@ -34,6 +34,8 @@ import org.apache.commons.codec.binary.StringUtils; +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_LATEST; + /** * Handles an operation on a specific file. Requires a file id in order to be * instantiated. Applies logic based on an {@link ExternalTool} specification, @@ -110,7 +112,7 @@ public String handleRequest(boolean preview) { switch (externalTool.getScope()) { case DATASET: callback=SystemConfig.getDataverseSiteUrlStatic() + "/api/v1/datasets/" - + dataset.getId() + "/versions/:latest/toolparams/" + externalTool.getId(); + + dataset.getId() + "/versions/" + DS_VERSION_LATEST + "/toolparams/" + externalTool.getId(); break; case FILE: callback= SystemConfig.getDataverseSiteUrlStatic() + "/api/v1/files/" diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 5f7643b3115..327609d5e47 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -34,6 +34,8 @@ import edu.harvard.iq.dataverse.dataset.DatasetThumbnail; import edu.harvard.iq.dataverse.dataset.DatasetUtil; import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException; + +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT; import static edu.harvard.iq.dataverse.datasetutility.FileSizeChecker.bytesToHumanReadable; import edu.harvard.iq.dataverse.ingest.IngestReport; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; @@ -2152,7 +2154,7 @@ private static String getFileAccessUrl(FileMetadata fileMetadata, String apiLoca private static String getFolderAccessUrl(DatasetVersion version, String currentFolder, String subFolder, String apiLocation, boolean originals) { String datasetId = version.getDataset().getId().toString(); String versionTag = version.getFriendlyVersionNumber(); - versionTag = versionTag.replace("DRAFT", ":draft"); + versionTag = versionTag.replace("DRAFT", DS_VERSION_DRAFT); if (!"".equals(currentFolder)) { subFolder = currentFolder + "/" + subFolder; } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java index 4ae76a7b8db..c864823176e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java @@ -14,6 +14,8 @@ import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.authorization.users.ApiToken; +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT; + public class URLTokenUtil { protected static final Logger logger = Logger.getLogger(URLTokenUtil.class.getCanonicalName()); @@ -177,8 +179,7 @@ private String getTokenValue(String value) { } } if (("DRAFT").equals(versionString)) { - versionString = ":draft"; // send the token needed in api calls that can be substituted for a numeric - // version. + versionString = DS_VERSION_DRAFT; // send the token needed in api calls that can be substituted for a numeric version. } return versionString; case FILE_METADATA_ID: diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 7c0099ef34c..5c1eb66b63d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3,6 +3,7 @@ import edu.harvard.iq.dataverse.DatasetVersionFilesServiceBean; import io.restassured.RestAssured; +import static edu.harvard.iq.dataverse.api.ApiConstants.*; import static io.restassured.RestAssured.given; import io.restassured.path.json.JsonPath; @@ -500,7 +501,7 @@ public void testCreatePublishDestroyDataset() { assertTrue(datasetContactFromExport.toString().contains("finch@mailinator.com")); assertTrue(firstValue.toString().contains("finch@mailinator.com")); - Response getDatasetVersion = UtilIT.getDatasetVersion(datasetPersistentId, ":latest-published", apiToken); + Response getDatasetVersion = UtilIT.getDatasetVersion(datasetPersistentId, DS_VERSION_LATEST_PUBLISHED, apiToken); getDatasetVersion.prettyPrint(); getDatasetVersion.then().assertThat() .body("data.datasetId", equalTo(datasetId)) @@ -1159,7 +1160,7 @@ public void testPrivateUrl() { assertEquals(OK.getStatusCode(), createPrivateUrlForPostVersionOneDraft.getStatusCode()); // A Contributor has DeleteDatasetDraft - Response deleteDraftVersionAsContributor = UtilIT.deleteDatasetVersionViaNativeApi(datasetId, ":draft", contributorApiToken); + Response deleteDraftVersionAsContributor = UtilIT.deleteDatasetVersionViaNativeApi(datasetId, DS_VERSION_DRAFT, contributorApiToken); deleteDraftVersionAsContributor.prettyPrint(); deleteDraftVersionAsContributor.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3257,7 +3258,7 @@ public void getDatasetVersionCitation() { createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - Response getDatasetVersionCitationResponse = UtilIT.getDatasetVersionCitation(datasetId, ":draft", apiToken); + Response getDatasetVersionCitationResponse = UtilIT.getDatasetVersionCitation(datasetId, DS_VERSION_DRAFT, apiToken); getDatasetVersionCitationResponse.prettyPrint(); getDatasetVersionCitationResponse.then().assertThat() @@ -3293,13 +3294,11 @@ public void getVersionFiles() throws IOException { UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName5, new byte[300], apiToken); UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName4, new byte[400], apiToken); - String testDatasetVersion = ":latest"; - // Test pagination and NameAZ order criteria (the default criteria) int testPageSize = 2; // Test page 1 - Response getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, null, null, null, null, null, null, false, apiToken); + Response getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, testPageSize, null, null, null, null, null, null, false, apiToken); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3313,7 +3312,7 @@ public void getVersionFiles() throws IOException { String testFileId2 = JsonPath.from(getVersionFilesResponsePaginated.body().asString()).getString("data[1].dataFile.id"); // Test page 2 - getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize, null, null, null, null, null, false, apiToken); + getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, testPageSize, testPageSize, null, null, null, null, null, false, apiToken); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3324,7 +3323,7 @@ public void getVersionFiles() throws IOException { assertEquals(testPageSize, fileMetadatasCount); // Test page 3 (last) - getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize * 2, null, null, null, null, null, false, apiToken); + getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, testPageSize, testPageSize * 2, null, null, null, null, null, false, apiToken); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3334,7 +3333,7 @@ public void getVersionFiles() throws IOException { assertEquals(1, fileMetadatasCount); // Test NameZA order criteria - Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameZA.toString(), false, apiToken); + Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameZA.toString(), false, apiToken); getVersionFilesResponseNameZACriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3345,7 +3344,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName1)); // Test Newest order criteria - Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Newest.toString(), false, apiToken); + Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Newest.toString(), false, apiToken); getVersionFilesResponseNewestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3356,7 +3355,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName1)); // Test Oldest order criteria - Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Oldest.toString(), false, apiToken); + Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Oldest.toString(), false, apiToken); getVersionFilesResponseOldestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3367,7 +3366,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName4)); // Test Size order criteria - Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Size.toString(), false, apiToken); + Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Size.toString(), false, apiToken); getVersionFilesResponseSizeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3378,7 +3377,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName4)); // Test Type order criteria - Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Type.toString(), false, apiToken); + Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Type.toString(), false, apiToken); getVersionFilesResponseTypeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3390,13 +3389,13 @@ public void getVersionFiles() throws IOException { // Test invalid order criteria String invalidOrderCriteria = "invalidOrderCriteria"; - Response getVersionFilesResponseInvalidOrderCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, invalidOrderCriteria, false, apiToken); + Response getVersionFilesResponseInvalidOrderCriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, invalidOrderCriteria, false, apiToken); getVersionFilesResponseInvalidOrderCriteria.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()) .body("message", equalTo("Invalid order criteria: " + invalidOrderCriteria)); // Test Content Type - Response getVersionFilesResponseContentType = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, "image/png", null, null, null, null, false, apiToken); + Response getVersionFilesResponseContentType = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, "image/png", null, null, null, null, false, apiToken); getVersionFilesResponseContentType.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3412,7 +3411,7 @@ public void getVersionFiles() throws IOException { setFileCategoriesResponse = UtilIT.setFileCategories(testFileId2, apiToken, List.of(testCategory)); setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); - Response getVersionFilesResponseCategoryName = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, testCategory, null, null, false, apiToken); + Response getVersionFilesResponseCategoryName = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, testCategory, null, null, false, apiToken); getVersionFilesResponseCategoryName.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3427,7 +3426,7 @@ public void getVersionFiles() throws IOException { restrictFileResponse.then().assertThat() .statusCode(OK.getStatusCode()); - Response getVersionFilesResponseRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Restricted.toString(), null, null, null, false, apiToken); + Response getVersionFilesResponseRestricted = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Restricted.toString(), null, null, null, false, apiToken); getVersionFilesResponseRestricted.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3452,7 +3451,7 @@ public void getVersionFiles() throws IOException { createActiveFileEmbargoResponse.then().assertThat() .statusCode(OK.getStatusCode()); - Response getVersionFilesResponseEmbargoedThenPublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString(), null, null, null, false, apiToken); + Response getVersionFilesResponseEmbargoedThenPublic = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString(), null, null, null, false, apiToken); getVersionFilesResponseEmbargoedThenPublic.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3461,7 +3460,7 @@ public void getVersionFiles() throws IOException { fileMetadatasCount = getVersionFilesResponseEmbargoedThenPublic.jsonPath().getList("data").size(); assertEquals(1, fileMetadatasCount); - Response getVersionFilesResponseEmbargoedThenRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenRestricted.toString(), null, null, null, false, apiToken); + Response getVersionFilesResponseEmbargoedThenRestricted = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenRestricted.toString(), null, null, null, false, apiToken); getVersionFilesResponseEmbargoedThenRestricted.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3471,7 +3470,7 @@ public void getVersionFiles() throws IOException { assertEquals(1, fileMetadatasCount); // Test Access Status Public - Response getVersionFilesResponsePublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString(), null, null, null, false, apiToken); + Response getVersionFilesResponsePublic = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString(), null, null, null, false, apiToken); getVersionFilesResponsePublic.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3483,7 +3482,7 @@ public void getVersionFiles() throws IOException { assertEquals(3, fileMetadatasCount); // Test Search Text - Response getVersionFilesResponseSearchText = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, "test_1", null, false, apiToken); + Response getVersionFilesResponseSearchText = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, "test_1", null, false, apiToken); getVersionFilesResponseSearchText.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3498,17 +3497,15 @@ public void getVersionFiles() throws IOException { Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); - String latestPublishedVersion = ":latest-published"; - - Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, latestPublishedVersion, apiToken); + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, apiToken); deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); // includeDeaccessioned false - Response getVersionFilesResponseNoDeaccessioned = UtilIT.getVersionFiles(datasetId, latestPublishedVersion, null, null, null, null, null, null, null, false, apiToken); + Response getVersionFilesResponseNoDeaccessioned = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, null, null, false, apiToken); getVersionFilesResponseNoDeaccessioned.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); // includeDeaccessioned true - Response getVersionFilesResponseDeaccessioned = UtilIT.getVersionFiles(datasetId, latestPublishedVersion, null, null, null, null, null, null, null, true, apiToken); + Response getVersionFilesResponseDeaccessioned = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, null, null, true, apiToken); getVersionFilesResponseDeaccessioned.then().assertThat().statusCode(OK.getStatusCode()); getVersionFilesResponseDeaccessioned.then().assertThat() @@ -3560,7 +3557,7 @@ public void getVersionFileCounts() throws IOException { createFileEmbargoResponse.then().assertThat().statusCode(OK.getStatusCode()); // Getting the file counts and assert each count - Response getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, ":latest", false, apiToken); + Response getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST, false, apiToken); getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); @@ -3582,17 +3579,15 @@ public void getVersionFileCounts() throws IOException { Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); - String latestPublishedVersion = ":latest-published"; - - Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, latestPublishedVersion, apiToken); + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, apiToken); deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); // includeDeaccessioned false - Response getVersionFileCountsResponseNoDeaccessioned = UtilIT.getVersionFileCounts(datasetId, latestPublishedVersion, false, apiToken); + Response getVersionFileCountsResponseNoDeaccessioned = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST_PUBLISHED, false, apiToken); getVersionFileCountsResponseNoDeaccessioned.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); // includeDeaccessioned true - Response getVersionFileCountsResponseDeaccessioned = UtilIT.getVersionFileCounts(datasetId, latestPublishedVersion, true, apiToken); + Response getVersionFileCountsResponseDeaccessioned = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST_PUBLISHED, true, apiToken); getVersionFileCountsResponseDeaccessioned.then().assertThat().statusCode(OK.getStatusCode()); responseJsonPath = getVersionFileCountsResponseDeaccessioned.jsonPath(); @@ -3613,14 +3608,14 @@ public void deaccessionDataset() { createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - // Test that :draft and :latest are not allowed - Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, ":draft", apiToken); + // Test that draft and latest version constants are not allowed + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_DRAFT, apiToken); deaccessionDatasetResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); - deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, ":latest", apiToken); + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST, apiToken); deaccessionDatasetResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); // Test that a not found error occurs when there is no published version available - deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, ":latest-published", apiToken); + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, apiToken); deaccessionDatasetResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); // Test that the dataset is successfully deaccessioned when published @@ -3628,11 +3623,11 @@ public void deaccessionDataset() { publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); - deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, ":latest-published", apiToken); + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, apiToken); deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); // Test that a not found error occurs when the only published version has already been deaccessioned - deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, ":latest-published", apiToken); + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, apiToken); deaccessionDatasetResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DownloadFilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DownloadFilesIT.java index 598ba36c1e1..927efb0b142 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DownloadFilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DownloadFilesIT.java @@ -16,6 +16,9 @@ import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; import java.util.zip.ZipOutputStream; + +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT; +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_LATEST_PUBLISHED; import static jakarta.ws.rs.core.Response.Status.CREATED; import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; import static jakarta.ws.rs.core.Response.Status.OK; @@ -188,8 +191,7 @@ public void downloadAllFilesByVersion() throws IOException { HashSet expectedFiles6 = new HashSet<>(Arrays.asList("CODE_OF_CONDUCT.md", "LICENSE.md", "MANIFEST.TXT", "README.md", "CONTRIBUTING.md")); assertEquals(expectedFiles6, filenamesFound6); - String datasetVersionLatestPublished = ":latest-published"; - Response downloadFiles9 = UtilIT.downloadFiles(datasetPid, datasetVersionLatestPublished, apiToken); + Response downloadFiles9 = UtilIT.downloadFiles(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiToken); downloadFiles9.then().assertThat() .statusCode(OK.getStatusCode()); @@ -200,8 +202,7 @@ public void downloadAllFilesByVersion() throws IOException { assertEquals(expectedFiles7, filenamesFound7); // Guests cannot download draft versions. - String datasetVersionDraft = ":draft"; - Response downloadFiles10 = UtilIT.downloadFiles(datasetPid, datasetVersionDraft, null); + Response downloadFiles10 = UtilIT.downloadFiles(datasetPid, DS_VERSION_DRAFT, null); downloadFiles10.prettyPrint(); downloadFiles10.then().assertThat() .statusCode(UNAUTHORIZED.getStatusCode()) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 7f1ca4c8d70..94e895a7b7b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -10,6 +10,8 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.BeforeAll; import io.restassured.path.json.JsonPath; + +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT; import static io.restassured.path.json.JsonPath.with; import io.restassured.path.xml.XmlPath; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -1354,7 +1356,7 @@ public void testDataSizeInDataverse() throws InterruptedException { .statusCode(OK.getStatusCode()); String apiTokenRando = createUserGetToken(); - Response datasetStorageSizeResponseDraft = UtilIT.findDatasetDownloadSize(datasetId.toString(), ":draft", apiTokenRando); + Response datasetStorageSizeResponseDraft = UtilIT.findDatasetDownloadSize(datasetId.toString(), DS_VERSION_DRAFT, apiTokenRando); datasetStorageSizeResponseDraft.prettyPrint(); assertEquals(UNAUTHORIZED.getStatusCode(), datasetStorageSizeResponseDraft.getStatusCode()); Response publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); @@ -1607,7 +1609,7 @@ public void test_CrawlableAccessToDatasetFiles() { // Expected values in the output: String expectedTitleTopFolder = "Index of folder /"; String expectedLinkTopFolder = folderName + "/"; - String expectedLinkAhrefTopFolder = "/api/datasets/"+datasetId+"/dirindex/?version=:draft&folder=subfolder"; + String expectedLinkAhrefTopFolder = "/api/datasets/"+datasetId+"/dirindex/?version=" + DS_VERSION_DRAFT + "&folder=subfolder"; String expectedTitleSubFolder = "Index of folder /" + folderName; String expectedLinkAhrefSubFolder = "/api/access/datafile/" + folderName + "/" + dataFileId; @@ -1987,7 +1989,7 @@ public void testDeleteFile() { deleteResponse2.then().assertThat().statusCode(OK.getStatusCode()); // Check file 2 deleted from post v1.0 draft - Response postv1draft = UtilIT.getDatasetVersion(datasetPid, ":draft", apiToken); + Response postv1draft = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); postv1draft.prettyPrint(); postv1draft.then().assertThat() .body("data.files.size()", equalTo(1)) @@ -2009,7 +2011,7 @@ public void testDeleteFile() { downloadResponse2.then().assertThat().statusCode(OK.getStatusCode()); // Check file 3 still in post v1.0 draft - Response postv1draft2 = UtilIT.getDatasetVersion(datasetPid, ":draft", apiToken); + Response postv1draft2 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); postv1draft2.prettyPrint(); postv1draft2.then().assertThat() .body("data.files[0].dataFile.filename", equalTo("orcid_16x16.png")) @@ -2024,7 +2026,7 @@ public void testDeleteFile() { deleteResponse3.then().assertThat().statusCode(OK.getStatusCode()); // Check file 3 deleted from post v1.0 draft - Response postv1draft3 = UtilIT.getDatasetVersion(datasetPid, ":draft", apiToken); + Response postv1draft3 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); postv1draft3.prettyPrint(); postv1draft3.then().assertThat() .body("data.files[0]", equalTo(null)) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 086fef5f18a..8c6a2d6e75d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -38,6 +38,7 @@ import org.hamcrest.Description; import org.hamcrest.Matcher; +import static edu.harvard.iq.dataverse.api.ApiConstants.*; import static io.restassured.path.xml.XmlPath.from; import static io.restassured.RestAssured.given; import edu.harvard.iq.dataverse.DatasetField; @@ -515,7 +516,7 @@ static Response updateDatasetMetadataViaNative(String persistentId, String pathT .header(API_TOKEN_HTTP_HEADER, apiToken) .body(jsonIn) .contentType("application/json") - .put("/api/datasets/:persistentId/versions/:draft?persistentId=" + persistentId); + .put("/api/datasets/:persistentId/versions/" + DS_VERSION_DRAFT + "?persistentId=" + persistentId); return response; } @@ -791,7 +792,7 @@ static Response deleteAuxFile(Long fileId, String formatTag, String formatVersio static Response getCrawlableFileAccess(String datasetId, String folderName, String apiToken) { RequestSpecification requestSpecification = given() .header(API_TOKEN_HTTP_HEADER, apiToken); - String apiPath = "/api/datasets/" + datasetId + "/dirindex?version=:draft"; + String apiPath = "/api/datasets/" + datasetId + "/dirindex?version=" + DS_VERSION_DRAFT; if (StringUtil.nonEmpty(folderName)) { apiPath = apiPath.concat("&folder="+folderName); } @@ -1407,7 +1408,7 @@ static Response getDatasetVersion(String persistentId, String versionNumber, Str static Response getMetadataBlockFromDatasetVersion(String persistentId, String versionNumber, String metadataBlock, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .get("/api/datasets/:persistentId/versions/:latest-published/metadata/citation?persistentId=" + persistentId); + .get("/api/datasets/:persistentId/versions/" + DS_VERSION_LATEST_PUBLISHED + "/metadata/citation?persistentId=" + persistentId); } static Response makeSuperUser(String username) { @@ -2922,7 +2923,7 @@ static Response findDatasetStorageSize(String datasetId, String apiToken) { static Response findDatasetDownloadSize(String datasetId) { return given() - .get("/api/datasets/" + datasetId + "/versions/:latest/downloadsize"); + .get("/api/datasets/" + datasetId + "/versions/" + DS_VERSION_LATEST + "/downloadsize"); } static Response findDatasetDownloadSize(String datasetId, String version, String apiToken) { From 887d26f2fe5c41ca71f0031a9eae6dbfa13e8559 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 21 Sep 2023 10:11:33 +0100 Subject: [PATCH 049/252] Added: docs for deaccessioning API endpoints --- doc/sphinx-guides/source/api/native-api.rst | 42 ++++++++++++++++++++- 1 file changed, 41 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 90f4ad4e800..f46bd0dd17c 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1020,7 +1020,17 @@ Usage example: Please note that both filtering and ordering criteria values are case sensitive and must be correctly typed for the endpoint to recognize them. -Keep in mind that you can combine all of the above query params depending on the results you are looking for. +By default, deaccessioned dataset versions are not supported by this endpoint and will be ignored in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a not found error if the version is deaccessioned and you do not enable the option described below. + +If you want to consider deaccessioned dataset versions, you must specify this through the ``includeDeaccessioned`` query parameter. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?includeDeaccessioned=true" + +.. note:: Keep in mind that you can combine all of the above query params depending on the results you are looking for. Get File Counts in a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1048,6 +1058,16 @@ The fully expanded example above (without environment variables) looks like this curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts" +By default, deaccessioned dataset versions are not supported by this endpoint and will be ignored in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a not found error if the version is deaccessioned and you do not enable the option described below. + +If you want to consider deaccessioned dataset versions, you must specify this through the ``includeDeaccessioned`` query parameter. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?includeDeaccessioned=true" + View Dataset Files and Folders as a Directory Index ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1344,6 +1364,26 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/24/versions/:draft" +Deaccession Dataset +~~~~~~~~~~~~~~~~~~~ + +Given a version of a dataset, updates its status to deaccessioned. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + export VERSIONID=1.0 + + curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/datasets/$ID/versions/$VERSIONID/deaccession" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/24/versions/1.0/deaccession" + Set Citation Date Field Type for a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 1d661e74f2671405143023e22018c5ca197b9c5c Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 21 Sep 2023 10:37:24 +0100 Subject: [PATCH 050/252] Added: release notes for #9852 --- .../9852-files-api-extension-deaccession.md | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 doc/release-notes/9852-files-api-extension-deaccession.md diff --git a/doc/release-notes/9852-files-api-extension-deaccession.md b/doc/release-notes/9852-files-api-extension-deaccession.md new file mode 100644 index 00000000000..c5f6741932a --- /dev/null +++ b/doc/release-notes/9852-files-api-extension-deaccession.md @@ -0,0 +1,10 @@ +Extended the existing endpoints: + +- getVersionFiles (/api/datasets/{id}/versions/{versionId}/files) +- getVersionFileCounts (/api/datasets/{id}/versions/{versionId}/files/counts) + +The above endpoints now accept a new boolean optional query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain files or file counts. + +Additionally, a new endpoint has been developed to support version deaccessioning through API (Given a dataset and a version). + +- deaccessionDataset (/api/datasets/{id}/versions/{versionId}/deaccession) From 3c7fa8f0eeb34db7d2ca12d4f7eae8e4e02df1d8 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 21 Sep 2023 11:04:58 +0100 Subject: [PATCH 051/252] Added: friendlyType field to DataFile API json payload --- doc/release-notes/9852-files-api-extension-deaccession.md | 2 ++ .../java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 1 + 2 files changed, 3 insertions(+) diff --git a/doc/release-notes/9852-files-api-extension-deaccession.md b/doc/release-notes/9852-files-api-extension-deaccession.md index c5f6741932a..55698580e3c 100644 --- a/doc/release-notes/9852-files-api-extension-deaccession.md +++ b/doc/release-notes/9852-files-api-extension-deaccession.md @@ -8,3 +8,5 @@ The above endpoints now accept a new boolean optional query parameter "includeDe Additionally, a new endpoint has been developed to support version deaccessioning through API (Given a dataset and a version). - deaccessionDataset (/api/datasets/{id}/versions/{versionId}/deaccession) + +Finally, the DataFile API payload has been extended to add the field "friendlyType" diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index e5cd72ff5fc..c4f9e47accf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -665,6 +665,7 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boo .add("pidURL", pidURL) .add("filename", fileName) .add("contentType", df.getContentType()) + .add("friendlyType", df.getFriendlyType()) .add("filesize", df.getFilesize()) .add("description", fileMetadata.getDescription()) .add("categories", getFileCategories(fileMetadata)) From b3808c5f5d4b058a5be359c27a35254936a37266 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Fri, 22 Sep 2023 12:20:31 -0400 Subject: [PATCH 052/252] We talked about this on the Containerization Working Group meeting on 2023-09-21. Deleting the duplicated chmod and moving the bootstrap script copy to be executed before the chmod. --- modules/container-configbaker/Dockerfile | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/modules/container-configbaker/Dockerfile b/modules/container-configbaker/Dockerfile index 44f3806a591..2975b043213 100644 --- a/modules/container-configbaker/Dockerfile +++ b/modules/container-configbaker/Dockerfile @@ -26,8 +26,12 @@ RUN true && \ # Make our working directories mkdir -p ${SCRIPT_DIR} ${SECRETS_DIR} ${SOLR_TEMPLATE} -# Get in the scripts and make them executable (just in case...) +# Get in the scripts COPY maven/scripts maven/solr/update-fields.sh ${SCRIPT_DIR}/ +# Copy the data from scripts/api that provide the common base setup you'd get from the installer. +# ".dockerignore" will take care of taking only the bare necessities +COPY maven/setup ${SCRIPT_DIR}/bootstrap/base/ +# Make the scripts executable RUN chmod +x ${SCRIPT_DIR}/*.sh ${BOOTSTRAP_DIR}/*/*.sh # Copy the Solr config bits @@ -35,10 +39,8 @@ COPY --from=solr /opt/solr/server/solr/configsets/_default ${SOLR_TEMPLATE}/ COPY maven/solr/*.xml ${SOLR_TEMPLATE}/conf/ RUN rm ${SOLR_TEMPLATE}/conf/managed-schema.xml -# Copy the data from scripts/api that provide the common base setup you'd get from the installer. -# ".dockerignore" will take care of taking only the bare necessities -COPY maven/setup ${SCRIPT_DIR}/bootstrap/base/ -RUN chmod +x ${BOOTSTRAP_DIR}/*/*.sh + + # Set the entrypoint to tini (as a process supervisor) ENTRYPOINT ["/usr/bin/dumb-init", "--"] From cdd3a721deefe659151ab89b2c75cd6f3af016a5 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 25 Sep 2023 18:23:07 -0400 Subject: [PATCH 053/252] 9952 - add missing <> chars for license --- .../edu/harvard/iq/dataverse/util/SignpostingResources.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java b/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java index 2c9b7167059..21abd2d7034 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java @@ -78,7 +78,7 @@ public String getLinks() { type = ";rel=\"type\",<" + defaultFileTypeValue + ">;rel=\"type\""; valueList.add(type); - String licenseString = DatasetUtil.getLicenseURI(workingDatasetVersion) + ";rel=\"license\""; + String licenseString = "<" + DatasetUtil.getLicenseURI(workingDatasetVersion) + ">;rel=\"license\""; valueList.add(licenseString); String linkset = "<" + systemConfig.getDataverseSiteUrl() + "/api/datasets/:persistentId/versions/" From 3a4d8f98053ff726c617a45c5ad15d2f3059c138 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 25 Sep 2023 18:25:28 -0400 Subject: [PATCH 054/252] 9953 - don't wrap linkset in a data element also remove @AuthRequired per #9466 --- .../harvard/iq/dataverse/api/Datasets.java | 33 ++++++++++--------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 98bc42f75b0..3b0bc3e0fcf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -580,26 +580,27 @@ public Response getVersionMetadataBlock(@Context ContainerRequestContext crc, * @return */ @GET - @AuthRequired @Path("{id}/versions/{versionId}/linkset") - public Response getLinkset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - if ( ":draft".equals(versionId) ) { + public Response getLinkset(@PathParam("id") String datasetId, @PathParam("versionId") String versionId, + @Context UriInfo uriInfo, @Context HttpHeaders headers) { + if (":draft".equals(versionId)) { return badRequest("Signposting is not supported on the :draft version"); } - User user = getRequestUser(crc); - return response(req -> { + DataverseRequest req = createDataverseRequest(null); + try { DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); - return ok(Json.createObjectBuilder().add( - "linkset", - new SignpostingResources( - systemConfig, - dsv, - JvmSettings.SIGNPOSTING_LEVEL1_AUTHOR_LIMIT.lookupOptional().orElse(""), - JvmSettings.SIGNPOSTING_LEVEL1_ITEM_LIMIT.lookupOptional().orElse("") - ).getJsonLinkset() - ) - ); - }, user); + return Response + .ok(Json.createObjectBuilder() + .add("linkset", + new SignpostingResources(systemConfig, dsv, + JvmSettings.SIGNPOSTING_LEVEL1_AUTHOR_LIMIT.lookupOptional().orElse(""), + JvmSettings.SIGNPOSTING_LEVEL1_ITEM_LIMIT.lookupOptional().orElse("")) + .getJsonLinkset()) + .build()) + .type(MediaType.APPLICATION_JSON).build(); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } } @GET From 869d24266bc305d4b008975d8ebe0dd5911063a2 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 25 Sep 2023 18:26:50 -0400 Subject: [PATCH 055/252] add null check to avoid any remaining cases of 9954 --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index d20175b6e1a..7cb5bfa3850 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -6144,7 +6144,7 @@ public String getWebloaderUrlForDataset(Dataset d) { String signpostingLinkHeader = null; public String getSignpostingLinkHeader() { - if (!workingVersion.isReleased()) { + if ((workingVersion==null) || (!workingVersion.isReleased())) { return null; } if (signpostingLinkHeader == null) { From 2332c1c5b815e737f7e2471d40d31b0fac179c82 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 25 Sep 2023 18:38:05 -0400 Subject: [PATCH 056/252] release note --- doc/release-notes/9955-Signposting-updates.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/9955-Signposting-updates.md diff --git a/doc/release-notes/9955-Signposting-updates.md b/doc/release-notes/9955-Signposting-updates.md new file mode 100644 index 00000000000..bf0c7bc646b --- /dev/null +++ b/doc/release-notes/9955-Signposting-updates.md @@ -0,0 +1 @@ +This release fixes two issues (#9952, #9953) where the Signposting output did not match the Signposting specification. \ No newline at end of file From 5978d71337fd1bfbecd42b7bd88b7b4193dbc6ad Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 26 Sep 2023 07:07:00 -0400 Subject: [PATCH 057/252] 9957- use ld+json --- doc/release-notes/9955-Signposting-updates.md | 2 +- doc/sphinx-guides/source/api/native-api.rst | 2 +- .../edu/harvard/iq/dataverse/DatasetFieldServiceBean.java | 3 ++- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 2 +- .../edu/harvard/iq/dataverse/util/SignpostingResources.java | 4 ++-- 5 files changed, 7 insertions(+), 6 deletions(-) diff --git a/doc/release-notes/9955-Signposting-updates.md b/doc/release-notes/9955-Signposting-updates.md index bf0c7bc646b..92168231895 100644 --- a/doc/release-notes/9955-Signposting-updates.md +++ b/doc/release-notes/9955-Signposting-updates.md @@ -1 +1 @@ -This release fixes two issues (#9952, #9953) where the Signposting output did not match the Signposting specification. \ No newline at end of file +This release fixes several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification. \ No newline at end of file diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 56d245f97c0..e181a2a5546 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2196,7 +2196,7 @@ Signposting involves the addition of a `Link ;rel="cite-as", ;rel="describedby";type="application/vnd.citationstyles.csl+json",;rel="describedby";type="application/json+ld", ;rel="type",;rel="type", https://demo.dataverse.org/api/datasets/:persistentId/versions/1.0/customlicense?persistentId=doi:10.5072/FK2/YD5QDG;rel="license", ; rel="linkset";type="application/linkset+json"`` +``Link: ;rel="cite-as", ;rel="describedby";type="application/vnd.citationstyles.csl+json",;rel="describedby";type="application/ld+json", ;rel="type",;rel="type", ;rel="license", ; rel="linkset";type="application/linkset+json"`` The URL for linkset information is discoverable under the ``rel="linkset";type="application/linkset+json`` entry in the "Link" header, such as in the example above. diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java index 620d4bf3e09..ce2b00086ec 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java @@ -500,7 +500,8 @@ public void process(HttpResponse response, HttpContext context) throws HttpExcep .setRetryHandler(new DefaultHttpRequestRetryHandler(3, false)) .build()) { HttpGet httpGet = new HttpGet(retrievalUri); - httpGet.addHeader("Accept", "application/json+ld, application/json"); + //application/json+ld is for backward compatibility + httpGet.addHeader("Accept", "application/ld+json, application/json+ld, application/json"); HttpResponse response = httpClient.execute(httpGet); String data = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 3b0bc3e0fcf..b9a104d8eaa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -272,7 +272,7 @@ public Response getDataset(@Context ContainerRequestContext crc, @PathParam("id" @GET @Path("/export") - @Produces({"application/xml", "application/json", "application/html" }) + @Produces({"application/xml", "application/json", "application/html", "application/ld+json" }) public Response exportDataset(@QueryParam("persistentId") String persistentId, @QueryParam("exporter") String exporter, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) { try { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java b/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java index 21abd2d7034..1826689b892 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java @@ -71,7 +71,7 @@ public String getLinks() { String describedby = "<" + ds.getGlobalId().asURL().toString() + ">;rel=\"describedby\"" + ";type=\"" + "application/vnd.citationstyles.csl+json\""; describedby += ",<" + systemConfig.getDataverseSiteUrl() + "/api/datasets/export?exporter=schema.org&persistentId=" - + ds.getProtocol() + ":" + ds.getAuthority() + "/" + ds.getIdentifier() + ">;rel=\"describedby\"" + ";type=\"application/json+ld\""; + + ds.getProtocol() + ":" + ds.getAuthority() + "/" + ds.getIdentifier() + ">;rel=\"describedby\"" + ";type=\"application/ld+json\""; valueList.add(describedby); String type = ";rel=\"type\""; @@ -116,7 +116,7 @@ public JsonArrayBuilder getJsonLinkset() { systemConfig.getDataverseSiteUrl() + "/api/datasets/export?exporter=schema.org&persistentId=" + ds.getProtocol() + ":" + ds.getAuthority() + "/" + ds.getIdentifier() ).add( "type", - "application/json+ld" + "application/ld+json" ) ); JsonArrayBuilder linksetJsonObj = Json.createArrayBuilder(); From e3fbd0287392aa6652cf23f32f849c17812a4fd8 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 26 Sep 2023 12:29:01 -0400 Subject: [PATCH 058/252] Update test --- src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java index 17eba4770f1..b41f62ae28f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java @@ -92,7 +92,7 @@ public void testSignposting() { String responseString = linksetResponse.getBody().asString(); - JsonObject data = JsonUtil.getJsonObject(responseString).getJsonObject("data"); + JsonObject data = JsonUtil.getJsonObject(responseString); JsonObject lso = data.getJsonArray("linkset").getJsonObject(0); System.out.println("Linkset: " + lso.toString()); From 3dd4564dc56a1132fcda7301a358e8f1f802752b Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 26 Sep 2023 18:29:42 +0100 Subject: [PATCH 059/252] Added: ignoreOriginalTabularSize optional query parameter to getDownloadSize datasets API endpoint --- .../iq/dataverse/DatasetServiceBean.java | 13 +++-- .../harvard/iq/dataverse/api/Datasets.java | 29 ++++++---- .../impl/GetDatasetStorageSizeCommand.java | 18 +++--- .../impl/GetDataverseStorageSizeCommand.java | 2 +- .../harvard/iq/dataverse/api/DatasetsIT.java | 55 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 7 +++ 6 files changed, 98 insertions(+), 26 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 52eb5868c35..4799502a6e3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -788,13 +788,13 @@ public void exportDataset(Dataset dataset, boolean forceReExport) { } } } - + } //get a string to add to save success message //depends on page (dataset/file) and user privleges public String getReminderString(Dataset dataset, boolean canPublishDataset, boolean filePage, boolean isValid) { - + String reminderString; if (canPublishDataset) { @@ -1015,12 +1015,12 @@ public void obtainPersistentIdentifiersForDatafiles(Dataset dataset) { } public long findStorageSize(Dataset dataset) throws IOException { - return findStorageSize(dataset, false, GetDatasetStorageSizeCommand.Mode.STORAGE, null); + return findStorageSize(dataset, false, true, GetDatasetStorageSizeCommand.Mode.STORAGE, null); } public long findStorageSize(Dataset dataset, boolean countCachedExtras) throws IOException { - return findStorageSize(dataset, countCachedExtras, GetDatasetStorageSizeCommand.Mode.STORAGE, null); + return findStorageSize(dataset, countCachedExtras, true, GetDatasetStorageSizeCommand.Mode.STORAGE, null); } /** @@ -1028,6 +1028,7 @@ public long findStorageSize(Dataset dataset, boolean countCachedExtras) throws I * * @param dataset * @param countCachedExtras boolean indicating if the cached disposable extras should also be counted + * @param countOriginalTabularSize boolean indicating if the size of the stored original tabular files should also be counted, in addition to the main tab-delimited file size * @param mode String indicating whether we are getting the result for storage (entire dataset) or download version based * @param version optional param for dataset version * @return total size @@ -1036,7 +1037,7 @@ public long findStorageSize(Dataset dataset, boolean countCachedExtras) throws I * default mode, the method doesn't need to access the storage system, as the * sizes of the main files are recorded in the database) */ - public long findStorageSize(Dataset dataset, boolean countCachedExtras, GetDatasetStorageSizeCommand.Mode mode, DatasetVersion version) throws IOException { + public long findStorageSize(Dataset dataset, boolean countCachedExtras, boolean countOriginalTabularSize, GetDatasetStorageSizeCommand.Mode mode, DatasetVersion version) throws IOException { long total = 0L; if (dataset.isHarvested()) { @@ -1062,7 +1063,7 @@ public long findStorageSize(Dataset dataset, boolean countCachedExtras, GetDatas total += datafile.getFilesize(); if (!countCachedExtras) { - if (datafile.isTabularData()) { + if (datafile.isTabularData() && countOriginalTabularSize) { // count the size of the stored original, in addition to the main tab-delimited file: Long originalFileSize = datafile.getDataTable().getOriginalFileSize(); if (originalFileSize != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 62d87b198fe..a39347ef64e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2947,25 +2947,32 @@ public Response getMakeDataCountMetricCurrentMonth(@PathParam("id") String idSup String nullCurrentMonth = null; return getMakeDataCountMetric(idSupplied, metricSupplied, nullCurrentMonth, country); } - + @GET @AuthRequired @Path("{identifier}/storagesize") - public Response getStorageSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached, - @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { - + public Response getStorageSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached) { return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.storage"), - execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached, GetDatasetStorageSizeCommand.Mode.STORAGE, null)))), getRequestUser(crc)); + execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached, true, GetDatasetStorageSizeCommand.Mode.STORAGE, null)))), getRequestUser(crc)); } - + @GET @AuthRequired @Path("{identifier}/versions/{versionId}/downloadsize") - public Response getDownloadSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @PathParam("versionId") String version, - @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { - - return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), - execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), false, GetDatasetStorageSizeCommand.Mode.DOWNLOAD, getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers))))), getRequestUser(crc)); + public Response getDownloadSize(@Context ContainerRequestContext crc, + @PathParam("identifier") String dvIdtf, + @PathParam("versionId") String version, + @QueryParam("ignoreOriginalTabularSize") boolean ignoreOriginalTabularSize, + @Context UriInfo uriInfo, + @Context HttpHeaders headers) { + return response(req -> { + Long datasetStorageSize = execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), false, !ignoreOriginalTabularSize, GetDatasetStorageSizeCommand.Mode.DOWNLOAD, getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers))); + String message = MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), datasetStorageSize); + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("message", message); + jsonObjectBuilder.add("storageSize", datasetStorageSize); + return ok(jsonObjectBuilder); + }, getRequestUser(crc)); } @GET diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java index f1f27fdcee2..eebb8dd9e00 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java @@ -7,7 +7,6 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; @@ -15,6 +14,7 @@ import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.util.BundleUtil; + import java.io.IOException; import java.util.Collections; import java.util.Map; @@ -32,47 +32,49 @@ public class GetDatasetStorageSizeCommand extends AbstractCommand { private final Dataset dataset; private final Boolean countCachedFiles; + private final Boolean countOriginalTabularSize; private final Mode mode; private final DatasetVersion version; public enum Mode { STORAGE, DOWNLOAD - }; + } public GetDatasetStorageSizeCommand(DataverseRequest aRequest, Dataset target) { super(aRequest, target); dataset = target; countCachedFiles = false; + countOriginalTabularSize = true; mode = Mode.DOWNLOAD; version = null; } - public GetDatasetStorageSizeCommand(DataverseRequest aRequest, Dataset target, boolean countCachedFiles, Mode mode, DatasetVersion version) { + public GetDatasetStorageSizeCommand(DataverseRequest aRequest, Dataset target, boolean countCachedFiles, boolean countOriginalTabularSize, Mode mode, DatasetVersion version) { super(aRequest, target); dataset = target; this.countCachedFiles = countCachedFiles; + this.countOriginalTabularSize = countOriginalTabularSize; this.mode = mode; this.version = version; } @Override public Long execute(CommandContext ctxt) throws CommandException { - logger.fine("getDataverseStorageSize called on " + dataset.getDisplayName()); - if (dataset == null) { // should never happen - must indicate some data corruption in the database throw new CommandException(BundleUtil.getStringFromBundle("datasets.api.listing.error"), this); } + logger.fine("getDataverseStorageSize called on " + dataset.getDisplayName()); + try { - return ctxt.datasets().findStorageSize(dataset, countCachedFiles, mode, version); + return ctxt.datasets().findStorageSize(dataset, countCachedFiles, countOriginalTabularSize, mode, version); } catch (IOException ex) { throw new CommandException(BundleUtil.getStringFromBundle("datasets.api.datasize.ioerror"), this); } - } - + @Override public Map> getRequiredPermissions() { // for data file check permission on owning dataset diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataverseStorageSizeCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataverseStorageSizeCommand.java index 57912a6b4bd..9f93f6747ea 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataverseStorageSizeCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataverseStorageSizeCommand.java @@ -59,7 +59,7 @@ public Long execute(CommandContext ctxt) throws CommandException { } try { - total += ctxt.datasets().findStorageSize(dataset, countCachedFiles, GetDatasetStorageSizeCommand.Mode.STORAGE, null); + total += ctxt.datasets().findStorageSize(dataset, countCachedFiles, true, GetDatasetStorageSizeCommand.Mode.STORAGE, null); } catch (IOException ex) { throw new CommandException(BundleUtil.getStringFromBundle("dataverse.datasize.ioerror"), this); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 5c1eb66b63d..929882fe95a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3630,4 +3630,59 @@ public void deaccessionDataset() { deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, apiToken); deaccessionDatasetResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); } + + @Test + public void getDownloadSize() throws IOException { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String datasetPersistentId = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId"); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Creating test text files + String testFileName1 = "test_1.txt"; + String testFileName2 = "test_2.txt"; + + int testFileSize1 = 50; + int testFileSize2 = 200; + + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName1, new byte[testFileSize1], apiToken); + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName2, new byte[testFileSize2], apiToken); + + int expectedTextFilesStorageSize = testFileSize1 + testFileSize2; + + Response getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, false, apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.storageSize", equalTo(expectedTextFilesStorageSize)); + + // Upload test tabular file + String pathToTabularTestFile = "src/test/resources/tab/test.tab"; + Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); + uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Get the original tabular file size + int tabularOriginalSize = Integer.parseInt(uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.filesize")); + + // Get the size ignoring the original tabular file sizes + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, true, apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()); + + int actualSizeIgnoringOriginalTabularSizes = Integer.parseInt(getDownloadSizeResponse.getBody().jsonPath().getString("data.storageSize")); + // Assert that the size has been incremented with the last uploaded file + assertTrue(actualSizeIgnoringOriginalTabularSizes > expectedTextFilesStorageSize); + + // Get the size including the original tabular file sizes + int expectedSizeIncludingOriginalTabularSizes = tabularOriginalSize + actualSizeIgnoringOriginalTabularSizes; + + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, false, apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.storageSize", equalTo(expectedSizeIncludingOriginalTabularSizes)); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 8c6a2d6e75d..ecf26bd26ae 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3409,4 +3409,11 @@ static Response deaccessionDataset(Integer datasetId, String version, String api .body(jsonString) .put("/api/datasets/" + datasetId + "/versions/" + version + "/deaccession"); } + + static Response getDownloadSize(Integer datasetId, String version, boolean ignoreOriginalTabularSize, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .queryParam("ignoreOriginalTabularSize", ignoreOriginalTabularSize) + .get("/api/datasets/" + datasetId + "/versions/" + version + "/downloadsize"); + } } From f653c219d0ce6d7b1b1b3774b4820a05391c82d0 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 27 Sep 2023 10:33:21 +0100 Subject: [PATCH 060/252] Changed: dataset version download size calculation when ignoring original tab file sizes --- .../edu/harvard/iq/dataverse/DatasetServiceBean.java | 9 ++++----- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 6 ++++-- .../command/impl/GetDatasetStorageSizeCommand.java | 7 ++----- .../command/impl/GetDataverseStorageSizeCommand.java | 2 +- .../java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 3 ++- 5 files changed, 13 insertions(+), 14 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 4799502a6e3..30274efb384 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -1015,12 +1015,12 @@ public void obtainPersistentIdentifiersForDatafiles(Dataset dataset) { } public long findStorageSize(Dataset dataset) throws IOException { - return findStorageSize(dataset, false, true, GetDatasetStorageSizeCommand.Mode.STORAGE, null); + return findStorageSize(dataset, false, GetDatasetStorageSizeCommand.Mode.STORAGE, null); } public long findStorageSize(Dataset dataset, boolean countCachedExtras) throws IOException { - return findStorageSize(dataset, countCachedExtras, true, GetDatasetStorageSizeCommand.Mode.STORAGE, null); + return findStorageSize(dataset, countCachedExtras, GetDatasetStorageSizeCommand.Mode.STORAGE, null); } /** @@ -1028,7 +1028,6 @@ public long findStorageSize(Dataset dataset, boolean countCachedExtras) throws I * * @param dataset * @param countCachedExtras boolean indicating if the cached disposable extras should also be counted - * @param countOriginalTabularSize boolean indicating if the size of the stored original tabular files should also be counted, in addition to the main tab-delimited file size * @param mode String indicating whether we are getting the result for storage (entire dataset) or download version based * @param version optional param for dataset version * @return total size @@ -1037,7 +1036,7 @@ public long findStorageSize(Dataset dataset, boolean countCachedExtras) throws I * default mode, the method doesn't need to access the storage system, as the * sizes of the main files are recorded in the database) */ - public long findStorageSize(Dataset dataset, boolean countCachedExtras, boolean countOriginalTabularSize, GetDatasetStorageSizeCommand.Mode mode, DatasetVersion version) throws IOException { + public long findStorageSize(Dataset dataset, boolean countCachedExtras, GetDatasetStorageSizeCommand.Mode mode, DatasetVersion version) throws IOException { long total = 0L; if (dataset.isHarvested()) { @@ -1063,7 +1062,7 @@ public long findStorageSize(Dataset dataset, boolean countCachedExtras, boolean total += datafile.getFilesize(); if (!countCachedExtras) { - if (datafile.isTabularData() && countOriginalTabularSize) { + if (datafile.isTabularData()) { // count the size of the stored original, in addition to the main tab-delimited file: Long originalFileSize = datafile.getDataTable().getOriginalFileSize(); if (originalFileSize != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index a39347ef64e..981cbced11e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2953,7 +2953,7 @@ public Response getMakeDataCountMetricCurrentMonth(@PathParam("id") String idSup @Path("{identifier}/storagesize") public Response getStorageSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached) { return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.storage"), - execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached, true, GetDatasetStorageSizeCommand.Mode.STORAGE, null)))), getRequestUser(crc)); + execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached, GetDatasetStorageSizeCommand.Mode.STORAGE, null)))), getRequestUser(crc)); } @GET @@ -2966,7 +2966,9 @@ public Response getDownloadSize(@Context ContainerRequestContext crc, @Context UriInfo uriInfo, @Context HttpHeaders headers) { return response(req -> { - Long datasetStorageSize = execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), false, !ignoreOriginalTabularSize, GetDatasetStorageSizeCommand.Mode.DOWNLOAD, getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers))); + DatasetVersion datasetVersion = getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers); + Long datasetStorageSize = ignoreOriginalTabularSize ? DatasetUtil.getDownloadSizeNumeric(datasetVersion, false) + : execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), false, GetDatasetStorageSizeCommand.Mode.DOWNLOAD, datasetVersion)); String message = MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), datasetStorageSize); JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); jsonObjectBuilder.add("message", message); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java index eebb8dd9e00..09b33c4efc4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java @@ -32,7 +32,6 @@ public class GetDatasetStorageSizeCommand extends AbstractCommand { private final Dataset dataset; private final Boolean countCachedFiles; - private final Boolean countOriginalTabularSize; private final Mode mode; private final DatasetVersion version; @@ -45,16 +44,14 @@ public GetDatasetStorageSizeCommand(DataverseRequest aRequest, Dataset target) { super(aRequest, target); dataset = target; countCachedFiles = false; - countOriginalTabularSize = true; mode = Mode.DOWNLOAD; version = null; } - public GetDatasetStorageSizeCommand(DataverseRequest aRequest, Dataset target, boolean countCachedFiles, boolean countOriginalTabularSize, Mode mode, DatasetVersion version) { + public GetDatasetStorageSizeCommand(DataverseRequest aRequest, Dataset target, boolean countCachedFiles, Mode mode, DatasetVersion version) { super(aRequest, target); dataset = target; this.countCachedFiles = countCachedFiles; - this.countOriginalTabularSize = countOriginalTabularSize; this.mode = mode; this.version = version; } @@ -69,7 +66,7 @@ public Long execute(CommandContext ctxt) throws CommandException { logger.fine("getDataverseStorageSize called on " + dataset.getDisplayName()); try { - return ctxt.datasets().findStorageSize(dataset, countCachedFiles, countOriginalTabularSize, mode, version); + return ctxt.datasets().findStorageSize(dataset, countCachedFiles, mode, version); } catch (IOException ex) { throw new CommandException(BundleUtil.getStringFromBundle("datasets.api.datasize.ioerror"), this); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataverseStorageSizeCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataverseStorageSizeCommand.java index 9f93f6747ea..57912a6b4bd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataverseStorageSizeCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataverseStorageSizeCommand.java @@ -59,7 +59,7 @@ public Long execute(CommandContext ctxt) throws CommandException { } try { - total += ctxt.datasets().findStorageSize(dataset, countCachedFiles, true, GetDatasetStorageSizeCommand.Mode.STORAGE, null); + total += ctxt.datasets().findStorageSize(dataset, countCachedFiles, GetDatasetStorageSizeCommand.Mode.STORAGE, null); } catch (IOException ex) { throw new CommandException(BundleUtil.getStringFromBundle("dataverse.datasize.ioerror"), this); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 929882fe95a..580a1edb6f2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3679,7 +3679,8 @@ public void getDownloadSize() throws IOException { assertTrue(actualSizeIgnoringOriginalTabularSizes > expectedTextFilesStorageSize); // Get the size including the original tabular file sizes - int expectedSizeIncludingOriginalTabularSizes = tabularOriginalSize + actualSizeIgnoringOriginalTabularSizes; + int tabularProcessedSize = actualSizeIgnoringOriginalTabularSizes - expectedTextFilesStorageSize; + int expectedSizeIncludingOriginalTabularSizes = tabularOriginalSize + tabularProcessedSize + expectedTextFilesStorageSize; getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, false, apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) From 9d10b99cdbb3487e08a308e0e6f1de7ff69cf913 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 27 Sep 2023 10:40:23 +0100 Subject: [PATCH 061/252] Added: #9958 release notes --- .../9958-dataset-api-downloadsize-ignore-tabular-size.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 doc/release-notes/9958-dataset-api-downloadsize-ignore-tabular-size.md diff --git a/doc/release-notes/9958-dataset-api-downloadsize-ignore-tabular-size.md b/doc/release-notes/9958-dataset-api-downloadsize-ignore-tabular-size.md new file mode 100644 index 00000000000..73b27a1a581 --- /dev/null +++ b/doc/release-notes/9958-dataset-api-downloadsize-ignore-tabular-size.md @@ -0,0 +1,3 @@ +Added a new optional query parameter "ignoreOriginalTabularSize" to the "getDownloadSize" API endpoint ("api/datasets/{identifier}/versions/{versionId}/downloadsize"). + +If set to true, the endpoint will return the download size ignoring the original tabular file sizes. From 9710c79432cbc30a1f3222a2df2e423f6040ed0a Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 27 Sep 2023 10:46:05 +0100 Subject: [PATCH 062/252] Added: mentioned ignoreOriginalTabularSize query parameter in the docs for /downloadsize API endpoint --- doc/sphinx-guides/source/api/native-api.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 663051c0884..169b950dc74 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1797,6 +1797,8 @@ The fully expanded example above (without environment variables) looks like this The size of all files available for download will be returned. If :draft is passed as versionId the token supplied must have permission to view unpublished drafts. A token is not required for published datasets. Also restricted files will be included in this total regardless of whether the user has access to download the restricted file(s). +There is an optional query parameter ``ignoreOriginalTabularSize`` which, if set to true, the endpoint will return the download size ignoring the sizes of the original tabular files. Otherwise, both the original and the processed size will be included in the count for tabular files. + Submit a Dataset for Review ~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 4aa34ffb417039b8132070270a246f8e4b4fedd3 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 27 Sep 2023 10:50:42 +0100 Subject: [PATCH 063/252] Added: ignoreOriginalTabularSize query param usage example to the docs --- doc/sphinx-guides/source/api/native-api.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 169b950dc74..0f77aeba580 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1799,6 +1799,12 @@ If :draft is passed as versionId the token supplied must have permission to view There is an optional query parameter ``ignoreOriginalTabularSize`` which, if set to true, the endpoint will return the download size ignoring the sizes of the original tabular files. Otherwise, both the original and the processed size will be included in the count for tabular files. +Usage example: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?ignoreOriginalTabularSize=true" + Submit a Dataset for Review ~~~~~~~~~~~~~~~~~~~~~~~~~~~ From c9c6cf26a1764bb5c409c4d25571984d0e5fbf80 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 27 Sep 2023 10:32:44 -0400 Subject: [PATCH 064/252] Add null check to avoid future issues --- .../java/edu/harvard/iq/dataverse/DatasetPage.java | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 7cb5bfa3850..7dba8af3fdc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -2863,6 +2863,12 @@ public void sort() { public String refresh() { logger.fine("refreshing"); + //In v5.14, versionId was null here. In 6.0, it appears not to be. + //This check is to handle the null if it reappears/occurs under other circumstances + if(versionId==null) { + logger.fine("versionId was null in refresh"); + versionId = workingVersion.getId(); + } //dataset = datasetService.find(dataset.getId()); dataset = null; workingVersion = null; @@ -2872,10 +2878,9 @@ public String refresh() { DatasetVersionServiceBean.RetrieveDatasetVersionResponse retrieveDatasetVersionResponse = null; if (versionId != null) { - // versionId must have been set by now, in the init() method, - // regardless of how the page was originally called - by the dataset - // database id, by the persistent identifier, or by the db id of - // the version. + // versionId must have been set by now (see null check above), in the init() + // method, regardless of how the page was originally called - by the dataset + // database id, by the persistent identifier, or by the db id of the version. this.workingVersion = datasetVersionService.findDeep(versionId); dataset = workingVersion.getDataset(); } From ba2dd8400c128cefd22f7d83d52771760d477905 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 27 Sep 2023 10:39:26 -0400 Subject: [PATCH 065/252] warn if null cases still occur --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 7dba8af3fdc..74064f20893 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -2866,7 +2866,7 @@ public String refresh() { //In v5.14, versionId was null here. In 6.0, it appears not to be. //This check is to handle the null if it reappears/occurs under other circumstances if(versionId==null) { - logger.fine("versionId was null in refresh"); + logger.warning("versionId was null in refresh"); versionId = workingVersion.getId(); } //dataset = datasetService.find(dataset.getId()); @@ -6150,6 +6150,9 @@ public String getWebloaderUrlForDataset(Dataset d) { public String getSignpostingLinkHeader() { if ((workingVersion==null) || (!workingVersion.isReleased())) { + if(workingVersion==null) { + logger.warning("workingVersion was null in getSignpostingLinkHeader"); + } return null; } if (signpostingLinkHeader == null) { From 448ae448ff1cb36f10b30449694126a866c28643 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 28 Sep 2023 11:13:52 +0100 Subject: [PATCH 066/252] Added: JSON payload to curl examples for Deaccession Dataset docs --- doc/sphinx-guides/source/api/native-api.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 663051c0884..01a681cfb6a 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1375,14 +1375,15 @@ Given a version of a dataset, updates its status to deaccessioned. export SERVER_URL=https://demo.dataverse.org export ID=24 export VERSIONID=1.0 + export JSON='{"deaccessionReason":"Description of the deaccession reason.", "deaccessionForwardURL":"https://demo.dataverse.org"}' - curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/datasets/$ID/versions/$VERSIONID/deaccession" + curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/datasets/$ID/versions/$VERSIONID/deaccession" -d "$JSON" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/24/versions/1.0/deaccession" + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/24/versions/1.0/deaccession" -d '{"deaccessionReason":"Description of the deaccession reason.", "deaccessionForwardURL":"https://demo.dataverse.org"}' Set Citation Date Field Type for a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 907fd4024c8df2218764fd0902d1242a37726f7e Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 2 Oct 2023 10:48:36 +0100 Subject: [PATCH 067/252] Changed: using query-based implementation for files download size --- .../DatasetVersionFilesServiceBean.java | 57 +++++++++++++++++++ .../harvard/iq/dataverse/api/Datasets.java | 12 +++- .../harvard/iq/dataverse/api/DatasetsIT.java | 30 +++++++--- .../edu/harvard/iq/dataverse/api/UtilIT.java | 4 +- 4 files changed, 89 insertions(+), 14 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index a547a216ad5..66e0ec5b5fe 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.QDataFileCategory; +import edu.harvard.iq.dataverse.QDataTable; import edu.harvard.iq.dataverse.QDvObject; import edu.harvard.iq.dataverse.QEmbargo; import edu.harvard.iq.dataverse.QFileMetadata; @@ -36,6 +37,7 @@ public class DatasetVersionFilesServiceBean implements Serializable { private final QFileMetadata fileMetadata = QFileMetadata.fileMetadata; private final QDvObject dvObject = QDvObject.dvObject; private final QDataFileCategory dataFileCategory = QDataFileCategory.dataFileCategory; + private final QDataTable dataTable = QDataTable.dataTable; /** * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionFilesServiceBean#getFileMetadatas} @@ -51,6 +53,19 @@ public enum DataFileAccessStatus { Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic } + /** + * Mode to base the search in {@link DatasetVersionFilesServiceBean#getFilesDownloadSize(DatasetVersion, FileDownloadSizeMode)} + *

    + * All: Includes both archival and original sizes for tabular files + * Archival: Includes only the archival size for tabular files + * Original: Includes only the original size for tabular files + *

    + * All the modes include archival sizes for non-tabular files + */ + public enum FileDownloadSizeMode { + All, Original, Archival + } + /** * Given a DatasetVersion, returns its total file metadata count * @@ -159,6 +174,23 @@ public List getFileMetadatas(DatasetVersion datasetVersion, Intege return baseQuery.fetch(); } + /** + * Returns the total download size of all files for a particular DatasetVersion + * + * @param datasetVersion the DatasetVersion to access + * @param mode a FileDownloadSizeMode to base the search on + * @return long value of total file download size + */ + public long getFilesDownloadSize(DatasetVersion datasetVersion, FileDownloadSizeMode mode) { + return switch (mode) { + case All -> + Long.sum(getOriginalTabularFilesSize(datasetVersion), getArchivalFilesSize(datasetVersion, false)); + case Original -> + Long.sum(getOriginalTabularFilesSize(datasetVersion), getArchivalFilesSize(datasetVersion, true)); + case Archival -> getArchivalFilesSize(datasetVersion, false); + }; + } + private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map totalCounts, DataFileAccessStatus dataFileAccessStatus) { long fileMetadataCount = getFileMetadataCountByAccessStatus(datasetVersion, dataFileAccessStatus); if (fileMetadataCount > 0) { @@ -230,4 +262,29 @@ private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery qu break; } } + + private long getOriginalTabularFilesSize(DatasetVersion datasetVersion) { + JPAQueryFactory queryFactory = new JPAQueryFactory(em); + Long result = queryFactory + .from(fileMetadata) + .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())) + .from(dataTable) + .where(fileMetadata.dataFile.dataTables.isNotEmpty().and(dataTable.dataFile.eq(fileMetadata.dataFile))) + .select(dataTable.originalFileSize.sum()).fetchFirst(); + return (result == null) ? 0 : result; + } + + private long getArchivalFilesSize(DatasetVersion datasetVersion, boolean ignoreTabular) { + JPAQueryFactory queryFactory = new JPAQueryFactory(em); + JPAQuery baseQuery = queryFactory + .from(fileMetadata) + .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())); + Long result; + if (ignoreTabular) { + result = baseQuery.where(fileMetadata.dataFile.dataTables.isEmpty()).select(fileMetadata.dataFile.filesize.sum()).fetchFirst(); + } else { + result = baseQuery.select(fileMetadata.dataFile.filesize.sum()).fetchFirst(); + } + return (result == null) ? 0 : result; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 981cbced11e..80a2dac9568 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2962,13 +2962,19 @@ public Response getStorageSize(@Context ContainerRequestContext crc, @PathParam( public Response getDownloadSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @PathParam("versionId") String version, - @QueryParam("ignoreOriginalTabularSize") boolean ignoreOriginalTabularSize, + @QueryParam("mode") String mode, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + return response(req -> { + DatasetVersionFilesServiceBean.FileDownloadSizeMode fileDownloadSizeMode; + try { + fileDownloadSizeMode = mode != null ? DatasetVersionFilesServiceBean.FileDownloadSizeMode.valueOf(mode) : DatasetVersionFilesServiceBean.FileDownloadSizeMode.All; + } catch (IllegalArgumentException e) { + return error(Response.Status.BAD_REQUEST, "Invalid mode: " + mode); + } DatasetVersion datasetVersion = getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers); - Long datasetStorageSize = ignoreOriginalTabularSize ? DatasetUtil.getDownloadSizeNumeric(datasetVersion, false) - : execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), false, GetDatasetStorageSizeCommand.Mode.DOWNLOAD, datasetVersion)); + long datasetStorageSize = datasetVersionFilesServiceBean.getFilesDownloadSize(datasetVersion, fileDownloadSizeMode); String message = MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), datasetStorageSize); JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); jsonObjectBuilder.add("message", message); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 580a1edb6f2..189cf3a6f5a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3632,7 +3632,7 @@ public void deaccessionDataset() { } @Test - public void getDownloadSize() throws IOException { + public void getDownloadSize() throws IOException, InterruptedException { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); String apiToken = UtilIT.getApiTokenFromResponse(createUser); @@ -3658,7 +3658,8 @@ public void getDownloadSize() throws IOException { int expectedTextFilesStorageSize = testFileSize1 + testFileSize2; - Response getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, false, apiToken); + // Get the total size when there are no tabular files + Response getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedTextFilesStorageSize)); @@ -3670,20 +3671,31 @@ public void getDownloadSize() throws IOException { // Get the original tabular file size int tabularOriginalSize = Integer.parseInt(uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.filesize")); - // Get the size ignoring the original tabular file sizes - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, true, apiToken); + // Ensure tabular file is ingested + Thread.sleep(2000); + + // Get the total size ignoring the original tabular file sizes + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Archival.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()); int actualSizeIgnoringOriginalTabularSizes = Integer.parseInt(getDownloadSizeResponse.getBody().jsonPath().getString("data.storageSize")); + // Assert that the size has been incremented with the last uploaded file assertTrue(actualSizeIgnoringOriginalTabularSizes > expectedTextFilesStorageSize); - // Get the size including the original tabular file sizes - int tabularProcessedSize = actualSizeIgnoringOriginalTabularSizes - expectedTextFilesStorageSize; - int expectedSizeIncludingOriginalTabularSizes = tabularOriginalSize + tabularProcessedSize + expectedTextFilesStorageSize; + // Get the total size including only original sizes and ignoring archival sizes for tabular files + int expectedSizeIncludingOnlyOriginalForTabular = tabularOriginalSize + expectedTextFilesStorageSize; + + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.storageSize", equalTo(expectedSizeIncludingOnlyOriginalForTabular)); + + // Get the total size including both the original and archival tabular file sizes + int tabularArchivalSize = actualSizeIgnoringOriginalTabularSizes - expectedTextFilesStorageSize; + int expectedSizeIncludingAllSizes = tabularArchivalSize + tabularOriginalSize + expectedTextFilesStorageSize; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, false, apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) - .body("data.storageSize", equalTo(expectedSizeIncludingOriginalTabularSizes)); + .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes)); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index ecf26bd26ae..f9f3dc9be8d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3410,10 +3410,10 @@ static Response deaccessionDataset(Integer datasetId, String version, String api .put("/api/datasets/" + datasetId + "/versions/" + version + "/deaccession"); } - static Response getDownloadSize(Integer datasetId, String version, boolean ignoreOriginalTabularSize, String apiToken) { + static Response getDownloadSize(Integer datasetId, String version, String mode, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .queryParam("ignoreOriginalTabularSize", ignoreOriginalTabularSize) + .queryParam("mode", mode) .get("/api/datasets/" + datasetId + "/versions/" + version + "/downloadsize"); } } From a5c32bd1b11f4385926f9abc53578e6b48c05adc Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 2 Oct 2023 10:53:45 +0100 Subject: [PATCH 068/252] Added: error case to getDownloadSize IT --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 189cf3a6f5a..ee3355096b8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3697,5 +3697,11 @@ public void getDownloadSize() throws IOException, InterruptedException { getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes)); + + // Get the total size sending invalid file download size mode + String invalidMode = "invalidMode"; + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, invalidMode, apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Invalid mode: " + invalidMode)); } } From 131cd8f83473e9919e871723551eb441b6f27c3e Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 2 Oct 2023 11:22:44 +0100 Subject: [PATCH 069/252] Added: multiple tab files test case for getDownloadSize IT --- .../harvard/iq/dataverse/api/DatasetsIT.java | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index ee3355096b8..829c19c6440 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3703,5 +3703,23 @@ public void getDownloadSize() throws IOException, InterruptedException { getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, invalidMode, apiToken); getDownloadSizeResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()) .body("message", equalTo("Invalid mode: " + invalidMode)); + + // Upload second test tabular file (same source as before) + uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); + uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Get the total size including only original sizes and ignoring archival sizes for tabular files + expectedSizeIncludingOnlyOriginalForTabular = tabularOriginalSize + expectedSizeIncludingOnlyOriginalForTabular; + + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.storageSize", equalTo(expectedSizeIncludingOnlyOriginalForTabular)); + + // Get the total size including both the original and archival tabular file sizes + expectedSizeIncludingAllSizes = tabularArchivalSize + tabularOriginalSize + expectedSizeIncludingAllSizes; + + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes)); } } From 725bbb7c6c7c76a87f9892501b8050c24e704f8d Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 2 Oct 2023 12:20:45 +0100 Subject: [PATCH 070/252] Changed: updated docs for /downloadsize endpoint --- doc/sphinx-guides/source/api/native-api.rst | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 0f77aeba580..0cea70c04f1 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1797,13 +1797,17 @@ The fully expanded example above (without environment variables) looks like this The size of all files available for download will be returned. If :draft is passed as versionId the token supplied must have permission to view unpublished drafts. A token is not required for published datasets. Also restricted files will be included in this total regardless of whether the user has access to download the restricted file(s). -There is an optional query parameter ``ignoreOriginalTabularSize`` which, if set to true, the endpoint will return the download size ignoring the sizes of the original tabular files. Otherwise, both the original and the processed size will be included in the count for tabular files. +There is an optional query parameter ``mode`` which applies a filter criteria to the operation. This parameter supports the following values: + +* ``All`` (Default): Includes both archival and original sizes for tabular files +* ``Archival``: Includes only the archival size for tabular files +* ``Original``: Includes only the original size for tabular files Usage example: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?ignoreOriginalTabularSize=true" + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?mode=Archival" Submit a Dataset for Review ~~~~~~~~~~~~~~~~~~~~~~~~~~~ From cbf00d788ef27fdbe846328223d3fed9b00125bc Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 2 Oct 2023 12:22:38 +0100 Subject: [PATCH 071/252] Changed: updated release notes for #9958 --- ...958-dataset-api-downloadsize-ignore-tabular-size.md | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/doc/release-notes/9958-dataset-api-downloadsize-ignore-tabular-size.md b/doc/release-notes/9958-dataset-api-downloadsize-ignore-tabular-size.md index 73b27a1a581..2ede679b361 100644 --- a/doc/release-notes/9958-dataset-api-downloadsize-ignore-tabular-size.md +++ b/doc/release-notes/9958-dataset-api-downloadsize-ignore-tabular-size.md @@ -1,3 +1,9 @@ -Added a new optional query parameter "ignoreOriginalTabularSize" to the "getDownloadSize" API endpoint ("api/datasets/{identifier}/versions/{versionId}/downloadsize"). +Added a new optional query parameter "mode" to the "getDownloadSize" API endpoint ("api/datasets/{identifier}/versions/{versionId}/downloadsize"). -If set to true, the endpoint will return the download size ignoring the original tabular file sizes. +This parameter applies a filter criteria to the operation and supports the following values: + +- All (Default): Includes both archival and original sizes for tabular files + +- Archival: Includes only the archival size for tabular files + +- Original: Includes only the original size for tabular files From 87c6515e3c22b25c66850714dfe17167b1202433 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 2 Oct 2023 12:24:43 +0100 Subject: [PATCH 072/252] Added: sleep call to getDownloadSize IT to ensure tab file is ingested --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 829c19c6440..cab468fb1e9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3708,6 +3708,9 @@ public void getDownloadSize() throws IOException, InterruptedException { uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + // Ensure tabular file is ingested + Thread.sleep(2000); + // Get the total size including only original sizes and ignoring archival sizes for tabular files expectedSizeIncludingOnlyOriginalForTabular = tabularOriginalSize + expectedSizeIncludingOnlyOriginalForTabular; From 38cabc1767fa61e59c16aefcabc03d5514006d7e Mon Sep 17 00:00:00 2001 From: Guillermo Portas Date: Mon, 2 Oct 2023 12:58:36 +0100 Subject: [PATCH 073/252] Changed: getVersionFiles docs suggestions applied Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 01a681cfb6a..4a84cc17d16 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1020,7 +1020,7 @@ Usage example: Please note that both filtering and ordering criteria values are case sensitive and must be correctly typed for the endpoint to recognize them. -By default, deaccessioned dataset versions are not supported by this endpoint and will be ignored in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a not found error if the version is deaccessioned and you do not enable the option described below. +By default, deaccessioned dataset versions are not included in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a "not found" error if the version is deaccessioned and you do not enable the ``includeDeaccessioned`` option described below. If you want to consider deaccessioned dataset versions, you must specify this through the ``includeDeaccessioned`` query parameter. From 5c7830c9022d72cb2bdd248981f23fc4d29fd1d0 Mon Sep 17 00:00:00 2001 From: Guillermo Portas Date: Mon, 2 Oct 2023 12:59:48 +0100 Subject: [PATCH 074/252] Changed: getVersionFiles docs suggestions applied (2) Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 4a84cc17d16..9459440608b 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1022,7 +1022,7 @@ Please note that both filtering and ordering criteria values are case sensitive By default, deaccessioned dataset versions are not included in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a "not found" error if the version is deaccessioned and you do not enable the ``includeDeaccessioned`` option described below. -If you want to consider deaccessioned dataset versions, you must specify this through the ``includeDeaccessioned`` query parameter. +If you want to consider deaccessioned dataset versions, you must set ``includeDeaccessioned`` query parameter to ``true``. Usage example: From 02003f12853ed441c30c1e3a1e51e38824b3defb Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 2 Oct 2023 13:14:57 +0100 Subject: [PATCH 075/252] Added: clarification to Deaccession Dataset API docs about calling the endpoint multiple times for the same dataset version --- doc/sphinx-guides/source/api/native-api.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 01a681cfb6a..4bca19d078d 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1385,6 +1385,8 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/24/versions/1.0/deaccession" -d '{"deaccessionReason":"Description of the deaccession reason.", "deaccessionForwardURL":"https://demo.dataverse.org"}' +.. note:: You cannot deaccession a dataset more than once. If you call this endpoint twice for the same dataset version, you will get a not found error on the second call, since the dataset you are looking for will no longer be public since it is already deaccessioned. + Set Citation Date Field Type for a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 244ade84624970c0040b4ec29c7250ab550cdeda Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 2 Oct 2023 11:30:53 -0400 Subject: [PATCH 076/252] tiny doc fixes #9852 --- doc/sphinx-guides/source/api/native-api.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 78576e8bbe1..377ca4017f6 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1022,7 +1022,7 @@ Please note that both filtering and ordering criteria values are case sensitive By default, deaccessioned dataset versions are not included in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a "not found" error if the version is deaccessioned and you do not enable the ``includeDeaccessioned`` option described below. -If you want to consider deaccessioned dataset versions, you must set ``includeDeaccessioned`` query parameter to ``true``. +If you want to include deaccessioned dataset versions, you must set ``includeDeaccessioned`` query parameter to ``true``. Usage example: @@ -1060,7 +1060,7 @@ The fully expanded example above (without environment variables) looks like this By default, deaccessioned dataset versions are not supported by this endpoint and will be ignored in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a not found error if the version is deaccessioned and you do not enable the option described below. -If you want to consider deaccessioned dataset versions, you must specify this through the ``includeDeaccessioned`` query parameter. +If you want to include deaccessioned dataset versions, you must specify this through the ``includeDeaccessioned`` query parameter. Usage example: @@ -1385,7 +1385,7 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/24/versions/1.0/deaccession" -d '{"deaccessionReason":"Description of the deaccession reason.", "deaccessionForwardURL":"https://demo.dataverse.org"}' -.. note:: You cannot deaccession a dataset more than once. If you call this endpoint twice for the same dataset version, you will get a not found error on the second call, since the dataset you are looking for will no longer be public since it is already deaccessioned. +.. note:: You cannot deaccession a dataset more than once. If you call this endpoint twice for the same dataset version, you will get a not found error on the second call, since the dataset you are looking for will no longer be published since it is already deaccessioned. Set Citation Date Field Type for a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 649ea13e9dcf80b941de0cca209d389fd810e352 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 2 Oct 2023 18:24:33 +0100 Subject: [PATCH 077/252] Changed: using POST method for deaccessionDataset API endpoint and string bundle for error messages (IT extended) --- doc/sphinx-guides/source/api/native-api.rst | 4 +- .../harvard/iq/dataverse/api/Datasets.java | 6 +-- src/main/java/propertyFiles/Bundle.properties | 3 +- .../harvard/iq/dataverse/api/DatasetsIT.java | 48 +++++++++++++++---- .../edu/harvard/iq/dataverse/api/UtilIT.java | 9 ++-- 5 files changed, 52 insertions(+), 18 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 78576e8bbe1..f494415e731 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1377,13 +1377,13 @@ Given a version of a dataset, updates its status to deaccessioned. export VERSIONID=1.0 export JSON='{"deaccessionReason":"Description of the deaccession reason.", "deaccessionForwardURL":"https://demo.dataverse.org"}' - curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/datasets/$ID/versions/$VERSIONID/deaccession" -d "$JSON" + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/datasets/$ID/versions/$VERSIONID/deaccession" -d "$JSON" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/24/versions/1.0/deaccession" -d '{"deaccessionReason":"Description of the deaccession reason.", "deaccessionForwardURL":"https://demo.dataverse.org"}' + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/24/versions/1.0/deaccession" -d '{"deaccessionReason":"Description of the deaccession reason.", "deaccessionForwardURL":"https://demo.dataverse.org"}' .. note:: You cannot deaccession a dataset more than once. If you call this endpoint twice for the same dataset version, you will get a not found error on the second call, since the dataset you are looking for will no longer be public since it is already deaccessioned. diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 62d87b198fe..e334116958d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3924,12 +3924,12 @@ public Response getDatasetVersionCitation(@Context ContainerRequestContext crc, getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers).getCitation(true, false)), getRequestUser(crc)); } - @PUT + @POST @AuthRequired @Path("{id}/versions/{versionId}/deaccession") public Response deaccessionDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, String jsonBody, @Context UriInfo uriInfo, @Context HttpHeaders headers) { if (DS_VERSION_DRAFT.equals(versionId) || DS_VERSION_LATEST.equals(versionId)) { - return badRequest("Only " + DS_VERSION_LATEST_PUBLISHED + " or a specific version can be deaccessioned"); + return badRequest(BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.version.identifier.error", List.of(DS_VERSION_LATEST_PUBLISHED))); } return response(req -> { DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, false); @@ -3941,7 +3941,7 @@ public Response deaccessionDataset(@Context ContainerRequestContext crc, @PathPa try { datasetVersion.setArchiveNote(deaccessionForwardURL); } catch (IllegalArgumentException iae) { - return error(Response.Status.BAD_REQUEST, "Invalid deaccession forward URL: " + iae.getMessage()); + return badRequest(BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.forward.url", List.of(iae.getMessage()))); } } execCommand(new DeaccessionDatasetVersionCommand(req, datasetVersion, false)); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 997f0470cc3..6e7ed55a768 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2623,7 +2623,8 @@ datasets.api.privateurl.anonymized.error.released=Can't create a URL for anonymi datasets.api.creationdate=Date Created datasets.api.modificationdate=Last Modified Date datasets.api.curationstatus=Curation Status - +datasets.api.deaccessionDataset.invalid.version.identifier.error=Only {0} or a specific version can be deaccessioned +datasets.api.deaccessionDataset.invalid.forward.url=Invalid deaccession forward URL: {0} #Dataverses.java dataverses.api.update.default.contributor.role.failure.role.not.found=Role {0} not found. diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 5c1eb66b63d..cfb430e6995 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3,6 +3,7 @@ import edu.harvard.iq.dataverse.DatasetVersionFilesServiceBean; import io.restassured.RestAssured; +import static edu.harvard.iq.dataverse.DatasetVersion.ARCHIVE_NOTE_MAX_LENGTH; import static edu.harvard.iq.dataverse.api.ApiConstants.*; import static io.restassured.RestAssured.given; @@ -15,6 +16,7 @@ import java.util.*; import java.util.logging.Logger; +import org.apache.commons.lang3.RandomStringUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -3608,26 +3610,54 @@ public void deaccessionDataset() { createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - // Test that draft and latest version constants are not allowed - Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_DRAFT, apiToken); - deaccessionDatasetResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); - deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST, apiToken); - deaccessionDatasetResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + String testDeaccessionReason = "Test deaccession reason."; + String testDeaccessionForwardURL = "http://demo.dataverse.org"; + + // Test that draft and latest version constants are not allowed and a bad request error is received + String expectedInvalidVersionIdentifierError = BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.version.identifier.error", List.of(DS_VERSION_LATEST_PUBLISHED)); + + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_DRAFT, testDeaccessionReason, testDeaccessionForwardURL, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo(expectedInvalidVersionIdentifierError)); + + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST, testDeaccessionReason, testDeaccessionForwardURL, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo(expectedInvalidVersionIdentifierError)); // Test that a not found error occurs when there is no published version available - deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, apiToken); + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, testDeaccessionReason, testDeaccessionForwardURL, apiToken); deaccessionDatasetResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); - // Test that the dataset is successfully deaccessioned when published + // Publish test dataset Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); - deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, apiToken); + + // Test that a bad request error is received when the forward URL exceeds ARCHIVE_NOTE_MAX_LENGTH + String testInvalidDeaccessionForwardURL = RandomStringUtils.randomAlphabetic(ARCHIVE_NOTE_MAX_LENGTH + 1); + + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, testDeaccessionReason, testInvalidDeaccessionForwardURL, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()) + .body("message", containsString(testInvalidDeaccessionForwardURL)); + + // Test that the dataset is successfully deaccessioned when published and valid deaccession params are sent + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, testDeaccessionReason, testDeaccessionForwardURL, apiToken); deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); // Test that a not found error occurs when the only published version has already been deaccessioned - deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, apiToken); + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, testDeaccessionReason, testDeaccessionForwardURL, apiToken); deaccessionDatasetResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // Test that a dataset can be deaccessioned without forward URL + createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, testDeaccessionReason, null, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 8c6a2d6e75d..9a5ef76a5ff 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3400,13 +3400,16 @@ static Response getHasBeenDeleted(String dataFileId, String apiToken) { .get("/api/files/" + dataFileId + "/hasBeenDeleted"); } - static Response deaccessionDataset(Integer datasetId, String version, String apiToken) { + static Response deaccessionDataset(Integer datasetId, String version, String deaccessionReason, String deaccessionForwardURL, String apiToken) { JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - jsonObjectBuilder.add("deaccessionReason", "Test deaccession."); + jsonObjectBuilder.add("deaccessionReason", deaccessionReason); + if (deaccessionForwardURL != null) { + jsonObjectBuilder.add("deaccessionForwardURL", deaccessionForwardURL); + } String jsonString = jsonObjectBuilder.build().toString(); return given() .header(API_TOKEN_HTTP_HEADER, apiToken) .body(jsonString) - .put("/api/datasets/" + datasetId + "/versions/" + version + "/deaccession"); + .post("/api/datasets/" + datasetId + "/versions/" + version + "/deaccession"); } } From cb1f3e65fc06302d37dae280714ffb0bce764b39 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 2 Oct 2023 13:26:46 -0400 Subject: [PATCH 078/252] #9507 prelim check in --- .../edu/harvard/iq/dataverse/DatasetPage.java | 19 ++++++++++++++++++- src/main/java/propertyFiles/Bundle.properties | 1 + src/main/webapp/dataset.xhtml | 8 ++++++++ 3 files changed, 27 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index f99c10b2b79..3e5b2d78f68 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -61,6 +61,7 @@ import edu.harvard.iq.dataverse.util.WebloaderUtil; import edu.harvard.iq.dataverse.validation.URLValidator; import edu.harvard.iq.dataverse.workflows.WorkflowComment; +import edu.harvard.iq.dataverse.Dataverse; import java.io.File; import java.io.FileOutputStream; @@ -3423,7 +3424,23 @@ private Boolean saveLink(Dataverse dataverse){ } return retVal; } - + + private String alreadyLinkedDataverses = null; + + public String getAlreadyLinkedDataverses(){ + if (alreadyLinkedDataverses != null) { + return alreadyLinkedDataverses; + } + List dataverseList = dataverseService.findDataversesThatLinkToThisDatasetId(dataset.getId()); + for (Dataverse dv: dataverseList){ + if (alreadyLinkedDataverses == null){ + alreadyLinkedDataverses = dv.getCurrentName(); + } else { + alreadyLinkedDataverses = alreadyLinkedDataverses + ", " + dv.getCurrentName(); + } + } + return alreadyLinkedDataverses; + } public List completeLinkingDataverse(String query) { dataset = datasetService.find(dataset.getId()); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 55ffcd2f57d..2f5b0f64e6c 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2494,6 +2494,7 @@ dataset.registered=DatasetRegistered dataset.registered.msg=Your dataset is now registered. dataset.notlinked=DatasetNotLinked dataset.notlinked.msg=There was a problem linking this dataset to yours: +dataset.linking.popop.already.linked.note=Note: the dataset is already linked to the following dataverses: datasetversion.archive.success=Archival copy of Version successfully submitted datasetversion.archive.failure=Error in submitting an archival copy datasetversion.update.failure=Dataset Version Update failed. Changes are still in the DRAFT version. diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index a9f1ff339f0..6cc1f5cc45c 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1598,6 +1598,14 @@ +

    + + #{bundle.dataset.linking.popop.already.linked.note} + + + #{DatasetPage.alreadyLinkedDataverses} + +
    Date: Mon, 2 Oct 2023 14:07:19 -0400 Subject: [PATCH 079/252] #9507 fix bundle/format --- src/main/java/propertyFiles/Bundle.properties | 2 +- src/main/webapp/dataset.xhtml | 8 ++------ 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 2f5b0f64e6c..e14374780c8 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2494,7 +2494,7 @@ dataset.registered=DatasetRegistered dataset.registered.msg=Your dataset is now registered. dataset.notlinked=DatasetNotLinked dataset.notlinked.msg=There was a problem linking this dataset to yours: -dataset.linking.popop.already.linked.note=Note: the dataset is already linked to the following dataverses: +dataset.linking.popop.already.linked.note=Note: the dataset is already linked to the following dataverses: datasetversion.archive.success=Archival copy of Version successfully submitted datasetversion.archive.failure=Error in submitting an archival copy datasetversion.update.failure=Dataset Version Update failed. Changes are still in the DRAFT version. diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 6cc1f5cc45c..eccec7bf6a3 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1599,12 +1599,8 @@
    - - #{bundle.dataset.linking.popop.already.linked.note} - - - #{DatasetPage.alreadyLinkedDataverses} - + +
    Date: Mon, 2 Oct 2023 16:40:39 -0400 Subject: [PATCH 080/252] #9507 update ui; add refresh logic --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 2 +- src/main/java/propertyFiles/Bundle.properties | 2 +- src/main/webapp/dataset.xhtml | 4 +++- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 3e5b2d78f68..d53db206afa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -3388,7 +3388,7 @@ public void saveLinkingDataverses(ActionEvent evt) { FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.notlinked"), linkingDataverseErrorMessage); FacesContext.getCurrentInstance().addMessage(null, message); } - + alreadyLinkedDataverses = null; //force update to list of linked dataverses } private String linkingDataverseErrorMessage = ""; diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index b5992a35a3f..0c96192ee48 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2497,7 +2497,7 @@ dataset.registered=DatasetRegistered dataset.registered.msg=Your dataset is now registered. dataset.notlinked=DatasetNotLinked dataset.notlinked.msg=There was a problem linking this dataset to yours: -dataset.linking.popop.already.linked.note=Note: the dataset is already linked to the following dataverses: +dataset.linking.popop.already.linked.note=Note: This dataset is already linked to the following dataverse(s): datasetversion.archive.success=Archival copy of Version successfully submitted datasetversion.archive.failure=Error in submitting an archival copy datasetversion.update.failure=Dataset Version Update failed. Changes are still in the DRAFT version. diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index eccec7bf6a3..58ee9b6dbdc 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1599,8 +1599,10 @@
    - + +   +
    Date: Tue, 3 Oct 2023 09:49:13 +0100 Subject: [PATCH 081/252] Refactor: simpler where condition for getOriginalTabularFilesSize query --- .../harvard/iq/dataverse/DatasetVersionFilesServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index 66e0ec5b5fe..f957f7473dd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -269,7 +269,7 @@ private long getOriginalTabularFilesSize(DatasetVersion datasetVersion) { .from(fileMetadata) .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())) .from(dataTable) - .where(fileMetadata.dataFile.dataTables.isNotEmpty().and(dataTable.dataFile.eq(fileMetadata.dataFile))) + .where(dataTable.dataFile.eq(fileMetadata.dataFile)) .select(dataTable.originalFileSize.sum()).fetchFirst(); return (result == null) ? 0 : result; } From 47da5a4508b2c8d2d38531ad370da81fac1bce7b Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 3 Oct 2023 09:56:04 +0100 Subject: [PATCH 082/252] Changed: using the known size of the tab file in IT instead of obtaining it from response --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index cab468fb1e9..b3c6535b493 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3668,8 +3668,7 @@ public void getDownloadSize() throws IOException, InterruptedException { Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); - // Get the original tabular file size - int tabularOriginalSize = Integer.parseInt(uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.filesize")); + int tabularOriginalSize = 157; // Ensure tabular file is ingested Thread.sleep(2000); From 91db30212d7f3b7c384ceeb1f04c9fcb9f12b808 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 3 Oct 2023 10:12:56 +0100 Subject: [PATCH 083/252] Fixed: failing IT due to missing params when calling deaccessionDataset --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index cfb430e6995..b23852e8221 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3499,7 +3499,7 @@ public void getVersionFiles() throws IOException { Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); - Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, apiToken); + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, "Test deaccession reason.", null, apiToken); deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); // includeDeaccessioned false @@ -3581,7 +3581,7 @@ public void getVersionFileCounts() throws IOException { Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); - Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, apiToken); + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, "Test deaccession reason.", null, apiToken); deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); // includeDeaccessioned false From 982582bfc6e9f15d19cc13ecb22dc0e1dcfd93c8 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 3 Oct 2023 09:59:45 -0400 Subject: [PATCH 084/252] #9507 update bundle dataverse to collection --- src/main/java/propertyFiles/Bundle.properties | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 0c96192ee48..c9b212ffcf1 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -874,23 +874,23 @@ dataverse.publish.header=Publish Dataverse dataverse.nopublished=No Published Dataverses dataverse.nopublished.tip=In order to use this feature you must have at least one published dataverse. dataverse.contact=Email Dataverse Contact -dataverse.link=Link Dataverse -dataverse.link.btn.tip=Link to Your Dataverse -dataverse.link.yourDataverses=Your Dataverse -dataverse.link.yourDataverses.inputPlaceholder=Enter Dataverse Name -dataverse.link.save=Save Linked Dataverse -dataverse.link.dataverse.choose=Choose which of your dataverses you would like to link this dataverse to. -dataverse.link.dataset.choose=Enter the name of the dataverse you would like to link this dataset to. If you need to remove this link in the future, please contact {0}. -dataverse.link.dataset.none=No linkable dataverses available. -dataverse.link.no.choice=You have one dataverse you can add linked dataverses and datasets in. -dataverse.link.no.linkable=To be able to link a dataverse or dataset, you need to have your own dataverse. Create a dataverse to get started. -dataverse.link.no.linkable.remaining=You have already linked all of your eligible dataverses. +dataverse.link=Link Collection +dataverse.link.btn.tip=Link to Your Collection +dataverse.link.yourDataverses=Your Collection +dataverse.link.yourDataverses.inputPlaceholder=Enter Collection Name +dataverse.link.save=Save Linked collection +dataverse.link.dataverse.choose=Choose which of your collection you would like to link this collection to. +dataverse.link.dataset.choose=Enter the name of the collection you would like to link this dataset to. If you need to remove this link in the future, please contact {0}. +dataverse.link.dataset.none=No linkable collections available. +dataverse.link.no.choice=You have one collection you can add linked collection and datasets in. +dataverse.link.no.linkable=To be able to link a collection or dataset, you need to have your own collection. Create a collection to get started. +dataverse.link.no.linkable.remaining=You have already linked all of your eligible collections. dataverse.savedsearch.link=Link Search dataverse.savedsearch.searchquery=Search dataverse.savedsearch.filterQueries=Facets dataverse.savedsearch.save=Save Linked Search -dataverse.savedsearch.dataverse.choose=Choose which of your dataverses you would like to link this search to. -dataverse.savedsearch.no.choice=You have one dataverse to which you may add a saved search. +dataverse.savedsearch.dataverse.choose=Choose which of your collection you would like to link this search to. +dataverse.savedsearch.no.choice=You have one collection to which you may add a saved search. # Bundle file editors, please note that "dataverse.savedsearch.save.success" is used in a unit test dataverse.saved.search.success=The saved search has been successfully linked to {0}. dataverse.saved.search.failure=The saved search was not able to be linked. @@ -2497,7 +2497,7 @@ dataset.registered=DatasetRegistered dataset.registered.msg=Your dataset is now registered. dataset.notlinked=DatasetNotLinked dataset.notlinked.msg=There was a problem linking this dataset to yours: -dataset.linking.popop.already.linked.note=Note: This dataset is already linked to the following dataverse(s): +dataset.linking.popop.already.linked.note=Note: This dataset is already linked to the following collection(s): datasetversion.archive.success=Archival copy of Version successfully submitted datasetversion.archive.failure=Error in submitting an archival copy datasetversion.update.failure=Dataset Version Update failed. Changes are still in the DRAFT version. From 47369ffe8b97d423a8ac63e3b8623d45fc82e000 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 3 Oct 2023 10:31:48 -0400 Subject: [PATCH 085/252] #9507 unneeded import --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 7713558852c..e9109c2b82b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -61,7 +61,6 @@ import edu.harvard.iq.dataverse.util.WebloaderUtil; import edu.harvard.iq.dataverse.validation.URLValidator; import edu.harvard.iq.dataverse.workflows.WorkflowComment; -import edu.harvard.iq.dataverse.Dataverse; import java.io.File; import java.io.FileOutputStream; From 6f957a79a5744c83216e40b83370bcffc47be418 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 3 Oct 2023 17:39:38 -0400 Subject: [PATCH 086/252] user is required --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index b9a104d8eaa..52ca9cd748f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -580,13 +580,14 @@ public Response getVersionMetadataBlock(@Context ContainerRequestContext crc, * @return */ @GET + @AuthRequired @Path("{id}/versions/{versionId}/linkset") - public Response getLinkset(@PathParam("id") String datasetId, @PathParam("versionId") String versionId, + public Response getLinkset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { if (":draft".equals(versionId)) { return badRequest("Signposting is not supported on the :draft version"); } - DataverseRequest req = createDataverseRequest(null); + DataverseRequest req = createDataverseRequest(getRequestUser(crc)); try { DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); return Response From 9f0b8102904bb663dce8c50203d32663550e2095 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 4 Oct 2023 10:47:54 -0400 Subject: [PATCH 087/252] more tests and docs #9952, #9953, #9957 --- doc/release-notes/9955-Signposting-updates.md | 8 +++++++- doc/sphinx-guides/source/api/native-api.rst | 2 +- .../edu/harvard/iq/dataverse/api/SignpostingIT.java | 11 +++++++++++ 3 files changed, 19 insertions(+), 2 deletions(-) diff --git a/doc/release-notes/9955-Signposting-updates.md b/doc/release-notes/9955-Signposting-updates.md index 92168231895..db0e27e51c5 100644 --- a/doc/release-notes/9955-Signposting-updates.md +++ b/doc/release-notes/9955-Signposting-updates.md @@ -1 +1,7 @@ -This release fixes several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification. \ No newline at end of file +This release fixes several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification. These changes introduce backward-incompatibility, but since Signposting support was added recently (in Dataverse 5.14 in PR #8981), we feel it's best to do this clean up and not support the old implementation that was not fully compliant with the spec. + +To fix #9952, we surround the license info with `<` and `>`. + +To fix #9953, we no longer wrap the response in a `{"status":"OK","data":{` JSON object. This has also been noted in the guides at https://dataverse-guide--9955.org.readthedocs.build/en/9955/api/native-api.html#retrieve-signposting-information + +To fix #9957, we corrected the mime/content type, changing it from `json+ld` to `ld+json`. For backward compatibility, we are still supporting the old one, for now. diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index e181a2a5546..bc186720252 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2200,7 +2200,7 @@ Here is an example of a "Link" header: The URL for linkset information is discoverable under the ``rel="linkset";type="application/linkset+json`` entry in the "Link" header, such as in the example above. -The reponse includes a JSON object conforming to the `Signposting `__ specification. +The reponse includes a JSON object conforming to the `Signposting `__ specification. As part of this conformance, unlike most Dataverse API responses, the output is not wrapped in a ``{"status":"OK","data":{`` object. Signposting is not supported for draft dataset versions. .. code-block:: bash diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java index b41f62ae28f..75f514f3398 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java @@ -80,6 +80,7 @@ public void testSignposting() { assertTrue(linkHeader.contains(datasetPid)); assertTrue(linkHeader.contains("cite-as")); assertTrue(linkHeader.contains("describedby")); + assertTrue(linkHeader.contains(";rel=\"license\"")); Pattern pattern = Pattern.compile("<([^<]*)> ; rel=\"linkset\";type=\"application\\/linkset\\+json\""); Matcher matcher = pattern.matcher(linkHeader); @@ -101,6 +102,16 @@ public void testSignposting() { assertTrue(lso.getString("anchor").indexOf("/dataset.xhtml?persistentId=" + datasetPid) > 0); assertTrue(lso.containsKey("describedby")); + // Test export URL from link header + // regex inspired by https://stackoverflow.com/questions/68860255/how-to-match-the-closest-opening-and-closing-brackets + Pattern exporterPattern = Pattern.compile("[<\\[][^()\\[\\]]*?exporter=schema.org[^()\\[\\]]*[>\\]]"); + Matcher exporterMatcher = exporterPattern.matcher(linkHeader); + exporterMatcher.find(); + + Response exportDataset = UtilIT.exportDataset(datasetPid, "schema.org"); + exportDataset.prettyPrint(); + exportDataset.then().assertThat().statusCode(OK.getStatusCode()); + } } From eb56c502dc2179754fa144bf0f354da444612ea9 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 26 Sep 2023 16:26:21 -0400 Subject: [PATCH 088/252] allow fast xhtml edits in Docker #9590 --- docker-compose-dev.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 694f2046ca8..930bb1230f5 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -18,6 +18,7 @@ services: - DATAVERSE_AUTH_OIDC_CLIENT_ID=test - DATAVERSE_AUTH_OIDC_CLIENT_SECRET=94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8 - DATAVERSE_AUTH_OIDC_AUTH_SERVER_URL=http://keycloak.mydomain.com:8090/realms/test + - DATAVERSE_JSF_REFRESH_PERIOD=1 ports: - "8080:8080" # HTTP (Dataverse Application) - "4848:4848" # HTTP (Payara Admin Console) @@ -31,6 +32,9 @@ services: volumes: - ./docker-dev-volumes/app/data:/dv - ./docker-dev-volumes/app/secrets:/secrets + # Uncomment for changes to xhtml in Netbeans to be deployed immediately. + # Replace 6.0 with the current version. + # - ./target/dataverse-6.0:/opt/payara/deployments/dataverse tmpfs: - /dumps:mode=770,size=2052M,uid=1000,gid=1000 - /tmp:mode=770,size=2052M,uid=1000,gid=1000 From e2fa09680472fdcda617efb430d0b14ccce70ab9 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 4 Oct 2023 12:15:18 -0400 Subject: [PATCH 089/252] instead of Netbeans, be generic about IDEs/tools #9590 --- docker-compose-dev.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 930bb1230f5..d4564ab1335 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -32,7 +32,7 @@ services: volumes: - ./docker-dev-volumes/app/data:/dv - ./docker-dev-volumes/app/secrets:/secrets - # Uncomment for changes to xhtml in Netbeans to be deployed immediately. + # Uncomment for changes to xhtml to be deployed immediately (if supported your IDE or toolchain). # Replace 6.0 with the current version. # - ./target/dataverse-6.0:/opt/payara/deployments/dataverse tmpfs: From 9d7108b7ed2759dade7f6d1b67a1ec1ab541cc95 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 4 Oct 2023 23:59:11 +0200 Subject: [PATCH 090/252] chore(build): use stable Rewrite releases for Jakarta EE 10 #8305 --- modules/dataverse-parent/pom.xml | 3 ++- pom.xml | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index bfa11af6c70..b6d846b49bc 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -426,6 +426,7 @@ https://artifacts.unidata.ucar.edu/repository/unidata-all/ + + --> diff --git a/pom.xml b/pom.xml index e70b723cad5..5536bcccb05 100644 --- a/pom.xml +++ b/pom.xml @@ -359,12 +359,12 @@ org.ocpsoft.rewrite rewrite-servlet - 6.0.0-SNAPSHOT + 10.0.0.Final org.ocpsoft.rewrite rewrite-config-prettyfaces - 6.0.0-SNAPSHOT + 10.0.0.Final edu.ucsb.nceas From 365b18cf2ba70d2ac192a04475ede920eb11aa6c Mon Sep 17 00:00:00 2001 From: Lehebax Date: Thu, 5 Oct 2023 12:07:41 +0530 Subject: [PATCH 091/252] Fixed the equals() method check --- .../java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java | 2 +- src/main/java/edu/harvard/iq/dataverse/DataverseContact.java | 2 +- src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java | 2 +- .../edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java | 2 +- src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java index 7746099818e..8ac98500890 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java @@ -113,7 +113,7 @@ public int hashCode() { @Override public boolean equals(Object object) { - if (!(object instanceof DatasetField)) { + if (!(object instanceof DatasetFieldDefaultValue)) { return false; } DatasetFieldDefaultValue other = (DatasetFieldDefaultValue) object; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java b/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java index d77767985eb..9f86a03639a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java @@ -99,7 +99,7 @@ public int hashCode() { @Override public boolean equals(Object object) { - if (!(object instanceof DatasetFieldType)) { + if (!(object instanceof DataverseContact)) { return false; } DataverseContact other = (DataverseContact) object; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java index 768c2308e50..83a2d8fdb8f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java @@ -93,7 +93,7 @@ public int hashCode() { @Override public boolean equals(Object object) { - if (!(object instanceof DatasetFieldType)) { + if (!(object instanceof DataverseFacet)) { return false; } DataverseFacet other = (DataverseFacet) object; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java index 39ad6ca9520..d30d94cd034 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java @@ -85,7 +85,7 @@ public int hashCode() { @Override public boolean equals(Object object) { - if (!(object instanceof DatasetFieldType)) { + if (!(object instanceof DataverseFeaturedDataverse)) { return false; } DataverseFeaturedDataverse other = (DataverseFeaturedDataverse) object; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java b/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java index 539669328a7..7f57d16b95a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java @@ -181,7 +181,7 @@ public int hashCode() { @Override public boolean equals(Object object) { - if (!(object instanceof DatasetFieldType)) { + if (!(object instanceof DataverseTheme)) { return false; } DataverseTheme other = (DataverseTheme) object; From f42587e1e33af7170185cc24ce382cb402d72533 Mon Sep 17 00:00:00 2001 From: Lehebax Date: Thu, 5 Oct 2023 19:18:37 +0530 Subject: [PATCH 092/252] Added unit tests for the fixed equals() methods --- .../DatasetFieldDefaultValueTest.java | 47 +++++++++++++++++++ .../iq/dataverse/DataverseContactTest.java | 47 +++++++++++++++++++ .../iq/dataverse/DataverseFacetTest.java | 47 +++++++++++++++++++ .../DataverseFeaturedDataverseTest.java | 47 +++++++++++++++++++ .../iq/dataverse/DataverseThemeTest.java | 47 +++++++++++++++++++ 5 files changed, 235 insertions(+) create mode 100644 src/test/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValueTest.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/DataverseContactTest.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/DataverseFacetTest.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverseTest.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/DataverseThemeTest.java diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValueTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValueTest.java new file mode 100644 index 00000000000..999fadaae06 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValueTest.java @@ -0,0 +1,47 @@ +package edu.harvard.iq.dataverse; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; + +import org.junit.jupiter.api.BeforeEach; + +public class DatasetFieldDefaultValueTest { + private DatasetFieldDefaultValue dataverseContact; + + @BeforeEach + public void before() { + this.dataverseContact = new DatasetFieldDefaultValue(); + this.dataverseContact.setId(1L); + } + + @Test + public void testEqualsWithNull() { + assertFalse(this.dataverseContact.equals(null)); + } + + @Test + public void testEqualsWithDifferentClass() { + DatasetField datasetField = new DatasetField(); + + assertFalse(this.dataverseContact.equals(datasetField)); + } + + @Test + public void testEqualsWithSameClassSameId() { + DatasetFieldDefaultValue dataverseContact1 = new DatasetFieldDefaultValue(); + dataverseContact1.setId(1L); + + assertTrue(this.dataverseContact.equals(dataverseContact1)); + } + + @Test + public void testEqualsWithSameClassDifferentId() { + DatasetFieldDefaultValue dataverseContact1 = new DatasetFieldDefaultValue(); + dataverseContact1.setId(2L); + + assertFalse(this.dataverseContact.equals(dataverseContact1)); + } +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseContactTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseContactTest.java new file mode 100644 index 00000000000..2abb10a485d --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DataverseContactTest.java @@ -0,0 +1,47 @@ +package edu.harvard.iq.dataverse; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; + +import org.junit.jupiter.api.BeforeEach; + +public class DataverseContactTest { + private DataverseContact dataverseContact; + + @BeforeEach + public void before() { + this.dataverseContact = new DataverseContact(); + this.dataverseContact.setId(1L); + } + + @Test + public void testEqualsWithNull() { + assertFalse(this.dataverseContact.equals(null)); + } + + @Test + public void testEqualsWithDifferentClass() { + DatasetFieldType datasetFieldType = new DatasetFieldType(); + + assertFalse(this.dataverseContact.equals(datasetFieldType)); + } + + @Test + public void testEqualsWithSameClassSameId() { + DataverseContact dataverseContact1 = new DataverseContact(); + dataverseContact1.setId(1L); + + assertTrue(this.dataverseContact.equals(dataverseContact1)); + } + + @Test + public void testEqualsWithSameClassDifferentId() { + DataverseContact dataverseContact1 = new DataverseContact(); + dataverseContact1.setId(2L); + + assertFalse(this.dataverseContact.equals(dataverseContact1)); + } +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseFacetTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseFacetTest.java new file mode 100644 index 00000000000..7ae50439c10 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DataverseFacetTest.java @@ -0,0 +1,47 @@ +package edu.harvard.iq.dataverse; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; + +import org.junit.jupiter.api.BeforeEach; + +public class DataverseFacetTest { + private DataverseFacet dataverseFacet; + + @BeforeEach + public void before() { + this.dataverseFacet = new DataverseFacet(); + this.dataverseFacet.setId(1L); + } + + @Test + public void testEqualsWithNull() { + assertFalse(this.dataverseFacet.equals(null)); + } + + @Test + public void testEqualsWithDifferentClass() { + DatasetFieldType datasetFieldType = new DatasetFieldType(); + + assertFalse(this.dataverseFacet.equals(datasetFieldType)); + } + + @Test + public void testEqualsWithSameClassSameId() { + DataverseFacet dataverseFacet1 = new DataverseFacet(); + dataverseFacet1.setId(1L); + + assertTrue(this.dataverseFacet.equals(dataverseFacet1)); + } + + @Test + public void testEqualsWithSameClassDifferentId() { + DataverseFacet dataverseFacet1 = new DataverseFacet(); + dataverseFacet1.setId(2L); + + assertFalse(this.dataverseFacet.equals(dataverseFacet1)); + } +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverseTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverseTest.java new file mode 100644 index 00000000000..b024dc3bfd3 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverseTest.java @@ -0,0 +1,47 @@ +package edu.harvard.iq.dataverse; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; + +import org.junit.jupiter.api.BeforeEach; + +public class DataverseFeaturedDataverseTest { + private DataverseFeaturedDataverse dataverseFeaturedDataverse; + + @BeforeEach + public void before() { + this.dataverseFeaturedDataverse = new DataverseFeaturedDataverse(); + this.dataverseFeaturedDataverse.setId(1L); + } + + @Test + public void testEqualsWithNull() { + assertFalse(this.dataverseFeaturedDataverse.equals(null)); + } + + @Test + public void testEqualsWithDifferentClass() { + DatasetFieldType datasetFieldType = new DatasetFieldType(); + + assertFalse(this.dataverseFeaturedDataverse.equals(datasetFieldType)); + } + + @Test + public void testEqualsWithSameClassSameId() { + DataverseFeaturedDataverse dataverseFeaturedDataverse1 = new DataverseFeaturedDataverse(); + dataverseFeaturedDataverse1.setId(1L); + + assertTrue(this.dataverseFeaturedDataverse.equals(dataverseFeaturedDataverse1)); + } + + @Test + public void testEqualsWithSameClassDifferentId() { + DataverseFeaturedDataverse dataverseFeaturedDataverse1 = new DataverseFeaturedDataverse(); + dataverseFeaturedDataverse1.setId(2L); + + assertFalse(this.dataverseFeaturedDataverse.equals(dataverseFeaturedDataverse1)); + } +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseThemeTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseThemeTest.java new file mode 100644 index 00000000000..e6721e34c6f --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DataverseThemeTest.java @@ -0,0 +1,47 @@ +package edu.harvard.iq.dataverse; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; + +import org.junit.jupiter.api.BeforeEach; + +public class DataverseThemeTest { + private DataverseTheme dataverseTheme; + + @BeforeEach + public void before() { + this.dataverseTheme = new DataverseTheme(); + this.dataverseTheme.setId(1L); + } + + @Test + public void testEqualsWithNull() { + assertFalse(this.dataverseTheme.equals(null)); + } + + @Test + public void testEqualsWithDifferentClass() { + DatasetFieldType datasetFieldType = new DatasetFieldType(); + + assertFalse(this.dataverseTheme.equals(datasetFieldType)); + } + + @Test + public void testEqualsWithSameClassSameId() { + DataverseTheme dataverseTheme1 = new DataverseTheme(); + dataverseTheme1.setId(1L); + + assertTrue(this.dataverseTheme.equals(dataverseTheme1)); + } + + @Test + public void testEqualsWithSameClassDifferentId() { + DataverseTheme dataverseTheme1 = new DataverseTheme(); + dataverseTheme1.setId(2L); + + assertFalse(this.dataverseTheme.equals(dataverseTheme1)); + } +} \ No newline at end of file From 56994b16b9a8dd92d550d491b5df00b542450e64 Mon Sep 17 00:00:00 2001 From: bencomp Date: Fri, 6 Oct 2023 00:55:09 +0200 Subject: [PATCH 093/252] Use JsonUtil.getJsonObject to prevent resource leaks --- .../harvard/iq/dataverse/api/Datasets.java | 28 ++++++++----------- .../harvard/iq/dataverse/api/Dataverses.java | 11 ++++---- .../iq/dataverse/api/HarvestingClients.java | 9 +++--- .../edu/harvard/iq/dataverse/api/Prov.java | 6 ++-- .../api/imports/ImportServiceBean.java | 15 ++++------ .../dataverse/globus/GlobusServiceBean.java | 21 ++++++++------ .../DatasetMetricsServiceBean.java | 9 ++---- 7 files changed, 45 insertions(+), 54 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 704ec8f1989..aacfb78cc2e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -93,8 +93,8 @@ import edu.harvard.iq.dataverse.util.json.JSONLDUtil; import edu.harvard.iq.dataverse.util.json.JsonLDTerm; import edu.harvard.iq.dataverse.util.json.JsonParseException; -import edu.harvard.iq.dataverse.util.SignpostingResources; import edu.harvard.iq.dataverse.util.json.JsonUtil; +import edu.harvard.iq.dataverse.util.SignpostingResources; import edu.harvard.iq.dataverse.search.IndexServiceBean; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; @@ -109,7 +109,6 @@ import java.io.IOException; import java.io.InputStream; -import java.io.StringReader; import java.net.URI; import java.sql.Timestamp; import java.text.MessageFormat; @@ -675,10 +674,10 @@ public Response updateDraftVersion(@Context ContainerRequestContext crc, String return error( Response.Status.BAD_REQUEST, "Only the :draft version can be updated"); } - try ( StringReader rdr = new StringReader(jsonBody) ) { + try { DataverseRequest req = createDataverseRequest(getRequestUser(crc)); Dataset ds = findDatasetOrDie(id); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); DatasetVersion incomingVersion = jsonParser().parseDatasetVersion(json); // clear possibly stale fields from the incoming dataset version. @@ -834,10 +833,10 @@ public Response deleteVersionMetadata(@Context ContainerRequestContext crc, Stri } private Response processDatasetFieldDataDelete(String jsonBody, String id, DataverseRequest req) { - try (StringReader rdr = new StringReader(jsonBody)) { + try { Dataset ds = findDatasetOrDie(id); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); //Get the current draft or create a new version to update DatasetVersion dsv = ds.getOrCreateEditVersion(); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); @@ -991,10 +990,10 @@ public Response editVersionMetadata(@Context ContainerRequestContext crc, String private Response processDatasetUpdate(String jsonBody, String id, DataverseRequest req, Boolean replaceData){ - try (StringReader rdr = new StringReader(jsonBody)) { + try { Dataset ds = findDatasetOrDie(id); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); //Get the current draft or create a new version to update DatasetVersion dsv = ds.getOrCreateEditVersion(); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); @@ -1441,8 +1440,7 @@ public Response createFileEmbargo(@Context ContainerRequestContext crc, @PathPar return error(Status.BAD_REQUEST, "No Embargoes allowed"); } - StringReader rdr = new StringReader(jsonBody); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); Embargo embargo = new Embargo(); @@ -1585,8 +1583,7 @@ public Response removeFileEmbargo(@Context ContainerRequestContext crc, @PathPar return error(Status.BAD_REQUEST, "No Embargoes allowed"); } - StringReader rdr = new StringReader(jsonBody); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); List datasetFiles = dataset.getFiles(); List embargoFilesToUnset = new LinkedList<>(); @@ -2101,8 +2098,7 @@ public Response returnToAuthor(@Context ContainerRequestContext crc, @PathParam( if (jsonBody == null || jsonBody.isEmpty()) { return error(Response.Status.BAD_REQUEST, "You must supply JSON to this API endpoint and it must contain a reason for returning the dataset (field: reasonForReturn)."); } - StringReader rdr = new StringReader(jsonBody); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); try { Dataset dataset = findDatasetOrDie(idSupplied); String reasonForReturn = null; @@ -2354,9 +2350,7 @@ public Response completeMPUpload(@Context ContainerRequestContext crc, String pa List eTagList = new ArrayList(); logger.info("Etags: " + partETagBody); try { - JsonReader jsonReader = Json.createReader(new StringReader(partETagBody)); - JsonObject object = jsonReader.readObject(); - jsonReader.close(); + JsonObject object = JsonUtil.getJsonObject(partETagBody); for (String partNo : object.keySet()) { eTagList.add(new PartETag(Integer.parseInt(partNo), object.getString(partNo))); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index a60775cbd38..c64ba42999c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -78,8 +78,9 @@ import edu.harvard.iq.dataverse.util.json.JSONLDUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.util.json.JsonPrinter; +import edu.harvard.iq.dataverse.util.json.JsonUtil; + import static edu.harvard.iq.dataverse.util.json.JsonPrinter.brief; -import java.io.StringReader; import java.util.Collections; import java.util.LinkedList; import java.util.List; @@ -178,8 +179,8 @@ public Response addDataverse(@Context ContainerRequestContext crc, String body, Dataverse d; JsonObject dvJson; - try (StringReader rdr = new StringReader(body)) { - dvJson = Json.createReader(rdr).readObject(); + try { + dvJson = JsonUtil.getJsonObject(body); d = jsonParser().parseDataverse(dvJson); } catch (JsonParsingException jpe) { logger.log(Level.SEVERE, "Json: {0}", body); @@ -559,8 +560,8 @@ public Response recreateDataset(@Context ContainerRequestContext crc, String jso } private Dataset parseDataset(String datasetJson) throws WrappedResponse { - try (StringReader rdr = new StringReader(datasetJson)) { - return jsonParser().parseDataset(Json.createReader(rdr).readObject()); + try { + return jsonParser().parseDataset(JsonUtil.getJsonObject(datasetJson)); } catch (JsonParsingException | JsonParseException jpe) { logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", datasetJson); throw new WrappedResponse(error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage())); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java index d7eec9f5757..dfc9f48dd1a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java @@ -16,6 +16,7 @@ import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.util.json.JsonPrinter; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import jakarta.json.JsonObjectBuilder; import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import java.io.IOException; @@ -164,8 +165,8 @@ public Response createHarvestingClient(@Context ContainerRequestContext crc, Str return wr.getResponse(); } - try ( StringReader rdr = new StringReader(jsonBody) ) { - JsonObject json = Json.createReader(rdr).readObject(); + try { + JsonObject json = JsonUtil.getJsonObject(jsonBody); // Check that the client with this name doesn't exist yet: // (we could simply let the command fail, but that does not result @@ -261,9 +262,9 @@ public Response modifyHarvestingClient(@Context ContainerRequestContext crc, Str String ownerDataverseAlias = harvestingClient.getDataverse().getAlias(); - try ( StringReader rdr = new StringReader(jsonBody) ) { + try { DataverseRequest req = createDataverseRequest(getRequestUser(crc)); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); HarvestingClient newHarvestingClient = new HarvestingClient(); String newDataverseAlias = jsonParser().parseHarvestingClient(json, newHarvestingClient); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Prov.java b/src/main/java/edu/harvard/iq/dataverse/api/Prov.java index 37b4792920f..7f81ca20988 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Prov.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Prov.java @@ -12,7 +12,8 @@ import edu.harvard.iq.dataverse.engine.command.impl.PersistProvJsonCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; import edu.harvard.iq.dataverse.util.BundleUtil; -import java.io.StringReader; +import edu.harvard.iq.dataverse.util.json.JsonUtil; + import java.util.HashMap; import java.util.logging.Logger; import jakarta.inject.Inject; @@ -109,11 +110,10 @@ public Response addProvFreeForm(@Context ContainerRequestContext crc, String bod if(!systemConfig.isProvCollectionEnabled()) { return error(FORBIDDEN, BundleUtil.getStringFromBundle("api.prov.error.provDisabled")); } - StringReader rdr = new StringReader(body); JsonObject jsonObj = null; try { - jsonObj = Json.createReader(rdr).readObject(); + jsonObj = JsonUtil.getJsonObject(body); } catch (JsonException ex) { return error(BAD_REQUEST, BundleUtil.getStringFromBundle("api.prov.error.freeformInvalidJson")); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index bcb67b180c8..c17ba909230 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -36,12 +36,12 @@ import edu.harvard.iq.dataverse.util.ConstraintViolationUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.util.json.JsonParser; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.license.LicenseServiceBean; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintWriter; -import java.io.StringReader; import java.nio.file.Files; import java.util.ArrayList; import java.util.Date; @@ -60,7 +60,6 @@ import jakarta.json.Json; import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; -import jakarta.json.JsonReader; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; import jakarta.validation.ConstraintViolation; @@ -259,9 +258,8 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve throw new ImportException("Failed to transform XML metadata format "+metadataFormat+" into a DatasetDTO"); } } - - JsonReader jsonReader = Json.createReader(new StringReader(json)); - JsonObject obj = jsonReader.readObject(); + + JsonObject obj = JsonUtil.getJsonObject(json); //and call parse Json to read it into a dataset try { JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService, licenseService, harvestingClient); @@ -396,10 +394,8 @@ public JsonObject ddiToJson(String xmlToParse) throws ImportException, XMLStream // convert DTO to Json, Gson gson = new GsonBuilder().setPrettyPrinting().create(); String json = gson.toJson(dsDTO); - JsonReader jsonReader = Json.createReader(new StringReader(json)); - JsonObject obj = jsonReader.readObject(); - return obj; + return JsonUtil.getJsonObject(json); } public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse owner, String xmlToParse, String fileName, ImportType importType, PrintWriter cleanupLog) throws ImportException, IOException { @@ -416,8 +412,7 @@ public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse o // convert DTO to Json, Gson gson = new GsonBuilder().setPrettyPrinting().create(); String json = gson.toJson(dsDTO); - JsonReader jsonReader = Json.createReader(new StringReader(json)); - JsonObject obj = jsonReader.readObject(); + JsonObject obj = JsonUtil.getJsonObject(json); //and call parse Json to read it into a dataset try { JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService, licenseService); diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index d2613422be9..d6943ec3511 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -596,13 +596,14 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin Thread.sleep(5000); JsonObject jsonObject = null; - try (StringReader rdr = new StringReader(jsonData)) { - jsonObject = Json.createReader(rdr).readObject(); + try { + jsonObject = JsonUtil.getJsonObject(jsonData); } catch (Exception jpe) { jpe.printStackTrace(); - logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}"); + logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonData); + // TODO: I think an (parsing) exception should stop the process, shouldn't it? } - logger.info("json: " + JsonUtil.prettyPrint(jsonObject)); + logger.log(Level.INFO, "json: {0}", JsonUtil.prettyPrint(jsonObject)); String taskIdentifier = jsonObject.getString("taskIdentifier"); @@ -808,11 +809,12 @@ private String addFiles(String curlCommand, Logger globusLogger) { sb.append(line); globusLogger.info(" API Output : " + sb.toString()); JsonObject jsonObject = null; - try (StringReader rdr = new StringReader(sb.toString())) { - jsonObject = Json.createReader(rdr).readObject(); + try { + jsonObject = JsonUtil.getJsonObject(sb.toString()); } catch (Exception jpe) { jpe.printStackTrace(); globusLogger.log(Level.SEVERE, "Error parsing dataset json."); + // TODO: a parsing exception should cause the process to stop. } status = jsonObject.getString("status"); @@ -853,11 +855,12 @@ public void globusDownload(String jsonData, Dataset dataset, User authUser) thro globusLogger.info("Starting an globusDownload "); JsonObject jsonObject = null; - try (StringReader rdr = new StringReader(jsonData)) { - jsonObject = Json.createReader(rdr).readObject(); + try { + jsonObject = JsonUtil.getJsonObject(jsonData); } catch (Exception jpe) { jpe.printStackTrace(); - globusLogger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}"); + globusLogger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonData); + // TODO: stop the process after this parsing exception. } String taskIdentifier = jsonObject.getString("taskIdentifier"); diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java index 0925c164bf4..0fb7e9f1e6c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java @@ -3,7 +3,8 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetServiceBean; -import java.io.StringReader; +import edu.harvard.iq.dataverse.util.json.JsonUtil; + import java.math.BigDecimal; import java.util.ArrayList; import java.util.List; @@ -14,10 +15,8 @@ import jakarta.ejb.EJBException; import jakarta.ejb.Stateless; import jakarta.inject.Named; -import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonObject; -import jakarta.json.JsonReader; import jakarta.json.JsonValue; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; @@ -125,9 +124,7 @@ public List parseSushiReport(JsonObject report, Dataset dataset) List datasetMetricsDataset = new ArrayList<>(); String globalId = null; Dataset ds = null; - StringReader rdr = new StringReader(reportDataset.toString()); - JsonReader jrdr = Json.createReader(rdr); - JsonObject obj = jrdr.readObject(); + JsonObject obj = JsonUtil.getJsonObject(reportDataset.toString()); String jsonGlobalId = ""; String globalIdType = ""; if (obj.containsKey("dataset-id")) { From 7d72db1778802a72022cd625ddd545290d96405b Mon Sep 17 00:00:00 2001 From: bencomp Date: Fri, 6 Oct 2023 00:55:09 +0200 Subject: [PATCH 094/252] Add --- .../harvard/iq/dataverse/api/Datasets.java | 28 ++++++++----------- .../harvard/iq/dataverse/api/Dataverses.java | 11 ++++---- .../iq/dataverse/api/HarvestingClients.java | 9 +++--- .../edu/harvard/iq/dataverse/api/Prov.java | 6 ++-- .../api/imports/ImportServiceBean.java | 15 ++++------ .../dataverse/globus/GlobusServiceBean.java | 21 ++++++++------ .../DatasetMetricsServiceBean.java | 9 ++---- .../settings/SettingsServiceBean.java | 16 ++++------- .../iq/dataverse/workflows/WorkflowUtil.java | 7 ++--- 9 files changed, 54 insertions(+), 68 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 704ec8f1989..aacfb78cc2e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -93,8 +93,8 @@ import edu.harvard.iq.dataverse.util.json.JSONLDUtil; import edu.harvard.iq.dataverse.util.json.JsonLDTerm; import edu.harvard.iq.dataverse.util.json.JsonParseException; -import edu.harvard.iq.dataverse.util.SignpostingResources; import edu.harvard.iq.dataverse.util.json.JsonUtil; +import edu.harvard.iq.dataverse.util.SignpostingResources; import edu.harvard.iq.dataverse.search.IndexServiceBean; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; @@ -109,7 +109,6 @@ import java.io.IOException; import java.io.InputStream; -import java.io.StringReader; import java.net.URI; import java.sql.Timestamp; import java.text.MessageFormat; @@ -675,10 +674,10 @@ public Response updateDraftVersion(@Context ContainerRequestContext crc, String return error( Response.Status.BAD_REQUEST, "Only the :draft version can be updated"); } - try ( StringReader rdr = new StringReader(jsonBody) ) { + try { DataverseRequest req = createDataverseRequest(getRequestUser(crc)); Dataset ds = findDatasetOrDie(id); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); DatasetVersion incomingVersion = jsonParser().parseDatasetVersion(json); // clear possibly stale fields from the incoming dataset version. @@ -834,10 +833,10 @@ public Response deleteVersionMetadata(@Context ContainerRequestContext crc, Stri } private Response processDatasetFieldDataDelete(String jsonBody, String id, DataverseRequest req) { - try (StringReader rdr = new StringReader(jsonBody)) { + try { Dataset ds = findDatasetOrDie(id); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); //Get the current draft or create a new version to update DatasetVersion dsv = ds.getOrCreateEditVersion(); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); @@ -991,10 +990,10 @@ public Response editVersionMetadata(@Context ContainerRequestContext crc, String private Response processDatasetUpdate(String jsonBody, String id, DataverseRequest req, Boolean replaceData){ - try (StringReader rdr = new StringReader(jsonBody)) { + try { Dataset ds = findDatasetOrDie(id); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); //Get the current draft or create a new version to update DatasetVersion dsv = ds.getOrCreateEditVersion(); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); @@ -1441,8 +1440,7 @@ public Response createFileEmbargo(@Context ContainerRequestContext crc, @PathPar return error(Status.BAD_REQUEST, "No Embargoes allowed"); } - StringReader rdr = new StringReader(jsonBody); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); Embargo embargo = new Embargo(); @@ -1585,8 +1583,7 @@ public Response removeFileEmbargo(@Context ContainerRequestContext crc, @PathPar return error(Status.BAD_REQUEST, "No Embargoes allowed"); } - StringReader rdr = new StringReader(jsonBody); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); List datasetFiles = dataset.getFiles(); List embargoFilesToUnset = new LinkedList<>(); @@ -2101,8 +2098,7 @@ public Response returnToAuthor(@Context ContainerRequestContext crc, @PathParam( if (jsonBody == null || jsonBody.isEmpty()) { return error(Response.Status.BAD_REQUEST, "You must supply JSON to this API endpoint and it must contain a reason for returning the dataset (field: reasonForReturn)."); } - StringReader rdr = new StringReader(jsonBody); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); try { Dataset dataset = findDatasetOrDie(idSupplied); String reasonForReturn = null; @@ -2354,9 +2350,7 @@ public Response completeMPUpload(@Context ContainerRequestContext crc, String pa List eTagList = new ArrayList(); logger.info("Etags: " + partETagBody); try { - JsonReader jsonReader = Json.createReader(new StringReader(partETagBody)); - JsonObject object = jsonReader.readObject(); - jsonReader.close(); + JsonObject object = JsonUtil.getJsonObject(partETagBody); for (String partNo : object.keySet()) { eTagList.add(new PartETag(Integer.parseInt(partNo), object.getString(partNo))); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index a60775cbd38..c64ba42999c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -78,8 +78,9 @@ import edu.harvard.iq.dataverse.util.json.JSONLDUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.util.json.JsonPrinter; +import edu.harvard.iq.dataverse.util.json.JsonUtil; + import static edu.harvard.iq.dataverse.util.json.JsonPrinter.brief; -import java.io.StringReader; import java.util.Collections; import java.util.LinkedList; import java.util.List; @@ -178,8 +179,8 @@ public Response addDataverse(@Context ContainerRequestContext crc, String body, Dataverse d; JsonObject dvJson; - try (StringReader rdr = new StringReader(body)) { - dvJson = Json.createReader(rdr).readObject(); + try { + dvJson = JsonUtil.getJsonObject(body); d = jsonParser().parseDataverse(dvJson); } catch (JsonParsingException jpe) { logger.log(Level.SEVERE, "Json: {0}", body); @@ -559,8 +560,8 @@ public Response recreateDataset(@Context ContainerRequestContext crc, String jso } private Dataset parseDataset(String datasetJson) throws WrappedResponse { - try (StringReader rdr = new StringReader(datasetJson)) { - return jsonParser().parseDataset(Json.createReader(rdr).readObject()); + try { + return jsonParser().parseDataset(JsonUtil.getJsonObject(datasetJson)); } catch (JsonParsingException | JsonParseException jpe) { logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", datasetJson); throw new WrappedResponse(error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage())); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java index d7eec9f5757..dfc9f48dd1a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java @@ -16,6 +16,7 @@ import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.util.json.JsonPrinter; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import jakarta.json.JsonObjectBuilder; import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import java.io.IOException; @@ -164,8 +165,8 @@ public Response createHarvestingClient(@Context ContainerRequestContext crc, Str return wr.getResponse(); } - try ( StringReader rdr = new StringReader(jsonBody) ) { - JsonObject json = Json.createReader(rdr).readObject(); + try { + JsonObject json = JsonUtil.getJsonObject(jsonBody); // Check that the client with this name doesn't exist yet: // (we could simply let the command fail, but that does not result @@ -261,9 +262,9 @@ public Response modifyHarvestingClient(@Context ContainerRequestContext crc, Str String ownerDataverseAlias = harvestingClient.getDataverse().getAlias(); - try ( StringReader rdr = new StringReader(jsonBody) ) { + try { DataverseRequest req = createDataverseRequest(getRequestUser(crc)); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); HarvestingClient newHarvestingClient = new HarvestingClient(); String newDataverseAlias = jsonParser().parseHarvestingClient(json, newHarvestingClient); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Prov.java b/src/main/java/edu/harvard/iq/dataverse/api/Prov.java index 37b4792920f..7f81ca20988 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Prov.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Prov.java @@ -12,7 +12,8 @@ import edu.harvard.iq.dataverse.engine.command.impl.PersistProvJsonCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; import edu.harvard.iq.dataverse.util.BundleUtil; -import java.io.StringReader; +import edu.harvard.iq.dataverse.util.json.JsonUtil; + import java.util.HashMap; import java.util.logging.Logger; import jakarta.inject.Inject; @@ -109,11 +110,10 @@ public Response addProvFreeForm(@Context ContainerRequestContext crc, String bod if(!systemConfig.isProvCollectionEnabled()) { return error(FORBIDDEN, BundleUtil.getStringFromBundle("api.prov.error.provDisabled")); } - StringReader rdr = new StringReader(body); JsonObject jsonObj = null; try { - jsonObj = Json.createReader(rdr).readObject(); + jsonObj = JsonUtil.getJsonObject(body); } catch (JsonException ex) { return error(BAD_REQUEST, BundleUtil.getStringFromBundle("api.prov.error.freeformInvalidJson")); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index bcb67b180c8..c17ba909230 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -36,12 +36,12 @@ import edu.harvard.iq.dataverse.util.ConstraintViolationUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.util.json.JsonParser; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.license.LicenseServiceBean; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintWriter; -import java.io.StringReader; import java.nio.file.Files; import java.util.ArrayList; import java.util.Date; @@ -60,7 +60,6 @@ import jakarta.json.Json; import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; -import jakarta.json.JsonReader; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; import jakarta.validation.ConstraintViolation; @@ -259,9 +258,8 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve throw new ImportException("Failed to transform XML metadata format "+metadataFormat+" into a DatasetDTO"); } } - - JsonReader jsonReader = Json.createReader(new StringReader(json)); - JsonObject obj = jsonReader.readObject(); + + JsonObject obj = JsonUtil.getJsonObject(json); //and call parse Json to read it into a dataset try { JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService, licenseService, harvestingClient); @@ -396,10 +394,8 @@ public JsonObject ddiToJson(String xmlToParse) throws ImportException, XMLStream // convert DTO to Json, Gson gson = new GsonBuilder().setPrettyPrinting().create(); String json = gson.toJson(dsDTO); - JsonReader jsonReader = Json.createReader(new StringReader(json)); - JsonObject obj = jsonReader.readObject(); - return obj; + return JsonUtil.getJsonObject(json); } public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse owner, String xmlToParse, String fileName, ImportType importType, PrintWriter cleanupLog) throws ImportException, IOException { @@ -416,8 +412,7 @@ public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse o // convert DTO to Json, Gson gson = new GsonBuilder().setPrettyPrinting().create(); String json = gson.toJson(dsDTO); - JsonReader jsonReader = Json.createReader(new StringReader(json)); - JsonObject obj = jsonReader.readObject(); + JsonObject obj = JsonUtil.getJsonObject(json); //and call parse Json to read it into a dataset try { JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService, licenseService); diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index d2613422be9..d6943ec3511 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -596,13 +596,14 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin Thread.sleep(5000); JsonObject jsonObject = null; - try (StringReader rdr = new StringReader(jsonData)) { - jsonObject = Json.createReader(rdr).readObject(); + try { + jsonObject = JsonUtil.getJsonObject(jsonData); } catch (Exception jpe) { jpe.printStackTrace(); - logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}"); + logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonData); + // TODO: I think an (parsing) exception should stop the process, shouldn't it? } - logger.info("json: " + JsonUtil.prettyPrint(jsonObject)); + logger.log(Level.INFO, "json: {0}", JsonUtil.prettyPrint(jsonObject)); String taskIdentifier = jsonObject.getString("taskIdentifier"); @@ -808,11 +809,12 @@ private String addFiles(String curlCommand, Logger globusLogger) { sb.append(line); globusLogger.info(" API Output : " + sb.toString()); JsonObject jsonObject = null; - try (StringReader rdr = new StringReader(sb.toString())) { - jsonObject = Json.createReader(rdr).readObject(); + try { + jsonObject = JsonUtil.getJsonObject(sb.toString()); } catch (Exception jpe) { jpe.printStackTrace(); globusLogger.log(Level.SEVERE, "Error parsing dataset json."); + // TODO: a parsing exception should cause the process to stop. } status = jsonObject.getString("status"); @@ -853,11 +855,12 @@ public void globusDownload(String jsonData, Dataset dataset, User authUser) thro globusLogger.info("Starting an globusDownload "); JsonObject jsonObject = null; - try (StringReader rdr = new StringReader(jsonData)) { - jsonObject = Json.createReader(rdr).readObject(); + try { + jsonObject = JsonUtil.getJsonObject(jsonData); } catch (Exception jpe) { jpe.printStackTrace(); - globusLogger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}"); + globusLogger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonData); + // TODO: stop the process after this parsing exception. } String taskIdentifier = jsonObject.getString("taskIdentifier"); diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java index 0925c164bf4..0fb7e9f1e6c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java @@ -3,7 +3,8 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetServiceBean; -import java.io.StringReader; +import edu.harvard.iq.dataverse.util.json.JsonUtil; + import java.math.BigDecimal; import java.util.ArrayList; import java.util.List; @@ -14,10 +15,8 @@ import jakarta.ejb.EJBException; import jakarta.ejb.Stateless; import jakarta.inject.Named; -import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonObject; -import jakarta.json.JsonReader; import jakarta.json.JsonValue; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; @@ -125,9 +124,7 @@ public List parseSushiReport(JsonObject report, Dataset dataset) List datasetMetricsDataset = new ArrayList<>(); String globalId = null; Dataset ds = null; - StringReader rdr = new StringReader(reportDataset.toString()); - JsonReader jrdr = Json.createReader(rdr); - JsonObject obj = jrdr.readObject(); + JsonObject obj = JsonUtil.getJsonObject(reportDataset.toString()); String jsonGlobalId = ""; String globalIdType = ""; if (obj.containsKey("dataset-id")) { diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index 0aa403a5116..6b74810eb53 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -4,14 +4,12 @@ import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; import edu.harvard.iq.dataverse.api.ApiBlockingFilter; import edu.harvard.iq.dataverse.util.StringUtil; - +import edu.harvard.iq.dataverse.util.json.JsonUtil; import jakarta.ejb.EJB; import jakarta.ejb.Stateless; import jakarta.inject.Named; -import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonObject; -import jakarta.json.JsonReader; import jakarta.json.JsonValue; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; @@ -20,7 +18,6 @@ import org.json.JSONException; import org.json.JSONObject; -import java.io.StringReader; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -696,8 +693,8 @@ public Long getValueForCompoundKeyAsLong(Key key, String param){ try { return Long.parseLong(val); } catch (NumberFormatException ex) { - try ( StringReader rdr = new StringReader(val) ) { - JsonObject settings = Json.createReader(rdr).readObject(); + try { + JsonObject settings = JsonUtil.getJsonObject(val); if(settings.containsKey(param)) { return Long.parseLong(settings.getString(param)); } else if(settings.containsKey("default")) { @@ -730,8 +727,8 @@ public Boolean getValueForCompoundKeyAsBoolean(Key key, String param) { return null; } - try (StringReader rdr = new StringReader(val)) { - JsonObject settings = Json.createReader(rdr).readObject(); + try { + JsonObject settings = JsonUtil.getJsonObject(val); if (settings.containsKey(param)) { return Boolean.parseBoolean(settings.getString(param)); } else if (settings.containsKey("default")) { @@ -897,8 +894,7 @@ public Map getBaseMetadataLanguageMap(Map languag if(mlString.isEmpty()) { mlString="[]"; } - JsonReader jsonReader = Json.createReader(new StringReader(mlString)); - JsonArray languages = jsonReader.readArray(); + JsonArray languages = JsonUtil.getJsonArray(mlString); for(JsonValue jv: languages) { JsonObject lang = (JsonObject) jv; languageMap.put(lang.getString("locale"), lang.getString("title")); diff --git a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java index 456b829ba61..b104f113db2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java @@ -3,7 +3,6 @@ import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.api.Util; -import java.io.StringReader; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; @@ -11,7 +10,7 @@ import jakarta.json.Json; import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonObject; - +import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import edu.harvard.iq.dataverse.workflow.step.Failure; import edu.harvard.iq.dataverse.workflow.step.Success; @@ -42,8 +41,8 @@ public static JsonArrayBuilder getAllWorkflowComments(DatasetVersion datasetVers } public static WorkflowStepResult parseResponse(String externalData) { - try (StringReader reader = new StringReader(externalData)) { - JsonObject response = Json.createReader(reader).readObject(); + try { + JsonObject response = JsonUtil.getJsonObject(externalData); String status = null; //Lower case is documented, upper case is deprecated if(response.containsKey("status")) { From 1440e653b8480c754f0669bb15f1b2cd92442522 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 6 Oct 2023 14:48:30 +0100 Subject: [PATCH 095/252] Refactor: FileSearchCriteria to encapsulate all criteria options --- .../DatasetVersionFilesServiceBean.java | 54 +++++++++---------- .../iq/dataverse/FileSearchCriteria.java | 45 ++++++++++++++++ .../harvard/iq/dataverse/api/Datasets.java | 17 ++++-- .../iq/dataverse/util/json/JsonPrinter.java | 5 +- .../harvard/iq/dataverse/api/DatasetsIT.java | 24 +++++---- 5 files changed, 97 insertions(+), 48 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/FileSearchCriteria.java diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index 6006d937100..a436b10d340 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -29,6 +29,8 @@ import static edu.harvard.iq.dataverse.DataFileTag.TagLabelToTypes; +import edu.harvard.iq.dataverse.FileSearchCriteria.FileAccessStatus; + @Stateless @Named public class DatasetVersionFilesServiceBean implements Serializable { @@ -44,17 +46,10 @@ public class DatasetVersionFilesServiceBean implements Serializable { /** * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionFilesServiceBean#getFileMetadatas} */ - public enum FileMetadatasOrderCriteria { + public enum FileOrderCriteria { NameAZ, NameZA, Newest, Oldest, Size, Type } - /** - * Status of the particular DataFile based on active embargoes and restriction state used in {@link DatasetVersionFilesServiceBean#getFileMetadatas} - */ - public enum DataFileAccessStatus { - Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic - } - /** * Given a DatasetVersion, returns its total file metadata count * @@ -107,17 +102,17 @@ public Map getFileMetadataCountPerCategoryName(DatasetVersion data } /** - * Given a DatasetVersion, returns its file metadata count per DataFileAccessStatus + * Given a DatasetVersion, returns its file metadata count per FileAccessStatus * * @param datasetVersion the DatasetVersion to access - * @return Map of file metadata counts per DataFileAccessStatus + * @return Map of file metadata counts per FileAccessStatus */ - public Map getFileMetadataCountPerAccessStatus(DatasetVersion datasetVersion) { - Map allCounts = new HashMap<>(); - addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.Public); - addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.Restricted); - addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.EmbargoedThenPublic); - addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.EmbargoedThenRestricted); + public Map getFileMetadataCountPerAccessStatus(DatasetVersion datasetVersion) { + Map allCounts = new HashMap<>(); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.Public); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.Restricted); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.EmbargoedThenPublic); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.EmbargoedThenRestricted); return allCounts; } @@ -127,29 +122,30 @@ public Map getFileMetadataCountPerAccessStatus(Datas * @param datasetVersion the DatasetVersion to access * @param limit for pagination, can be null * @param offset for pagination, can be null - * @param contentType for retrieving only files with this content type - * @param accessStatus for retrieving only files with this DataFileAccessStatus - * @param categoryName for retrieving only files categorized with this category name - * @param tabularTagName for retrieving only files categorized with this tabular tag name - * @param searchText for retrieving only files that contain the specified text within their labels or descriptions - * @param orderCriteria a FileMetadatasOrderCriteria to order the results + * @param searchCriteria for retrieving only files matching this criteria + * @param orderCriteria a FileOrderCriteria to order the results * @return a FileMetadata list from the specified DatasetVersion */ - public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, String contentType, DataFileAccessStatus accessStatus, String categoryName, String tabularTagName, String searchText, FileMetadatasOrderCriteria orderCriteria) { + public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, FileSearchCriteria searchCriteria, FileOrderCriteria orderCriteria) { JPAQuery baseQuery = createGetFileMetadatasBaseQuery(datasetVersion, orderCriteria); + String contentType = searchCriteria.getContentType(); if (contentType != null) { baseQuery.where(fileMetadata.dataFile.contentType.eq(contentType)); } + FileAccessStatus accessStatus = searchCriteria.getAccessStatus(); if (accessStatus != null) { baseQuery.where(createGetFileMetadatasAccessStatusExpression(accessStatus)); } + String categoryName = searchCriteria.getCategoryName(); if (categoryName != null) { baseQuery.from(dataFileCategory).where(dataFileCategory.name.eq(categoryName).and(fileMetadata.fileCategories.contains(dataFileCategory))); } + String tabularTagName = searchCriteria.getTabularTagName(); if (tabularTagName != null) { baseQuery.from(dataFileTag).where(dataFileTag.type.eq(TagLabelToTypes.get(tabularTagName)).and(fileMetadata.dataFile.dataFileTags.contains(dataFileTag))); } + String searchText = searchCriteria.getSearchText(); if (searchText != null && !searchText.isEmpty()) { searchText = searchText.trim().toLowerCase(); baseQuery.where(fileMetadata.label.lower().contains(searchText).or(fileMetadata.description.lower().contains(searchText))); @@ -167,14 +163,14 @@ public List getFileMetadatas(DatasetVersion datasetVersion, Intege return baseQuery.fetch(); } - private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map totalCounts, DataFileAccessStatus dataFileAccessStatus) { + private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map totalCounts, FileAccessStatus dataFileAccessStatus) { long fileMetadataCount = getFileMetadataCountByAccessStatus(datasetVersion, dataFileAccessStatus); if (fileMetadataCount > 0) { totalCounts.put(dataFileAccessStatus, fileMetadataCount); } } - private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, DataFileAccessStatus accessStatus) { + private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, FileAccessStatus accessStatus) { JPAQueryFactory queryFactory = new JPAQueryFactory(em); return queryFactory .selectFrom(fileMetadata) @@ -182,16 +178,16 @@ private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, D .stream().count(); } - private JPAQuery createGetFileMetadatasBaseQuery(DatasetVersion datasetVersion, FileMetadatasOrderCriteria orderCriteria) { + private JPAQuery createGetFileMetadatasBaseQuery(DatasetVersion datasetVersion, FileOrderCriteria orderCriteria) { JPAQueryFactory queryFactory = new JPAQueryFactory(em); JPAQuery baseQuery = queryFactory.selectFrom(fileMetadata).where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())); - if (orderCriteria == FileMetadatasOrderCriteria.Newest || orderCriteria == FileMetadatasOrderCriteria.Oldest) { + if (orderCriteria == FileOrderCriteria.Newest || orderCriteria == FileOrderCriteria.Oldest) { baseQuery.from(dvObject).where(dvObject.id.eq(fileMetadata.dataFile.id)); } return baseQuery; } - private BooleanExpression createGetFileMetadatasAccessStatusExpression(DataFileAccessStatus accessStatus) { + private BooleanExpression createGetFileMetadatasAccessStatusExpression(FileAccessStatus accessStatus) { QEmbargo embargo = fileMetadata.dataFile.embargo; BooleanExpression activelyEmbargoedExpression = embargo.dateAvailable.goe(DateExpression.currentDate(LocalDate.class)); BooleanExpression inactivelyEmbargoedExpression = embargo.isNull(); @@ -215,7 +211,7 @@ private BooleanExpression createGetFileMetadatasAccessStatusExpression(DataFileA return accessStatusExpression; } - private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery query, FileMetadatasOrderCriteria orderCriteria) { + private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery query, FileOrderCriteria orderCriteria) { DateTimeExpression orderByLifetimeExpression = new CaseBuilder().when(dvObject.publicationDate.isNotNull()).then(dvObject.publicationDate).otherwise(dvObject.createDate); switch (orderCriteria) { case NameZA: diff --git a/src/main/java/edu/harvard/iq/dataverse/FileSearchCriteria.java b/src/main/java/edu/harvard/iq/dataverse/FileSearchCriteria.java new file mode 100644 index 00000000000..62f10c18bdf --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/FileSearchCriteria.java @@ -0,0 +1,45 @@ +package edu.harvard.iq.dataverse; + +public class FileSearchCriteria { + + private final String contentType; + private final FileAccessStatus accessStatus; + private final String categoryName; + private final String tabularTagName; + private final String searchText; + + /** + * Status of the particular DataFile based on active embargoes and restriction state + */ + public enum FileAccessStatus { + Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic + } + + public FileSearchCriteria(String contentType, FileAccessStatus accessStatus, String categoryName, String tabularTagName, String searchText) { + this.contentType = contentType; + this.accessStatus = accessStatus; + this.categoryName = categoryName; + this.tabularTagName = tabularTagName; + this.searchText = searchText; + } + + public String getContentType() { + return contentType; + } + + public FileAccessStatus getAccessStatus() { + return accessStatus; + } + + public String getCategoryName() { + return categoryName; + } + + public String getTabularTagName() { + return tabularTagName; + } + + public String getSearchText() { + return searchText; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index b3be55399d8..14fd1b2453c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -506,19 +506,26 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, @Context HttpHeaders headers) { return response(req -> { DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); - DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria fileMetadatasOrderCriteria; + DatasetVersionFilesServiceBean.FileOrderCriteria fileOrderCriteria; try { - fileMetadatasOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameAZ; + fileOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileOrderCriteria.NameAZ; } catch (IllegalArgumentException e) { return error(Response.Status.BAD_REQUEST, "Invalid order criteria: " + orderCriteria); } - DatasetVersionFilesServiceBean.DataFileAccessStatus dataFileAccessStatus; + FileSearchCriteria.FileAccessStatus dataFileAccessStatus; try { - dataFileAccessStatus = accessStatus != null ? DatasetVersionFilesServiceBean.DataFileAccessStatus.valueOf(accessStatus) : null; + dataFileAccessStatus = accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null; } catch (IllegalArgumentException e) { return error(Response.Status.BAD_REQUEST, "Invalid access status: " + accessStatus); } - return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, contentType, dataFileAccessStatus, categoryName, tabularTagName, searchText, fileMetadatasOrderCriteria))); + FileSearchCriteria fileSearchCriteria = new FileSearchCriteria( + contentType, + dataFileAccessStatus, + categoryName, + tabularTagName, + searchText + ); + return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, fileSearchCriteria, fileOrderCriteria))); }, getRequestUser(crc)); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 1fed0b233e4..70840c7502f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -41,7 +41,6 @@ import jakarta.json.Json; import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonObjectBuilder; -import jakarta.json.JsonValue; import java.util.function.BiConsumer; import java.util.function.BinaryOperator; @@ -1108,9 +1107,9 @@ public static JsonObjectBuilder json(Map map) { return jsonObjectBuilder; } - public static JsonObjectBuilder jsonFileCountPerAccessStatusMap(Map map) { + public static JsonObjectBuilder jsonFileCountPerAccessStatusMap(Map map) { JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - for (Map.Entry mapEntry : map.entrySet()) { + for (Map.Entry mapEntry : map.entrySet()) { jsonObjectBuilder.add(mapEntry.getKey().toString(), mapEntry.getValue()); } return jsonObjectBuilder; diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index b9f09cc7c07..5d1a89aa555 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.api; import edu.harvard.iq.dataverse.DatasetVersionFilesServiceBean; +import edu.harvard.iq.dataverse.FileSearchCriteria; import io.restassured.RestAssured; import static io.restassured.RestAssured.given; @@ -3267,6 +3268,7 @@ public void getDatasetVersionCitation() { .body("data.message", containsString("DRAFT VERSION")); } + @Test public void getVersionFiles() throws IOException, InterruptedException { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); @@ -3334,7 +3336,7 @@ public void getVersionFiles() throws IOException, InterruptedException { assertEquals(1, fileMetadatasCount); // Test NameZA order criteria - Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameZA.toString(), apiToken); + Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.NameZA.toString(), apiToken); getVersionFilesResponseNameZACriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3345,7 +3347,7 @@ public void getVersionFiles() throws IOException, InterruptedException { .body("data[4].label", equalTo(testFileName1)); // Test Newest order criteria - Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Newest.toString(), apiToken); + Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.Newest.toString(), apiToken); getVersionFilesResponseNewestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3356,7 +3358,7 @@ public void getVersionFiles() throws IOException, InterruptedException { .body("data[4].label", equalTo(testFileName1)); // Test Oldest order criteria - Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Oldest.toString(), apiToken); + Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.Oldest.toString(), apiToken); getVersionFilesResponseOldestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3367,7 +3369,7 @@ public void getVersionFiles() throws IOException, InterruptedException { .body("data[4].label", equalTo(testFileName4)); // Test Size order criteria - Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Size.toString(), apiToken); + Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.Size.toString(), apiToken); getVersionFilesResponseSizeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3378,7 +3380,7 @@ public void getVersionFiles() throws IOException, InterruptedException { .body("data[4].label", equalTo(testFileName4)); // Test Type order criteria - Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Type.toString(), apiToken); + Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.Type.toString(), apiToken); getVersionFilesResponseTypeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3427,7 +3429,7 @@ public void getVersionFiles() throws IOException, InterruptedException { restrictFileResponse.then().assertThat() .statusCode(OK.getStatusCode()); - Response getVersionFilesResponseRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Restricted.toString(), null, null, null, null, apiToken); + Response getVersionFilesResponseRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, FileSearchCriteria.FileAccessStatus.Restricted.toString(), null, null, null, null, apiToken); getVersionFilesResponseRestricted.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3452,7 +3454,7 @@ public void getVersionFiles() throws IOException, InterruptedException { createActiveFileEmbargoResponse.then().assertThat() .statusCode(OK.getStatusCode()); - Response getVersionFilesResponseEmbargoedThenPublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString(), null, null, null, null, apiToken); + Response getVersionFilesResponseEmbargoedThenPublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString(), null, null, null, null, apiToken); getVersionFilesResponseEmbargoedThenPublic.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3461,7 +3463,7 @@ public void getVersionFiles() throws IOException, InterruptedException { fileMetadatasCount = getVersionFilesResponseEmbargoedThenPublic.jsonPath().getList("data").size(); assertEquals(1, fileMetadatasCount); - Response getVersionFilesResponseEmbargoedThenRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenRestricted.toString(), null, null, null, null, apiToken); + Response getVersionFilesResponseEmbargoedThenRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, FileSearchCriteria.FileAccessStatus.EmbargoedThenRestricted.toString(), null, null, null, null, apiToken); getVersionFilesResponseEmbargoedThenRestricted.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3471,7 +3473,7 @@ public void getVersionFiles() throws IOException, InterruptedException { assertEquals(1, fileMetadatasCount); // Test Access Status Public - Response getVersionFilesResponsePublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString(), null, null, null, null, apiToken); + Response getVersionFilesResponsePublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, FileSearchCriteria.FileAccessStatus.Public.toString(), null, null, null, null, apiToken); getVersionFilesResponsePublic.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3569,7 +3571,7 @@ public void getVersionFileCounts() throws IOException { assertEquals(2, responseCountPerContentTypeMap.get("image/png")); assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); - assertEquals(3, responseCountPerAccessStatusMap.get(DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString())); - assertEquals(1, responseCountPerAccessStatusMap.get(DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString())); + assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); + assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); } } From 690ac1e96a2717774e04aefb11603ae126005559 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 6 Oct 2023 15:29:45 +0100 Subject: [PATCH 096/252] Added: file search criteria params to getVersionFileCounts API endpoint (Pending IT to be added) --- .../DatasetVersionFilesServiceBean.java | 99 ++++++++++--------- .../harvard/iq/dataverse/api/Datasets.java | 48 ++++++--- 2 files changed, 89 insertions(+), 58 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index a436b10d340..9afd0513b62 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -54,26 +54,32 @@ public enum FileOrderCriteria { * Given a DatasetVersion, returns its total file metadata count * * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for counting only files matching this criteria * @return long value of total file metadata count */ - public long getFileMetadataCount(DatasetVersion datasetVersion) { + public long getFileMetadataCount(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { JPAQueryFactory queryFactory = new JPAQueryFactory(em); - return queryFactory.selectFrom(fileMetadata).where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())).stream().count(); + JPAQuery baseQuery = queryFactory.selectFrom(fileMetadata).where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())); + applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); + return baseQuery.stream().count(); } /** * Given a DatasetVersion, returns its file metadata count per content type * * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for counting only files matching this criteria * @return Map of file metadata counts per content type */ - public Map getFileMetadataCountPerContentType(DatasetVersion datasetVersion) { + public Map getFileMetadataCountPerContentType(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { JPAQueryFactory queryFactory = new JPAQueryFactory(em); - List contentTypeOccurrences = queryFactory + JPAQuery baseQuery = queryFactory .select(fileMetadata.dataFile.contentType, fileMetadata.count()) .from(fileMetadata) .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())) - .groupBy(fileMetadata.dataFile.contentType).fetch(); + .groupBy(fileMetadata.dataFile.contentType); + applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); + List contentTypeOccurrences = baseQuery.fetch(); Map result = new HashMap<>(); for (Tuple occurrence : contentTypeOccurrences) { result.put(occurrence.get(fileMetadata.dataFile.contentType), occurrence.get(fileMetadata.count())); @@ -85,15 +91,18 @@ public Map getFileMetadataCountPerContentType(DatasetVersion datas * Given a DatasetVersion, returns its file metadata count per category name * * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for counting only files matching this criteria * @return Map of file metadata counts per category name */ - public Map getFileMetadataCountPerCategoryName(DatasetVersion datasetVersion) { + public Map getFileMetadataCountPerCategoryName(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { JPAQueryFactory queryFactory = new JPAQueryFactory(em); - List categoryNameOccurrences = queryFactory + JPAQuery baseQuery = queryFactory .select(dataFileCategory.name, fileMetadata.count()) .from(dataFileCategory, fileMetadata) .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(fileMetadata.fileCategories.contains(dataFileCategory))) - .groupBy(dataFileCategory.name).fetch(); + .groupBy(dataFileCategory.name); + applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); + List categoryNameOccurrences = baseQuery.fetch(); Map result = new HashMap<>(); for (Tuple occurrence : categoryNameOccurrences) { result.put(occurrence.get(dataFileCategory.name), occurrence.get(fileMetadata.count())); @@ -105,14 +114,15 @@ public Map getFileMetadataCountPerCategoryName(DatasetVersion data * Given a DatasetVersion, returns its file metadata count per FileAccessStatus * * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for counting only files matching this criteria * @return Map of file metadata counts per FileAccessStatus */ - public Map getFileMetadataCountPerAccessStatus(DatasetVersion datasetVersion) { + public Map getFileMetadataCountPerAccessStatus(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { Map allCounts = new HashMap<>(); - addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.Public); - addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.Restricted); - addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.EmbargoedThenPublic); - addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.EmbargoedThenRestricted); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.Public, searchCriteria); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.Restricted, searchCriteria); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.EmbargoedThenPublic, searchCriteria); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.EmbargoedThenRestricted, searchCriteria); return allCounts; } @@ -128,54 +138,31 @@ public Map getFileMetadataCountPerAccessStatus(DatasetVe */ public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, FileSearchCriteria searchCriteria, FileOrderCriteria orderCriteria) { JPAQuery baseQuery = createGetFileMetadatasBaseQuery(datasetVersion, orderCriteria); - - String contentType = searchCriteria.getContentType(); - if (contentType != null) { - baseQuery.where(fileMetadata.dataFile.contentType.eq(contentType)); - } - FileAccessStatus accessStatus = searchCriteria.getAccessStatus(); - if (accessStatus != null) { - baseQuery.where(createGetFileMetadatasAccessStatusExpression(accessStatus)); - } - String categoryName = searchCriteria.getCategoryName(); - if (categoryName != null) { - baseQuery.from(dataFileCategory).where(dataFileCategory.name.eq(categoryName).and(fileMetadata.fileCategories.contains(dataFileCategory))); - } - String tabularTagName = searchCriteria.getTabularTagName(); - if (tabularTagName != null) { - baseQuery.from(dataFileTag).where(dataFileTag.type.eq(TagLabelToTypes.get(tabularTagName)).and(fileMetadata.dataFile.dataFileTags.contains(dataFileTag))); - } - String searchText = searchCriteria.getSearchText(); - if (searchText != null && !searchText.isEmpty()) { - searchText = searchText.trim().toLowerCase(); - baseQuery.where(fileMetadata.label.lower().contains(searchText).or(fileMetadata.description.lower().contains(searchText))); - } - + applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); applyOrderCriteriaToGetFileMetadatasQuery(baseQuery, orderCriteria); - if (limit != null) { baseQuery.limit(limit); } if (offset != null) { baseQuery.offset(offset); } - return baseQuery.fetch(); } - private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map totalCounts, FileAccessStatus dataFileAccessStatus) { - long fileMetadataCount = getFileMetadataCountByAccessStatus(datasetVersion, dataFileAccessStatus); + private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map totalCounts, FileAccessStatus dataFileAccessStatus, FileSearchCriteria searchCriteria) { + long fileMetadataCount = getFileMetadataCountByAccessStatus(datasetVersion, dataFileAccessStatus, searchCriteria); if (fileMetadataCount > 0) { totalCounts.put(dataFileAccessStatus, fileMetadataCount); } } - private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, FileAccessStatus accessStatus) { + private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, FileAccessStatus accessStatus, FileSearchCriteria searchCriteria) { JPAQueryFactory queryFactory = new JPAQueryFactory(em); - return queryFactory + JPAQuery baseQuery = queryFactory .selectFrom(fileMetadata) - .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(createGetFileMetadatasAccessStatusExpression(accessStatus))) - .stream().count(); + .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(createGetFileMetadatasAccessStatusExpression(accessStatus))); + applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); + return baseQuery.stream().count(); } private JPAQuery createGetFileMetadatasBaseQuery(DatasetVersion datasetVersion, FileOrderCriteria orderCriteria) { @@ -211,6 +198,30 @@ private BooleanExpression createGetFileMetadatasAccessStatusExpression(FileAcces return accessStatusExpression; } + private void applyFileSearchCriteriaToQuery(JPAQuery baseQuery, FileSearchCriteria searchCriteria) { + String contentType = searchCriteria.getContentType(); + if (contentType != null) { + baseQuery.where(fileMetadata.dataFile.contentType.eq(contentType)); + } + FileAccessStatus accessStatus = searchCriteria.getAccessStatus(); + if (accessStatus != null) { + baseQuery.where(createGetFileMetadatasAccessStatusExpression(accessStatus)); + } + String categoryName = searchCriteria.getCategoryName(); + if (categoryName != null) { + baseQuery.from(dataFileCategory).where(dataFileCategory.name.eq(categoryName).and(fileMetadata.fileCategories.contains(dataFileCategory))); + } + String tabularTagName = searchCriteria.getTabularTagName(); + if (tabularTagName != null) { + baseQuery.from(dataFileTag).where(dataFileTag.type.eq(TagLabelToTypes.get(tabularTagName)).and(fileMetadata.dataFile.dataFileTags.contains(dataFileTag))); + } + String searchText = searchCriteria.getSearchText(); + if (searchText != null && !searchText.isEmpty()) { + searchText = searchText.trim().toLowerCase(); + baseQuery.where(fileMetadata.label.lower().contains(searchText).or(fileMetadata.description.lower().contains(searchText))); + } + } + private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery query, FileOrderCriteria orderCriteria) { DateTimeExpression orderByLifetimeExpression = new CaseBuilder().when(dvObject.publicationDate.isNotNull()).then(dvObject.publicationDate).otherwise(dvObject.createDate); switch (orderCriteria) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 14fd1b2453c..ac32454c950 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -512,19 +512,18 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, } catch (IllegalArgumentException e) { return error(Response.Status.BAD_REQUEST, "Invalid order criteria: " + orderCriteria); } - FileSearchCriteria.FileAccessStatus dataFileAccessStatus; + FileSearchCriteria fileSearchCriteria; try { - dataFileAccessStatus = accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null; + fileSearchCriteria = new FileSearchCriteria( + contentType, + accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null, + categoryName, + tabularTagName, + searchText + ); } catch (IllegalArgumentException e) { return error(Response.Status.BAD_REQUEST, "Invalid access status: " + accessStatus); } - FileSearchCriteria fileSearchCriteria = new FileSearchCriteria( - contentType, - dataFileAccessStatus, - categoryName, - tabularTagName, - searchText - ); return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, fileSearchCriteria, fileOrderCriteria))); }, getRequestUser(crc)); } @@ -532,14 +531,35 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, @GET @AuthRequired @Path("{id}/versions/{versionId}/files/counts") - public Response getVersionFileCounts(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + public Response getVersionFileCounts(@Context ContainerRequestContext crc, + @PathParam("id") String datasetId, + @PathParam("versionId") String versionId, + @QueryParam("contentType") String contentType, + @QueryParam("accessStatus") String accessStatus, + @QueryParam("categoryName") String categoryName, + @QueryParam("tabularTagName") String tabularTagName, + @QueryParam("searchText") String searchText, + @Context UriInfo uriInfo, + @Context HttpHeaders headers) { return response(req -> { + FileSearchCriteria fileSearchCriteria; + try { + fileSearchCriteria = new FileSearchCriteria( + contentType, + accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null, + categoryName, + tabularTagName, + searchText + ); + } catch (IllegalArgumentException e) { + return error(Response.Status.BAD_REQUEST, "Invalid access status: " + accessStatus); + } DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion)); - jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion))); - jsonObjectBuilder.add("perCategoryName", json(datasetVersionFilesServiceBean.getFileMetadataCountPerCategoryName(datasetVersion))); - jsonObjectBuilder.add("perAccessStatus", jsonFileCountPerAccessStatusMap(datasetVersionFilesServiceBean.getFileMetadataCountPerAccessStatus(datasetVersion))); + jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria)); + jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion, fileSearchCriteria))); + jsonObjectBuilder.add("perCategoryName", json(datasetVersionFilesServiceBean.getFileMetadataCountPerCategoryName(datasetVersion, fileSearchCriteria))); + jsonObjectBuilder.add("perAccessStatus", jsonFileCountPerAccessStatusMap(datasetVersionFilesServiceBean.getFileMetadataCountPerAccessStatus(datasetVersion, fileSearchCriteria))); return ok(jsonObjectBuilder); }, getRequestUser(crc)); } From a0870b8554c709f25fb3bc47e04f58e08e951f2f Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 6 Oct 2023 15:35:17 +0100 Subject: [PATCH 097/252] Refactor: using Bundle.properties string for bad request errors in getVersionFiles and getVersionFileCounts API endpoints --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 6 +++--- src/main/java/propertyFiles/Bundle.properties | 2 ++ 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index ac32454c950..f7a4b1d0d25 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -510,7 +510,7 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, try { fileOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileOrderCriteria.NameAZ; } catch (IllegalArgumentException e) { - return error(Response.Status.BAD_REQUEST, "Invalid order criteria: " + orderCriteria); + return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.order.criteria", List.of(orderCriteria))); } FileSearchCriteria fileSearchCriteria; try { @@ -522,7 +522,7 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, searchText ); } catch (IllegalArgumentException e) { - return error(Response.Status.BAD_REQUEST, "Invalid access status: " + accessStatus); + return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus))); } return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, fileSearchCriteria, fileOrderCriteria))); }, getRequestUser(crc)); @@ -552,7 +552,7 @@ public Response getVersionFileCounts(@Context ContainerRequestContext crc, searchText ); } catch (IllegalArgumentException e) { - return error(Response.Status.BAD_REQUEST, "Invalid access status: " + accessStatus); + return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus))); } DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 7b4befcca36..3128106d38f 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2646,6 +2646,8 @@ datasets.api.privateurl.anonymized.error.released=Can't create a URL for anonymi datasets.api.creationdate=Date Created datasets.api.modificationdate=Last Modified Date datasets.api.curationstatus=Curation Status +datasets.api.version.files.invalid.order.criteria=Invalid order criteria: {0} +datasets.api.version.files.invalid.access.status=Invalid access status: {0} #Dataverses.java From 2abb36fc2f24e78ca75ebe0cbfc0a84a1345af26 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 6 Oct 2023 17:00:56 +0100 Subject: [PATCH 098/252] Added: IT for getVersionFileCounts with criteria --- .../harvard/iq/dataverse/api/DatasetsIT.java | 127 +++++++++++++++++- .../edu/harvard/iq/dataverse/api/UtilIT.java | 22 ++- 2 files changed, 143 insertions(+), 6 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 5d1a89aa555..433628685b2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3484,6 +3484,13 @@ public void getVersionFiles() throws IOException, InterruptedException { fileMetadatasCount = getVersionFilesResponsePublic.jsonPath().getList("data").size(); assertEquals(3, fileMetadatasCount); + // Test invalid access status + String invalidStatus = "invalidStatus"; + Response getVersionFilesResponseInvalidStatus = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, invalidStatus, null, null, null, null, apiToken); + getVersionFilesResponseInvalidStatus.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(invalidStatus)))); + // Test Search Text Response getVersionFilesResponseSearchText = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, "test_1", null, apiToken); @@ -3519,7 +3526,7 @@ public void getVersionFiles() throws IOException, InterruptedException { } @Test - public void getVersionFileCounts() throws IOException { + public void getVersionFileCounts() throws IOException, InterruptedException { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); String apiToken = UtilIT.getApiTokenFromResponse(createUser); @@ -3557,8 +3564,10 @@ public void getVersionFileCounts() throws IOException { Response createFileEmbargoResponse = UtilIT.createFileEmbargo(datasetId, Integer.parseInt(dataFileId), activeEmbargoDate, apiToken); createFileEmbargoResponse.then().assertThat().statusCode(OK.getStatusCode()); - // Getting the file counts and assert each count - Response getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, ":latest", apiToken); + String testDatasetVersion = ":latest"; + + // Getting the file counts without criteria and assert each count is correct + Response getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, null, null, null, null, apiToken); getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); @@ -3570,8 +3579,120 @@ public void getVersionFileCounts() throws IOException { assertEquals(4, (Integer) responseJsonPath.get("data.total")); assertEquals(2, responseCountPerContentTypeMap.get("image/png")); assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); + assertEquals(2, responseCountPerContentTypeMap.size()); + assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(2, responseCountPerAccessStatusMap.size()); + assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); + assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); + + // Test content type criteria + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, "image/png", null, null, null, null, apiToken); + + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponse.jsonPath(); + responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(2, (Integer) responseJsonPath.get("data.total")); + assertEquals(2, responseCountPerContentTypeMap.get("image/png")); + assertEquals(1, responseCountPerContentTypeMap.size()); + assertEquals(1, responseCountPerCategoryNameMap.size()); assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(2, responseCountPerAccessStatusMap.size()); + assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); + assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); + + // Test access status criteria + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, FileSearchCriteria.FileAccessStatus.Public.toString(), null, null, null, apiToken); + + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponse.jsonPath(); + responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(3, (Integer) responseJsonPath.get("data.total")); + assertEquals(1, responseCountPerContentTypeMap.get("image/png")); + assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); + assertEquals(2, responseCountPerContentTypeMap.size()); + assertEquals(0, responseCountPerCategoryNameMap.size()); + assertEquals(1, responseCountPerAccessStatusMap.size()); assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); + + // Test invalid access status + String invalidStatus = "invalidStatus"; + Response getVersionFilesResponseInvalidStatus = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, invalidStatus, null, null, null, apiToken); + getVersionFilesResponseInvalidStatus.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(invalidStatus)))); + + // Test category name criteria + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, null, "testCategory", null, null, apiToken); + + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponse.jsonPath(); + responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(1, (Integer) responseJsonPath.get("data.total")); + assertEquals(1, responseCountPerContentTypeMap.get("image/png")); + assertEquals(1, responseCountPerContentTypeMap.size()); + assertEquals(1, responseCountPerCategoryNameMap.size()); + assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(1, responseCountPerAccessStatusMap.size()); assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); + + // Test search text criteria + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, null, null, null, "test", apiToken); + + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponse.jsonPath(); + responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(3, (Integer) responseJsonPath.get("data.total")); + assertEquals(1, responseCountPerContentTypeMap.get("image/png")); + assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); + assertEquals(2, responseCountPerContentTypeMap.size()); + assertEquals(0, responseCountPerCategoryNameMap.size()); + assertEquals(1, responseCountPerAccessStatusMap.size()); + assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); + + // Test tabular tag name criteria + String pathToTabularTestFile = "src/test/resources/tab/test.tab"; + Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); + uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String tabularFileId = uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + + // Ensure tabular file is ingested + sleep(2000); + + String tabularTagName = "Survey"; + Response setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, List.of(tabularTagName)); + setFileTabularTagsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, null, null, tabularTagName, null, apiToken); + + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponse.jsonPath(); + responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(1, (Integer) responseJsonPath.get("data.total")); + assertEquals(1, responseCountPerContentTypeMap.get("text/tab-separated-values")); + assertEquals(1, responseCountPerContentTypeMap.size()); + assertEquals(0, responseCountPerCategoryNameMap.size()); + assertEquals(1, responseCountPerAccessStatusMap.size()); + assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 8e333451c8d..6d0f0bfa752 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3358,10 +3358,26 @@ static Response createFileEmbargo(Integer datasetId, Integer fileId, String date .post("/api/datasets/" + datasetId + "/files/actions/:set-embargo"); } - static Response getVersionFileCounts(Integer datasetId, String version, String apiToken) { - return given() + static Response getVersionFileCounts(Integer datasetId, String version, String contentType, String accessStatus, String categoryName, String tabularTagName, String searchText, String apiToken) { + RequestSpecification requestSpecification = given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .get("/api/datasets/" + datasetId + "/versions/" + version + "/files/counts"); + .contentType("application/json"); + if (contentType != null) { + requestSpecification = requestSpecification.queryParam("contentType", contentType); + } + if (accessStatus != null) { + requestSpecification = requestSpecification.queryParam("accessStatus", accessStatus); + } + if (categoryName != null) { + requestSpecification = requestSpecification.queryParam("categoryName", categoryName); + } + if (tabularTagName != null) { + requestSpecification = requestSpecification.queryParam("tabularTagName", tabularTagName); + } + if (searchText != null) { + requestSpecification = requestSpecification.queryParam("searchText", searchText); + } + return requestSpecification.get("/api/datasets/" + datasetId + "/versions/" + version + "/files/counts"); } static Response setFileCategories(String dataFileId, String apiToken, List categories) { From 65df3d0f4bca41598dcc5cad741779d7d8fd5716 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 9 Oct 2023 09:36:40 +0100 Subject: [PATCH 099/252] Added: count per tabular tag name to getVersionFileCounts API endpoint --- .../DatasetVersionFilesServiceBean.java | 23 +++++++++++++++++++ .../harvard/iq/dataverse/api/Datasets.java | 1 + .../iq/dataverse/util/json/JsonPrinter.java | 8 +++++++ .../harvard/iq/dataverse/api/DatasetsIT.java | 13 +++++++++++ 4 files changed, 45 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index 9afd0513b62..b6b095f58dd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -110,6 +110,29 @@ public Map getFileMetadataCountPerCategoryName(DatasetVersion data return result; } + /** + * Given a DatasetVersion, returns its file metadata count per DataFileTag.TagType + * + * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for counting only files matching this criteria + * @return Map of file metadata counts per DataFileTag.TagType + */ + public Map getFileMetadataCountPerTabularTagName(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { + JPAQueryFactory queryFactory = new JPAQueryFactory(em); + JPAQuery baseQuery = queryFactory + .select(dataFileTag.type, fileMetadata.count()) + .from(dataFileTag, fileMetadata) + .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(fileMetadata.dataFile.dataFileTags.contains(dataFileTag))) + .groupBy(dataFileTag.type); + applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); + List tagNameOccurrences = baseQuery.fetch(); + Map result = new HashMap<>(); + for (Tuple occurrence : tagNameOccurrences) { + result.put(occurrence.get(dataFileTag.type), occurrence.get(fileMetadata.count())); + } + return result; + } + /** * Given a DatasetVersion, returns its file metadata count per FileAccessStatus * diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index f7a4b1d0d25..26d4dd01cf5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -559,6 +559,7 @@ public Response getVersionFileCounts(@Context ContainerRequestContext crc, jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria)); jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion, fileSearchCriteria))); jsonObjectBuilder.add("perCategoryName", json(datasetVersionFilesServiceBean.getFileMetadataCountPerCategoryName(datasetVersion, fileSearchCriteria))); + jsonObjectBuilder.add("perTabularTagName", jsonFileCountPerTabularTagNameMap(datasetVersionFilesServiceBean.getFileMetadataCountPerTabularTagName(datasetVersion, fileSearchCriteria))); jsonObjectBuilder.add("perAccessStatus", jsonFileCountPerAccessStatusMap(datasetVersionFilesServiceBean.getFileMetadataCountPerAccessStatus(datasetVersion, fileSearchCriteria))); return ok(jsonObjectBuilder); }, getRequestUser(crc)); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 70840c7502f..6fe1ca87028 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -1115,6 +1115,14 @@ public static JsonObjectBuilder jsonFileCountPerAccessStatusMap(Map map) { + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + for (Map.Entry mapEntry : map.entrySet()) { + jsonObjectBuilder.add(mapEntry.getKey().toString(), mapEntry.getValue()); + } + return jsonObjectBuilder; + } + public static Collector, JsonArrayBuilder> toJsonArray() { return new Collector, JsonArrayBuilder>() { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 433628685b2..53546133b27 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3574,6 +3574,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { JsonPath responseJsonPath = getVersionFileCountsResponse.jsonPath(); LinkedHashMap responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); LinkedHashMap responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + LinkedHashMap responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); LinkedHashMap responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); assertEquals(4, (Integer) responseJsonPath.get("data.total")); @@ -3581,6 +3582,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); assertEquals(2, responseCountPerContentTypeMap.size()); assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(0, responseCountPerTabularTagNameMap.size()); assertEquals(2, responseCountPerAccessStatusMap.size()); assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); @@ -3593,6 +3595,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { responseJsonPath = getVersionFileCountsResponse.jsonPath(); responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); assertEquals(2, (Integer) responseJsonPath.get("data.total")); @@ -3600,6 +3603,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { assertEquals(1, responseCountPerContentTypeMap.size()); assertEquals(1, responseCountPerCategoryNameMap.size()); assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(0, responseCountPerTabularTagNameMap.size()); assertEquals(2, responseCountPerAccessStatusMap.size()); assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); @@ -3612,6 +3616,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { responseJsonPath = getVersionFileCountsResponse.jsonPath(); responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); assertEquals(3, (Integer) responseJsonPath.get("data.total")); @@ -3619,6 +3624,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); assertEquals(2, responseCountPerContentTypeMap.size()); assertEquals(0, responseCountPerCategoryNameMap.size()); + assertEquals(0, responseCountPerTabularTagNameMap.size()); assertEquals(1, responseCountPerAccessStatusMap.size()); assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); @@ -3637,6 +3643,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { responseJsonPath = getVersionFileCountsResponse.jsonPath(); responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); assertEquals(1, (Integer) responseJsonPath.get("data.total")); @@ -3644,6 +3651,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { assertEquals(1, responseCountPerContentTypeMap.size()); assertEquals(1, responseCountPerCategoryNameMap.size()); assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(0, responseCountPerTabularTagNameMap.size()); assertEquals(1, responseCountPerAccessStatusMap.size()); assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); @@ -3655,6 +3663,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { responseJsonPath = getVersionFileCountsResponse.jsonPath(); responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); assertEquals(3, (Integer) responseJsonPath.get("data.total")); @@ -3662,6 +3671,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); assertEquals(2, responseCountPerContentTypeMap.size()); assertEquals(0, responseCountPerCategoryNameMap.size()); + assertEquals(0, responseCountPerTabularTagNameMap.size()); assertEquals(1, responseCountPerAccessStatusMap.size()); assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); @@ -3686,12 +3696,15 @@ public void getVersionFileCounts() throws IOException, InterruptedException { responseJsonPath = getVersionFileCountsResponse.jsonPath(); responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); assertEquals(1, (Integer) responseJsonPath.get("data.total")); assertEquals(1, responseCountPerContentTypeMap.get("text/tab-separated-values")); assertEquals(1, responseCountPerContentTypeMap.size()); assertEquals(0, responseCountPerCategoryNameMap.size()); + assertEquals(1, responseCountPerTabularTagNameMap.size()); + assertEquals(1, responseCountPerTabularTagNameMap.get(tabularTagName)); assertEquals(1, responseCountPerAccessStatusMap.size()); assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); } From 98a444c2108395fc562e0159d554ce1f9968686e Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 9 Oct 2023 09:45:15 +0100 Subject: [PATCH 100/252] Added: docs for extended getVersionFileCounts endpoint --- doc/sphinx-guides/source/api/native-api.rst | 52 +++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 97b41ffa98a..f05c4d42073 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1046,6 +1046,7 @@ The returned file counts are based on different criteria: - Total (The total file count) - Per content type - Per category name +- Per tabular tag name - Per access status (Possible values: Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic) .. code-block:: bash @@ -1062,6 +1063,57 @@ The fully expanded example above (without environment variables) looks like this curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts" +Category name filtering is optionally supported. To return counts only for files to which the requested category has been added. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?categoryName=Data" + +Tabular tag name filtering is also optionally supported. To return counts only for files to which the requested tabular tag has been added. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?tabularTagName=Survey" + +Content type filtering is also optionally supported. To return counts only for files matching the requested content type. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?contentType=image/png" + +Filtering by search text is also optionally supported. The search will be applied to the labels and descriptions of the dataset files, to return counts only for files that contain the text searched in one of such fields. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?searchText=word" + +File access filtering is also optionally supported. In particular, by the following possible values: + +* ``Public`` +* ``Restricted`` +* ``EmbargoedThenRestricted`` +* ``EmbargoedThenPublic`` + +If no filter is specified, the files will match all of the above categories. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?accessStatus=Public" + +Please note that filtering values are case sensitive and must be correctly typed for the endpoint to recognize them. + +Keep in mind that you can combine all of the above query params depending on the results you are looking for. + View Dataset Files and Folders as a Directory Index ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 7d0501cdc2982e591d99eab29b9569d2880ebf30 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 9 Oct 2023 09:50:30 +0100 Subject: [PATCH 101/252] Added: #9907 release notes --- .../9907-files-api-counts-with-criteria.md | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 doc/release-notes/9907-files-api-counts-with-criteria.md diff --git a/doc/release-notes/9907-files-api-counts-with-criteria.md b/doc/release-notes/9907-files-api-counts-with-criteria.md new file mode 100644 index 00000000000..07cd23daad0 --- /dev/null +++ b/doc/release-notes/9907-files-api-counts-with-criteria.md @@ -0,0 +1,11 @@ +Extended the getVersionFileCounts endpoint (/api/datasets/{id}/versions/{versionId}/files/counts) to support filtering by criteria. + +In particular, the endpoint now accepts the following optional criteria query parameters: + +- contentType +- accessStatus +- categoryName +- tabularTagName +- searchText + +This filtering criteria is the same as the one for the getVersionFiles endpoint. From 35eeed53cefe427df8684ca8c20046be2b2a45f2 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 9 Oct 2023 10:07:53 +0100 Subject: [PATCH 102/252] Refactor: using variable instead of repeated string in IT --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 53546133b27..06d0bed14c0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3636,7 +3636,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { .body("message", equalTo(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(invalidStatus)))); // Test category name criteria - getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, null, "testCategory", null, null, apiToken); + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, null, testCategory, null, null, apiToken); getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); From 1e8b735ca1baba7c12bac0737cfc88eedc084ec3 Mon Sep 17 00:00:00 2001 From: mr-loop-1 Date: Mon, 9 Oct 2023 14:47:26 +0530 Subject: [PATCH 103/252] #9412 added markdown in external tools guide --- .../source/_static/admin/dataverse-external-tools.tsv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv index 8543300dd2c..f8bf5fc73d9 100644 --- a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv +++ b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv @@ -2,6 +2,6 @@ Tool Type Scope Description Data Explorer explore file "A GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. See the README.md file at https://github.com/scholarsportal/dataverse-data-explorer-v2 for the instructions on adding Data Explorer to your Dataverse." Whole Tale explore dataset "A platform for the creation of reproducible research packages that allows users to launch containerized interactive analysis environments based on popular tools such as Jupyter and RStudio. Using this integration, Dataverse users can launch Jupyter and RStudio environments to analyze published datasets. For more information, see the `Whole Tale User Guide `_." Binder explore dataset Binder allows you to spin up custom computing environments in the cloud (including Jupyter notebooks) with the files from your dataset. `Installation instructions `_ are in the Data Exploration Lab girder_ythub project. -File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" +File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, markdown (md), text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" Data Curation Tool configure file "A GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Curation-Tool for the installation instructions." Ask the Data query file Ask the Data is an experimental tool that allows you ask natural language questions about the data contained in Dataverse tables (tabular data). See the README.md file at https://github.com/IQSS/askdataverse/tree/main/askthedata for the instructions on adding Ask the Data to your Dataverse installation. From b32d51fab1b78ff9316476f76c549163c4b3e7ba Mon Sep 17 00:00:00 2001 From: mr-loop-1 Date: Mon, 9 Oct 2023 14:50:28 +0530 Subject: [PATCH 104/252] #9412 added markdown to gile previews list --- doc/sphinx-guides/source/user/dataset-management.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index 3b5b4ec6ba8..c41ca40dd36 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -200,6 +200,7 @@ Previewers are available for the following file types: - Text - PDF +- Markdown (MD) - Tabular (CSV, Excel, etc., see :doc:`tabulardataingest/index`) - Code (R, etc.) - Images (PNG, GIF, JPG) From dea8bf7636bc396d51773bc481135dab5f1a7679 Mon Sep 17 00:00:00 2001 From: mr-loop-1 Date: Mon, 9 Oct 2023 14:51:13 +0530 Subject: [PATCH 105/252] #9412 removed file extension markdown --- .../source/_static/admin/dataverse-external-tools.tsv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv index f8bf5fc73d9..a13dea923e4 100644 --- a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv +++ b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv @@ -2,6 +2,6 @@ Tool Type Scope Description Data Explorer explore file "A GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. See the README.md file at https://github.com/scholarsportal/dataverse-data-explorer-v2 for the instructions on adding Data Explorer to your Dataverse." Whole Tale explore dataset "A platform for the creation of reproducible research packages that allows users to launch containerized interactive analysis environments based on popular tools such as Jupyter and RStudio. Using this integration, Dataverse users can launch Jupyter and RStudio environments to analyze published datasets. For more information, see the `Whole Tale User Guide `_." Binder explore dataset Binder allows you to spin up custom computing environments in the cloud (including Jupyter notebooks) with the files from your dataset. `Installation instructions `_ are in the Data Exploration Lab girder_ythub project. -File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, markdown (md), text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" +File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, markdown, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" Data Curation Tool configure file "A GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Curation-Tool for the installation instructions." Ask the Data query file Ask the Data is an experimental tool that allows you ask natural language questions about the data contained in Dataverse tables (tabular data). See the README.md file at https://github.com/IQSS/askdataverse/tree/main/askthedata for the instructions on adding Ask the Data to your Dataverse installation. From 5a3b7853607d0a995ad9cbdbbcf402114f2a70b8 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 9 Oct 2023 08:53:43 -0400 Subject: [PATCH 106/252] add release note for markdown previewer #9412 --- doc/release-notes/9412-markdown-previewer.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/9412-markdown-previewer.md diff --git a/doc/release-notes/9412-markdown-previewer.md b/doc/release-notes/9412-markdown-previewer.md new file mode 100644 index 00000000000..8faa2679fb0 --- /dev/null +++ b/doc/release-notes/9412-markdown-previewer.md @@ -0,0 +1 @@ +There is now a Markdown (.md) previewer: https://dataverse-guide--9986.org.readthedocs.build/en/9986/user/dataset-management.html#file-previews From cc117bd4396e18f5680f34488928bb7a009b8bf0 Mon Sep 17 00:00:00 2001 From: Abdul Samad <62374784+mr-loop-1@users.noreply.github.com> Date: Mon, 9 Oct 2023 19:03:00 +0530 Subject: [PATCH 107/252] remove extension after markdown Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/user/dataset-management.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index c41ca40dd36..1e8ea897032 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -200,7 +200,7 @@ Previewers are available for the following file types: - Text - PDF -- Markdown (MD) +- Markdown - Tabular (CSV, Excel, etc., see :doc:`tabulardataingest/index`) - Code (R, etc.) - Images (PNG, GIF, JPG) From 44b015f375fc92505def8ef2e2475950a3818d4e Mon Sep 17 00:00:00 2001 From: Abdul Samad <62374784+mr-loop-1@users.noreply.github.com> Date: Mon, 9 Oct 2023 19:03:32 +0530 Subject: [PATCH 108/252] Capitalise Markdown Co-authored-by: Philip Durbin --- .../source/_static/admin/dataverse-external-tools.tsv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv index a13dea923e4..4f4c29d0670 100644 --- a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv +++ b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv @@ -2,6 +2,6 @@ Tool Type Scope Description Data Explorer explore file "A GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. See the README.md file at https://github.com/scholarsportal/dataverse-data-explorer-v2 for the instructions on adding Data Explorer to your Dataverse." Whole Tale explore dataset "A platform for the creation of reproducible research packages that allows users to launch containerized interactive analysis environments based on popular tools such as Jupyter and RStudio. Using this integration, Dataverse users can launch Jupyter and RStudio environments to analyze published datasets. For more information, see the `Whole Tale User Guide `_." Binder explore dataset Binder allows you to spin up custom computing environments in the cloud (including Jupyter notebooks) with the files from your dataset. `Installation instructions `_ are in the Data Exploration Lab girder_ythub project. -File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, markdown, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" +File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, Markdown, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" Data Curation Tool configure file "A GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Curation-Tool for the installation instructions." Ask the Data query file Ask the Data is an experimental tool that allows you ask natural language questions about the data contained in Dataverse tables (tabular data). See the README.md file at https://github.com/IQSS/askdataverse/tree/main/askthedata for the instructions on adding Ask the Data to your Dataverse installation. From e1acdd328fa4a6ca6624522e21806c7d2a779ef9 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Oct 2023 14:49:09 +0200 Subject: [PATCH 109/252] test(oidc): deactivate test when no Docker available #9974 As many of IQSS and external devs might not have Docker available, let's deactivate any Testcontainers tests in these cases. --- .../oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java index 5968cf3eaeb..ee6823ef98a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java @@ -54,7 +54,7 @@ @Tag(Tags.INTEGRATION_TEST) @Tag(Tags.USES_TESTCONTAINERS) -@Testcontainers +@Testcontainers(disabledWithoutDocker = true) @ExtendWith(MockitoExtension.class) // NOTE: order is important here - Testcontainers must be first, otherwise it's not ready when we call getAuthUrl() @LocalJvmSettings From 2aa7a471249cb129aeef13d6301f10ddb43506b7 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Oct 2023 14:52:43 +0200 Subject: [PATCH 110/252] doc(testing): change docs for TC ITs to disable when no Docker #9974 --- doc/sphinx-guides/source/developers/testing.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index 4691aca3aad..dab8110b20b 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -316,7 +316,7 @@ Please make sure to: .. code:: java /** A very minimal example for a Testcontainers integration test class. */ - @Testcontainers + @Testcontainers(disabledWithoutDocker = true) @Tag(edu.harvard.iq.dataverse.util.testing.Tags.INTEGRATION_TEST) @Tag(edu.harvard.iq.dataverse.util.testing.Tags.USES_TESTCONTAINERS) class MyExampleIT { /* ... */ } From ed291936810a46e260df9809def80b2d2c5b50dc Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Oct 2023 20:04:11 +0200 Subject: [PATCH 111/252] style(ct): remove empty lines from configbaker Dockerfile --- modules/container-configbaker/Dockerfile | 2 -- 1 file changed, 2 deletions(-) diff --git a/modules/container-configbaker/Dockerfile b/modules/container-configbaker/Dockerfile index 2975b043213..9b98334d72b 100644 --- a/modules/container-configbaker/Dockerfile +++ b/modules/container-configbaker/Dockerfile @@ -40,8 +40,6 @@ COPY maven/solr/*.xml ${SOLR_TEMPLATE}/conf/ RUN rm ${SOLR_TEMPLATE}/conf/managed-schema.xml - - # Set the entrypoint to tini (as a process supervisor) ENTRYPOINT ["/usr/bin/dumb-init", "--"] # By default run a script that will print a help message and terminate From e89e2aaeb32f983462ea11b64eceab6ddc926eb7 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 11 Oct 2023 10:47:00 -0400 Subject: [PATCH 112/252] #9507 revert to use dataverse in bundle --- src/main/java/propertyFiles/Bundle.properties | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 4964dac78a4..e3dbdc144f1 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -875,22 +875,22 @@ dataverse.nopublished=No Published Dataverses dataverse.nopublished.tip=In order to use this feature you must have at least one published dataverse. dataverse.contact=Email Dataverse Contact dataverse.link=Link Collection -dataverse.link.btn.tip=Link to Your Collection -dataverse.link.yourDataverses=Your Collection -dataverse.link.yourDataverses.inputPlaceholder=Enter Collection Name -dataverse.link.save=Save Linked collection -dataverse.link.dataverse.choose=Choose which of your collection you would like to link this collection to. -dataverse.link.dataset.choose=Enter the name of the collection you would like to link this dataset to. If you need to remove this link in the future, please contact {0}. -dataverse.link.dataset.none=No linkable collections available. -dataverse.link.no.choice=You have one collection you can add linked collection and datasets in. -dataverse.link.no.linkable=To be able to link a collection or dataset, you need to have your own collection. Create a collection to get started. -dataverse.link.no.linkable.remaining=You have already linked all of your eligible collections. +dataverse.link.btn.tip=Link to Your Dataverse +dataverse.link.yourDataverses=Your Dataverse +dataverse.link.yourDataverses.inputPlaceholder=Enter Dataverse Name +dataverse.link.save=Save Linked Dataverse +dataverse.link.dataverse.choose=Choose which of your dataverses you would like to link this dataverse to. +dataverse.link.dataset.choose=Enter the name of the dataverse you would like to link this dataset to. If you need to remove this link in the future, please contact {0}. +dataverse.link.dataset.none=No linkable dataverses available. +dataverse.link.no.choice=You have one dataverse you can add linked dataverses and datasets in. +dataverse.link.no.linkable=To be able to link a dataverse or dataset, you need to have your own dataverse. Create a dataverse to get started. +dataverse.link.no.linkable.remaining=You have already linked all of your eligible dataverses. dataverse.savedsearch.link=Link Search dataverse.savedsearch.searchquery=Search dataverse.savedsearch.filterQueries=Facets dataverse.savedsearch.save=Save Linked Search -dataverse.savedsearch.dataverse.choose=Choose which of your collection you would like to link this search to. -dataverse.savedsearch.no.choice=You have one collection to which you may add a saved search. +dataverse.savedsearch.dataverse.choose=Choose which of your dataverses you would like to link this search to. +dataverse.savedsearch.no.choice=You have one dataverse to which you may add a saved search. # Bundle file editors, please note that "dataverse.savedsearch.save.success" is used in a unit test dataverse.saved.search.success=The saved search has been successfully linked to {0}. dataverse.saved.search.failure=The saved search was not able to be linked. @@ -2498,7 +2498,7 @@ dataset.registered=DatasetRegistered dataset.registered.msg=Your dataset is now registered. dataset.notlinked=DatasetNotLinked dataset.notlinked.msg=There was a problem linking this dataset to yours: -dataset.linking.popop.already.linked.note=Note: This dataset is already linked to the following collection(s): +dataset.linking.popop.already.linked.note=Note: This dataset is already linked to the following dataverse(s): datasetversion.archive.success=Archival copy of Version successfully submitted datasetversion.archive.failure=Error in submitting an archival copy datasetversion.update.failure=Dataset Version Update failed. Changes are still in the DRAFT version. From 617f36cd98b267bc99d53a7b69c21d96974ff4dc Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 11 Oct 2023 10:48:11 -0400 Subject: [PATCH 113/252] #9507 missed one --- src/main/java/propertyFiles/Bundle.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index e3dbdc144f1..89eabaeb0bf 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -874,7 +874,7 @@ dataverse.publish.header=Publish Dataverse dataverse.nopublished=No Published Dataverses dataverse.nopublished.tip=In order to use this feature you must have at least one published dataverse. dataverse.contact=Email Dataverse Contact -dataverse.link=Link Collection +dataverse.link=Link Dataverse dataverse.link.btn.tip=Link to Your Dataverse dataverse.link.yourDataverses=Your Dataverse dataverse.link.yourDataverses.inputPlaceholder=Enter Dataverse Name From 18cdf133f49d597da6aea9d21385e45b77844ceb Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 11 Oct 2023 14:48:51 -0400 Subject: [PATCH 114/252] stripping more dead code in the version service bean (my experimental filemetadatas retrieval method, not directly used in the PR). (#9763) --- .../dataverse/DatasetVersionServiceBean.java | 88 ------------------- 1 file changed, 88 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 476a306e081..c2f9027a38a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -49,22 +49,6 @@ public class DatasetVersionServiceBean implements java.io.Serializable { private static final SimpleDateFormat logFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss"); - private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL = "SELECT fm FROM FileMetadata fm" - + " WHERE fm.datasetVersion.id=:datasetVersionId" - + " ORDER BY fm.label"; - private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE = "SELECT fm FROM FileMetadata fm, DvObject dvo" - + " WHERE fm.datasetVersion.id = :datasetVersionId" - + " AND fm.dataFile.id = dvo.id" - + " ORDER BY CASE WHEN dvo.publicationDate IS NOT NULL THEN dvo.publicationDate ELSE dvo.createDate END"; - private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_SIZE = "SELECT fm FROM FileMetadata fm, DataFile df" - + " WHERE fm.datasetVersion.id = :datasetVersionId" - + " AND fm.dataFile.id = df.id" - + " ORDER BY df.filesize"; - private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_TYPE = "SELECT fm FROM FileMetadata fm, DataFile df" - + " WHERE fm.datasetVersion.id = :datasetVersionId" - + " AND fm.dataFile.id = df.id" - + " ORDER BY df.contentType"; - @EJB DatasetServiceBean datasetService; @@ -166,18 +150,6 @@ public DatasetVersion getDatasetVersion(){ } } // end RetrieveDatasetVersionResponse - /** - * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionServiceBean#getFileMetadatas} - */ - public enum FileMetadatasOrderCriteria { - NameAZ, - NameZA, - Newest, - Oldest, - Size, - Type - } - public DatasetVersion find(Object pk) { return em.find(DatasetVersion.class, pk); } @@ -1287,64 +1259,4 @@ public List getUnarchivedDatasetVersions(){ return null; } } // end getUnarchivedDatasetVersions - - /** - * Returns a FileMetadata list of files in the specified DatasetVersion - * - * @param datasetVersion the DatasetVersion to access - * @param limit for pagination, can be null - * @param offset for pagination, can be null - * @param orderCriteria a FileMetadatasOrderCriteria to order the results - * @return a FileMetadata list of the specified DatasetVersion - */ - public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, FileMetadatasOrderCriteria orderCriteria) { - TypedQuery query = em.createQuery(getQueryStringFromFileMetadatasOrderCriteria(orderCriteria), FileMetadata.class) - .setParameter("datasetVersionId", datasetVersion.getId()); - - if (limit == null && offset == null) { - query = query.setHint("eclipselink.left-join-fetch", "fm.dataFile.ingestRequest") - .setHint("eclipselink.left-join-fetch", "fm.dataFile.thumbnailForDataset") - .setHint("eclipselink.left-join-fetch", "fm.dataFile.dataTables") - .setHint("eclipselink.left-join-fetch", "fm.fileCategories") - .setHint("eclipselink.left-join-fetch", "fm.dataFile.embargo") - .setHint("eclipselink.left-join-fetch", "fm.datasetVersion") - .setHint("eclipselink.left-join-fetch", "fm.dataFile.releaseUser") - .setHint("eclipselink.left-join-fetch", "fm.dataFile.dataFileTags") - .setHint("eclipselink.left-join-fetch", "fm.dataFile.creator"); - } else { - // @todo: is there really no way to use offset-limit with left join hints? - if (limit != null) { - query = query.setMaxResults(limit); - } - if (offset != null) { - query = query.setFirstResult(offset); - } - } - return query.getResultList(); - } - - private String getQueryStringFromFileMetadatasOrderCriteria(FileMetadatasOrderCriteria orderCriteria) { - String queryString; - switch (orderCriteria) { - case NameZA: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL + " DESC"; - break; - case Newest: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE + " DESC"; - break; - case Oldest: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE; - break; - case Size: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_SIZE; - break; - case Type: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_TYPE; - break; - default: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL; - break; - } - return queryString; - } } // end class From 381ddf59088808a536d58498e60514e1ea8557b8 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 11 Oct 2023 15:22:52 -0400 Subject: [PATCH 115/252] more commented-out code that needed to be removed before finalizing the pr. (#9763) --- .../edu/harvard/iq/dataverse/Dataset.java | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 692a2ba0245..245bdf0efd2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -693,31 +693,12 @@ public Timestamp getCitationDate() { Timestamp citationDate = null; //Only calculate if this dataset doesn't use an alternate date field for publication date if (citationDateDatasetFieldType == null) { - // @todo: remove this commented-out code once/if the PR passes review - L.A. - //List versions = this.versions; - // TODo - is this ever not version 1.0 (or draft if not published yet) - //DatasetVersion oldest = versions.get(versions.size() - 1); - // - I believe the answer is yes, the oldest versions will always be - // either 1.0 or draft - L.A. citationDate = super.getPublicationDate(); if (embargoCitationDate != null) { if (citationDate.compareTo(embargoCitationDate) < 0) { return embargoCitationDate; } } - // @todo: remove this commented-out code once/if the PR passes review - L.A. - /*if (oldest.isPublished()) { - List fms = oldest.getFileMetadatas(); - for (FileMetadata fm : fms) { - Embargo embargo = fm.getDataFile().getEmbargo(); - if (embargo != null) { - Timestamp embDate = Timestamp.valueOf(embargo.getDateAvailable().atStartOfDay()); - if (citationDate.compareTo(embDate) < 0) { - citationDate = embDate; - } - } - } - }*/ } return citationDate; } From ada8cc7a713c8074378c7732d4cf30688d50f9cf Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 13 Oct 2023 10:44:14 +0100 Subject: [PATCH 116/252] Fixed: curl examples in docs for deaccession dataset --- doc/sphinx-guides/source/api/native-api.rst | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index e51ca0055b6..1dc1ab13d9f 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1383,21 +1383,31 @@ Deaccession Dataset Given a version of a dataset, updates its status to deaccessioned. +The JSON body required to deaccession a dataset (``deaccession.json``) looks like this:: + + { + "deaccessionReason": "Description of the deaccession reason.", + "deaccessionForwardURL": "https://demo.dataverse.org" + } + + +Note that the field ``deaccessionForwardURL`` is optional. + .. code-block:: bash export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org export ID=24 export VERSIONID=1.0 - export JSON='{"deaccessionReason":"Description of the deaccession reason.", "deaccessionForwardURL":"https://demo.dataverse.org"}' + export FILE_PATH=deaccession.json - curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/datasets/$ID/versions/$VERSIONID/deaccession" -d "$JSON" + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/datasets/$ID/versions/$VERSIONID/deaccession" -H "Content-type:application/json" --upload-file $FILE_PATH The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/24/versions/1.0/deaccession" -d '{"deaccessionReason":"Description of the deaccession reason.", "deaccessionForwardURL":"https://demo.dataverse.org"}' + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/24/versions/1.0/deaccession" -H "Content-type:application/json" --upload-file deaccession.json .. note:: You cannot deaccession a dataset more than once. If you call this endpoint twice for the same dataset version, you will get a not found error on the second call, since the dataset you are looking for will no longer be published since it is already deaccessioned. From 1f0efddbd6cb4e10b7f5924dbd338105f18add81 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 13 Oct 2023 11:35:44 +0100 Subject: [PATCH 117/252] Fixed: permission checks in GetSpecificPublishedDatasetVersionCommand --- ...etSpecificPublishedDatasetVersionCommand.java | 3 ++- .../edu/harvard/iq/dataverse/api/DatasetsIT.java | 16 ++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 14 +++++++++----- 3 files changed, 27 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java index 879a694ef57..a87eb8a99a5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java @@ -8,6 +8,7 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -40,7 +41,7 @@ public GetSpecificPublishedDatasetVersionCommand(DataverseRequest aRequest, Data @Override public DatasetVersion execute(CommandContext ctxt) throws CommandException { for (DatasetVersion dsv : ds.getVersions()) { - if (dsv.isReleased() || (includeDeaccessioned && dsv.isDeaccessioned())) { + if (dsv.isReleased() || (includeDeaccessioned && dsv.isDeaccessioned() && ctxt.permissions().requestOn(getRequest(), ds).has(Permission.EditDataset))) { if (dsv.getVersionNumber().equals(majorVersion) && dsv.getMinorVersionNumber().equals(minorVersion)) { return dsv; } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 2d52a6c6e15..ee81d3f67f4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3543,6 +3543,14 @@ public void getVersionFiles() throws IOException, InterruptedException { fileMetadatasCount = getVersionFilesResponseTabularTagName.jsonPath().getList("data").size(); assertEquals(1, fileMetadatasCount); + + // Test that the dataset files for a deaccessioned dataset cannot be accessed by a guest + // By latest published version + Response getDatasetVersionResponse = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, null, null, null, true, null); + getDatasetVersionResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + // By specific version 1.0 + getDatasetVersionResponse = UtilIT.getVersionFiles(datasetId, "1.0", null, null, null, null, null, null, null, null, true, null); + getDatasetVersionResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); } @Test @@ -3620,6 +3628,14 @@ public void getVersionFileCounts() throws IOException { responseJsonPath = getVersionFileCountsResponseDeaccessioned.jsonPath(); assertEquals(4, (Integer) responseJsonPath.get("data.total")); + + // Test that the dataset file counts for a deaccessioned dataset cannot be accessed by a guest + // By latest published version + Response getDatasetVersionResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST_PUBLISHED, true, null); + getDatasetVersionResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + // By specific version 1.0 + getDatasetVersionResponse = UtilIT.getVersionFileCounts(datasetId, "1.0", true, null); + getDatasetVersionResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 93a7cc64082..434dc6d26f1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3290,9 +3290,11 @@ static Response getVersionFiles(Integer datasetId, boolean includeDeaccessioned, String apiToken) { RequestSpecification requestSpecification = given() - .header(API_TOKEN_HTTP_HEADER, apiToken) .contentType("application/json") .queryParam("includeDeaccessioned", includeDeaccessioned); + if (apiToken != null) { + requestSpecification.header(API_TOKEN_HTTP_HEADER, apiToken); + } if (limit != null) { requestSpecification = requestSpecification.queryParam("limit", limit); } @@ -3372,10 +3374,12 @@ static Response createFileEmbargo(Integer datasetId, Integer fileId, String date } static Response getVersionFileCounts(Integer datasetId, String version, boolean includeDeaccessioned, String apiToken) { - return given() - .header(API_TOKEN_HTTP_HEADER, apiToken) - .queryParam("includeDeaccessioned", includeDeaccessioned) - .get("/api/datasets/" + datasetId + "/versions/" + version + "/files/counts"); + RequestSpecification requestSpecification = given() + .queryParam("includeDeaccessioned", includeDeaccessioned); + if (apiToken != null) { + requestSpecification.header(API_TOKEN_HTTP_HEADER, apiToken); + } + return requestSpecification.get("/api/datasets/" + datasetId + "/versions/" + version + "/files/counts"); } static Response setFileCategories(String dataFileId, String apiToken, List categories) { From 4b5ad8fac1c1733c73ad0e2f5d7e1e47155895bc Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 13 Oct 2023 10:04:25 -0400 Subject: [PATCH 118/252] rename sql script #9763 avoid conflict with V6.0.0.1__9599-guestbook-at-request.sql --- ...rgocitationdate.sql => V6.0.0.2__9763-embargocitationdate.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V6.0.0.1__9763-embargocitationdate.sql => V6.0.0.2__9763-embargocitationdate.sql} (100%) diff --git a/src/main/resources/db/migration/V6.0.0.1__9763-embargocitationdate.sql b/src/main/resources/db/migration/V6.0.0.2__9763-embargocitationdate.sql similarity index 100% rename from src/main/resources/db/migration/V6.0.0.1__9763-embargocitationdate.sql rename to src/main/resources/db/migration/V6.0.0.2__9763-embargocitationdate.sql From 12ba35e9b9c4f0396ed942ea30a832e6a57c22c9 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 13 Oct 2023 17:14:17 +0100 Subject: [PATCH 119/252] Fixed: failing tests after develop merge --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 6626b18219c..34eccd3172a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3622,8 +3622,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); // Test content type criteria - getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST_PUBLISHED, "image/png", null, null, null, null, false, apiToken); - + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST, "image/png", null, null, null, null, false, apiToken); getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); responseJsonPath = getVersionFileCountsResponse.jsonPath(); @@ -3760,7 +3759,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { getVersionFileCountsResponseDeaccessioned.then().assertThat().statusCode(OK.getStatusCode()); responseJsonPath = getVersionFileCountsResponseDeaccessioned.jsonPath(); - assertEquals(4, (Integer) responseJsonPath.get("data.total")); + assertEquals(5, (Integer) responseJsonPath.get("data.total")); // Test that the dataset file counts for a deaccessioned dataset cannot be accessed by a guest // By latest published version From beed44473f5a51d2a9d69fd31353c4900f8391ae Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 13 Oct 2023 13:44:23 -0400 Subject: [PATCH 120/252] re-apply #9892 --- src/main/webapp/guestbook-terms-popup-fragment.xhtml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/webapp/guestbook-terms-popup-fragment.xhtml b/src/main/webapp/guestbook-terms-popup-fragment.xhtml index 69cc9fae55c..34df0c79390 100644 --- a/src/main/webapp/guestbook-terms-popup-fragment.xhtml +++ b/src/main/webapp/guestbook-terms-popup-fragment.xhtml @@ -7,7 +7,8 @@ xmlns:o="http://omnifaces.org/ui" xmlns:jsf="http://xmlns.jcp.org/jsf" xmlns:iqbs="http://xmlns.jcp.org/jsf/composite/iqbs"> - + + @@ -321,4 +322,5 @@
    + From 53b73e09f28751c3e17150e17908df5fa3f308b4 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 13 Oct 2023 14:07:37 -0400 Subject: [PATCH 121/252] remove file with "TODO - remove!" --- .../webapp/file-download-popup-fragment.xhtml | 305 ------------------ 1 file changed, 305 deletions(-) delete mode 100644 src/main/webapp/file-download-popup-fragment.xhtml diff --git a/src/main/webapp/file-download-popup-fragment.xhtml b/src/main/webapp/file-download-popup-fragment.xhtml deleted file mode 100644 index 3a64ca4a3a2..00000000000 --- a/src/main/webapp/file-download-popup-fragment.xhtml +++ /dev/null @@ -1,305 +0,0 @@ - - - - - - - -

    - #{bundle['file.downloadDialog.tip']} -

    - -
    - -
    -
    - -
    - -
    - - -
    - - - -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    -
    - - -
    - - - -
    - -
    - - - -
    -
    -
    - -
    - - - -
    -
    -
    - -
    - - - -
    -
    -
    - -
    - - - -
    -
    -
    - -
    - -
    - - - - - - - - - - - -
    -
    -
    -
    -
    -
    -
    - - - - - - - - - - - - - - - - - - - - - -
    -
    -
    \ No newline at end of file From f47867ee34e93e14efaca2fba414e202d234c1c6 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Sun, 15 Oct 2023 19:24:09 -0400 Subject: [PATCH 122/252] renaming the flyway script since 6.0.0.1 has already been merged. (#9763) --- ...rgocitationdate.sql => V6.0.0.2__9763-embargocitationdate.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V6.0.0.1__9763-embargocitationdate.sql => V6.0.0.2__9763-embargocitationdate.sql} (100%) diff --git a/src/main/resources/db/migration/V6.0.0.1__9763-embargocitationdate.sql b/src/main/resources/db/migration/V6.0.0.2__9763-embargocitationdate.sql similarity index 100% rename from src/main/resources/db/migration/V6.0.0.1__9763-embargocitationdate.sql rename to src/main/resources/db/migration/V6.0.0.2__9763-embargocitationdate.sql From 35f69517ea2139c2e742b7d7b28e1b88dcdd9ef5 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Sun, 15 Oct 2023 20:22:09 -0400 Subject: [PATCH 123/252] Switching to the new version of gdcc/xoai, v5.2.0 (#9910) --- modules/dataverse-parent/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 19f78415280..1d99c1cd3d8 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -165,7 +165,7 @@ 4.4.14 - 5.1.0 + 5.2.0 1.19.0 From 4182b036f24ba8402ffe7f2c304ed4026fa7874d Mon Sep 17 00:00:00 2001 From: Abhinav Rana <142827270+AR-2910@users.noreply.github.com> Date: Mon, 16 Oct 2023 07:50:09 +0530 Subject: [PATCH 124/252] Update config.rst Adding link to "Dataverse General User Interface Translation Guide for Weblate" in the "Tools For Translators" section. Issue #9512. --- doc/sphinx-guides/source/installation/config.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 086b0a80895..ce8876b012c 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1276,6 +1276,8 @@ The list below depicts a set of tools that can be used to ease the amount of wor - `easyTranslationHelper `_, a tool developed by `University of Aveiro `_. +- `Dataverse General User Interface Translation Guide for Weblate `_, a guide produced as part of the `SSHOC Dataverse Translation `_ event. + .. _Web-Analytics-Code: Web Analytics Code From cea36d6aadcc3a21d70c3029b498a279256d6c07 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 16 Oct 2023 12:19:15 +0100 Subject: [PATCH 125/252] Added: file search criteria to getDownloadSize datasets API endpoint --- .../DatasetVersionFilesServiceBean.java | 23 +++++++++++-------- .../harvard/iq/dataverse/api/Datasets.java | 19 ++++++++++++++- .../harvard/iq/dataverse/api/DatasetsIT.java | 21 +++++++++++------ .../edu/harvard/iq/dataverse/api/UtilIT.java | 22 +++++++++++++++--- 4 files changed, 64 insertions(+), 21 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index 2c14498caa9..6ea9262bbc4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -53,7 +53,7 @@ public enum FileOrderCriteria { } /** - * Mode to base the search in {@link DatasetVersionFilesServiceBean#getFilesDownloadSize(DatasetVersion, FileDownloadSizeMode)} + * Mode to base the search in {@link DatasetVersionFilesServiceBean#getFilesDownloadSize(DatasetVersion, FileSearchCriteria, FileDownloadSizeMode)} *

    * All: Includes both archival and original sizes for tabular files * Archival: Includes only the archival size for tabular files @@ -191,16 +191,17 @@ public List getFileMetadatas(DatasetVersion datasetVersion, Intege * Returns the total download size of all files for a particular DatasetVersion * * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for retrieving only files matching this criteria * @param mode a FileDownloadSizeMode to base the search on * @return long value of total file download size */ - public long getFilesDownloadSize(DatasetVersion datasetVersion, FileDownloadSizeMode mode) { + public long getFilesDownloadSize(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria, FileDownloadSizeMode mode) { return switch (mode) { case All -> - Long.sum(getOriginalTabularFilesSize(datasetVersion), getArchivalFilesSize(datasetVersion, false)); + Long.sum(getOriginalTabularFilesSize(datasetVersion, searchCriteria), getArchivalFilesSize(datasetVersion, false, searchCriteria)); case Original -> - Long.sum(getOriginalTabularFilesSize(datasetVersion), getArchivalFilesSize(datasetVersion, true)); - case Archival -> getArchivalFilesSize(datasetVersion, false); + Long.sum(getOriginalTabularFilesSize(datasetVersion, searchCriteria), getArchivalFilesSize(datasetVersion, true, searchCriteria)); + case Archival -> getArchivalFilesSize(datasetVersion, false, searchCriteria); }; } @@ -301,22 +302,24 @@ private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery qu } } - private long getOriginalTabularFilesSize(DatasetVersion datasetVersion) { + private long getOriginalTabularFilesSize(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { JPAQueryFactory queryFactory = new JPAQueryFactory(em); - Long result = queryFactory + JPAQuery baseQuery = queryFactory .from(fileMetadata) .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())) .from(dataTable) - .where(dataTable.dataFile.eq(fileMetadata.dataFile)) - .select(dataTable.originalFileSize.sum()).fetchFirst(); + .where(dataTable.dataFile.eq(fileMetadata.dataFile)); + applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); + Long result = baseQuery.select(dataTable.originalFileSize.sum()).fetchFirst(); return (result == null) ? 0 : result; } - private long getArchivalFilesSize(DatasetVersion datasetVersion, boolean ignoreTabular) { + private long getArchivalFilesSize(DatasetVersion datasetVersion, boolean ignoreTabular, FileSearchCriteria searchCriteria) { JPAQueryFactory queryFactory = new JPAQueryFactory(em); JPAQuery baseQuery = queryFactory .from(fileMetadata) .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())); + applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); Long result; if (ignoreTabular) { result = baseQuery.where(fileMetadata.dataFile.dataTables.isEmpty()).select(fileMetadata.dataFile.filesize.sum()).fetchFirst(); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index f8929c5e8d8..8605b4772f4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2992,11 +2992,28 @@ public Response getStorageSize(@Context ContainerRequestContext crc, @PathParam( public Response getDownloadSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @PathParam("versionId") String version, + @QueryParam("contentType") String contentType, + @QueryParam("accessStatus") String accessStatus, + @QueryParam("categoryName") String categoryName, + @QueryParam("tabularTagName") String tabularTagName, + @QueryParam("searchText") String searchText, @QueryParam("mode") String mode, @Context UriInfo uriInfo, @Context HttpHeaders headers) { return response(req -> { + FileSearchCriteria fileSearchCriteria; + try { + fileSearchCriteria = new FileSearchCriteria( + contentType, + accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null, + categoryName, + tabularTagName, + searchText + ); + } catch (IllegalArgumentException e) { + return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus))); + } DatasetVersionFilesServiceBean.FileDownloadSizeMode fileDownloadSizeMode; try { fileDownloadSizeMode = mode != null ? DatasetVersionFilesServiceBean.FileDownloadSizeMode.valueOf(mode) : DatasetVersionFilesServiceBean.FileDownloadSizeMode.All; @@ -3004,7 +3021,7 @@ public Response getDownloadSize(@Context ContainerRequestContext crc, return error(Response.Status.BAD_REQUEST, "Invalid mode: " + mode); } DatasetVersion datasetVersion = getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers); - long datasetStorageSize = datasetVersionFilesServiceBean.getFilesDownloadSize(datasetVersion, fileDownloadSizeMode); + long datasetStorageSize = datasetVersionFilesServiceBean.getFilesDownloadSize(datasetVersion, fileSearchCriteria, fileDownloadSizeMode); String message = MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), datasetStorageSize); JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); jsonObjectBuilder.add("message", message); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 34eccd3172a..66a67887405 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3863,7 +3863,7 @@ public void getDownloadSize() throws IOException, InterruptedException { int expectedTextFilesStorageSize = testFileSize1 + testFileSize2; // Get the total size when there are no tabular files - Response getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); + Response getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedTextFilesStorageSize)); @@ -3878,7 +3878,7 @@ public void getDownloadSize() throws IOException, InterruptedException { Thread.sleep(2000); // Get the total size ignoring the original tabular file sizes - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Archival.toString(), apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Archival.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()); int actualSizeIgnoringOriginalTabularSizes = Integer.parseInt(getDownloadSizeResponse.getBody().jsonPath().getString("data.storageSize")); @@ -3889,7 +3889,7 @@ public void getDownloadSize() throws IOException, InterruptedException { // Get the total size including only original sizes and ignoring archival sizes for tabular files int expectedSizeIncludingOnlyOriginalForTabular = tabularOriginalSize + expectedTextFilesStorageSize; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingOnlyOriginalForTabular)); @@ -3897,13 +3897,13 @@ public void getDownloadSize() throws IOException, InterruptedException { int tabularArchivalSize = actualSizeIgnoringOriginalTabularSizes - expectedTextFilesStorageSize; int expectedSizeIncludingAllSizes = tabularArchivalSize + tabularOriginalSize + expectedTextFilesStorageSize; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes)); // Get the total size sending invalid file download size mode String invalidMode = "invalidMode"; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, invalidMode, apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, invalidMode, apiToken); getDownloadSizeResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()) .body("message", equalTo("Invalid mode: " + invalidMode)); @@ -3917,15 +3917,22 @@ public void getDownloadSize() throws IOException, InterruptedException { // Get the total size including only original sizes and ignoring archival sizes for tabular files expectedSizeIncludingOnlyOriginalForTabular = tabularOriginalSize + expectedSizeIncludingOnlyOriginalForTabular; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingOnlyOriginalForTabular)); // Get the total size including both the original and archival tabular file sizes expectedSizeIncludingAllSizes = tabularArchivalSize + tabularOriginalSize + expectedSizeIncludingAllSizes; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes)); + + // Get the total size including both the original and archival tabular file sizes with search criteria + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, "text/plain", FileSearchCriteria.FileAccessStatus.Public.toString(), null, null, "test_", DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); + // We exclude tabular sizes from the expected result since the search criteria filters by content type "text/plain" and search text "test_" + int expectedSizeIncludingAllSizesAndApplyingCriteria = testFileSize1 + testFileSize2; + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.storageSize", equalTo(expectedSizeIncludingAllSizesAndApplyingCriteria)); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 4421e9280b3..38cc44c8c0d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3450,10 +3450,26 @@ static Response deaccessionDataset(Integer datasetId, String version, String dea .post("/api/datasets/" + datasetId + "/versions/" + version + "/deaccession"); } - static Response getDownloadSize(Integer datasetId, String version, String mode, String apiToken) { - return given() + static Response getDownloadSize(Integer datasetId, String version, String contentType, String accessStatus, String categoryName, String tabularTagName, String searchText, String mode, String apiToken) { + RequestSpecification requestSpecification = given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .queryParam("mode", mode) + .queryParam("mode", mode); + if (contentType != null) { + requestSpecification = requestSpecification.queryParam("contentType", contentType); + } + if (accessStatus != null) { + requestSpecification = requestSpecification.queryParam("accessStatus", accessStatus); + } + if (categoryName != null) { + requestSpecification = requestSpecification.queryParam("categoryName", categoryName); + } + if (tabularTagName != null) { + requestSpecification = requestSpecification.queryParam("tabularTagName", tabularTagName); + } + if (searchText != null) { + requestSpecification = requestSpecification.queryParam("searchText", searchText); + } + return requestSpecification .get("/api/datasets/" + datasetId + "/versions/" + version + "/downloadsize"); } } From b6bcbf7cadcf8e7b2f05825836d155f6a589b710 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 16 Oct 2023 12:47:35 +0100 Subject: [PATCH 126/252] Added: getDownloadSize API endpoint deaccessioned dataset support --- .../harvard/iq/dataverse/api/Datasets.java | 3 +- .../harvard/iq/dataverse/api/DatasetsIT.java | 41 +++++++++++++++---- .../edu/harvard/iq/dataverse/api/UtilIT.java | 26 ++++++++++-- 3 files changed, 58 insertions(+), 12 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 8605b4772f4..852dd18ee84 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2998,6 +2998,7 @@ public Response getDownloadSize(@Context ContainerRequestContext crc, @QueryParam("tabularTagName") String tabularTagName, @QueryParam("searchText") String searchText, @QueryParam("mode") String mode, + @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, @Context UriInfo uriInfo, @Context HttpHeaders headers) { @@ -3020,7 +3021,7 @@ public Response getDownloadSize(@Context ContainerRequestContext crc, } catch (IllegalArgumentException e) { return error(Response.Status.BAD_REQUEST, "Invalid mode: " + mode); } - DatasetVersion datasetVersion = getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers); + DatasetVersion datasetVersion = getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers, includeDeaccessioned); long datasetStorageSize = datasetVersionFilesServiceBean.getFilesDownloadSize(datasetVersion, fileSearchCriteria, fileDownloadSizeMode); String message = MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), datasetStorageSize); JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 66a67887405..e12de1e23cc 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3863,7 +3863,7 @@ public void getDownloadSize() throws IOException, InterruptedException { int expectedTextFilesStorageSize = testFileSize1 + testFileSize2; // Get the total size when there are no tabular files - Response getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); + Response getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), false, apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedTextFilesStorageSize)); @@ -3878,7 +3878,7 @@ public void getDownloadSize() throws IOException, InterruptedException { Thread.sleep(2000); // Get the total size ignoring the original tabular file sizes - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Archival.toString(), apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Archival.toString(), false, apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()); int actualSizeIgnoringOriginalTabularSizes = Integer.parseInt(getDownloadSizeResponse.getBody().jsonPath().getString("data.storageSize")); @@ -3889,7 +3889,7 @@ public void getDownloadSize() throws IOException, InterruptedException { // Get the total size including only original sizes and ignoring archival sizes for tabular files int expectedSizeIncludingOnlyOriginalForTabular = tabularOriginalSize + expectedTextFilesStorageSize; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), false, apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingOnlyOriginalForTabular)); @@ -3897,13 +3897,13 @@ public void getDownloadSize() throws IOException, InterruptedException { int tabularArchivalSize = actualSizeIgnoringOriginalTabularSizes - expectedTextFilesStorageSize; int expectedSizeIncludingAllSizes = tabularArchivalSize + tabularOriginalSize + expectedTextFilesStorageSize; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), false, apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes)); // Get the total size sending invalid file download size mode String invalidMode = "invalidMode"; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, invalidMode, apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, invalidMode, false, apiToken); getDownloadSizeResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()) .body("message", equalTo("Invalid mode: " + invalidMode)); @@ -3917,22 +3917,47 @@ public void getDownloadSize() throws IOException, InterruptedException { // Get the total size including only original sizes and ignoring archival sizes for tabular files expectedSizeIncludingOnlyOriginalForTabular = tabularOriginalSize + expectedSizeIncludingOnlyOriginalForTabular; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), false, apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingOnlyOriginalForTabular)); // Get the total size including both the original and archival tabular file sizes expectedSizeIncludingAllSizes = tabularArchivalSize + tabularOriginalSize + expectedSizeIncludingAllSizes; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), false, apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes)); // Get the total size including both the original and archival tabular file sizes with search criteria - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, "text/plain", FileSearchCriteria.FileAccessStatus.Public.toString(), null, null, "test_", DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, "text/plain", FileSearchCriteria.FileAccessStatus.Public.toString(), null, null, "test_", DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), false, apiToken); // We exclude tabular sizes from the expected result since the search criteria filters by content type "text/plain" and search text "test_" int expectedSizeIncludingAllSizesAndApplyingCriteria = testFileSize1 + testFileSize2; getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingAllSizesAndApplyingCriteria)); + + // Test Deaccessioned + Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, "Test deaccession reason.", null, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // includeDeaccessioned false + Response getVersionFileCountsResponseNoDeaccessioned = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), false, apiToken); + getVersionFileCountsResponseNoDeaccessioned.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // includeDeaccessioned true + Response getVersionFileCountsResponseDeaccessioned = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), true, apiToken); + getVersionFileCountsResponseDeaccessioned.then().assertThat().statusCode(OK.getStatusCode()); + + // Test that the dataset file counts for a deaccessioned dataset cannot be accessed by a guest + // By latest published version + Response getVersionFileCountsGuestUserResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), true, null); + getVersionFileCountsGuestUserResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + // By specific version 1.0 + getVersionFileCountsGuestUserResponse = UtilIT.getDownloadSize(datasetId, "1.0", null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), true, null); + getVersionFileCountsGuestUserResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 38cc44c8c0d..15350782fa1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3373,7 +3373,15 @@ static Response createFileEmbargo(Integer datasetId, Integer fileId, String date .post("/api/datasets/" + datasetId + "/files/actions/:set-embargo"); } - static Response getVersionFileCounts(Integer datasetId, String version, String contentType, String accessStatus, String categoryName, String tabularTagName, String searchText, boolean includeDeaccessioned, String apiToken) { + static Response getVersionFileCounts(Integer datasetId, + String version, + String contentType, + String accessStatus, + String categoryName, + String tabularTagName, + String searchText, + boolean includeDeaccessioned, + String apiToken) { RequestSpecification requestSpecification = given() .queryParam("includeDeaccessioned", includeDeaccessioned); if (apiToken != null) { @@ -3450,10 +3458,22 @@ static Response deaccessionDataset(Integer datasetId, String version, String dea .post("/api/datasets/" + datasetId + "/versions/" + version + "/deaccession"); } - static Response getDownloadSize(Integer datasetId, String version, String contentType, String accessStatus, String categoryName, String tabularTagName, String searchText, String mode, String apiToken) { + static Response getDownloadSize(Integer datasetId, + String version, + String contentType, + String accessStatus, + String categoryName, + String tabularTagName, + String searchText, + String mode, + boolean includeDeaccessioned, + String apiToken) { RequestSpecification requestSpecification = given() - .header(API_TOKEN_HTTP_HEADER, apiToken) + .queryParam("includeDeaccessioned", includeDeaccessioned) .queryParam("mode", mode); + if (apiToken != null) { + requestSpecification.header(API_TOKEN_HTTP_HEADER, apiToken); + } if (contentType != null) { requestSpecification = requestSpecification.queryParam("contentType", contentType); } From 794c5b64e6daa52e1d84fb2cb58468b5104d3161 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 16 Oct 2023 13:03:08 +0100 Subject: [PATCH 127/252] Added: extended docs for getDownloadSize API endpoint --- doc/sphinx-guides/source/api/native-api.rst | 61 ++++++++++++++++++++- 1 file changed, 58 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 2e1a878dce8..98e2722ac5e 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -964,7 +964,7 @@ The fully expanded example above (without environment variables) looks like this curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files" -This endpoint supports optional pagination, through the ``limit`` and ``offset`` query params: +This endpoint supports optional pagination, through the ``limit`` and ``offset`` query parameters: .. code-block:: bash @@ -1044,7 +1044,7 @@ Usage example: curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?includeDeaccessioned=true" -.. note:: Keep in mind that you can combine all of the above query params depending on the results you are looking for. +.. note:: Keep in mind that you can combine all of the above query parameters depending on the results you are looking for. Get File Counts in a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1132,7 +1132,7 @@ Usage example: Please note that filtering values are case sensitive and must be correctly typed for the endpoint to recognize them. -Keep in mind that you can combine all of the above query params depending on the results you are looking for. +Keep in mind that you can combine all of the above query parameters depending on the results you are looking for. View Dataset Files and Folders as a Directory Index ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1888,6 +1888,61 @@ Usage example: curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?mode=Archival" +Category name filtering is also optionally supported. To return the size of all files available for download matching the requested category name. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?categoryName=Data" + +Tabular tag name filtering is also optionally supported. To return the size of all files available for download for which the requested tabular tag has been added. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?tabularTagName=Survey" + +Content type filtering is also optionally supported. To return the size of all files available for download matching the requested content type. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?contentType=image/png" + +Filtering by search text is also optionally supported. The search will be applied to the labels and descriptions of the dataset files, to return the size of all files available for download that contain the text searched in one of such fields. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?searchText=word" + +File access filtering is also optionally supported. In particular, by the following possible values: + +* ``Public`` +* ``Restricted`` +* ``EmbargoedThenRestricted`` +* ``EmbargoedThenPublic`` + +If no filter is specified, the files will match all of the above categories. + +Please note that filtering query parameters are case sensitive and must be correctly typed for the endpoint to recognize them. + +By default, deaccessioned dataset versions are not included in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a "not found" error if the version is deaccessioned and you do not enable the ``includeDeaccessioned`` option described below. + +If you want to include deaccessioned dataset versions, you must set ``includeDeaccessioned`` query parameter to ``true``. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?includeDeaccessioned=true" + +.. note:: Keep in mind that you can combine all of the above query parameters depending on the results you are looking for. + Submit a Dataset for Review ~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 28cd109303ec22cbb898f32f3141cc281f4c7c62 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 16 Oct 2023 13:10:38 +0100 Subject: [PATCH 128/252] Added: release notes for #9995 --- ...adsize-with-criteria-and-deaccessioned-support.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md diff --git a/doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md b/doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md new file mode 100644 index 00000000000..71c7aa3b516 --- /dev/null +++ b/doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md @@ -0,0 +1,12 @@ +Extended the getDownloadSize endpoint (/api/datasets/{id}/versions/{versionId}/files/downloadsize), including the following new features: + +- The endpoint now accepts a new boolean optional query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned dataset versions when searching for versions to obtain the file total download size. + + +- The endpoint now supports filtering by criteria. In particular, it accepts the following optional criteria query parameters: + + - contentType + - accessStatus + - categoryName + - tabularTagName + - searchText From ab237777309b90e299e584cff6995618bc378ebd Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 16 Oct 2023 13:22:30 +0100 Subject: [PATCH 129/252] Fixed: release notes --- ...-api-downloadsize-with-criteria-and-deaccessioned-support.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md b/doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md index 71c7aa3b516..020224b2094 100644 --- a/doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md +++ b/doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md @@ -1,4 +1,4 @@ -Extended the getDownloadSize endpoint (/api/datasets/{id}/versions/{versionId}/files/downloadsize), including the following new features: +Extended the getDownloadSize endpoint (/api/datasets/{id}/versions/{versionId}/downloadsize), including the following new features: - The endpoint now accepts a new boolean optional query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned dataset versions when searching for versions to obtain the file total download size. From e847ed04e87f16d5423bcfade38453fd1d959343 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 16 Oct 2023 16:53:58 +0100 Subject: [PATCH 130/252] Fixed: set label as second ordering column when ordering by content type --- .../harvard/iq/dataverse/DatasetVersionFilesServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index 2c14498caa9..701ff4474ea 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -293,7 +293,7 @@ private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery qu query.orderBy(fileMetadata.dataFile.filesize.asc()); break; case Type: - query.orderBy(fileMetadata.dataFile.contentType.asc()); + query.orderBy(fileMetadata.dataFile.contentType.asc(), fileMetadata.label.asc()); break; default: query.orderBy(fileMetadata.label.asc()); From 635d345df3b71484b827668946b48b017420eedd Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 16 Oct 2023 16:33:02 -0400 Subject: [PATCH 131/252] check driver type not id --- .../java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index 00db98e894e..d33f8f5e5bd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -606,7 +606,7 @@ public static String getDriverPrefix(String driverId) { } public static boolean isDirectUploadEnabled(String driverId) { - return (DataAccess.S3.equals(driverId) && Boolean.parseBoolean(System.getProperty("dataverse.files." + DataAccess.S3 + ".upload-redirect"))) || + return (System.getProperty("dataverse.files." + driverId + ".type").equals(DataAccess.S3) && Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect"))) || Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-out-of-band")); } From 6cda2fcec8fd2ebfef8b9a11a907628ec7d4b1b2 Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Tue, 17 Oct 2023 10:00:06 +0200 Subject: [PATCH 132/252] Remove StringReader in deaccession endpoint --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 7d50d5e0e70..f292f63c1ff 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3974,8 +3974,8 @@ public Response deaccessionDataset(@Context ContainerRequestContext crc, @PathPa } return response(req -> { DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, false); - try (StringReader stringReader = new StringReader(jsonBody)) { - JsonObject jsonObject = Json.createReader(stringReader).readObject(); + try { + JsonObject jsonObject = JsonUtil.getJsonObject(jsonBody); datasetVersion.setVersionNote(jsonObject.getString("deaccessionReason")); String deaccessionForwardURL = jsonObject.getString("deaccessionForwardURL", null); if (deaccessionForwardURL != null) { From 5802bf61fe92aae50d8764ee8df4dd8011292f22 Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Tue, 17 Oct 2023 11:02:12 +0200 Subject: [PATCH 133/252] Lookup dataverse by alias or ID --- .../java/edu/harvard/iq/dataverse/api/Dataverses.java | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 30c14535251..47f6468bfac 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -1194,10 +1194,15 @@ public Response getGuestbookResponsesByDataverse(@Context ContainerRequestContex public void write(OutputStream os) throws IOException, WebApplicationException { - Dataverse dv = dataverseService.findByAlias(dvIdtf); + Dataverse dv; + try { + dv = findDataverseOrDie(dvIdtf); + } catch (WrappedResponse wr) { + throw new WebApplicationException(wr.getResponse()); + } Map customQandAs = guestbookResponseService.mapCustomQuestionAnswersAsStrings(dv.getId(), gbId); Map datasetTitles = guestbookResponseService.mapDatasetTitles(dv.getId()); - + List guestbookResults = guestbookResponseService.getGuestbookResults(dv.getId(), gbId); os.write("Guestbook, Dataset, Dataset PID, Date, Type, File Name, File Id, File PID, User Name, Email, Institution, Position, Custom Questions\n".getBytes()); for (Object[] result : guestbookResults) { From f2770fb2f107b4e75701d2baf5f4f52b51abf709 Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Tue, 17 Oct 2023 16:10:02 +0200 Subject: [PATCH 134/252] Do not look up the Dataverse twice Co-authored-by: Philip Durbin --- .../java/edu/harvard/iq/dataverse/api/Dataverses.java | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 47f6468bfac..76cfa8ef764 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -1173,8 +1173,9 @@ public Response getGroupByOwnerAndAliasInOwner(@Context ContainerRequestContext public Response getGuestbookResponsesByDataverse(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("guestbookId") Long gbId, @Context HttpServletResponse response) { + Dataverse dv; try { - Dataverse dv = findDataverseOrDie(dvIdtf); + dv = findDataverseOrDie(dvIdtf); User u = getRequestUser(crc); DataverseRequest req = createDataverseRequest(u); if (permissionSvc.request(req) @@ -1194,12 +1195,6 @@ public Response getGuestbookResponsesByDataverse(@Context ContainerRequestContex public void write(OutputStream os) throws IOException, WebApplicationException { - Dataverse dv; - try { - dv = findDataverseOrDie(dvIdtf); - } catch (WrappedResponse wr) { - throw new WebApplicationException(wr.getResponse()); - } Map customQandAs = guestbookResponseService.mapCustomQuestionAnswersAsStrings(dv.getId(), gbId); Map datasetTitles = guestbookResponseService.mapDatasetTitles(dv.getId()); @@ -1208,7 +1203,6 @@ public void write(OutputStream os) throws IOException, for (Object[] result : guestbookResults) { StringBuilder sb = guestbookResponseService.convertGuestbookResponsesToCSV(customQandAs, datasetTitles, result); os.write(sb.toString().getBytes()); - } } }; From 21eb153a63227fde85604b8d504c18813254496a Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Tue, 17 Oct 2023 20:47:00 +0200 Subject: [PATCH 135/252] Add API test for Dataverses GuestbookResponses --- .../iq/dataverse/api/DataversesIT.java | 19 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 11 ++++++++++- 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 09052f9e4ea..171a35ac04f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -144,6 +144,25 @@ public void testMinimalDataverse() throws FileNotFoundException { deleteDataverse.then().assertThat().statusCode(OK.getStatusCode()); } + @Test + public void testGetGuestbookResponses() { + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + // Create a Dataverse + Response create = UtilIT.createRandomDataverse(apiToken); + create.prettyPrint(); + create.then().assertThat().statusCode(CREATED.getStatusCode()); + String alias = UtilIT.getAliasFromResponse(create); + Integer dvId = UtilIT.getDataverseIdFromResponse(create); + // Get GuestbookResponses by Dataverse alias + Response getResponsesByAlias = UtilIT.getGuestbookResponses(alias, null, apiToken); + getResponsesByAlias.then().assertThat().statusCode(OK.getStatusCode()); + // Get GuestbookResponses by Dataverse ID + Response getResponsesById = UtilIT.getGuestbookResponses(dvId.toString(), null, apiToken); + getResponsesById.then().assertThat().statusCode(OK.getStatusCode()); + } + @Test public void testNotEnoughJson() { Response createUser = UtilIT.createRandomUser(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 4421e9280b3..94a0f33a83e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -372,7 +372,16 @@ static Response showDataverseContents(String alias, String apiToken) { .header(API_TOKEN_HTTP_HEADER, apiToken) .when().get("/api/dataverses/" + alias + "/contents"); } - + + static Response getGuestbookResponses(String dataverseAlias, Long guestbookId, String apiToken) { + RequestSpecification requestSpec = given() + .auth().basic(apiToken, EMPTY_STRING); + if (guestbookId != null) { + requestSpec.queryParam("guestbookId", guestbookId); + } + return requestSpec.get("/api/dataverses/" + dataverseAlias + "/guestbookResponses/"); + } + static Response createRandomDatasetViaNativeApi(String dataverseAlias, String apiToken) { return createRandomDatasetViaNativeApi(dataverseAlias, apiToken, false); } From d0a858f5c817df7f626033063ec1afa4dbd69831 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 18 Oct 2023 07:27:09 +0100 Subject: [PATCH 136/252] Added: ManageFilePermissions permission check to getUserPermissionsOnFile API endpoint --- .../harvard/iq/dataverse/FileDownloadServiceBean.java | 11 ----------- .../java/edu/harvard/iq/dataverse/api/Access.java | 3 ++- .../java/edu/harvard/iq/dataverse/api/AccessIT.java | 2 ++ 3 files changed, 4 insertions(+), 12 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java index de947ee9058..55817d4a746 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java @@ -645,15 +645,4 @@ public String getDirectStorageLocatrion(String storageLocation) { return null; } - - /** - * Checks if the DataverseRequest, which contains IP Groups, has permission to download the file - * - * @param dataverseRequest the DataverseRequest - * @param dataFile the DataFile to check permissions - * @return boolean - */ - public boolean canDownloadFile(DataverseRequest dataverseRequest, DataFile dataFile) { - return permissionService.requestOn(dataverseRequest, dataFile).has(Permission.DownloadFile); - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 1aaa7e60816..696fcb34920 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -1709,7 +1709,8 @@ public Response getUserPermissionsOnFile(@Context ContainerRequestContext crc, @ } JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); User requestUser = getRequestUser(crc); - jsonObjectBuilder.add("canDownloadFile", fileDownloadService.canDownloadFile(createDataverseRequest(requestUser), dataFile)); + jsonObjectBuilder.add("canDownloadFile", permissionService.userOn(requestUser, dataFile).has(Permission.DownloadFile)); + jsonObjectBuilder.add("canManageFilePermissions", permissionService.userOn(requestUser, dataFile).has(Permission.ManageFilePermissions)); jsonObjectBuilder.add("canEditOwnerDataset", permissionService.userOn(requestUser, dataFile.getOwner()).has(Permission.EditDataset)); return ok(jsonObjectBuilder); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java index 416caa68566..42e21e53101 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java @@ -666,6 +666,8 @@ public void testGetUserPermissionsOnFile() { assertTrue(canDownloadFile); boolean canEditOwnerDataset = JsonPath.from(getUserPermissionsOnFileResponse.body().asString()).getBoolean("data.canEditOwnerDataset"); assertTrue(canEditOwnerDataset); + boolean canManageFilePermissions = JsonPath.from(getUserPermissionsOnFileResponse.body().asString()).getBoolean("data.canManageFilePermissions"); + assertTrue(canManageFilePermissions); // Call with invalid file id Response getUserPermissionsOnFileInvalidIdResponse = UtilIT.getUserPermissionsOnFile("testInvalidId", apiToken); From 5d8ac32754ea2c13c2dbd883d627b583a6cb1b43 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 18 Oct 2023 07:34:58 +0100 Subject: [PATCH 137/252] Added: getUserPermissionsOnDataset API endpoint --- .../harvard/iq/dataverse/api/Datasets.java | 20 +++++++++++ .../harvard/iq/dataverse/api/DatasetsIT.java | 33 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 6 ++++ 3 files changed, 59 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index c3032495f27..7cfe587d8dc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -4083,4 +4083,24 @@ public Response resetGuestbookEntryAtRequest(@Context ContainerRequestContext cr datasetService.merge(dataset); return ok("Guestbook Entry At Request reset to default: " + dataset.getEffectiveGuestbookEntryAtRequest()); } + + @GET + @AuthRequired + @Path("{id}/userPermissions") + public Response getUserPermissionsOnDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId) { + Dataset dataset; + try { + dataset = findDatasetOrDie(datasetId); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + User requestUser = getRequestUser(crc); + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("canViewUnpublishedDataset", permissionService.userOn(requestUser, dataset).has(Permission.ViewUnpublishedDataset)); + jsonObjectBuilder.add("canEditDataset", permissionService.userOn(requestUser, dataset).has(Permission.EditDataset)); + jsonObjectBuilder.add("canPublishDataset", permissionService.userOn(requestUser, dataset).has(Permission.PublishDataset)); + jsonObjectBuilder.add("canManageDatasetPermissions", permissionService.userOn(requestUser, dataset).has(Permission.ManageDatasetPermissions)); + jsonObjectBuilder.add("canDeleteDatasetDraft", permissionService.userOn(requestUser, dataset).has(Permission.DeleteDatasetDraft)); + return ok(jsonObjectBuilder); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 34eccd3172a..4258773a0b3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3928,4 +3928,37 @@ public void getDownloadSize() throws IOException, InterruptedException { getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes)); } + + @Test + public void testGetUserPermissionsOnDataset() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Call with valid dataset id + Response getUserPermissionsOnDatasetResponse = UtilIT.getUserPermissionsOnDataset(Integer.toString(datasetId), apiToken); + getUserPermissionsOnDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + boolean canViewUnpublishedDataset = JsonPath.from(getUserPermissionsOnDatasetResponse.body().asString()).getBoolean("data.canViewUnpublishedDataset"); + assertTrue(canViewUnpublishedDataset); + boolean canEditDataset = JsonPath.from(getUserPermissionsOnDatasetResponse.body().asString()).getBoolean("data.canEditDataset"); + assertTrue(canEditDataset); + boolean canPublishDataset = JsonPath.from(getUserPermissionsOnDatasetResponse.body().asString()).getBoolean("data.canPublishDataset"); + assertTrue(canPublishDataset); + boolean canManageDatasetPermissions = JsonPath.from(getUserPermissionsOnDatasetResponse.body().asString()).getBoolean("data.canManageDatasetPermissions"); + assertTrue(canManageDatasetPermissions); + boolean canDeleteDatasetDraft = JsonPath.from(getUserPermissionsOnDatasetResponse.body().asString()).getBoolean("data.canDeleteDatasetDraft"); + assertTrue(canDeleteDatasetDraft); + + // Call with invalid dataset id + Response getUserPermissionsOnDatasetInvalidIdResponse = UtilIT.getUserPermissionsOnDataset("testInvalidId", apiToken); + getUserPermissionsOnDatasetInvalidIdResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 4421e9280b3..be23df5ec63 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3359,6 +3359,12 @@ static Response getUserPermissionsOnFile(String dataFileId, String apiToken) { .get("/api/access/datafile/" + dataFileId + "/userPermissions"); } + static Response getUserPermissionsOnDataset(String datasetId, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/datasets/" + datasetId + "/userPermissions"); + } + static Response createFileEmbargo(Integer datasetId, Integer fileId, String dateAvailable, String apiToken) { JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); jsonBuilder.add("dateAvailable", dateAvailable); From 38681bb113da3b9ea6359cf2da4e324e550ea463 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 18 Oct 2023 07:40:24 +0100 Subject: [PATCH 138/252] Added: includeDeaccessioned optional query param to getVersion Datasets API endpoint --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 7cfe587d8dc..5e9d02c4af3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -482,9 +482,14 @@ public Response listVersions(@Context ContainerRequestContext crc, @PathParam("i @GET @AuthRequired @Path("{id}/versions/{versionId}") - public Response getVersion(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + public Response getVersion(@Context ContainerRequestContext crc, + @PathParam("id") String datasetId, + @PathParam("versionId") String versionId, + @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, + @Context UriInfo uriInfo, + @Context HttpHeaders headers) { return response( req -> { - DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); + DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned); return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found") : ok(json(dsv)); }, getRequestUser(crc)); From 835fb44325935a4509ce3139b96306b0370d290d Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 18 Oct 2023 08:27:27 +0100 Subject: [PATCH 139/252] Added: docs for API endpoints getUserPermissionsOnDataset, getUserPermissionsOnFile and getVersion --- doc/sphinx-guides/source/api/dataaccess.rst | 1 + doc/sphinx-guides/source/api/native-api.rst | 30 +++++++++++++++++++++ 2 files changed, 31 insertions(+) diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst index 6edd413b7a5..f7aaa8f4ee4 100755 --- a/doc/sphinx-guides/source/api/dataaccess.rst +++ b/doc/sphinx-guides/source/api/dataaccess.rst @@ -426,6 +426,7 @@ This method returns the permissions that the calling user has on a particular fi In particular, the user permissions that this method checks, returned as booleans, are the following: * Can download the file +* Can manage the file permissions * Can edit the file owner dataset A curl example using an ``id``:: diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 3ac145b2f8e..f735079b334 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -909,6 +909,16 @@ The fully expanded example above (without environment variables) looks like this curl "https://demo.dataverse.org/api/datasets/24/versions/1.0" +By default, deaccessioned dataset versions are not included in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a "not found" error if the version is deaccessioned and you do not enable the ``includeDeaccessioned`` option described below. + +If you want to include deaccessioned dataset versions, you must set ``includeDeaccessioned`` query parameter to ``true``. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0?includeDeaccessioned=true" + .. _export-dataset-metadata-api: Export Metadata of a Dataset in Various Formats @@ -2496,6 +2506,26 @@ The API can also be used to reset the dataset to use the default/inherited value curl -X DELETE -H "X-Dataverse-key:$API_TOKEN" -H Content-type:application/json "$SERVER_URL/api/datasets/:persistentId/guestbookEntryAtRequest?persistentId=$PERSISTENT_IDENTIFIER" +Get User Permissions on a Dataset +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This API call returns the permissions that the calling user has on a particular dataset. + +In particular, the user permissions that this method checks, returned as booleans, are the following: + +* Can view the unpublished dataset +* Can edit the dataset +* Can publish the dataset +* Can manage the dataset permissions +* Can delete the dataset draft + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key: $API_TOKEN" -X GET "$SERVER_URL/api/datasets/$ID/userPermissions" Files From 56b291f4bf3a1ed4e48740ed50666a1709d4febf Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Wed, 18 Oct 2023 09:42:45 +0200 Subject: [PATCH 140/252] Log Dataverse ID instead of JSON path --- src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 94a0f33a83e..2f10e623047 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -287,7 +287,7 @@ static String getAliasFromResponse(Response createDataverseResponse) { static Integer getDataverseIdFromResponse(Response createDataverseResponse) { JsonPath createdDataverse = JsonPath.from(createDataverseResponse.body().asString()); int dataverseId = createdDataverse.getInt("data.id"); - logger.info("Id found in create dataverse response: " + createdDataverse); + logger.info("Id found in create dataverse response: " + dataverseId); return dataverseId; } From e886c1adcd2cfe06e1b01a514350ba1f7f586cc1 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 18 Oct 2023 09:04:27 +0100 Subject: [PATCH 141/252] Added: includeDeaccessioned IT test case for getDatasetVersion --- .../edu/harvard/iq/dataverse/api/DatasetsIT.java | 14 +++++++++++++- .../java/edu/harvard/iq/dataverse/api/FilesIT.java | 8 ++++---- .../java/edu/harvard/iq/dataverse/api/UtilIT.java | 3 ++- 3 files changed, 19 insertions(+), 6 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 4258773a0b3..569ebe0894b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -505,7 +505,7 @@ public void testCreatePublishDestroyDataset() { assertTrue(datasetContactFromExport.toString().contains("finch@mailinator.com")); assertTrue(firstValue.toString().contains("finch@mailinator.com")); - Response getDatasetVersion = UtilIT.getDatasetVersion(datasetPersistentId, DS_VERSION_LATEST_PUBLISHED, apiToken); + Response getDatasetVersion = UtilIT.getDatasetVersion(datasetPersistentId, DS_VERSION_LATEST_PUBLISHED, false, apiToken); getDatasetVersion.prettyPrint(); getDatasetVersion.then().assertThat() .body("data.datasetId", equalTo(datasetId)) @@ -549,6 +549,18 @@ public void testCreatePublishDestroyDataset() { } assertEquals(datasetPersistentId, XmlPath.from(exportDatasetAsDdi.body().asString()).getString("codeBook.docDscr.citation.titlStmt.IDNo")); + // Test includeDeaccessioned option + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, "Test deaccession reason.", null, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // includeDeaccessioned false + getDatasetVersion = UtilIT.getDatasetVersion(datasetPersistentId, DS_VERSION_LATEST_PUBLISHED, false, apiToken); + getDatasetVersion.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // includeDeaccessioned true + getDatasetVersion = UtilIT.getDatasetVersion(datasetPersistentId, DS_VERSION_LATEST_PUBLISHED, true, apiToken); + getDatasetVersion.then().assertThat().statusCode(OK.getStatusCode()); + Response deleteDatasetResponse = UtilIT.destroyDataset(datasetId, apiToken); deleteDatasetResponse.prettyPrint(); assertEquals(200, deleteDatasetResponse.getStatusCode()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 16726485dee..1f1321bad79 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -1989,14 +1989,14 @@ public void testDeleteFile() { deleteResponse2.then().assertThat().statusCode(OK.getStatusCode()); // Check file 2 deleted from post v1.0 draft - Response postv1draft = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); + Response postv1draft = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, false, apiToken); postv1draft.prettyPrint(); postv1draft.then().assertThat() .body("data.files.size()", equalTo(1)) .statusCode(OK.getStatusCode()); // Check file 2 still in v1.0 - Response v1 = UtilIT.getDatasetVersion(datasetPid, "1.0", apiToken); + Response v1 = UtilIT.getDatasetVersion(datasetPid, "1.0", false, apiToken); v1.prettyPrint(); v1.then().assertThat() .body("data.files[0].dataFile.filename", equalTo("cc0.png")) @@ -2011,7 +2011,7 @@ public void testDeleteFile() { downloadResponse2.then().assertThat().statusCode(OK.getStatusCode()); // Check file 3 still in post v1.0 draft - Response postv1draft2 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); + Response postv1draft2 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, false, apiToken); postv1draft2.prettyPrint(); postv1draft2.then().assertThat() .body("data.files[0].dataFile.filename", equalTo("orcid_16x16.png")) @@ -2026,7 +2026,7 @@ public void testDeleteFile() { deleteResponse3.then().assertThat().statusCode(OK.getStatusCode()); // Check file 3 deleted from post v1.0 draft - Response postv1draft3 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); + Response postv1draft3 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, false, apiToken); postv1draft3.prettyPrint(); postv1draft3.then().assertThat() .body("data.files[0]", equalTo(null)) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index be23df5ec63..0a1061c30ea 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -1399,9 +1399,10 @@ static Response nativeGetUsingPersistentId(String persistentId, String apiToken) return response; } - static Response getDatasetVersion(String persistentId, String versionNumber, String apiToken) { + static Response getDatasetVersion(String persistentId, String versionNumber, boolean includeDeaccessioned, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) + .queryParam("includeDeaccessioned", includeDeaccessioned) .get("/api/datasets/:persistentId/versions/" + versionNumber + "?persistentId=" + persistentId); } From 52d439d3284cf91064dbabcd3dbe401faeb3ba4d Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 18 Oct 2023 09:10:53 +0100 Subject: [PATCH 142/252] Fixed: minor docs tweak --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index f735079b334..6f1c3072a55 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2511,7 +2511,7 @@ Get User Permissions on a Dataset This API call returns the permissions that the calling user has on a particular dataset. -In particular, the user permissions that this method checks, returned as booleans, are the following: +In particular, the user permissions that this API call checks, returned as booleans, are the following: * Can view the unpublished dataset * Can edit the dataset From fa1b37bca410e903c9474ebf9aa6f38fd0b59c70 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 18 Oct 2023 09:12:37 +0100 Subject: [PATCH 143/252] Added: release notes for #10001 --- .../10001-datasets-files-api-user-permissions.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 doc/release-notes/10001-datasets-files-api-user-permissions.md diff --git a/doc/release-notes/10001-datasets-files-api-user-permissions.md b/doc/release-notes/10001-datasets-files-api-user-permissions.md new file mode 100644 index 00000000000..0aa75f9218a --- /dev/null +++ b/doc/release-notes/10001-datasets-files-api-user-permissions.md @@ -0,0 +1,13 @@ +- New query parameter `includeDeaccessioned` added to the getVersion endpoint (/api/datasets/{id}/versions/{versionId}) to consider deaccessioned versions when searching for versions. + + +- New endpoint to get user permissions on a dataset (/api/datasets/{id}/userPermissions). In particular, the user permissions that this API call checks, returned as booleans, are the following: + + - Can view the unpublished dataset + - Can edit the dataset + - Can publish the dataset + - Can manage the dataset permissions + - Can delete the dataset draft + + +- New permission check "canManageFilePermissions" added to the existing endpoint for getting user permissions on a file (/api/access/datafile/{id}/userPermissions). \ No newline at end of file From 836d4a7006ea222f30f34d816c7e388a44d44142 Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Wed, 18 Oct 2023 13:15:51 +0200 Subject: [PATCH 144/252] Authenticate using API token in header Co-authored-by: Philip Durbin --- src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 2f10e623047..b4d77252615 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -375,7 +375,7 @@ static Response showDataverseContents(String alias, String apiToken) { static Response getGuestbookResponses(String dataverseAlias, Long guestbookId, String apiToken) { RequestSpecification requestSpec = given() - .auth().basic(apiToken, EMPTY_STRING); + .header(API_TOKEN_HTTP_HEADER, apiToken); if (guestbookId != null) { requestSpec.queryParam("guestbookId", guestbookId); } From 7d7d73165e88c7791f1271bd55a3977134c978c9 Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Wed, 18 Oct 2023 13:20:23 +0200 Subject: [PATCH 145/252] Adjust expectations for getGuestbookResponses --- .../iq/dataverse/api/DataversesIT.java | 33 +++++++++++++++---- 1 file changed, 27 insertions(+), 6 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 171a35ac04f..78ece6ecc42 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -17,11 +17,13 @@ import jakarta.json.Json; import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; -import static jakarta.ws.rs.core.Response.Status.CREATED; -import static jakarta.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; import jakarta.ws.rs.core.Response.Status; -import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; import static jakarta.ws.rs.core.Response.Status.OK; +import static jakarta.ws.rs.core.Response.Status.CREATED; +import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; +import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; +import static jakarta.ws.rs.core.Response.Status.NOT_FOUND; +import static jakarta.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -144,23 +146,42 @@ public void testMinimalDataverse() throws FileNotFoundException { deleteDataverse.then().assertThat().statusCode(OK.getStatusCode()); } + /** + * A regular user can create a Dataverse Collection and access its + * GuestbookResponses by DV alias or ID. + * A request for a non-existent Dataverse's GuestbookResponses returns + * Not Found. + * A regular user cannot access the guestbook responses for a Dataverse + * that they do not have permissions for, like the root Dataverse. + */ @Test public void testGetGuestbookResponses() { Response createUser = UtilIT.createRandomUser(); createUser.prettyPrint(); String apiToken = UtilIT.getApiTokenFromResponse(createUser); - // Create a Dataverse + Response create = UtilIT.createRandomDataverse(apiToken); create.prettyPrint(); create.then().assertThat().statusCode(CREATED.getStatusCode()); String alias = UtilIT.getAliasFromResponse(create); Integer dvId = UtilIT.getDataverseIdFromResponse(create); - // Get GuestbookResponses by Dataverse alias + + logger.info("Request guestbook responses for non-existent Dataverse"); + Response getResponsesByBadAlias = UtilIT.getGuestbookResponses("-1", null, apiToken); + getResponsesByBadAlias.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + logger.info("Request guestbook responses for existent Dataverse by alias"); Response getResponsesByAlias = UtilIT.getGuestbookResponses(alias, null, apiToken); getResponsesByAlias.then().assertThat().statusCode(OK.getStatusCode()); - // Get GuestbookResponses by Dataverse ID + + logger.info("Request guestbook responses for existent Dataverse by ID"); Response getResponsesById = UtilIT.getGuestbookResponses(dvId.toString(), null, apiToken); getResponsesById.then().assertThat().statusCode(OK.getStatusCode()); + + logger.info("Request guestbook responses for root Dataverse by alias"); + getResponsesById = UtilIT.getGuestbookResponses("root", null, apiToken); + getResponsesById.prettyPrint(); + getResponsesById.then().assertThat().statusCode(FORBIDDEN.getStatusCode()); } @Test From 3c98c7d00e8e24be44e40b818d2ad2ff61a8ab29 Mon Sep 17 00:00:00 2001 From: bencomp Date: Thu, 19 Oct 2023 00:22:16 +0200 Subject: [PATCH 146/252] Fix identifer typo in docs --- .../source/developers/s3-direct-upload-api.rst | 8 ++++---- doc/sphinx-guides/source/user/dataset-management.rst | 2 +- doc/sphinx-guides/source/user/find-use-data.rst | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/doc/sphinx-guides/source/developers/s3-direct-upload-api.rst b/doc/sphinx-guides/source/developers/s3-direct-upload-api.rst index 4bf2bbdcc79..423fb02d385 100644 --- a/doc/sphinx-guides/source/developers/s3-direct-upload-api.rst +++ b/doc/sphinx-guides/source/developers/s3-direct-upload-api.rst @@ -116,7 +116,7 @@ The allowed checksum algorithms are defined by the edu.harvard.iq.dataverse.Data curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/add?persistentId=$PERSISTENT_IDENTIFIER" -F "jsonData=$JSON_DATA" Note that this API call can be used independently of the others, e.g. supporting use cases in which the file already exists in S3/has been uploaded via some out-of-band method. Enabling out-of-band uploads is described at :ref:`file-storage` in the Configuration Guide. -With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifer must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. +With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifier must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. To add multiple Uploaded Files to the Dataset --------------------------------------------- @@ -147,7 +147,7 @@ The allowed checksum algorithms are defined by the edu.harvard.iq.dataverse.Data curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/addFiles?persistentId=$PERSISTENT_IDENTIFIER" -F "jsonData=$JSON_DATA" Note that this API call can be used independently of the others, e.g. supporting use cases in which the files already exists in S3/has been uploaded via some out-of-band method. Enabling out-of-band uploads is described at :ref:`file-storage` in the Configuration Guide. -With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifer must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. +With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifier must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. Replacing an existing file in the Dataset @@ -177,7 +177,7 @@ Note that the API call does not validate that the file matches the hash value su curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/files/$FILE_IDENTIFIER/replace" -F "jsonData=$JSON_DATA" Note that this API call can be used independently of the others, e.g. supporting use cases in which the file already exists in S3/has been uploaded via some out-of-band method. Enabling out-of-band uploads is described at :ref:`file-storage` in the Configuration Guide. -With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifer must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. +With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifier must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. Replacing multiple existing files in the Dataset ------------------------------------------------ @@ -275,4 +275,4 @@ The JSON object returned as a response from this API call includes a "data" that Note that this API call can be used independently of the others, e.g. supporting use cases in which the files already exists in S3/has been uploaded via some out-of-band method. Enabling out-of-band uploads is described at :ref:`file-storage` in the Configuration Guide. -With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifer must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. +With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifier must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index 1e8ea897032..bac0192bdd6 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -784,7 +784,7 @@ The "Compute" button on dataset and file pages will allow you to compute on a si Cloud Storage Access -------------------- -If you need to access a dataset in a more flexible way than the Compute button provides, then you can use the Cloud Storage Access box on the dataset page to copy the dataset's container name. This unique identifer can then be used to allow direct access to the dataset. +If you need to access a dataset in a more flexible way than the Compute button provides, then you can use the Cloud Storage Access box on the dataset page to copy the dataset's container name. This unique identifier can then be used to allow direct access to the dataset. .. _deaccession: diff --git a/doc/sphinx-guides/source/user/find-use-data.rst b/doc/sphinx-guides/source/user/find-use-data.rst index 2e82a1482b4..bea23cbcd0e 100755 --- a/doc/sphinx-guides/source/user/find-use-data.rst +++ b/doc/sphinx-guides/source/user/find-use-data.rst @@ -71,7 +71,7 @@ View Files Files in a Dataverse installation each have their own landing page that can be reached through the search results or through the Files table on their parent dataset's page. The dataset page and file page offer much the same functionality in terms of viewing and editing files, with a few small exceptions. -- In installations that have enabled support for persistent identifers (PIDs) at the file level, the file page includes the file's DOI or handle, which can be found in the file citation and also under the Metadata tab. +- In installations that have enabled support for persistent identifiers (PIDs) at the file level, the file page includes the file's DOI or handle, which can be found in the file citation and also under the Metadata tab. - Previewers for several common file types are available and can be added by installation administrators. - The file page's Versions tab gives you a version history that is more focused on the individual file rather than the dataset as a whole. From d76e494f7c6889d9dfc23406e06b25f1a80d3507 Mon Sep 17 00:00:00 2001 From: bencomp Date: Thu, 19 Oct 2023 01:04:50 +0200 Subject: [PATCH 147/252] Fix identifer typo in code Just in case code outside this project is referencing the old methods, I created deprecated versions. --- .../dataverse/DatasetVersionServiceBean.java | 26 ++++++++++++++----- .../java/edu/harvard/iq/dataverse/Shib.java | 11 ++++---- .../CollectionListManagerImpl.java | 2 +- .../datadeposit/MediaResourceManagerImpl.java | 2 +- .../AuthenticationServiceBean.java | 10 +++---- .../authorization/UserIdentifier.java | 21 ++++++++++++--- .../providers/shib/ShibUtil.java | 17 ++++++++++++ .../filesystem/FileRecordJobListener.java | 2 +- .../importer/filesystem/FileRecordReader.java | 2 +- .../impl/ImportFromFileSystemCommand.java | 2 +- .../edu/harvard/iq/dataverse/api/AdminIT.java | 12 ++++----- .../providers/shib/ShibUtilTest.java | 18 ++++++------- 12 files changed, 85 insertions(+), 40 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index c2f9027a38a..cd3291e6222 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -495,10 +495,24 @@ private DatasetVersion getDatasetVersionByQuery(String queryString){ } } // end getDatasetVersionByQuery - - - - public DatasetVersion retrieveDatasetVersionByIdentiferClause(String identifierClause, String version){ + /** + * @deprecated because of a typo; use {@link #retrieveDatasetVersionByIdentifierClause(String, String) retrieveDatasetVersionByIdentifierClause} instead + * @see #retrieveDatasetVersionByIdentifierClause(String, String) + * @param identifierClause + * @param version + * @return a DatasetVersion if found, or {@code null} otherwise + */ + @Deprecated + public DatasetVersion retrieveDatasetVersionByIdentiferClause(String identifierClause, String version) { + return retrieveDatasetVersionByIdentifierClause(identifierClause, version); + } + + /** + * @param identifierClause + * @param version + * @return a DatasetVersion if found, or {@code null} otherwise + */ + public DatasetVersion retrieveDatasetVersionByIdentifierClause(String identifierClause, String version) { if (identifierClause == null){ return null; @@ -620,7 +634,7 @@ public RetrieveDatasetVersionResponse retrieveDatasetVersionByPersistentId(Strin identifierClause += " AND ds.identifier = '" + parsedId.getIdentifier() + "'"; - DatasetVersion ds = retrieveDatasetVersionByIdentiferClause(identifierClause, version); + DatasetVersion ds = retrieveDatasetVersionByIdentifierClause(identifierClause, version); if (ds != null){ msg("retrieved dataset: " + ds.getId() + " semantic: " + ds.getSemanticVersion()); @@ -718,7 +732,7 @@ public DatasetVersion getDatasetVersionById(Long datasetId, String version){ String identifierClause = this.getIdClause(datasetId); - DatasetVersion ds = retrieveDatasetVersionByIdentiferClause(identifierClause, version); + DatasetVersion ds = retrieveDatasetVersionByIdentifierClause(identifierClause, version); return ds; diff --git a/src/main/java/edu/harvard/iq/dataverse/Shib.java b/src/main/java/edu/harvard/iq/dataverse/Shib.java index bee1182e248..24c0f9d7926 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Shib.java +++ b/src/main/java/edu/harvard/iq/dataverse/Shib.java @@ -24,6 +24,7 @@ import java.util.Arrays; import java.util.Date; import java.util.List; +import java.util.logging.Level; import java.util.logging.Logger; import jakarta.ejb.EJB; import jakarta.ejb.EJBException; @@ -62,7 +63,7 @@ public class Shib implements java.io.Serializable { HttpServletRequest request; private String userPersistentId; - private String internalUserIdentifer; + private String internalUserIdentifier; AuthenticatedUserDisplayInfo displayInfo; /** * @todo Remove this boolean some day? Now the mockups show a popup. Should @@ -210,8 +211,8 @@ public void init() { } String usernameAssertion = getValueFromAssertion(ShibUtil.usernameAttribute); - internalUserIdentifer = ShibUtil.generateFriendlyLookingUserIdentifer(usernameAssertion, emailAddress); - logger.fine("friendly looking identifer (backend will enforce uniqueness):" + internalUserIdentifer); + internalUserIdentifier = ShibUtil.generateFriendlyLookingUserIdentifier(usernameAssertion, emailAddress); + logger.log(Level.FINE, "friendly looking identifier (backend will enforce uniqueness): {0}", internalUserIdentifier); String shibAffiliationAttribute = settingsService.getValueForKey(SettingsServiceBean.Key.ShibAffiliationAttribute); String affiliation = (StringUtils.isNotBlank(shibAffiliationAttribute)) @@ -326,7 +327,7 @@ public String confirmAndCreateAccount() { AuthenticatedUser au = null; try { au = authSvc.createAuthenticatedUser( - new UserRecordIdentifier(shibAuthProvider.getId(), lookupStringPerAuthProvider), internalUserIdentifer, displayInfo, true); + new UserRecordIdentifier(shibAuthProvider.getId(), lookupStringPerAuthProvider), internalUserIdentifier, displayInfo, true); } catch (EJBException ex) { /** * @todo Show the ConstraintViolationException, if any. @@ -354,7 +355,7 @@ public String confirmAndConvertAccount() { visibleTermsOfUse = false; ShibAuthenticationProvider shibAuthProvider = new ShibAuthenticationProvider(); String lookupStringPerAuthProvider = userPersistentId; - UserIdentifier userIdentifier = new UserIdentifier(lookupStringPerAuthProvider, internalUserIdentifer); + UserIdentifier userIdentifier = new UserIdentifier(lookupStringPerAuthProvider, internalUserIdentifier); logger.fine("builtin username: " + builtinUsername); AuthenticatedUser builtInUserToConvert = authSvc.canLogInAsBuiltinUser(builtinUsername, builtinPassword); if (builtInUserToConvert != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java index 084136f2b5d..541fa144e80 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java @@ -94,7 +94,7 @@ public Feed listCollectionContents(IRI iri, AuthCredentials authCredentials, Swo throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find dataverse: " + dvAlias); } } else { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Couldn't determine target type or identifer from URL: " + iri); + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Couldn't determine target type or identifier from URL: " + iri); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java index 15838a09456..a878720cc39 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java @@ -219,7 +219,7 @@ public void deleteMediaResource(String uri, AuthCredentials authCredentials, Swo throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unsupported file type found in URL: " + uri); } } else { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Target or identifer not specified in URL: " + uri); + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Target or identifier not specified in URL: " + uri); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java index 106a83a4ad1..496620cd6e8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java @@ -580,7 +580,7 @@ public boolean updateProvider( AuthenticatedUser authenticatedUser, String authe * {@code userDisplayInfo}, a lookup entry for them based * UserIdentifier.getLookupStringPerAuthProvider (within the supplied * authentication provider), and internal user identifier (used for role - * assignments, etc.) based on UserIdentifier.getInternalUserIdentifer. + * assignments, etc.) based on UserIdentifier.getInternalUserIdentifier. * * @param userRecordId * @param proposedAuthenticatedUserIdentifier @@ -605,20 +605,20 @@ public AuthenticatedUser createAuthenticatedUser(UserRecordIdentifier userRecord proposedAuthenticatedUserIdentifier = proposedAuthenticatedUserIdentifier.trim(); } // we now select a username for the generated AuthenticatedUser, or give up - String internalUserIdentifer = proposedAuthenticatedUserIdentifier; + String internalUserIdentifier = proposedAuthenticatedUserIdentifier; // TODO should lock table authenticated users for write here - if ( identifierExists(internalUserIdentifer) ) { + if ( identifierExists(internalUserIdentifier) ) { if ( ! generateUniqueIdentifier ) { return null; } int i=1; - String identifier = internalUserIdentifer + i; + String identifier = internalUserIdentifier + i; while ( identifierExists(identifier) ) { i += 1; } authenticatedUser.setUserIdentifier(identifier); } else { - authenticatedUser.setUserIdentifier(internalUserIdentifer); + authenticatedUser.setUserIdentifier(internalUserIdentifier); } authenticatedUser = save( authenticatedUser ); // TODO should unlock table authenticated users for write here diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/UserIdentifier.java b/src/main/java/edu/harvard/iq/dataverse/authorization/UserIdentifier.java index 1ac2c7583d6..312910e52c7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/UserIdentifier.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/UserIdentifier.java @@ -25,18 +25,31 @@ public class UserIdentifier { /** * The String used in the permission system to assign roles, for example. */ - String internalUserIdentifer; + String internalUserIdentifier; - public UserIdentifier(String lookupStringPerAuthProvider, String internalUserIdentifer) { + public UserIdentifier(String lookupStringPerAuthProvider, String internalUserIdentifier) { this.lookupStringPerAuthProvider = lookupStringPerAuthProvider; - this.internalUserIdentifer = internalUserIdentifer; + this.internalUserIdentifier = internalUserIdentifier; } public String getLookupStringPerAuthProvider() { return lookupStringPerAuthProvider; } + /** + * @deprecated because of a typo; use {@link #getInternalUserIdentifier()} instead + * @see #getInternalUserIdentifier() + * @return the internal user identifier + */ + @Deprecated public String getInternalUserIdentifer() { - return internalUserIdentifer; + return getInternalUserIdentifier(); + } + + /** + * @return the internal user identifier + */ + public String getInternalUserIdentifier() { + return internalUserIdentifier; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java index fff135e0dec..4cf41903405 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java @@ -133,7 +133,24 @@ public static String findSingleValue(String mayHaveMultipleValues) { return singleValue; } + /** + * @deprecated because of a typo; use {@link #generateFriendlyLookingUserIdentifier(String, String)} instead + * @see #generateFriendlyLookingUserIdentifier(String, String) + * @param usernameAssertion + * @param email + * @return a friendly-looking user identifier based on the asserted username or email, or a UUID as fallback + */ + @Deprecated public static String generateFriendlyLookingUserIdentifer(String usernameAssertion, String email) { + return generateFriendlyLookingUserIdentifier(usernameAssertion, email); + } + + /** + * @param usernameAssertion + * @param email + * @return a friendly-looking user identifier based on the asserted username or email, or a UUID as fallback + */ + public static String generateFriendlyLookingUserIdentifier(String usernameAssertion, String email) { if (usernameAssertion != null && !usernameAssertion.isEmpty()) { return usernameAssertion; } diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java index 593a5cbfdc3..a2f76150d7b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java +++ b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java @@ -450,7 +450,7 @@ private void loadChecksumManifest() { // We probably want package files to be able to use specific stores instead. // More importantly perhaps, the approach above does not take into account // if the dataset may have an AlternativePersistentIdentifier, that may be - // designated isStorageLocationDesignator() - i.e., if a different identifer + // designated isStorageLocationDesignator() - i.e., if a different identifier // needs to be used to name the storage directory, instead of the main/current // persistent identifier above. getJobLogger().log(Level.INFO, "Reading checksum manifest: " + manifestAbsolutePath); diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordReader.java b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordReader.java index fb702c21df2..9ce30683a87 100644 --- a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordReader.java @@ -109,7 +109,7 @@ public void open(Serializable checkpoint) throws Exception { // We probably want package files to be able to use specific stores instead. // More importantly perhaps, the approach above does not take into account // if the dataset may have an AlternativePersistentIdentifier, that may be - // designated isStorageLocationDesignator() - i.e., if a different identifer + // designated isStorageLocationDesignator() - i.e., if a different identifier // needs to be used to name the storage directory, instead of the main/current // persistent identifier above. getJobLogger().log(Level.INFO, "Reading dataset directory: " + directory.getAbsolutePath() diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportFromFileSystemCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportFromFileSystemCommand.java index c03c77d42fd..9a75f437b66 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportFromFileSystemCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportFromFileSystemCommand.java @@ -83,7 +83,7 @@ public JsonObject execute(CommandContext ctxt) throws CommandException { // We probably want package files to be able to use specific stores instead. // More importantly perhaps, the approach above does not take into account // if the dataset may have an AlternativePersistentIdentifier, that may be - // designated isStorageLocationDesignator() - i.e., if a different identifer + // designated isStorageLocationDesignator() - i.e., if a different identifier // needs to be used to name the storage directory, instead of the main/current // persistent identifier above. if (!isValidDirectory(directory)) { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index a5a4924ad77..0c5de662e8a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -153,10 +153,10 @@ public void testFilterAuthenticatedUsers() throws Exception { .body("data.pagination.pageCount", equalTo(1)) .body("data.pagination.numResults", equalTo(numResults)); - String userIdentifer; + String userIdentifier; for (int i=0; i < numResults; i++){ - userIdentifer = JsonPath.from(filterReponse01.getBody().asString()).getString("data.users[" + i + "].userIdentifier"); - assertEquals(randomUsernames.contains(userIdentifer), true); + userIdentifier = JsonPath.from(filterReponse01.getBody().asString()).getString("data.users[" + i + "].userIdentifier"); + assertTrue(randomUsernames.contains(userIdentifier)); } List userList1 = JsonPath.from(filterReponse01.body().asString()).getList("data.users"); @@ -177,10 +177,10 @@ public void testFilterAuthenticatedUsers() throws Exception { .body("data.pagination.pageCount", equalTo(3)) .body("data.pagination.numResults", equalTo(numResults)); - String userIdentifer2; + String userIdentifier2; for (int i=0; i < numUsersReturned; i++){ - userIdentifer2 = JsonPath.from(filterReponse02.getBody().asString()).getString("data.users[" + i + "].userIdentifier"); - assertEquals(randomUsernames.contains(userIdentifer2), true); + userIdentifier2 = JsonPath.from(filterReponse02.getBody().asString()).getString("data.users[" + i + "].userIdentifier"); + assertTrue(randomUsernames.contains(userIdentifier2)); } List userList2 = JsonPath.from(filterReponse02.body().asString()).getList("data.users"); diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtilTest.java index c644a4e2b2a..9ace90ac496 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtilTest.java @@ -102,16 +102,16 @@ public void testFindSingleValue() { } @Test - public void testGenerateFriendlyLookingUserIdentifer() { + public void testGenerateFriendlyLookingUserIdentifier() { int lengthOfUuid = UUID.randomUUID().toString().length(); - assertEquals("uid1", ShibUtil.generateFriendlyLookingUserIdentifer("uid1", null)); - assertEquals(" leadingWhiteSpace", ShibUtil.generateFriendlyLookingUserIdentifer(" leadingWhiteSpace", null)); - assertEquals("uid1", ShibUtil.generateFriendlyLookingUserIdentifer("uid1", "email1@example.com")); - assertEquals("email1", ShibUtil.generateFriendlyLookingUserIdentifer(null, "email1@example.com")); - assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifer(null, null).length()); - assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifer(null, "").length()); - assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifer("", null).length()); - assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifer(null, "junkEmailAddress").length()); + assertEquals("uid1", ShibUtil.generateFriendlyLookingUserIdentifier("uid1", null)); + assertEquals(" leadingWhiteSpace", ShibUtil.generateFriendlyLookingUserIdentifier(" leadingWhiteSpace", null)); + assertEquals("uid1", ShibUtil.generateFriendlyLookingUserIdentifier("uid1", "email1@example.com")); + assertEquals("email1", ShibUtil.generateFriendlyLookingUserIdentifier(null, "email1@example.com")); + assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifier(null, null).length()); + assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifier(null, "").length()); + assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifier("", null).length()); + assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifier(null, "junkEmailAddress").length()); } @Test From 5bca73896f0b482a9c8f838d3a01d37d235b57ac Mon Sep 17 00:00:00 2001 From: Saikiran Patil Date: Thu, 19 Oct 2023 18:41:02 +0530 Subject: [PATCH 148/252] Added tabulartags in files metadata for files metadata --- doc/sphinx-guides/source/api/native-api.rst | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index eedf23fd04e..1e0804ce7d8 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3108,7 +3108,7 @@ A curl example using an ``ID`` export ID=24 curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false},"dataFileTags":["Survey"]}' \ "$SERVER_URL/api/files/$ID/metadata" The fully expanded example above (without environment variables) looks like this: @@ -3116,7 +3116,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false},"dataFileTags":["Survey"]}' \ "http://demo.dataverse.org/api/files/24/metadata" A curl example using a ``PERSISTENT_ID`` @@ -3128,7 +3128,7 @@ A curl example using a ``PERSISTENT_ID`` export PERSISTENT_ID=doi:10.5072/FK2/AAA000 curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false},"dataFileTags":["Survey"]}' \ "$SERVER_URL/api/files/:persistentId/metadata?persistentId=$PERSISTENT_ID" The fully expanded example above (without environment variables) looks like this: @@ -3136,9 +3136,11 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}', "dataFileTags":["Survey"]} \ "https://demo.dataverse.org/api/files/:persistentId/metadata?persistentId=doi:10.5072/FK2/AAA000" +Note: To update the 'tabularTags' property of file metadata, use the 'dataFileTags' key when making API requests. This property is used to update the 'tabularTags' of the file metadata. + Also note that dataFileTags are not versioned and changes to these will update the published version of the file. .. _EditingVariableMetadata: From 520d5d6bdbea31ad56d465b7a2b4a1fdfaf40bb6 Mon Sep 17 00:00:00 2001 From: Saikiran Patil Date: Fri, 20 Oct 2023 00:24:13 +0530 Subject: [PATCH 149/252] correcting the Invalid JSON error --- doc/sphinx-guides/source/api/native-api.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 1e0804ce7d8..73a10f2b409 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3108,7 +3108,7 @@ A curl example using an ``ID`` export ID=24 curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false},"dataFileTags":["Survey"]}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false,"dataFileTags":["Survey"]}' \ "$SERVER_URL/api/files/$ID/metadata" The fully expanded example above (without environment variables) looks like this: @@ -3116,7 +3116,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false},"dataFileTags":["Survey"]}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false,"dataFileTags":["Survey"]}' \ "http://demo.dataverse.org/api/files/24/metadata" A curl example using a ``PERSISTENT_ID`` @@ -3128,7 +3128,7 @@ A curl example using a ``PERSISTENT_ID`` export PERSISTENT_ID=doi:10.5072/FK2/AAA000 curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false},"dataFileTags":["Survey"]}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false,"dataFileTags":["Survey"]}' \ "$SERVER_URL/api/files/:persistentId/metadata?persistentId=$PERSISTENT_ID" The fully expanded example above (without environment variables) looks like this: @@ -3136,7 +3136,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}', "dataFileTags":["Survey"]} \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false, "dataFileTags":["Survey"]} \ "https://demo.dataverse.org/api/files/:persistentId/metadata?persistentId=doi:10.5072/FK2/AAA000" Note: To update the 'tabularTags' property of file metadata, use the 'dataFileTags' key when making API requests. This property is used to update the 'tabularTags' of the file metadata. From df7a4cfb9597b3c224a8d753b3a84d57d643af34 Mon Sep 17 00:00:00 2001 From: Saikiran Patil Date: Fri, 20 Oct 2023 00:36:20 +0530 Subject: [PATCH 150/252] minor change --- doc/sphinx-guides/source/api/native-api.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 73a10f2b409..fcd2594ac6a 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3108,7 +3108,7 @@ A curl example using an ``ID`` export ID=24 curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false,"dataFileTags":["Survey"]}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","restrict":false,"dataFileTags":["Survey"]}' \ "$SERVER_URL/api/files/$ID/metadata" The fully expanded example above (without environment variables) looks like this: @@ -3116,7 +3116,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false,"dataFileTags":["Survey"]}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","restrict":false,"dataFileTags":["Survey"]}' \ "http://demo.dataverse.org/api/files/24/metadata" A curl example using a ``PERSISTENT_ID`` @@ -3128,7 +3128,7 @@ A curl example using a ``PERSISTENT_ID`` export PERSISTENT_ID=doi:10.5072/FK2/AAA000 curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false,"dataFileTags":["Survey"]}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","restrict":false,"dataFileTags":["Survey"]}' \ "$SERVER_URL/api/files/:persistentId/metadata?persistentId=$PERSISTENT_ID" The fully expanded example above (without environment variables) looks like this: @@ -3136,7 +3136,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false, "dataFileTags":["Survey"]} \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","restrict":false, "dataFileTags":["Survey"]} \ "https://demo.dataverse.org/api/files/:persistentId/metadata?persistentId=doi:10.5072/FK2/AAA000" Note: To update the 'tabularTags' property of file metadata, use the 'dataFileTags' key when making API requests. This property is used to update the 'tabularTags' of the file metadata. From 8c9828a3fa0c2d534ebcfc57f4cc17427e75f0b8 Mon Sep 17 00:00:00 2001 From: Saikiran Patil Date: Fri, 20 Oct 2023 00:39:49 +0530 Subject: [PATCH 151/252] another small change --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index fcd2594ac6a..ffb15b41fd1 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3136,7 +3136,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","restrict":false, "dataFileTags":["Survey"]} \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","restrict":false, "dataFileTags":["Survey"]}' \ "https://demo.dataverse.org/api/files/:persistentId/metadata?persistentId=doi:10.5072/FK2/AAA000" Note: To update the 'tabularTags' property of file metadata, use the 'dataFileTags' key when making API requests. This property is used to update the 'tabularTags' of the file metadata. From e22c5b45cc6167fd3cb7f67cf817a7aa3841d321 Mon Sep 17 00:00:00 2001 From: Saikiran Patil Date: Fri, 20 Oct 2023 00:52:49 +0530 Subject: [PATCH 152/252] remove extra white spaces --- doc/sphinx-guides/source/api/native-api.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index ffb15b41fd1..43d0b75a618 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3108,7 +3108,7 @@ A curl example using an ``ID`` export ID=24 curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","restrict":false,"dataFileTags":["Survey"]}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","dataFileTags":["Survey"],"restrict":false}' \ "$SERVER_URL/api/files/$ID/metadata" The fully expanded example above (without environment variables) looks like this: @@ -3116,7 +3116,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","restrict":false,"dataFileTags":["Survey"]}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","dataFileTags":["Survey"],"restrict":false}' \ "http://demo.dataverse.org/api/files/24/metadata" A curl example using a ``PERSISTENT_ID`` @@ -3128,7 +3128,7 @@ A curl example using a ``PERSISTENT_ID`` export PERSISTENT_ID=doi:10.5072/FK2/AAA000 curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","restrict":false,"dataFileTags":["Survey"]}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","dataFileTags":["Survey"],"restrict":false}' \ "$SERVER_URL/api/files/:persistentId/metadata?persistentId=$PERSISTENT_ID" The fully expanded example above (without environment variables) looks like this: @@ -3136,7 +3136,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","restrict":false, "dataFileTags":["Survey"]}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","dataFileTags":["Survey"],"restrict":false}' \ "https://demo.dataverse.org/api/files/:persistentId/metadata?persistentId=doi:10.5072/FK2/AAA000" Note: To update the 'tabularTags' property of file metadata, use the 'dataFileTags' key when making API requests. This property is used to update the 'tabularTags' of the file metadata. From 7fa9ce239ce838829bad9d8fd8cfa916390aad60 Mon Sep 17 00:00:00 2001 From: Saikiran Patil Date: Fri, 20 Oct 2023 06:56:42 +0530 Subject: [PATCH 153/252] added back "categories" tag in codes --- doc/sphinx-guides/source/api/native-api.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 43d0b75a618..24475dfaeb1 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3108,7 +3108,7 @@ A curl example using an ``ID`` export ID=24 curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","dataFileTags":["Survey"],"restrict":false}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"dataFileTags":["Survey"],"restrict":false}' \ "$SERVER_URL/api/files/$ID/metadata" The fully expanded example above (without environment variables) looks like this: @@ -3116,7 +3116,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","dataFileTags":["Survey"],"restrict":false}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"dataFileTags":["Survey"],"restrict":false}' \ "http://demo.dataverse.org/api/files/24/metadata" A curl example using a ``PERSISTENT_ID`` @@ -3128,7 +3128,7 @@ A curl example using a ``PERSISTENT_ID`` export PERSISTENT_ID=doi:10.5072/FK2/AAA000 curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","dataFileTags":["Survey"],"restrict":false}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"dataFileTags":["Survey"],"restrict":false}' \ "$SERVER_URL/api/files/:persistentId/metadata?persistentId=$PERSISTENT_ID" The fully expanded example above (without environment variables) looks like this: @@ -3136,7 +3136,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","dataFileTags":["Survey"],"restrict":false}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"dataFileTags":["Survey"],"restrict":false}' \ "https://demo.dataverse.org/api/files/:persistentId/metadata?persistentId=doi:10.5072/FK2/AAA000" Note: To update the 'tabularTags' property of file metadata, use the 'dataFileTags' key when making API requests. This property is used to update the 'tabularTags' of the file metadata. From 77c606fcabb4ae1434e1fde07557ed3736bb28eb Mon Sep 17 00:00:00 2001 From: Abhinav Rana <142827270+AR-2910@users.noreply.github.com> Date: Sun, 22 Oct 2023 23:02:41 +0530 Subject: [PATCH 154/252] Update S3AccessIO.java Issue: Problem with long file stores ID in JVM options #8312 Change made in line 194. --- .../java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 822ada0b83e..e3c6bfede7c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -191,7 +191,7 @@ public void open(DataAccessOption... options) throws IOException { } } // else we're OK (assumes bucket name in storageidentifier matches the driver's bucketname) } else { - if(!storageIdentifier.substring((this.driverId + DataAccess.SEPARATOR).length()).contains(":")) { + if(!storageIdentifier.contains(":")) { //No driver id or bucket newStorageIdentifier= this.driverId + DataAccess.SEPARATOR + bucketName + ":" + storageIdentifier; } else { @@ -1385,4 +1385,4 @@ public List cleanUp(Predicate filter, boolean dryRun) throws IOE } return toDelete; } -} \ No newline at end of file +} From ea6644b3d6dd2fa2eb1cf7a17f620f7a25a5b871 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 24 Oct 2023 13:11:42 +0100 Subject: [PATCH 155/252] Changed: rewritten DatasetVersionFilesServiceBean getFileMetadataCount using JPA Criteria (pending accessStatus search criteria filtering) --- .../DatasetVersionFilesServiceBean.java | 44 +++++++++++++++++-- 1 file changed, 40 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index fc662ee80bb..d65014e62a3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -24,6 +24,7 @@ import java.io.Serializable; import java.sql.Timestamp; import java.time.LocalDate; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -31,6 +32,7 @@ import static edu.harvard.iq.dataverse.DataFileTag.TagLabelToTypes; import edu.harvard.iq.dataverse.FileSearchCriteria.FileAccessStatus; +import jakarta.persistence.criteria.*; @Stateless @Named @@ -73,10 +75,13 @@ public enum FileDownloadSizeMode { * @return long value of total file metadata count */ public long getFileMetadataCount(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { - JPAQueryFactory queryFactory = new JPAQueryFactory(em); - JPAQuery baseQuery = queryFactory.selectFrom(fileMetadata).where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())); - applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); - return baseQuery.stream().count(); + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); + CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(Long.class); + Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); + Predicate basePredicate = criteriaBuilder.equal(fileMetadataRoot.get("datasetVersion").get("id"), datasetVersion.getId()); + Predicate searchCriteriaPredicate = createSearchCriteriaPredicate(searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot); + criteriaQuery.select(criteriaBuilder.count(fileMetadataRoot)).where(criteriaBuilder.and(basePredicate, searchCriteriaPredicate)); + return em.createQuery(criteriaQuery).getSingleResult(); } /** @@ -254,6 +259,37 @@ private BooleanExpression createGetFileMetadatasAccessStatusExpression(FileAcces return accessStatusExpression; } + private Predicate createSearchCriteriaPredicate(FileSearchCriteria searchCriteria, CriteriaBuilder criteriaBuilder, CriteriaQuery criteriaQuery, Root fileMetadataRoot) { + List predicates = new ArrayList<>(); + String contentType = searchCriteria.getContentType(); + if (contentType != null) { + predicates.add(criteriaBuilder.equal(fileMetadataRoot.get("dataFile").get("contentType"), contentType)); + } + FileAccessStatus accessStatus = searchCriteria.getAccessStatus(); + if (accessStatus != null) { + // TODO + } + String categoryName = searchCriteria.getCategoryName(); + if (categoryName != null) { + Root dataFileCategoryRoot = criteriaQuery.from(DataFileCategory.class); + predicates.add(criteriaBuilder.equal(dataFileCategoryRoot.get("name"), categoryName)); + predicates.add(dataFileCategoryRoot.in(fileMetadataRoot.get("fileCategories"))); + } + String tabularTagName = searchCriteria.getTabularTagName(); + if (tabularTagName != null) { + Root dataFileTagRoot = criteriaQuery.from(DataFileTag.class); + predicates.add(criteriaBuilder.equal(dataFileTagRoot.get("type"), TagLabelToTypes.get(tabularTagName))); + predicates.add(dataFileTagRoot.in(fileMetadataRoot.get("dataFile").get("dataFileTags"))); + } + String searchText = searchCriteria.getSearchText(); + if (searchText != null && !searchText.isEmpty()) { + searchText = searchText.trim().toLowerCase(); + predicates.add(criteriaBuilder.like(fileMetadataRoot.get("label"), "%" + searchText + "%")); + } + return criteriaBuilder.and(predicates.toArray(new Predicate[]{})); + } + + @Deprecated private void applyFileSearchCriteriaToQuery(JPAQuery baseQuery, FileSearchCriteria searchCriteria) { String contentType = searchCriteria.getContentType(); if (contentType != null) { From 65bed77a39eb99a97e9cbdc01e79c3310664898c Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 24 Oct 2023 18:05:50 +0100 Subject: [PATCH 156/252] Added: accessStatus search criteria filtering for JPA Criteria queries --- .../DatasetVersionFilesServiceBean.java | 30 +++++++++++++++---- 1 file changed, 25 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index d65014e62a3..c2efa72579f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -24,10 +24,7 @@ import java.io.Serializable; import java.sql.Timestamp; import java.time.LocalDate; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; import static edu.harvard.iq.dataverse.DataFileTag.TagLabelToTypes; @@ -235,6 +232,29 @@ private JPAQuery createGetFileMetadatasBaseQuery(DatasetVersion da return baseQuery; } + private Predicate createSearchCriteriaAccessStatusPredicate(FileAccessStatus accessStatus, CriteriaBuilder criteriaBuilder, Root fileMetadataRoot) { + Path dataFile = fileMetadataRoot.get("dataFile"); + + Path embargo = dataFile.get("embargo"); + Predicate activelyEmbargoedPredicate = criteriaBuilder.greaterThanOrEqualTo(embargo.get("dateAvailable"), criteriaBuilder.currentDate()); + Predicate inactivelyEmbargoedPredicate = criteriaBuilder.isNull(embargo); + + Path isRestricted = dataFile.get("restricted"); + Predicate isRestrictedPredicate = criteriaBuilder.isTrue(isRestricted); + Predicate isUnrestrictedPredicate = criteriaBuilder.isFalse(isRestricted); + + return switch (accessStatus) { + case EmbargoedThenRestricted -> + criteriaBuilder.and(activelyEmbargoedPredicate, isRestrictedPredicate); + case EmbargoedThenPublic -> + criteriaBuilder.and(activelyEmbargoedPredicate, isUnrestrictedPredicate); + case Restricted -> + criteriaBuilder.and(inactivelyEmbargoedPredicate, isRestrictedPredicate); + case Public -> criteriaBuilder.and(inactivelyEmbargoedPredicate, isUnrestrictedPredicate); + }; + } + + @Deprecated private BooleanExpression createGetFileMetadatasAccessStatusExpression(FileAccessStatus accessStatus) { QEmbargo embargo = fileMetadata.dataFile.embargo; BooleanExpression activelyEmbargoedExpression = embargo.dateAvailable.goe(DateExpression.currentDate(LocalDate.class)); @@ -267,7 +287,7 @@ private Predicate createSearchCriteriaPredicate(FileSearchCriteria searchCriteri } FileAccessStatus accessStatus = searchCriteria.getAccessStatus(); if (accessStatus != null) { - // TODO + predicates.add(createSearchCriteriaAccessStatusPredicate(accessStatus, criteriaBuilder, fileMetadataRoot)); } String categoryName = searchCriteria.getCategoryName(); if (categoryName != null) { From a8afef3bf8519b60fe70c84bc071143031b73196 Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Tue, 24 Oct 2023 13:25:02 -0400 Subject: [PATCH 157/252] #10030 document localhost-only behavior of Solr-9.3.0 and later --- .../source/_static/installation/files/etc/init.d/solr | 2 +- .../_static/installation/files/etc/systemd/solr.service | 2 +- doc/sphinx-guides/source/installation/prerequisites.rst | 6 ++---- 3 files changed, 4 insertions(+), 6 deletions(-) diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr index 9cf8902eb14..f7dba504e70 100755 --- a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr +++ b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr @@ -7,7 +7,7 @@ SOLR_DIR="/usr/local/solr/solr-9.3.0" SOLR_COMMAND="bin/solr" -SOLR_ARGS="-m 1g -j jetty.host=127.0.0.1" +SOLR_ARGS="-m 1g" SOLR_USER=solr case $1 in diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service b/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service index 0b8a8528490..2ceeb0016d6 100644 --- a/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service +++ b/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service @@ -6,7 +6,7 @@ After = syslog.target network.target remote-fs.target nss-lookup.target User = solr Type = forking WorkingDirectory = /usr/local/solr/solr-9.3.0 -ExecStart = /usr/local/solr/solr-9.3.0/bin/solr start -m 1g -j "jetty.host=127.0.0.1" +ExecStart = /usr/local/solr/solr-9.3.0/bin/solr start -m 1g ExecStop = /usr/local/solr/solr-9.3.0/bin/solr stop LimitNOFILE=65000 LimitNPROC=65000 diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst index 1847f1b8f63..cff20ccab1d 100644 --- a/doc/sphinx-guides/source/installation/prerequisites.rst +++ b/doc/sphinx-guides/source/installation/prerequisites.rst @@ -233,11 +233,9 @@ For systems using init.d (like CentOS 6), download this :download:`Solr init scr Securing Solr ============= -Our sample init script and systemd service file linked above tell Solr to only listen on localhost (127.0.0.1). We strongly recommend that you also use a firewall to block access to the Solr port (8983) from outside networks, for added redundancy. +As of version 9.3.0, Solr listens solely on localhost for security reasons. If your installation will run Solr on its own host, you will need to edit ``bin/solr.in.sh``, setting ``JETTY_HOST`` to the external IP address of your Solr server to tell Solr to accept external connections. -It is **very important** not to allow direct access to the Solr API from outside networks! Otherwise, any host that can reach the Solr port (8983 by default) can add or delete data, search unpublished data, and even reconfigure Solr. For more information, please see https://lucene.apache.org/solr/guide/7_3/securing-solr.html. A particularly serious security issue that has been identified recently allows a potential intruder to remotely execute arbitrary code on the system. See `RCE in Solr via Velocity Template `_ for more information. - -If you're running your Dataverse installation across multiple service hosts you'll want to remove the jetty.host argument (``-j jetty.host=127.0.0.1``) from the startup command line, but make sure Solr is behind a firewall and only accessible by the Dataverse installation host(s), by specific ip address(es). +We strongly recommend that you also use a firewall to block access to the Solr port (8983) from outside networks. It is **very important** not to allow direct access to the Solr API from outside networks! Otherwise, any host that can reach Solr can add or delete data, search unpublished data, and even reconfigure Solr. For more information, please see https://solr.apache.org/guide/solr/latest/deployment-guide/securing-solr.html We additionally recommend that the Solr service account's shell be disabled, as it isn't necessary for daily operation:: From 68081a936fb927cbd20f0e9eb9630a3752121a13 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 24 Oct 2023 18:33:45 +0100 Subject: [PATCH 158/252] Changed: rewritten DatasetVersionFilesServiceBean getFileMetadataCountPerContentType using JPA Criteria --- .../DatasetVersionFilesServiceBean.java | 34 ++++++++----------- 1 file changed, 14 insertions(+), 20 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index c2efa72579f..aa92f44c302 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -89,17 +89,17 @@ public long getFileMetadataCount(DatasetVersion datasetVersion, FileSearchCriter * @return Map of file metadata counts per content type */ public Map getFileMetadataCountPerContentType(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { - JPAQueryFactory queryFactory = new JPAQueryFactory(em); - JPAQuery baseQuery = queryFactory - .select(fileMetadata.dataFile.contentType, fileMetadata.count()) - .from(fileMetadata) - .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())) - .groupBy(fileMetadata.dataFile.contentType); - applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); - List contentTypeOccurrences = baseQuery.fetch(); + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); + CriteriaQuery criteriaQuery = criteriaBuilder.createTupleQuery(); + Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); + Path contentType = fileMetadataRoot.get("dataFile").get("contentType"); + Predicate basePredicate = criteriaBuilder.equal(fileMetadataRoot.get("datasetVersion").get("id"), datasetVersion.getId()); + Predicate searchCriteriaPredicate = createSearchCriteriaPredicate(searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot); + criteriaQuery.multiselect(contentType, criteriaBuilder.count(contentType)).where(criteriaBuilder.and(basePredicate, searchCriteriaPredicate)).groupBy(contentType); + List contentTypeOccurrences = em.createQuery(criteriaQuery).getResultList(); Map result = new HashMap<>(); - for (Tuple occurrence : contentTypeOccurrences) { - result.put(occurrence.get(fileMetadata.dataFile.contentType), occurrence.get(fileMetadata.count())); + for (jakarta.persistence.Tuple occurrence : contentTypeOccurrences) { + result.put(occurrence.get(0, String.class), occurrence.get(1, Long.class)); } return result; } @@ -234,22 +234,16 @@ private JPAQuery createGetFileMetadatasBaseQuery(DatasetVersion da private Predicate createSearchCriteriaAccessStatusPredicate(FileAccessStatus accessStatus, CriteriaBuilder criteriaBuilder, Root fileMetadataRoot) { Path dataFile = fileMetadataRoot.get("dataFile"); - Path embargo = dataFile.get("embargo"); Predicate activelyEmbargoedPredicate = criteriaBuilder.greaterThanOrEqualTo(embargo.get("dateAvailable"), criteriaBuilder.currentDate()); Predicate inactivelyEmbargoedPredicate = criteriaBuilder.isNull(embargo); - Path isRestricted = dataFile.get("restricted"); Predicate isRestrictedPredicate = criteriaBuilder.isTrue(isRestricted); Predicate isUnrestrictedPredicate = criteriaBuilder.isFalse(isRestricted); - return switch (accessStatus) { - case EmbargoedThenRestricted -> - criteriaBuilder.and(activelyEmbargoedPredicate, isRestrictedPredicate); - case EmbargoedThenPublic -> - criteriaBuilder.and(activelyEmbargoedPredicate, isUnrestrictedPredicate); - case Restricted -> - criteriaBuilder.and(inactivelyEmbargoedPredicate, isRestrictedPredicate); + case EmbargoedThenRestricted -> criteriaBuilder.and(activelyEmbargoedPredicate, isRestrictedPredicate); + case EmbargoedThenPublic -> criteriaBuilder.and(activelyEmbargoedPredicate, isUnrestrictedPredicate); + case Restricted -> criteriaBuilder.and(inactivelyEmbargoedPredicate, isRestrictedPredicate); case Public -> criteriaBuilder.and(inactivelyEmbargoedPredicate, isUnrestrictedPredicate); }; } @@ -279,7 +273,7 @@ private BooleanExpression createGetFileMetadatasAccessStatusExpression(FileAcces return accessStatusExpression; } - private Predicate createSearchCriteriaPredicate(FileSearchCriteria searchCriteria, CriteriaBuilder criteriaBuilder, CriteriaQuery criteriaQuery, Root fileMetadataRoot) { + private Predicate createSearchCriteriaPredicate(FileSearchCriteria searchCriteria, CriteriaBuilder criteriaBuilder, CriteriaQuery criteriaQuery, Root fileMetadataRoot) { List predicates = new ArrayList<>(); String contentType = searchCriteria.getContentType(); if (contentType != null) { From 8fbbaf78aa48ccff5c9f10f9078758ac6c06069f Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 24 Oct 2023 18:44:52 +0100 Subject: [PATCH 159/252] Changed: rewritten DatasetVersionFilesServiceBean getFileMetadataCountPerCategoryName using JPA Criteria --- .../DatasetVersionFilesServiceBean.java | 24 ++++++++++--------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index aa92f44c302..622f7883d42 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -92,9 +92,9 @@ public Map getFileMetadataCountPerContentType(DatasetVersion datas CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); CriteriaQuery criteriaQuery = criteriaBuilder.createTupleQuery(); Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); - Path contentType = fileMetadataRoot.get("dataFile").get("contentType"); Predicate basePredicate = criteriaBuilder.equal(fileMetadataRoot.get("datasetVersion").get("id"), datasetVersion.getId()); Predicate searchCriteriaPredicate = createSearchCriteriaPredicate(searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot); + Path contentType = fileMetadataRoot.get("dataFile").get("contentType"); criteriaQuery.multiselect(contentType, criteriaBuilder.count(contentType)).where(criteriaBuilder.and(basePredicate, searchCriteriaPredicate)).groupBy(contentType); List contentTypeOccurrences = em.createQuery(criteriaQuery).getResultList(); Map result = new HashMap<>(); @@ -111,18 +111,20 @@ public Map getFileMetadataCountPerContentType(DatasetVersion datas * @param searchCriteria for counting only files matching this criteria * @return Map of file metadata counts per category name */ + // TODO: Refactor remove duplication with getFileMetadataCountPerContentType public Map getFileMetadataCountPerCategoryName(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { - JPAQueryFactory queryFactory = new JPAQueryFactory(em); - JPAQuery baseQuery = queryFactory - .select(dataFileCategory.name, fileMetadata.count()) - .from(dataFileCategory, fileMetadata) - .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(fileMetadata.fileCategories.contains(dataFileCategory))) - .groupBy(dataFileCategory.name); - applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); - List categoryNameOccurrences = baseQuery.fetch(); + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); + CriteriaQuery criteriaQuery = criteriaBuilder.createTupleQuery(); + Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); + Predicate basePredicate = criteriaBuilder.equal(fileMetadataRoot.get("datasetVersion").get("id"), datasetVersion.getId()); + Predicate searchCriteriaPredicate = createSearchCriteriaPredicate(searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot); + Root dataFileCategoryRoot = criteriaQuery.from(DataFileCategory.class); + Path categoryName = dataFileCategoryRoot.get("name"); + criteriaQuery.multiselect(categoryName, criteriaBuilder.count(fileMetadataRoot)).where(criteriaBuilder.and(basePredicate, searchCriteriaPredicate, dataFileCategoryRoot.in(fileMetadataRoot.get("fileCategories")))).groupBy(categoryName); + List categoryNameOccurrences = em.createQuery(criteriaQuery).getResultList(); Map result = new HashMap<>(); - for (Tuple occurrence : categoryNameOccurrences) { - result.put(occurrence.get(dataFileCategory.name), occurrence.get(fileMetadata.count())); + for (jakarta.persistence.Tuple occurrence : categoryNameOccurrences) { + result.put(occurrence.get(0, String.class), occurrence.get(1, Long.class)); } return result; } From f9a2f2a4038a727b1487b8ddcf385278600ee0af Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 24 Oct 2023 18:57:13 +0100 Subject: [PATCH 160/252] Changed: rewritten DatasetVersionFilesServiceBean getFileMetadataCountPerTabularTagName using JPA Criteria --- .../DatasetVersionFilesServiceBean.java | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index 622f7883d42..e57af63b234 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -136,18 +136,20 @@ public Map getFileMetadataCountPerCategoryName(DatasetVersion data * @param searchCriteria for counting only files matching this criteria * @return Map of file metadata counts per DataFileTag.TagType */ + // TODO: Refactor remove duplication with getFileMetadataCountPerContentType public Map getFileMetadataCountPerTabularTagName(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { - JPAQueryFactory queryFactory = new JPAQueryFactory(em); - JPAQuery baseQuery = queryFactory - .select(dataFileTag.type, fileMetadata.count()) - .from(dataFileTag, fileMetadata) - .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(fileMetadata.dataFile.dataFileTags.contains(dataFileTag))) - .groupBy(dataFileTag.type); - applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); - List tagNameOccurrences = baseQuery.fetch(); + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); + CriteriaQuery criteriaQuery = criteriaBuilder.createTupleQuery(); + Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); + Predicate basePredicate = criteriaBuilder.equal(fileMetadataRoot.get("datasetVersion").get("id"), datasetVersion.getId()); + Predicate searchCriteriaPredicate = createSearchCriteriaPredicate(searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot); + Root dataFileTagRoot = criteriaQuery.from(DataFileTag.class); + Path dataFileTagType = dataFileTagRoot.get("type"); + criteriaQuery.multiselect(dataFileTagType, criteriaBuilder.count(fileMetadataRoot)).where(criteriaBuilder.and(basePredicate, searchCriteriaPredicate, dataFileTagRoot.in(fileMetadataRoot.get("dataFile").get("dataFileTags")))).groupBy(dataFileTagType); + List tagNameOccurrences = em.createQuery(criteriaQuery).getResultList(); Map result = new HashMap<>(); - for (Tuple occurrence : tagNameOccurrences) { - result.put(occurrence.get(dataFileTag.type), occurrence.get(fileMetadata.count())); + for (jakarta.persistence.Tuple occurrence : tagNameOccurrences) { + result.put(occurrence.get(0, DataFileTag.TagType.class), occurrence.get(1, Long.class)); } return result; } From 09472413bf23c3ba97eacb79114dd67ba84d19af Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Tue, 24 Oct 2023 15:37:13 -0400 Subject: [PATCH 161/252] #10038 document preserving access logging and other configuration in 6.0 upgrade steps --- doc/release-notes/6.0-release-notes.md | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/doc/release-notes/6.0-release-notes.md b/doc/release-notes/6.0-release-notes.md index df916216f5b..858cd604bda 100644 --- a/doc/release-notes/6.0-release-notes.md +++ b/doc/release-notes/6.0-release-notes.md @@ -166,6 +166,22 @@ If you are running Payara as a non-root user (and you should be!), **remember no Your `:MDCLogPath` database setting might be pointing to a Payara 5 directory such as `/usr/local/payara5/glassfish/domains/domain1/logs`. If so, edit this to be Payara 6. You'll probably want to copy your logs over as well. +1. If you've enabled access logging or any other site-specific configuration, be sure to preserve them. For instance, the default domain.xml includes + + ``` + + + ``` + + but you may wish to include + + ``` + + + ``` + + Be sure to keep a previous copy of your domain.xml for reference. + 1. Update systemd unit file (or other init system) from `/usr/local/payara5` to `/usr/local/payara6`, if applicable. 1. Start Payara. From 7d8135e24fd94b960de8c70025c11074cdfdefdb Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 25 Oct 2023 09:57:17 +0100 Subject: [PATCH 162/252] Refactor: rewritten JPA Criteria queries refactored to avoid duplication and improve legibility --- .../DatasetVersionFilesServiceBean.java | 78 +++++++++++-------- 1 file changed, 46 insertions(+), 32 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index e57af63b234..134f873aa4b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -7,7 +7,6 @@ import edu.harvard.iq.dataverse.QEmbargo; import edu.harvard.iq.dataverse.QFileMetadata; -import com.querydsl.core.Tuple; import com.querydsl.core.types.dsl.BooleanExpression; import com.querydsl.core.types.dsl.CaseBuilder; import com.querydsl.core.types.dsl.DateExpression; @@ -29,6 +28,7 @@ import static edu.harvard.iq.dataverse.DataFileTag.TagLabelToTypes; import edu.harvard.iq.dataverse.FileSearchCriteria.FileAccessStatus; +import jakarta.persistence.Tuple; import jakarta.persistence.criteria.*; @Stateless @@ -75,9 +75,11 @@ public long getFileMetadataCount(DatasetVersion datasetVersion, FileSearchCriter CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(Long.class); Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); - Predicate basePredicate = criteriaBuilder.equal(fileMetadataRoot.get("datasetVersion").get("id"), datasetVersion.getId()); - Predicate searchCriteriaPredicate = createSearchCriteriaPredicate(searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot); - criteriaQuery.select(criteriaBuilder.count(fileMetadataRoot)).where(criteriaBuilder.and(basePredicate, searchCriteriaPredicate)); + criteriaQuery + .select(criteriaBuilder.count(fileMetadataRoot)) + .where(criteriaBuilder.and( + createFileMetadataFromDatasetVersionPredicate(datasetVersion, criteriaBuilder, fileMetadataRoot), + createSearchCriteriaPredicate(searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot))); return em.createQuery(criteriaQuery).getSingleResult(); } @@ -90,18 +92,16 @@ public long getFileMetadataCount(DatasetVersion datasetVersion, FileSearchCriter */ public Map getFileMetadataCountPerContentType(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); - CriteriaQuery criteriaQuery = criteriaBuilder.createTupleQuery(); + CriteriaQuery criteriaQuery = criteriaBuilder.createTupleQuery(); Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); - Predicate basePredicate = criteriaBuilder.equal(fileMetadataRoot.get("datasetVersion").get("id"), datasetVersion.getId()); - Predicate searchCriteriaPredicate = createSearchCriteriaPredicate(searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot); Path contentType = fileMetadataRoot.get("dataFile").get("contentType"); - criteriaQuery.multiselect(contentType, criteriaBuilder.count(contentType)).where(criteriaBuilder.and(basePredicate, searchCriteriaPredicate)).groupBy(contentType); - List contentTypeOccurrences = em.createQuery(criteriaQuery).getResultList(); - Map result = new HashMap<>(); - for (jakarta.persistence.Tuple occurrence : contentTypeOccurrences) { - result.put(occurrence.get(0, String.class), occurrence.get(1, Long.class)); - } - return result; + criteriaQuery + .multiselect(contentType, criteriaBuilder.count(contentType)) + .where(criteriaBuilder.and( + createFileMetadataFromDatasetVersionPredicate(datasetVersion, criteriaBuilder, fileMetadataRoot), + createSearchCriteriaPredicate(searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot))) + .groupBy(contentType); + return getStringLongMapResultFromQuery(criteriaQuery); } /** @@ -111,22 +111,20 @@ public Map getFileMetadataCountPerContentType(DatasetVersion datas * @param searchCriteria for counting only files matching this criteria * @return Map of file metadata counts per category name */ - // TODO: Refactor remove duplication with getFileMetadataCountPerContentType public Map getFileMetadataCountPerCategoryName(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); - CriteriaQuery criteriaQuery = criteriaBuilder.createTupleQuery(); + CriteriaQuery criteriaQuery = criteriaBuilder.createTupleQuery(); Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); - Predicate basePredicate = criteriaBuilder.equal(fileMetadataRoot.get("datasetVersion").get("id"), datasetVersion.getId()); - Predicate searchCriteriaPredicate = createSearchCriteriaPredicate(searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot); Root dataFileCategoryRoot = criteriaQuery.from(DataFileCategory.class); Path categoryName = dataFileCategoryRoot.get("name"); - criteriaQuery.multiselect(categoryName, criteriaBuilder.count(fileMetadataRoot)).where(criteriaBuilder.and(basePredicate, searchCriteriaPredicate, dataFileCategoryRoot.in(fileMetadataRoot.get("fileCategories")))).groupBy(categoryName); - List categoryNameOccurrences = em.createQuery(criteriaQuery).getResultList(); - Map result = new HashMap<>(); - for (jakarta.persistence.Tuple occurrence : categoryNameOccurrences) { - result.put(occurrence.get(0, String.class), occurrence.get(1, Long.class)); - } - return result; + criteriaQuery + .multiselect(categoryName, criteriaBuilder.count(fileMetadataRoot)) + .where(criteriaBuilder.and( + createFileMetadataFromDatasetVersionPredicate(datasetVersion, criteriaBuilder, fileMetadataRoot), + createSearchCriteriaPredicate(searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot)), + dataFileCategoryRoot.in(fileMetadataRoot.get("fileCategories"))) + .groupBy(categoryName); + return getStringLongMapResultFromQuery(criteriaQuery); } /** @@ -136,19 +134,22 @@ public Map getFileMetadataCountPerCategoryName(DatasetVersion data * @param searchCriteria for counting only files matching this criteria * @return Map of file metadata counts per DataFileTag.TagType */ - // TODO: Refactor remove duplication with getFileMetadataCountPerContentType public Map getFileMetadataCountPerTabularTagName(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); - CriteriaQuery criteriaQuery = criteriaBuilder.createTupleQuery(); + CriteriaQuery criteriaQuery = criteriaBuilder.createTupleQuery(); Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); - Predicate basePredicate = criteriaBuilder.equal(fileMetadataRoot.get("datasetVersion").get("id"), datasetVersion.getId()); - Predicate searchCriteriaPredicate = createSearchCriteriaPredicate(searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot); Root dataFileTagRoot = criteriaQuery.from(DataFileTag.class); Path dataFileTagType = dataFileTagRoot.get("type"); - criteriaQuery.multiselect(dataFileTagType, criteriaBuilder.count(fileMetadataRoot)).where(criteriaBuilder.and(basePredicate, searchCriteriaPredicate, dataFileTagRoot.in(fileMetadataRoot.get("dataFile").get("dataFileTags")))).groupBy(dataFileTagType); - List tagNameOccurrences = em.createQuery(criteriaQuery).getResultList(); + criteriaQuery + .multiselect(dataFileTagType, criteriaBuilder.count(fileMetadataRoot)) + .where(criteriaBuilder.and( + createFileMetadataFromDatasetVersionPredicate(datasetVersion, criteriaBuilder, fileMetadataRoot), + createSearchCriteriaPredicate(searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot), + dataFileTagRoot.in(fileMetadataRoot.get("dataFile").get("dataFileTags")))) + .groupBy(dataFileTagType); + List tagNameOccurrences = em.createQuery(criteriaQuery).getResultList(); Map result = new HashMap<>(); - for (jakarta.persistence.Tuple occurrence : tagNameOccurrences) { + for (Tuple occurrence : tagNameOccurrences) { result.put(occurrence.get(0, DataFileTag.TagType.class), occurrence.get(1, Long.class)); } return result; @@ -382,4 +383,17 @@ private long getArchivalFilesSize(DatasetVersion datasetVersion, boolean ignoreT } return (result == null) ? 0 : result; } + + private Predicate createFileMetadataFromDatasetVersionPredicate(DatasetVersion datasetVersion, CriteriaBuilder criteriaBuilder, Root fileMetadataRoot) { + return criteriaBuilder.equal(fileMetadataRoot.get("datasetVersion").get("id"), datasetVersion.getId()); + } + + private Map getStringLongMapResultFromQuery(CriteriaQuery criteriaQuery) { + List categoryNameOccurrences = em.createQuery(criteriaQuery).getResultList(); + Map result = new HashMap<>(); + for (Tuple occurrence : categoryNameOccurrences) { + result.put(occurrence.get(0, String.class), occurrence.get(1, Long.class)); + } + return result; + } } From 2ffec93586f1afd4fcc51314b05cdef7d4301b83 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 25 Oct 2023 10:28:23 +0100 Subject: [PATCH 163/252] Changed: rewritten DatasetVersionFilesServiceBean getFileMetadataCountByAccessStatus using JPA Criteria --- .../DatasetVersionFilesServiceBean.java | 43 +++++++++---------- 1 file changed, 21 insertions(+), 22 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index 134f873aa4b..e8dde2fd73a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -77,9 +77,7 @@ public long getFileMetadataCount(DatasetVersion datasetVersion, FileSearchCriter Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); criteriaQuery .select(criteriaBuilder.count(fileMetadataRoot)) - .where(criteriaBuilder.and( - createFileMetadataFromDatasetVersionPredicate(datasetVersion, criteriaBuilder, fileMetadataRoot), - createSearchCriteriaPredicate(searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot))); + .where(createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot)); return em.createQuery(criteriaQuery).getSingleResult(); } @@ -97,9 +95,7 @@ public Map getFileMetadataCountPerContentType(DatasetVersion datas Path contentType = fileMetadataRoot.get("dataFile").get("contentType"); criteriaQuery .multiselect(contentType, criteriaBuilder.count(contentType)) - .where(criteriaBuilder.and( - createFileMetadataFromDatasetVersionPredicate(datasetVersion, criteriaBuilder, fileMetadataRoot), - createSearchCriteriaPredicate(searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot))) + .where(createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot)) .groupBy(contentType); return getStringLongMapResultFromQuery(criteriaQuery); } @@ -120,9 +116,8 @@ public Map getFileMetadataCountPerCategoryName(DatasetVersion data criteriaQuery .multiselect(categoryName, criteriaBuilder.count(fileMetadataRoot)) .where(criteriaBuilder.and( - createFileMetadataFromDatasetVersionPredicate(datasetVersion, criteriaBuilder, fileMetadataRoot), - createSearchCriteriaPredicate(searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot)), - dataFileCategoryRoot.in(fileMetadataRoot.get("fileCategories"))) + createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot), + dataFileCategoryRoot.in(fileMetadataRoot.get("fileCategories")))) .groupBy(categoryName); return getStringLongMapResultFromQuery(criteriaQuery); } @@ -143,8 +138,7 @@ public Map getFileMetadataCountPerTabularTagName(Data criteriaQuery .multiselect(dataFileTagType, criteriaBuilder.count(fileMetadataRoot)) .where(criteriaBuilder.and( - createFileMetadataFromDatasetVersionPredicate(datasetVersion, criteriaBuilder, fileMetadataRoot), - createSearchCriteriaPredicate(searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot), + createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot), dataFileTagRoot.in(fileMetadataRoot.get("dataFile").get("dataFileTags")))) .groupBy(dataFileTagType); List tagNameOccurrences = em.createQuery(criteriaQuery).getResultList(); @@ -220,12 +214,15 @@ private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map baseQuery = queryFactory - .selectFrom(fileMetadata) - .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(createGetFileMetadatasAccessStatusExpression(accessStatus))); - applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); - return baseQuery.stream().count(); + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); + CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(Long.class); + Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); + criteriaQuery + .select(criteriaBuilder.count(fileMetadataRoot)) + .where(criteriaBuilder.and( + createSearchCriteriaAccessStatusPredicate(accessStatus, criteriaBuilder, fileMetadataRoot), + createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot))); + return em.createQuery(criteriaQuery).getSingleResult(); } private JPAQuery createGetFileMetadatasBaseQuery(DatasetVersion datasetVersion, FileOrderCriteria orderCriteria) { @@ -278,8 +275,14 @@ private BooleanExpression createGetFileMetadatasAccessStatusExpression(FileAcces return accessStatusExpression; } - private Predicate createSearchCriteriaPredicate(FileSearchCriteria searchCriteria, CriteriaBuilder criteriaBuilder, CriteriaQuery criteriaQuery, Root fileMetadataRoot) { + private Predicate createSearchCriteriaPredicate(DatasetVersion datasetVersion, + FileSearchCriteria searchCriteria, + CriteriaBuilder criteriaBuilder, + CriteriaQuery criteriaQuery, + Root fileMetadataRoot) { List predicates = new ArrayList<>(); + Predicate basePredicate = criteriaBuilder.equal(fileMetadataRoot.get("datasetVersion").get("id"), datasetVersion.getId()); + predicates.add(basePredicate); String contentType = searchCriteria.getContentType(); if (contentType != null) { predicates.add(criteriaBuilder.equal(fileMetadataRoot.get("dataFile").get("contentType"), contentType)); @@ -384,10 +387,6 @@ private long getArchivalFilesSize(DatasetVersion datasetVersion, boolean ignoreT return (result == null) ? 0 : result; } - private Predicate createFileMetadataFromDatasetVersionPredicate(DatasetVersion datasetVersion, CriteriaBuilder criteriaBuilder, Root fileMetadataRoot) { - return criteriaBuilder.equal(fileMetadataRoot.get("datasetVersion").get("id"), datasetVersion.getId()); - } - private Map getStringLongMapResultFromQuery(CriteriaQuery criteriaQuery) { List categoryNameOccurrences = em.createQuery(criteriaQuery).getResultList(); Map result = new HashMap<>(); From a760807293537c2a0868ea928416b9755ce2485e Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 25 Oct 2023 10:54:56 +0100 Subject: [PATCH 164/252] Changed: rewritten DatasetVersionFilesServiceBean getFilesDownloadSize using JPA Criteria --- .../DatasetVersionFilesServiceBean.java | 39 ++++++++++--------- 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index e8dde2fd73a..661af5c210e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -2,7 +2,6 @@ import edu.harvard.iq.dataverse.QDataFileCategory; import edu.harvard.iq.dataverse.QDataFileTag; -import edu.harvard.iq.dataverse.QDataTable; import edu.harvard.iq.dataverse.QDvObject; import edu.harvard.iq.dataverse.QEmbargo; import edu.harvard.iq.dataverse.QFileMetadata; @@ -42,7 +41,6 @@ public class DatasetVersionFilesServiceBean implements Serializable { private final QDvObject dvObject = QDvObject.dvObject; private final QDataFileCategory dataFileCategory = QDataFileCategory.dataFileCategory; private final QDataFileTag dataFileTag = QDataFileTag.dataFileTag; - private final QDataTable dataTable = QDataTable.dataTable; /** * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionFilesServiceBean#getFileMetadatas} @@ -361,29 +359,34 @@ private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery qu } private long getOriginalTabularFilesSize(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { - JPAQueryFactory queryFactory = new JPAQueryFactory(em); - JPAQuery baseQuery = queryFactory - .from(fileMetadata) - .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())) - .from(dataTable) - .where(dataTable.dataFile.eq(fileMetadata.dataFile)); - applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); - Long result = baseQuery.select(dataTable.originalFileSize.sum()).fetchFirst(); + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); + CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(Long.class); + Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); + Root dataTableRoot = criteriaQuery.from(DataTable.class); + criteriaQuery + .select(criteriaBuilder.sum(dataTableRoot.get("originalFileSize"))) + .where(criteriaBuilder.and( + criteriaBuilder.equal(dataTableRoot.get("dataFile"), fileMetadataRoot.get("dataFile")), + createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot))); + Long result = em.createQuery(criteriaQuery).getSingleResult(); return (result == null) ? 0 : result; } private long getArchivalFilesSize(DatasetVersion datasetVersion, boolean ignoreTabular, FileSearchCriteria searchCriteria) { - JPAQueryFactory queryFactory = new JPAQueryFactory(em); - JPAQuery baseQuery = queryFactory - .from(fileMetadata) - .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())); - applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); - Long result; + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); + CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(Long.class); + Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); + Predicate searchCriteriaPredicate = createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot); + Predicate wherePredicate; if (ignoreTabular) { - result = baseQuery.where(fileMetadata.dataFile.dataTables.isEmpty()).select(fileMetadata.dataFile.filesize.sum()).fetchFirst(); + wherePredicate = criteriaBuilder.and(searchCriteriaPredicate, criteriaBuilder.isEmpty(fileMetadataRoot.get("dataFile").get("dataTables"))); } else { - result = baseQuery.select(fileMetadata.dataFile.filesize.sum()).fetchFirst(); + wherePredicate = searchCriteriaPredicate; } + criteriaQuery + .select(criteriaBuilder.sum(fileMetadataRoot.get("dataFile").get("filesize"))) + .where(wherePredicate); + Long result = em.createQuery(criteriaQuery).getSingleResult(); return (result == null) ? 0 : result; } From 653d390f3bf28354174fd12e04aed0fa009548f8 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 25 Oct 2023 13:34:09 +0100 Subject: [PATCH 165/252] Changed: rewritten DatasetVersionFilesServiceBean getFileMetadatas using JPA Criteria and removed QueryDSL references from the class --- .../DatasetVersionFilesServiceBean.java | 142 ++++-------------- 1 file changed, 31 insertions(+), 111 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index 661af5c210e..78fd896c897 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -1,35 +1,20 @@ package edu.harvard.iq.dataverse; -import edu.harvard.iq.dataverse.QDataFileCategory; -import edu.harvard.iq.dataverse.QDataFileTag; -import edu.harvard.iq.dataverse.QDvObject; -import edu.harvard.iq.dataverse.QEmbargo; -import edu.harvard.iq.dataverse.QFileMetadata; - -import com.querydsl.core.types.dsl.BooleanExpression; -import com.querydsl.core.types.dsl.CaseBuilder; -import com.querydsl.core.types.dsl.DateExpression; -import com.querydsl.core.types.dsl.DateTimeExpression; - -import com.querydsl.jpa.impl.JPAQuery; -import com.querydsl.jpa.impl.JPAQueryFactory; - +import edu.harvard.iq.dataverse.FileSearchCriteria.FileAccessStatus; import jakarta.ejb.Stateless; import jakarta.inject.Named; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Tuple; +import jakarta.persistence.TypedQuery; +import jakarta.persistence.criteria.*; import java.io.Serializable; import java.sql.Timestamp; -import java.time.LocalDate; import java.util.*; import static edu.harvard.iq.dataverse.DataFileTag.TagLabelToTypes; -import edu.harvard.iq.dataverse.FileSearchCriteria.FileAccessStatus; -import jakarta.persistence.Tuple; -import jakarta.persistence.criteria.*; - @Stateless @Named public class DatasetVersionFilesServiceBean implements Serializable { @@ -37,11 +22,6 @@ public class DatasetVersionFilesServiceBean implements Serializable { @PersistenceContext(unitName = "VDCNet-ejbPU") private EntityManager em; - private final QFileMetadata fileMetadata = QFileMetadata.fileMetadata; - private final QDvObject dvObject = QDvObject.dvObject; - private final QDataFileCategory dataFileCategory = QDataFileCategory.dataFileCategory; - private final QDataFileTag dataFileTag = QDataFileTag.dataFileTag; - /** * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionFilesServiceBean#getFileMetadatas} */ @@ -174,16 +154,21 @@ public Map getFileMetadataCountPerAccessStatus(DatasetVe * @return a FileMetadata list from the specified DatasetVersion */ public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, FileSearchCriteria searchCriteria, FileOrderCriteria orderCriteria) { - JPAQuery baseQuery = createGetFileMetadatasBaseQuery(datasetVersion, orderCriteria); - applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); - applyOrderCriteriaToGetFileMetadatasQuery(baseQuery, orderCriteria); + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); + CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(FileMetadata.class); + Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); + criteriaQuery + .select(fileMetadataRoot) + .where(createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot)) + .orderBy(createGetFileMetadatasOrder(criteriaBuilder, orderCriteria, fileMetadataRoot)); + TypedQuery typedQuery = em.createQuery(criteriaQuery); if (limit != null) { - baseQuery.limit(limit); + typedQuery.setMaxResults(limit); } if (offset != null) { - baseQuery.offset(offset); + typedQuery.setFirstResult(offset); } - return baseQuery.fetch(); + return typedQuery.getResultList(); } /** @@ -223,15 +208,6 @@ private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, F return em.createQuery(criteriaQuery).getSingleResult(); } - private JPAQuery createGetFileMetadatasBaseQuery(DatasetVersion datasetVersion, FileOrderCriteria orderCriteria) { - JPAQueryFactory queryFactory = new JPAQueryFactory(em); - JPAQuery baseQuery = queryFactory.selectFrom(fileMetadata).where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())); - if (orderCriteria == FileOrderCriteria.Newest || orderCriteria == FileOrderCriteria.Oldest) { - baseQuery.from(dvObject).where(dvObject.id.eq(fileMetadata.dataFile.id)); - } - return baseQuery; - } - private Predicate createSearchCriteriaAccessStatusPredicate(FileAccessStatus accessStatus, CriteriaBuilder criteriaBuilder, Root fileMetadataRoot) { Path dataFile = fileMetadataRoot.get("dataFile"); Path embargo = dataFile.get("embargo"); @@ -248,31 +224,6 @@ private Predicate createSearchCriteriaAccessStatusPredicate(FileAccessStatus acc }; } - @Deprecated - private BooleanExpression createGetFileMetadatasAccessStatusExpression(FileAccessStatus accessStatus) { - QEmbargo embargo = fileMetadata.dataFile.embargo; - BooleanExpression activelyEmbargoedExpression = embargo.dateAvailable.goe(DateExpression.currentDate(LocalDate.class)); - BooleanExpression inactivelyEmbargoedExpression = embargo.isNull(); - BooleanExpression accessStatusExpression; - switch (accessStatus) { - case EmbargoedThenRestricted: - accessStatusExpression = activelyEmbargoedExpression.and(fileMetadata.dataFile.restricted.isTrue()); - break; - case EmbargoedThenPublic: - accessStatusExpression = activelyEmbargoedExpression.and(fileMetadata.dataFile.restricted.isFalse()); - break; - case Restricted: - accessStatusExpression = inactivelyEmbargoedExpression.and(fileMetadata.dataFile.restricted.isTrue()); - break; - case Public: - accessStatusExpression = inactivelyEmbargoedExpression.and(fileMetadata.dataFile.restricted.isFalse()); - break; - default: - throw new IllegalStateException("Unexpected value: " + accessStatus); - } - return accessStatusExpression; - } - private Predicate createSearchCriteriaPredicate(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria, CriteriaBuilder criteriaBuilder, @@ -309,53 +260,22 @@ private Predicate createSearchCriteriaPredicate(DatasetVersion datasetVersion, return criteriaBuilder.and(predicates.toArray(new Predicate[]{})); } - @Deprecated - private void applyFileSearchCriteriaToQuery(JPAQuery baseQuery, FileSearchCriteria searchCriteria) { - String contentType = searchCriteria.getContentType(); - if (contentType != null) { - baseQuery.where(fileMetadata.dataFile.contentType.eq(contentType)); - } - FileAccessStatus accessStatus = searchCriteria.getAccessStatus(); - if (accessStatus != null) { - baseQuery.where(createGetFileMetadatasAccessStatusExpression(accessStatus)); - } - String categoryName = searchCriteria.getCategoryName(); - if (categoryName != null) { - baseQuery.from(dataFileCategory).where(dataFileCategory.name.eq(categoryName).and(fileMetadata.fileCategories.contains(dataFileCategory))); - } - String tabularTagName = searchCriteria.getTabularTagName(); - if (tabularTagName != null) { - baseQuery.from(dataFileTag).where(dataFileTag.type.eq(TagLabelToTypes.get(tabularTagName)).and(fileMetadata.dataFile.dataFileTags.contains(dataFileTag))); - } - String searchText = searchCriteria.getSearchText(); - if (searchText != null && !searchText.isEmpty()) { - searchText = searchText.trim().toLowerCase(); - baseQuery.where(fileMetadata.label.lower().contains(searchText).or(fileMetadata.description.lower().contains(searchText))); - } - } - - private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery query, FileOrderCriteria orderCriteria) { - DateTimeExpression orderByLifetimeExpression = new CaseBuilder().when(dvObject.publicationDate.isNotNull()).then(dvObject.publicationDate).otherwise(dvObject.createDate); - switch (orderCriteria) { - case NameZA: - query.orderBy(fileMetadata.label.desc()); - break; - case Newest: - query.orderBy(orderByLifetimeExpression.desc()); - break; - case Oldest: - query.orderBy(orderByLifetimeExpression.asc()); - break; - case Size: - query.orderBy(fileMetadata.dataFile.filesize.asc()); - break; - case Type: - query.orderBy(fileMetadata.dataFile.contentType.asc(), fileMetadata.label.asc()); - break; - default: - query.orderBy(fileMetadata.label.asc()); - break; - } + private Order createGetFileMetadatasOrder(CriteriaBuilder criteriaBuilder, + FileOrderCriteria orderCriteria, + Root fileMetadataRoot) { + Path label = fileMetadataRoot.get("label"); + Path dataFile = fileMetadataRoot.get("dataFile"); + Path publicationDate = dataFile.get("publicationDate"); + Path createDate = dataFile.get("createDate"); + Expression orderByLifetimeExpression = criteriaBuilder.selectCase().when(publicationDate.isNotNull(), publicationDate).otherwise(createDate); + return switch (orderCriteria) { + case NameZA -> criteriaBuilder.desc(label); + case Newest -> criteriaBuilder.desc(orderByLifetimeExpression); + case Oldest -> criteriaBuilder.asc(orderByLifetimeExpression); + case Size -> criteriaBuilder.asc(dataFile.get("filesize")); + case Type -> criteriaBuilder.asc(dataFile.get("contentType")); + default -> criteriaBuilder.asc(label); + }; } private long getOriginalTabularFilesSize(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { From 99f32fc370d615568bcd45017fdfd1a5b2ec1dcc Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 25 Oct 2023 14:51:10 +0100 Subject: [PATCH 166/252] Removed: QueryDSL library from the application dependencies --- modules/dataverse-parent/pom.xml | 3 --- pom.xml | 14 -------------- 2 files changed, 17 deletions(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 1d99c1cd3d8..db0fa46a952 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -200,9 +200,6 @@ 0.43.4 - - - 5.0.0 diff --git a/pom.xml b/pom.xml index bcca9884d50..5536bcccb05 100644 --- a/pom.xml +++ b/pom.xml @@ -252,20 +252,6 @@ expressly provided - - - com.querydsl - querydsl-apt - ${querydsl.version} - jakarta - provided - - - com.querydsl - querydsl-jpa - ${querydsl.version} - jakarta - commons-io From 368382974cc641b46eb71026352a968b72e138be Mon Sep 17 00:00:00 2001 From: gwen Date: Thu, 26 Oct 2023 11:18:00 +0200 Subject: [PATCH 167/252] Fixed bad url Fixed url for "editing files in another user repository" --- doc/sphinx-guides/source/developers/documentation.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/documentation.rst b/doc/sphinx-guides/source/developers/documentation.rst index f0729c59dcf..78b0970aaa5 100755 --- a/doc/sphinx-guides/source/developers/documentation.rst +++ b/doc/sphinx-guides/source/developers/documentation.rst @@ -8,7 +8,7 @@ Writing Documentation Quick Fix ----------- -If you find a typo or a small error in the documentation you can fix it using GitHub's online web editor. Generally speaking, we will be following https://help.github.com/en/articles/editing-files-in-another-users-repository +If you find a typo or a small error in the documentation you can fix it using GitHub's online web editor. Generally speaking, we will be following https://docs.github.com/en/repositories/working-with-files/managing-files/editing-files#editing-files-in-another-users-repository - Navigate to https://github.com/IQSS/dataverse/tree/develop/doc/sphinx-guides/source where you will see folders for each of the guides: `admin`_, `api`_, `developers`_, `installation`_, `style`_, `user`_. - Find the file you want to edit under one of the folders above. From 2dfcea47dd26e120b45485f9db156e242747242f Mon Sep 17 00:00:00 2001 From: gwen Date: Thu, 26 Oct 2023 11:38:48 +0200 Subject: [PATCH 168/252] specify the extension case for XLSX files --- doc/sphinx-guides/source/installation/config.rst | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index ce8876b012c..1cc26debc64 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -3065,12 +3065,18 @@ You can override this global setting on a per-format basis for the following for - SAV - Rdata - CSV -- XLSX +- XLSX (in lower-case) -For example, if you want your Dataverse installation to not attempt to ingest Rdata files larger than 1 MB, use this setting: +For example : + +* if you want your Dataverse installation to not attempt to ingest Rdata files larger than 1 MB, use this setting: ``curl -X PUT -d 1000000 http://localhost:8080/api/admin/settings/:TabularIngestSizeLimit:Rdata`` +* if you want your Dataverse installation to not attempt to ingest XLSX files at all, use this setting: + +``curl -X PUT -d 0 http://localhost:8080/api/admin/settings/:TabularIngestSizeLimit:xlsx`` + :ZipUploadFilesLimit ++++++++++++++++++++ From b9f77096eb378074bd58e0244e40194984097d5f Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 18 Oct 2023 16:15:54 -0400 Subject: [PATCH 169/252] disable ConfigCheckServiceTest #10024 Jenkins is failing. Maybe disabling this will help. It was added in this PR: #9819 --- .../harvard/iq/dataverse/settings/ConfigCheckServiceTest.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/settings/ConfigCheckServiceTest.java b/src/test/java/edu/harvard/iq/dataverse/settings/ConfigCheckServiceTest.java index b031b9429c6..dad86e73d19 100644 --- a/src/test/java/edu/harvard/iq/dataverse/settings/ConfigCheckServiceTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/settings/ConfigCheckServiceTest.java @@ -15,7 +15,9 @@ import static java.nio.file.attribute.PosixFilePermission.GROUP_READ; import static java.nio.file.attribute.PosixFilePermission.OWNER_READ; +import org.junit.jupiter.api.Disabled; +@Disabled class ConfigCheckServiceTest { @TempDir From 1902dbd541cebca2d5ec4441582fe03cb6eaf010 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 26 Oct 2023 14:21:53 -0400 Subject: [PATCH 170/252] replace 404 with accurate list of team members #9267 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index d40e5f228f7..f81a8f86fd1 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ Dataverse® =============== -Dataverse is an [open source][] software platform for sharing, finding, citing, and preserving research data (developed by the [Data Science and Products team](http://www.iq.harvard.edu/people/people/data-science-products) at the [Institute for Quantitative Social Science](http://iq.harvard.edu/) and the [Dataverse community][]). +Dataverse is an [open source][] software platform for sharing, finding, citing, and preserving research data (developed by the [Dataverse team](https://dataverse.org/about) at the [Institute for Quantitative Social Science](http://iq.harvard.edu/) and the [Dataverse community][]). [dataverse.org][] is our home on the web and shows a map of Dataverse installations around the world, a list of [features][], [integrations][] that have been made possible through [REST APIs][], our development [roadmap][], and more. From 60ed3a5ea24e4d091f1550ef0f10861b30df0b88 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 27 Oct 2023 12:07:47 -0400 Subject: [PATCH 171/252] label is optional --- .../harvard/iq/dataverse/export/ddi/DdiExportUtil.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index 1b61a9c9970..9bbc445ea9c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -1818,10 +1818,12 @@ private static void createVarDDI(XMLStreamWriter xmlw, JsonObject dvar, String f // labl if ((vm == null || !vm.containsKey("label"))) { - xmlw.writeStartElement("labl"); - writeAttribute(xmlw, "level", "variable"); - xmlw.writeCharacters(dvar.getString("label")); - xmlw.writeEndElement(); //labl + if(dvar.containsKey("label")) { + xmlw.writeStartElement("labl"); + writeAttribute(xmlw, "level", "variable"); + xmlw.writeCharacters(dvar.getString("label")); + xmlw.writeEndElement(); //labl + } } else if (vm != null && vm.containsKey("label")) { xmlw.writeStartElement("labl"); writeAttribute(xmlw, "level", "variable"); From 22bdb303d4e669b7fd5dad2623d430743f8dc320 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 25 Oct 2023 14:51:05 -0400 Subject: [PATCH 172/252] handle missing frequency (QDR has 2 examples in the prod db) --- .../edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 65fe749e554..cfc266f2ba7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -796,9 +796,10 @@ private static JsonArrayBuilder jsonCatStat(Collection catStat JsonObjectBuilder catStatObj = Json.createObjectBuilder(); catStatObj.add("label", stat.getLabel()) .add("value", stat.getValue()) - .add("isMissing", stat.isMissing()) - .add("frequency", stat.getFrequency()) - ; + .add("isMissing", stat.isMissing()); + if(stat.getFrequency()!=null){ + catStatObj.add("frequency", stat.getFrequency()); + } catArr.add(catStatObj); } return catArr; From 8b1ab1d462cb143d77e1f1825c7b9386cc2552ce Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 27 Oct 2023 14:39:46 -0400 Subject: [PATCH 173/252] delete redundant test --- .../java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index 9bbc445ea9c..9a689f7a4ed 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -1824,7 +1824,7 @@ private static void createVarDDI(XMLStreamWriter xmlw, JsonObject dvar, String f xmlw.writeCharacters(dvar.getString("label")); xmlw.writeEndElement(); //labl } - } else if (vm != null && vm.containsKey("label")) { + } else { xmlw.writeStartElement("labl"); writeAttribute(xmlw, "level", "variable"); xmlw.writeCharacters(vm.getString("label")); From a5bc9a1933cc25f3394f57ba93e7f6330d48023a Mon Sep 17 00:00:00 2001 From: bencomp Date: Fri, 27 Oct 2023 23:50:26 +0200 Subject: [PATCH 174/252] Use JsonUtil.getJsonObject in AbstractApiBean This fixes #10054. Like before, JsonException may still be thrown. Since this is a RuntimeException, I only mention it in the Javadoc. --- .../iq/dataverse/api/AbstractApiBean.java | 22 +++++++++---------- 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 0a0861fa1c9..027f9e0fcb1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -47,9 +47,9 @@ import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.json.JsonParser; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import edu.harvard.iq.dataverse.validation.PasswordValidatorServiceBean; -import java.io.StringReader; import java.net.URI; import java.util.Arrays; import java.util.Collections; @@ -62,9 +62,9 @@ import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonException; import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; -import jakarta.json.JsonReader; import jakarta.json.JsonValue; import jakarta.json.JsonValue.ValueType; import jakarta.persistence.EntityManager; @@ -132,23 +132,21 @@ public Response refineResponse( String message ) { * In the common case of the wrapped response being of type JSON, * return the message field it has (if any). * @return the content of a message field, or {@code null}. + * @throws JsonException when JSON parsing fails. */ String getWrappedMessageWhenJson() { if ( response.getMediaType().equals(MediaType.APPLICATION_JSON_TYPE) ) { Object entity = response.getEntity(); if ( entity == null ) return null; - String json = entity.toString(); - try ( StringReader rdr = new StringReader(json) ){ - JsonReader jrdr = Json.createReader(rdr); - JsonObject obj = jrdr.readObject(); - if ( obj.containsKey("message") ) { - JsonValue message = obj.get("message"); - return message.getValueType() == ValueType.STRING ? obj.getString("message") : message.toString(); - } else { - return null; - } + JsonObject obj = JsonUtil.getJsonObject(entity.toString()); + if ( obj.containsKey("message") ) { + JsonValue message = obj.get("message"); + return message.getValueType() == ValueType.STRING ? obj.getString("message") : message.toString(); + } else { + return null; } + } else { return null; } From 0fabe3ead61aa47bddb385f92d6b69474dd82668 Mon Sep 17 00:00:00 2001 From: bencomp Date: Sat, 28 Oct 2023 00:23:59 +0200 Subject: [PATCH 175/252] Remove Gson from JsonUtil --- .../iq/dataverse/util/json/JsonUtil.java | 22 +++++++++---------- 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java index cf8b64520de..2c780fa9417 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java @@ -1,9 +1,9 @@ package edu.harvard.iq.dataverse.util.json; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import com.google.gson.JsonObject; - +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.io.InputStream; import java.io.StringReader; import java.io.StringWriter; import java.util.HashMap; @@ -11,6 +11,7 @@ import java.util.logging.Logger; import jakarta.json.Json; import jakarta.json.JsonArray; +import jakarta.json.JsonObject; import jakarta.json.JsonReader; import jakarta.json.JsonWriter; import jakarta.json.JsonWriterFactory; @@ -26,11 +27,8 @@ public class JsonUtil { */ public static String prettyPrint(String jsonString) { try { - com.google.gson.JsonParser jsonParser = new com.google.gson.JsonParser(); - JsonObject jsonObject = jsonParser.parse(jsonString).getAsJsonObject(); - Gson gson = new GsonBuilder().setPrettyPrinting().create(); - String prettyJson = gson.toJson(jsonObject); - return prettyJson; + JsonObject jsonObject = getJsonObject(jsonString); + return prettyPrint(jsonObject); } catch (Exception ex) { logger.info("Returning original string due to exception: " + ex); return jsonString; @@ -48,7 +46,7 @@ public static String prettyPrint(JsonArray jsonArray) { return stringWriter.toString(); } - public static String prettyPrint(jakarta.json.JsonObject jsonObject) { + public static String prettyPrint(JsonObject jsonObject) { Map config = new HashMap<>(); config.put(JsonGenerator.PRETTY_PRINTING, true); JsonWriterFactory jsonWriterFactory = Json.createWriterFactory(config); @@ -59,7 +57,7 @@ public static String prettyPrint(jakarta.json.JsonObject jsonObject) { return stringWriter.toString(); } - public static jakarta.json.JsonObject getJsonObject(String serializedJson) { + public static JsonObject getJsonObject(String serializedJson) { try (StringReader rdr = new StringReader(serializedJson)) { try (JsonReader jsonReader = Json.createReader(rdr)) { return jsonReader.readObject(); @@ -67,7 +65,7 @@ public static jakarta.json.JsonObject getJsonObject(String serializedJson) { } } - public static jakarta.json.JsonArray getJsonArray(String serializedJson) { + public static JsonArray getJsonArray(String serializedJson) { try (StringReader rdr = new StringReader(serializedJson)) { try (JsonReader jsonReader = Json.createReader(rdr)) { return jsonReader.readArray(); From 37725d99a0612c20a6af2210107e148c6b28398e Mon Sep 17 00:00:00 2001 From: bencomp Date: Sat, 28 Oct 2023 00:25:56 +0200 Subject: [PATCH 176/252] Get JSON objects from files and inputstreams --- .../iq/dataverse/util/json/JsonUtil.java | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java index 2c780fa9417..371eb7e543e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java @@ -65,6 +65,31 @@ public static JsonObject getJsonObject(String serializedJson) { } } + /** + * Return the contents of the {@link InputStream} as a JSON object. + * @param stream the input stream to read from + * @throws JsonException when parsing fails. + */ + public static JsonObject getJsonObject(InputStream stream) { + try (JsonReader jsonReader = Json.createReader(stream)) { + return jsonReader.readObject(); + } + } + + /** + * Return the contents of the file as a JSON object. + * @param fileName the name of the file to read from + * @throws FileNotFoundException when the file cannot be opened for reading + * @throws JsonException when parsing fails. + */ + public static JsonObject getJsonObjectFromFile(String fileName) throws IOException { + try (FileReader rdr = new FileReader(fileName)) { + try (JsonReader jsonReader = Json.createReader(rdr)) { + return jsonReader.readObject(); + } + } + } + public static JsonArray getJsonArray(String serializedJson) { try (StringReader rdr = new StringReader(serializedJson)) { try (JsonReader jsonReader = Json.createReader(rdr)) { From 82b5edf72d249d1dcd48209dcf19c0fad6bb375d Mon Sep 17 00:00:00 2001 From: bencomp Date: Sat, 28 Oct 2023 00:36:17 +0200 Subject: [PATCH 177/252] Remove superfluous exception declaration --- .../java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java index 6b48dbf8415..5f30de8e932 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java @@ -12,7 +12,6 @@ import java.io.FileReader; import java.io.IOException; import java.net.HttpURLConnection; -import java.net.MalformedURLException; import java.net.URL; import java.util.Iterator; import java.util.List; @@ -135,7 +134,7 @@ public Response addUsageMetricsFromSushiReportAll(@PathParam("id") String id, @Q @POST @Path("{id}/updateCitationsForDataset") - public Response updateCitationsForDataset(@PathParam("id") String id) throws MalformedURLException, IOException { + public Response updateCitationsForDataset(@PathParam("id") String id) throws IOException { try { Dataset dataset = findDatasetOrDie(id); String persistentId = dataset.getGlobalId().toString(); From 69aa6735712532b87833a471ecb52f2f3ff33144 Mon Sep 17 00:00:00 2001 From: bencomp Date: Sat, 28 Oct 2023 00:36:57 +0200 Subject: [PATCH 178/252] Use logger to log --- .../java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java index 5f30de8e932..e65d8da4b76 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java @@ -101,7 +101,7 @@ public Response addUsageMetricsFromSushiReport(@PathParam("id") String id, @Quer } } catch (IOException ex) { - System.out.print(ex.getMessage()); + logger.log(Level.WARNING, ex.getMessage()); return error(Status.BAD_REQUEST, "IOException: " + ex.getLocalizedMessage()); } String msg = "Dummy Data has been added to dataset " + id; @@ -125,7 +125,7 @@ public Response addUsageMetricsFromSushiReportAll(@PathParam("id") String id, @Q } } catch (IOException ex) { - System.out.print(ex.getMessage()); + logger.log(Level.WARNING, ex.getMessage()); return error(Status.BAD_REQUEST, "IOException: " + ex.getLocalizedMessage()); } String msg = "Usage Metrics Data has been added to all datasets from file " + reportOnDisk; From 8cca38944664a4c8bd3b868f2dc1f36ab2841018 Mon Sep 17 00:00:00 2001 From: bencomp Date: Sat, 28 Oct 2023 00:37:44 +0200 Subject: [PATCH 179/252] Delegate JSON parsing from input streams and files See #10056 --- .../iq/dataverse/api/MakeDataCountApi.java | 35 ++++++++----------- 1 file changed, 14 insertions(+), 21 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java index e65d8da4b76..25f3696fe6b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java @@ -8,8 +8,8 @@ import edu.harvard.iq.dataverse.makedatacount.DatasetMetricsServiceBean; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.json.JsonUtil; -import java.io.FileReader; import java.io.IOException; import java.net.HttpURLConnection; import java.net.URL; @@ -82,23 +82,18 @@ public Response sendDataToHub() { @Path("{id}/addUsageMetricsFromSushiReport") public Response addUsageMetricsFromSushiReport(@PathParam("id") String id, @QueryParam("reportOnDisk") String reportOnDisk) { - JsonObject report; - - try (FileReader reader = new FileReader(reportOnDisk)) { - report = Json.createReader(reader).readObject(); - Dataset dataset; - try { - dataset = findDatasetOrDie(id); - List datasetMetrics = datasetMetricsService.parseSushiReport(report, dataset); - if (!datasetMetrics.isEmpty()) { - for (DatasetMetrics dm : datasetMetrics) { - datasetMetricsService.save(dm); - } + try { + JsonObject report = JsonUtil.getJsonObjectFromFile(reportOnDisk); + Dataset dataset = findDatasetOrDie(id); + List datasetMetrics = datasetMetricsService.parseSushiReport(report, dataset); + if (!datasetMetrics.isEmpty()) { + for (DatasetMetrics dm : datasetMetrics) { + datasetMetricsService.save(dm); } - } catch (WrappedResponse ex) { - Logger.getLogger(MakeDataCountApi.class.getName()).log(Level.SEVERE, null, ex); - return error(Status.BAD_REQUEST, "Wrapped response: " + ex.getLocalizedMessage()); } + } catch (WrappedResponse ex) { + logger.log(Level.SEVERE, null, ex); + return error(Status.BAD_REQUEST, "Wrapped response: " + ex.getLocalizedMessage()); } catch (IOException ex) { logger.log(Level.WARNING, ex.getMessage()); @@ -112,10 +107,8 @@ public Response addUsageMetricsFromSushiReport(@PathParam("id") String id, @Quer @Path("/addUsageMetricsFromSushiReport") public Response addUsageMetricsFromSushiReportAll(@PathParam("id") String id, @QueryParam("reportOnDisk") String reportOnDisk) { - JsonObject report; - - try (FileReader reader = new FileReader(reportOnDisk)) { - report = Json.createReader(reader).readObject(); + try { + JsonObject report = JsonUtil.getJsonObjectFromFile(reportOnDisk); List datasetMetrics = datasetMetricsService.parseSushiReport(report, null); if (!datasetMetrics.isEmpty()) { @@ -157,7 +150,7 @@ public Response updateCitationsForDataset(@PathParam("id") String id) throws IOE logger.warning("Failed to get citations from " + url.toString()); return error(Status.fromStatusCode(status), "Failed to get citations from " + url.toString()); } - JsonObject report = Json.createReader(connection.getInputStream()).readObject(); + JsonObject report = JsonUtil.getJsonObject(connection.getInputStream()); JsonObject links = report.getJsonObject("links"); JsonArray data = report.getJsonArray("data"); Iterator iter = data.iterator(); From 1d1163b1be2fd35e66ec64cf5b91c2ee6d5d7bee Mon Sep 17 00:00:00 2001 From: bencomp Date: Sat, 28 Oct 2023 00:41:41 +0200 Subject: [PATCH 180/252] Get JSON object from stream using JsonUtil --- .../engine/command/impl/GetProvJsonCommand.java | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetProvJsonCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetProvJsonCommand.java index 2de2adff099..b068c0126dd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetProvJsonCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetProvJsonCommand.java @@ -9,12 +9,12 @@ import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.util.json.JsonUtil; + import java.io.IOException; import java.io.InputStream; import java.util.logging.Logger; -import jakarta.json.Json; import jakarta.json.JsonObject; -import jakarta.json.JsonReader; @RequiredPermissions(Permission.EditDataset) public class GetProvJsonCommand extends AbstractCommand { @@ -37,9 +37,8 @@ public JsonObject execute(CommandContext ctxt) throws CommandException { StorageIO dataAccess = dataFile.getStorageIO(); InputStream inputStream = dataAccess.getAuxFileAsInputStream(provJsonExtension); JsonObject jsonObject = null; - if(null != inputStream) { - JsonReader jsonReader = Json.createReader(inputStream); - jsonObject = jsonReader.readObject(); + if (null != inputStream) { + jsonObject = JsonUtil.getJsonObject(inputStream); } return jsonObject; } catch (IOException ex) { From ff79e205fcb681e1a6487dd3136bb415a0992487 Mon Sep 17 00:00:00 2001 From: bencomp Date: Sat, 28 Oct 2023 01:23:48 +0200 Subject: [PATCH 181/252] Expect 4 space indentation This may become flaky if the indentation is dependent on implementation. GSON apparently keeps empty objects in one line and uses two spaces for indentation, whereas I see slightly different outputs. --- .../harvard/iq/dataverse/util/json/JsonUtilTest.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonUtilTest.java index 725862db7ba..3e4f9a690d2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonUtilTest.java @@ -3,17 +3,17 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import org.junit.jupiter.api.Test; -public class JsonUtilTest { +class JsonUtilTest { @Test - public void testPrettyPrint() { - JsonUtil jsonUtil = new JsonUtil(); + void testPrettyPrint() { String nullString = null; assertEquals(null, JsonUtil.prettyPrint(nullString)); assertEquals("", JsonUtil.prettyPrint("")); assertEquals("junk", JsonUtil.prettyPrint("junk")); - assertEquals("{}", JsonUtil.prettyPrint("{}")); - assertEquals("{\n" + " \"foo\": \"bar\"\n" + "}", JsonUtil.prettyPrint("{\"foo\": \"bar\"}")); + assertEquals("{\n}", JsonUtil.prettyPrint("{}")); + assertEquals("[\n \"junk\"\n]", JsonUtil.prettyPrint("[\"junk\"]")); + assertEquals("{\n" + " \"foo\": \"bar\"\n" + "}", JsonUtil.prettyPrint("{\"foo\": \"bar\"}")); } } From 91e572623e4020c72744f1cb4c10f78aaef16518 Mon Sep 17 00:00:00 2001 From: bencomp Date: Sat, 28 Oct 2023 01:24:47 +0200 Subject: [PATCH 182/252] Add private constructor to JsonUtil It only has static methods and should not be instantiated. --- src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java index 371eb7e543e..d50cf6f124e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java @@ -21,6 +21,8 @@ public class JsonUtil { private static final Logger logger = Logger.getLogger(JsonUtil.class.getCanonicalName()); + private JsonUtil() {} + /** * Make an attempt at pretty printing a String but will return the original * string if it isn't JSON or if there is any exception. From 785964df44317df84618bf217d479c41fdb6f46a Mon Sep 17 00:00:00 2001 From: bencomp Date: Sat, 28 Oct 2023 01:25:25 +0200 Subject: [PATCH 183/252] Pretty-print JSON arrays from strings too --- .../java/edu/harvard/iq/dataverse/util/json/JsonUtil.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java index d50cf6f124e..8ea0e6f0ace 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java @@ -29,8 +29,11 @@ private JsonUtil() {} */ public static String prettyPrint(String jsonString) { try { - JsonObject jsonObject = getJsonObject(jsonString); - return prettyPrint(jsonObject); + if (jsonString.trim().startsWith("{")) { + return prettyPrint(getJsonObject(jsonString)); + } else { + return prettyPrint(getJsonArray(jsonString)); + } } catch (Exception ex) { logger.info("Returning original string due to exception: " + ex); return jsonString; From a4097a32998acfba5755f6103de86281c971b9a4 Mon Sep 17 00:00:00 2001 From: vidyaa18 <2010030534@klh.edu.in> Date: Mon, 30 Oct 2023 00:33:30 +0530 Subject: [PATCH 184/252] made logo available --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index d40e5f228f7..c3b8d9b9631 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ We love contributors! Please see our [Contributing Guide][] for ways you can hel Dataverse is a trademark of President and Fellows of Harvard College and is registered in the United States. -[![Dataverse Project logo](src/main/webapp/resources/images/dataverseproject_logo.jpg?raw=true "Dataverse Project")](http://dataverse.org) +[![Dataverse Project logo](src/main/webapp/resources/images/dataverseproject_logo.jpg "Dataverse Project")](http://dataverse.org) [![API Test Status](https://jenkins.dataverse.org/buildStatus/icon?job=IQSS-dataverse-develop&subject=API%20Test%20Status)](https://jenkins.dataverse.org/job/IQSS-dataverse-develop/) [![API Test Coverage](https://img.shields.io/jenkins/coverage/jacoco?jobUrl=https%3A%2F%2Fjenkins.dataverse.org%2Fjob%2FIQSS-dataverse-develop&label=API%20Test%20Coverage)](https://jenkins.dataverse.org/job/IQSS-dataverse-develop/ws/target/coverage-it/index.html) From 47ea303562bf143d36edd66d035286ab74df6b9d Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Mon, 30 Oct 2023 08:48:05 +0100 Subject: [PATCH 185/252] Create InputStreams in try-with-resources JsonUtil.getJsonObject closes the Readers, but not the InputStream. It is the caller's responsibility to close the InputStream properly. --- .../harvard/iq/dataverse/api/MakeDataCountApi.java | 6 +++++- .../engine/command/impl/GetProvJsonCommand.java | 11 ++++++----- .../edu/harvard/iq/dataverse/util/json/JsonUtil.java | 2 ++ 3 files changed, 13 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java index 25f3696fe6b..b2696757220 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java @@ -11,6 +11,7 @@ import edu.harvard.iq.dataverse.util.json.JsonUtil; import java.io.IOException; +import java.io.InputStream; import java.net.HttpURLConnection; import java.net.URL; import java.util.Iterator; @@ -150,7 +151,10 @@ public Response updateCitationsForDataset(@PathParam("id") String id) throws IOE logger.warning("Failed to get citations from " + url.toString()); return error(Status.fromStatusCode(status), "Failed to get citations from " + url.toString()); } - JsonObject report = JsonUtil.getJsonObject(connection.getInputStream()); + JsonObject report; + try (InputStream inStream = connection.getInputStream()) { + report = JsonUtil.getJsonObject(inStream); + } JsonObject links = report.getJsonObject("links"); JsonArray data = report.getJsonArray("data"); Iterator iter = data.iterator(); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetProvJsonCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetProvJsonCommand.java index b068c0126dd..b98cd70a4da 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetProvJsonCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetProvJsonCommand.java @@ -35,12 +35,13 @@ public JsonObject execute(CommandContext ctxt) throws CommandException { try { StorageIO dataAccess = dataFile.getStorageIO(); - InputStream inputStream = dataAccess.getAuxFileAsInputStream(provJsonExtension); - JsonObject jsonObject = null; - if (null != inputStream) { - jsonObject = JsonUtil.getJsonObject(inputStream); + try (InputStream inputStream = dataAccess.getAuxFileAsInputStream(provJsonExtension)) { + JsonObject jsonObject = null; + if (null != inputStream) { + jsonObject = JsonUtil.getJsonObject(inputStream); + } + return jsonObject; } - return jsonObject; } catch (IOException ex) { String error = "Exception caught in DataAccess.getStorageIO(dataFile) getting file. Error: " + ex; throw new IllegalCommandException(error, this); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java index 8ea0e6f0ace..c1a20bf4c87 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java @@ -72,6 +72,8 @@ public static JsonObject getJsonObject(String serializedJson) { /** * Return the contents of the {@link InputStream} as a JSON object. + * + * The caller of this method is responsible for closing the provided stream. * @param stream the input stream to read from * @throws JsonException when parsing fails. */ From 235c0387921a79d1f919b97567666341119c5cae Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Mon, 30 Oct 2023 09:52:27 +0100 Subject: [PATCH 186/252] Complete the Javadoc docs for JsonUtil.getJsonX --- .../iq/dataverse/util/json/JsonUtil.java | 35 ++++++++++++++++--- 1 file changed, 31 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java index c1a20bf4c87..72a1cd2e1eb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java @@ -11,6 +11,7 @@ import java.util.logging.Logger; import jakarta.json.Json; import jakarta.json.JsonArray; +import jakarta.json.JsonException; import jakarta.json.JsonObject; import jakarta.json.JsonReader; import jakarta.json.JsonWriter; @@ -61,7 +62,17 @@ public static String prettyPrint(JsonObject jsonObject) { } return stringWriter.toString(); } - + + /** + * Return the contents of the string as a JSON object. + * This method closes its resources when an exception occurs, but does + * not catch any exceptions. + * @param serializedJson the JSON object serialized as a {@code String} + * @throws JsonException when parsing fails. + * @see #getJsonObject(InputStream) + * @see #getJsonObjectFromFile(String) + * @see #getJsonArray(String) + */ public static JsonObject getJsonObject(String serializedJson) { try (StringReader rdr = new StringReader(serializedJson)) { try (JsonReader jsonReader = Json.createReader(rdr)) { @@ -69,25 +80,33 @@ public static JsonObject getJsonObject(String serializedJson) { } } } - + /** * Return the contents of the {@link InputStream} as a JSON object. * + * This method closes its resources when an exception occurs, but does + * not catch any exceptions. * The caller of this method is responsible for closing the provided stream. * @param stream the input stream to read from * @throws JsonException when parsing fails. + * @see #getJsonObject(String) + * @see #getJsonObjectFromFile(String) */ public static JsonObject getJsonObject(InputStream stream) { try (JsonReader jsonReader = Json.createReader(stream)) { return jsonReader.readObject(); } } - + /** * Return the contents of the file as a JSON object. + * This method closes its resources when an exception occurs, but does + * not catch any exceptions. * @param fileName the name of the file to read from * @throws FileNotFoundException when the file cannot be opened for reading * @throws JsonException when parsing fails. + * @see #getJsonObject(String) + * @see #getJsonObject(InputStream) */ public static JsonObject getJsonObjectFromFile(String fileName) throws IOException { try (FileReader rdr = new FileReader(fileName)) { @@ -96,7 +115,15 @@ public static JsonObject getJsonObjectFromFile(String fileName) throws IOExcepti } } } - + + /** + * Return the contents of the string as a JSON array. + * This method closes its resources when an exception occurs, but does + * not catch any exceptions. + * @param serializedJson the JSON array serialized as a {@code String} + * @throws JsonException when parsing fails. + * @see #getJsonObject(String) + */ public static JsonArray getJsonArray(String serializedJson) { try (StringReader rdr = new StringReader(serializedJson)) { try (JsonReader jsonReader = Json.createReader(rdr)) { From c43ca37235adfb038af7dbf9beace8e64868035a Mon Sep 17 00:00:00 2001 From: Yamil Suarez Date: Mon, 30 Oct 2023 08:50:25 -0400 Subject: [PATCH 187/252] Update issue template to set a default label --- .github/ISSUE_TEMPLATE/bug_report.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index b297dfc4ee8..7e6995d76d9 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -3,7 +3,7 @@ name: Bug report about: Did you encounter something unexpected or incorrect in the Dataverse software? We'd like to hear about it! title: '' -labels: '' +labels: 'Type: Bug' assignees: '' --- From 904898979b7601d2f6a6e916fa489bf5c9a60316 Mon Sep 17 00:00:00 2001 From: Gustavo Durand Date: Mon, 30 Oct 2023 16:21:25 -0400 Subject: [PATCH 188/252] Update appendix.rst --- doc/sphinx-guides/source/user/appendix.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/user/appendix.rst b/doc/sphinx-guides/source/user/appendix.rst index 7d60054ae17..e36b40a4110 100755 --- a/doc/sphinx-guides/source/user/appendix.rst +++ b/doc/sphinx-guides/source/user/appendix.rst @@ -37,7 +37,7 @@ Experimental Metadata Unlike supported metadata, experimental metadata is not enabled by default in a new Dataverse installation. Feedback via any `channel `_ is welcome! - `CodeMeta Software Metadata `__: based on the `CodeMeta Software Metadata Schema, version 2.0 `__ (`see .tsv version `__) -- `Computational Workflow Metadata `__ (`see .tsv version `__): adapted from `Bioschemas Computational Workflow Profile, version 1.0 `__ and `Codemeta `__. +- `Computational Workflow Metadata `__ (`see .tsv version `__): adapted from `Bioschemas Computational Workflow Profile, version 1.0 `__ and `Codemeta `__. Please note: these custom metadata schemas are not included in the Solr schema for indexing by default, you will need to add them as necessary for your custom metadata blocks. See "Update the Solr Schema" in :doc:`../admin/metadatacustomization`. From e02092c2b15a5a5de8565fa2ffe98e0ca2daa431 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 1 Nov 2023 11:50:04 -0400 Subject: [PATCH 189/252] Disabling quotas checks for direct uploads via API in the Create New Files command. Longer term solution coming as part of #8549. (#10080) --- .../engine/command/impl/CreateNewDataFilesCommand.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index ac701da1be9..0470f59b861 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -657,7 +657,8 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException if (newFileSize != null) { fileSize = newFileSize; } else { - throw new CommandExecutionException("File size must be explicitly specified when creating DataFiles with Direct Upload", this); + // This is a direct upload via the API (DVUploader, etc.) + //throw new CommandExecutionException("File size must be explicitly specified when creating DataFiles with Direct Upload", this); } } @@ -696,7 +697,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException datafiles.add(datafile); // Update quota (may not be necessary in the context of direct upload - ?) - if (quota != null) { + if (fileSize > 0 && quota != null) { quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() + fileSize); } return CreateDataFileResult.success(fileName, finalType, datafiles); From 8388a814cc46c9b1dcaeb93e4c482315260a2e8e Mon Sep 17 00:00:00 2001 From: Yamil Suarez Date: Wed, 1 Nov 2023 11:51:08 -0400 Subject: [PATCH 190/252] Add default label to request issue template --- .github/ISSUE_TEMPLATE/feature_request.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index 7d5e0deea05..d6248537418 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -2,7 +2,7 @@ name: Feature request about: Suggest an idea or new feature for the Dataverse software! title: 'Feature Request/Idea:' -labels: '' +labels: 'Type: Feature' assignees: '' --- From 3b19aebf7f913fb8a244ae8d8d6d9e151eb8fc2c Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 1 Nov 2023 15:31:38 -0400 Subject: [PATCH 191/252] replace link to 404 with link to team #9267 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 2333787d4b9..831dbfed5ff 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ Dataverse® =============== -Dataverse is an [open source][] software platform for sharing, finding, citing, and preserving research data (developed by the [Data Science and Products team](https://www.iq.harvard.edu/people/people/data-science-products) at the [Institute for Quantitative Social Science](https://iq.harvard.edu/) and the [Dataverse community][]). +Dataverse is an [open source][] software platform for sharing, finding, citing, and preserving research data (developed by the [Dataverse team](https://dataverse.org/about) at the [Institute for Quantitative Social Science](https://iq.harvard.edu/) and the [Dataverse community][]). [dataverse.org][] is our home on the web and shows a map of Dataverse installations around the world, a list of [features][], [integrations][] that have been made possible through [REST APIs][], our development [roadmap][], and more. From 2efdece6784533170733490928137e4c60c98d2d Mon Sep 17 00:00:00 2001 From: "Balazs E. Pataki" Date: Thu, 2 Nov 2023 16:16:51 +0100 Subject: [PATCH 192/252] Add gdcc/nginx:unstable image and dev_nginx container Added a nginx container to the docker setup. This could be used for various proxying stuff, but it is primarily added to work around a problem with the IntelliJ Payara plugin. It doesn't allow remote redeployment in case the Payara admin is served via HTTPS using a self-signed certificate, which is the case of the default dataverse container installation. This nginx configuration provides an HTTP endpoint at port 4849, and proxies requests to the Payara admin console's HTTPS 4848 endpoint. From the IntelliJ Payara plugin one has to specify the localhost 4849 port (without SSL). --- docker-compose-dev.yml | 9 +++++++++ modules/nginx/Dockerfile | 9 +++++++++ modules/nginx/README.md | 7 +++++++ modules/nginx/default.conf | 12 ++++++++++++ modules/nginx/img.png | Bin 0 -> 71929 bytes pom.xml | 8 ++++++++ 6 files changed, 45 insertions(+) create mode 100644 modules/nginx/Dockerfile create mode 100644 modules/nginx/README.md create mode 100644 modules/nginx/default.conf create mode 100644 modules/nginx/img.png diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index ab44dbc1806..743b5d698d9 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -127,6 +127,15 @@ services: volumes: - './conf/keycloak/oidc-realm.json:/opt/keycloak/data/import/oidc-realm.json' + dev_nginx: + container_name: dev_nginx + image: gdcc/dev_nginx:unstable + ports: + - "4849:4849" + restart: always + networks: + - dataverse + networks: dataverse: driver: bridge diff --git a/modules/nginx/Dockerfile b/modules/nginx/Dockerfile new file mode 100644 index 00000000000..3900076599f --- /dev/null +++ b/modules/nginx/Dockerfile @@ -0,0 +1,9 @@ +FROM nginx:latest + +# Remove the default NGINX configuration file +RUN rm /etc/nginx/conf.d/default.conf + +# Copy the contents of the local default.conf to the container +COPY default.conf /etc/nginx/conf.d/ + +EXPOSE 4849 \ No newline at end of file diff --git a/modules/nginx/README.md b/modules/nginx/README.md new file mode 100644 index 00000000000..9d2ff785577 --- /dev/null +++ b/modules/nginx/README.md @@ -0,0 +1,7 @@ +# nginx proxy + +nginx can be used to proxy various services at other ports/protocols from docker. + +Currently, this is used to work around a problem with the IntelliJ Payara plugin, which doesn't allow remote redeployment in case the Payara admin is served via HTTPS using a self-signed certificate, which is the case of the default dataverse container installation. This configuration provides an HTTP endpoint at port 4849, and proxies requests to the Payara admin console's HTTPS 4848 endpoint. From the IntelliJ Payara plugin one has to specify the localhost 4849 port (without SSL). + +![img.png](img.png) diff --git a/modules/nginx/default.conf b/modules/nginx/default.conf new file mode 100644 index 00000000000..8381a66c19a --- /dev/null +++ b/modules/nginx/default.conf @@ -0,0 +1,12 @@ +server { + listen 4849; + + # Make it big, so that .war files can be submitted + client_max_body_size 300M; + + location / { + proxy_pass https://dataverse:4848; + proxy_ssl_verify off; + proxy_ssl_server_name on; + } +} diff --git a/modules/nginx/img.png b/modules/nginx/img.png new file mode 100644 index 0000000000000000000000000000000000000000..278ee490b2d6d7c61b641cae4133b022c66098e9 GIT binary patch literal 71929 zcmeF3Wmr_-7w=I}P+C-aXb`1429O335a}E`q@-s^B?rj?5hVr@l$7q2kcOcrun*2WA$$?`;>k?vhj)xKEK%-W ziSAw$0&sWX+k?K}1P;!VPolTYQ|nJ)>brh@6sUuM^`9ZiGBVrA7b3WGC6-MW;XX3s7ozFm=_uq$<2=JK2I^F*L)d7Y$Fx%gAfKzE1 z|Mvq-6uXT#{%5OFH=@6eND0M`-_nF67N20y@NCmRRt)u-CnKCh?q-IFH2=TIRk zvYjjI*HtkTDp;1Yy{0bir$k!uW^$A6=2Q{3TYqx?P~&@dvU7v}tpCNfuau%B4NZKu%h9_5 zH?>TBPf>0t30*bu?K0Qp1YIKG*OYR{oos0wUUPn4)hDw)c~@%^Ws@-t#?bkYD^Ytt*%QUk)S8{7DQRyPG+3y z#~dYmYDV*GYY#gPHu4HTWyDC|$1NKB+mVMm1;pF-y}ORIlgC6xF25fw4{NG( za@Xz2une@_&zZAIH148yuH${)IJwAM_Y*h20&_ zhSc+%g{_vfQQ2#mcVmC}MXvzrRjjU@_0BU{+&@YkBaM}xzr)>li6-xeL0#Z zDlhS@V#+CR>1}DR(6ca*w=K)2w9?7AMGX3UvoPTO^Zih!HyeX@(U+BtS3mPh`^1*L z8ZV4{Tigg$n9?|p zx}W^rVb^D?_QnmBj#->(M-l$g0tsg$h5p*akHCHDm8e69$=_Z_7qeT5#b{&ExshF; z!)IH6?mZvQcuDh|=7CIM|IFvDs>Nt4;6df#=ItFHG*b^1Rf`SBn6`uA4T4i1sx@a1 zaAS&SB@&3ISi;pXqZA+;w~Ba(M}Xo)`_B2DB&bVWl|4wF@tUiT%apoWS0CA|UjAhG zNmEVqdxX(FwsGrZ$1XB;u!i!;tk15o%c#E1NL}OgQDL_-TdLMQ{a9WTEvw<1#Kz0- zaj#-7uGm0FjFw6no`uybN$3GZx&&Pl*moMwakB(Vl_}@Ot2dv+1eRbk9%Dai(4p5W z!i_nzn^Kqa#Kx9{ymP>`>Kam)GVZnw@^7XiF~&LBYauQ?2mV~KTzAlyw{!(edOz|e zZQ|P}{j$W`-|Cv~Qh%)7smb%b+;1=MQ{k%4%&%JbpdCZMZj4@7oDv5HBeHH8z zM(dgFecnA3&@YXvO+Datv0YhsHAqyr^Rsf+0OY&ZWDw(Dl2CW9;5>$sy15jKE+{Un znOHytT)T}RdZA&&lBa)inNT;QhR(*(qAQ;b+Qy66nzzIaORB|!o*aa3O3fT@_VAiT<#>-bLvs@SmNRf`?Z%o~q~+!&QoW2D*SL)2(>^A8Hr*k2r@j3*I<0X7 z(JAhmDpKlfDgFf&EF*-O-pSl45#oRLB9@Vrk~BX072{VW5@*4|5Z?k|_Li$H8K&Zi z>~g%`5Puy8#;2Uu#$i5RidW})_Ot9Y%jb@VuftznJTsD^1uug0H{^H_qT5Ukl7;1? z27&@O2wA!HCi^_&tARvWBGJsa>cwdBf^O`-AkL3+{+Wc71n8H-!AK{fdj%o0br~{B zWJmKBcs-a44FmG{jYj6`c!d78swdqQLe(@8jAU8aAXoRA{5s zi`5Lip0qFg$$YclPFHC04FWH6L8M7c^nD(HOt}w5%XTN(7wF#umqb_L7Y>l0O<+&_ z$Z9BQ(Nvev-qh@C>d?zU*;H6!Z+n~56DkWif7F;IyLT3qEE|4Jp+1&8djV`4O3deC zap0ItJapvNR{5Ao0uKX+(j#JqLHBlAFD=)w)4MPQEd&zSR;uD`GAS8k7)e!U6_%pe zPJjTE$4s=pM0!LEvyZp2)P2c0s7O3J+K*klnDCv+Ri4f9Aeb?F5MhRN-nNDK0pVJc z1rj51tjB^1xLl;OuN_1PA~iDk>UIu?xrJ=wY&?wJ2jsPdL7HM2wSCDepd-tv>jusL_{5 zU=``(lo-_v0`Il`g4*dy(KM{F)3o1uS0C(ehGYA_TttVKF zolL&K;?Xq@q{m(eh3|VazJ(jt@xb%Xj_|!1vSqMhmCq0|-g33gYJsQgl~sn@5p(i< zFu5tD4bCH8hYb$Gb-A4-MLywK=py%UcqC)kLdbGDb*hmK{uRNl*n{p+X(ZD$LkB;K zN2W75sz?mWRbB)hr*Wc$lY*{LZvRs+sRtvuA0aOpcu0u(F5IBt_mDESLd) z&0)xU5dl%2enZNu4_PGh`kv4ME@KkAML09l(8{OFHq!l3a!+{i-^}Bm9pi)b_+96M z#1U|y;ttdfrm(mynfcTli6|0+tw_uH$Nm7W8b_kY+^u5TVH7(KhecNzG2HHW` zlpv!ND~z*W7;t6J@~7kJ?RI+_vpaQlCX<@ZBpOQ-P48^J?6rp4Q0)(o?SN6ucaxU% z>6WC?u`lG*rJi33m4uI@7sQ})koAYHFwuxVEm9(;E{kYGd@+)L18j$y`)B1m#jG4);x65t-$zrHM8X+_l?VgFwUiG^ zXk|j=2qVGxQ*Q~I28o=D+?ObM75ZYCk#@5NRz|W^w|EsEAPnR*L~w6+TznvTAsbDq zKC5=N&IQIPmKQ<@a@R8Sfx8;=!a#LNXr?^UgM7$#5#iTb@3m|Y18bs*x81gQAe-f_ z&Xq4ZS^al1$weJN%&nrtT(HMtOzN#~gpYKxcx9uR9?C_Q`uhyLh9TE?I!eW0S+Flr z8RQqiuTGt|5H@FwOTnwNW^K2c#!JKp3%J{4X-#equR?R1kZtvB==_Gf!iJjYMpq)& zl@J~x%R0)Z@qZsWMDgziNZK4S&G53$r^Ap$AEXLWWX89X>40LBiZ2P9P#kXJ;~B-; zG{*k<4Yo`sCpD7^9wx4v>_`*4N{CKj%Brh@Kl+x#;rcY@m_#l(Q4O5Y;8PaUGpH*8 z{}y}C68{UCLOM9`vE1XFAGpn{Eb>TeJ0;;=dvOQswN~-y%RxhluO>J7EdCo&!a^q5 zAGx?0Iaxo?5|N(E+*s=o4AL%%`d^53xE+isFKLjA_Z#|c{Y)czeE8=UF}Eaz1up37 z1NmK2e@i2}_2rsGK7{cxA8zacTsOM^(PG)OYq$1ohq}d(rqfQ*8rx@E76l@o*EcyU zr(aJArUinT4#7hmZkLmIsS^oYbu$a;{0>Z*%<<|TCaI;*lX(zp%6wmWK19w421U^9 z=Qp#&n#7#^CV58I$g;&N*{VL&nE_^mzhuw=f87`FkRc8>g*Np=4;UH_!pdNF<%N#h^bv4(uN&7S$!4;$ zmV+Mgyt#DEB{P-ty@og3TZ|IT>VYYxA>czTVq#(A5$eXU>7Jma`*~k0!G{VjNME%| zI&?hiY1w*}StkaIT_-szmOUJRcn=b$$G9BNh$u=nX1^z3syVa}4|}rRh22pB0S`3n zXT)rp1$fB$wFykdqE5evcXfBG^@{$@o#g_a19O7{t{gjgTu#q92n{rjXus^`8%&a7 zVU2MggT})kEg3XN^4|1#I_l^0*Hs`fW#@{q<5vo2N)i6=U*VhRn7Dz|8ScwGYE=`S zdh^XLqC)M~wj3y+w$w4cOvsVY(MRYY+mIp%``)hmC6@p(&jZ@!Ng=z@3FGALhTkG5 znE@XiH>t*&C3}Tx?1j4&sp}`W{@&YlATuhw={9%Hsm(m~J_LITrtom>!G5F6i%WE{ zo*`&bC}&IMt1x$A7M6M1`Zme0iz(J(OmU^etUv%1O?Rp1<+oH?Mf;B#E9<}cJ-X7~ z{pa8#X4v1v5c5qjxwzH8f5oKJvj6|U|7}P>dq9H7|M&U$M;teNQeqAziUWILNAv-8 zkiky<$*e1oW3yngya8v^UGKH@ur8n7df{K!n6J_(BWrJ=eNhr8A2C@nm*n5abx2-! zeZIN8Q@dM`Hg#dhh@fy~hV9(#D2=a0vH$&CDNA1NwQOL$K&L1XP-6~CBRRqUId=Ld z$X^w?-2{}v*I2yM5)9~yO=afLN5G8*G5>RkEA4ZBSj9~Prh-)uIM=M+Z=>!8DnEf& z(?Hp}93?!fbUXF*_JZl1?J@@WxK*dvB~cdO^S$v!01N7P7E?hzSO8M&in_CvWZL=b zi(PK7X_xlrod=lO0F(G@pH8dMCA>E+=`NyAaa9(7SLb>TTWO;cXJmmO_wX#lm^^f%wd0|e)O&uQ~Oy}&UogT+%T~8Z%vZXX`R>? z*6Tz|p0AaRnfQ$CAD;!{b7g0qVG3_WS`RJQ(TFatN(}wXK-OkqV*{wIjPi((&|Fe! zNk0gvkKEmv&qiCesB?*J-^p> z7OyU8XG84(+bVAF0`P#S*5*rPWo60)D=j9}`2;nrB4)ydHO&{zcQmZ2`%-K@b2&3! zEw@4@S50I-*mb*n%v0qnZkzq&`u1*pdSzYx_j2ARu&Fb|%(H^3Z#_IM1S1i2CVx7F zpLldf5X`xDbIwf0wVw$$o~csW=NESYY(eY#`lK;2?dEcI8K_YmX;m!tQg?l`T!VyA z4r3#su)RL<-QoR1oR%THgWG!*Yu!L)HlyKJZx-88s@VjLvYz!9t4}XzT7DG(oTSMr zoaNJL;DFhyhg+P^&UUiYlN(Xs!kUfT<$&wefG&s9ULETcmo~m$fuWC%l`|D;zZ#xC zE39*!00_ZUBJR@w04H<-SVYe`7Z{J~mx3c&RzMxe$%cfQpKX<*CMR(9)g+FF`IbLw zj%u2}06tx@Dsihfw{IOJk3j%7)O|eV4557vRO+rQP6f(L2ff0O372--&DH+j$Y^*@ z-IZjggeM#YP^8=sS8LX>sS4uHFaL*c)d!*!T; zPtK!b1mA6cNC!8ydeFGsT%GKeSZS_e>fs)VlW8j;+s|H~TpgG79%<2xbbH%YB0bLQ zFiFc0Q$xA~3|KuFpgjq-pA~Nrp)iwv8DKPqy$3M?ml=6dK|#{2DEOjnMwCvc9!0mZ zIk3n%fU3i%GCp0jir^78RXV8mR`l+G_0ZdEA;MMu)Jbn5_O$_O7zTiK10=-+>9bS4 z5hbx(cd)^}0o1|HHni60v(>ap?xUv>Ts5|1oldV>)B7~(o34JU%ytktBk&c{CO-q=j%pk5y`vZ3?Wk& zLkOndrg;xw6_>L%;RcO{i<#X|-#;1Tddp7qgWB~iV+;T?Vn4397maxy2@Z>L_*_+{ zg#3G`5&z@TG;N!B`^gc1L$>MF%L5em0ZJ<1rw;%U@@jGnC-rSowO3NTX8DF*@+Sz6 z8I?ZhWPSgGy6|ZXr{br&jR^oTylE_BPvyc87?x7>{2N@Fx6iM*ZUEi_AtozO^f#lP zybW8Y-z5^?dUa3a>N!Kzg3P5?NfBjgADk3;0QgZQ1HiS;cZ6!>7cgxaAn82 z>Wz&ZD7`RK-*)fBhBb%dq&)1sqM!!eYuv4{4^VyrAtj>SD9teNm|q5W2} zv5<834Jzll{f%Ijw(j0#>u)Ho+O5~CD)4yFg%ZSb!m9RhF+J)rp$r>-IQ56m)cD+W zZ#gR$K0F(xPX4U2^6mF_yVSLdF%B?;UcZh53%|!=m$yfH=g zwEbyb{_A^*d>HI?Hd@)BO(eGVwMOb`L=b&eU6v%%`;^zmTD-$1Pi^N4$;Bh^+%(5nF;kW&PxV0Sx4U}7|XDCBWi$BjJ!Ra$%a#W zUVLMv%wcl4u8EQcD^MWs0tjL`3{=p^?T}1{i4K@g0m1#dU%*TaYWMDrK6eR-aL6+Ilsj9=Np=TfLn1{e8QTxpFvY$^`WG$u0>E~4 zoNr%wlqLzZ%Gn6F4%toEsZkl1Et0}LQ-0LM1Es6pNXT=;w- zbQ{k{YSu`y-r|Q)D_@-N5tJ^z7t|q@Xo`9YNuui>3%0w)-ELEY$=FYEhCG z?l7dPfBbWQyW9H+Mi)+aTGRb+o?>kj>hlNX5#8O`qZ@ z5%v(938ki|mZ-)9gb~$LIo6zZo>r!`s`TMytY3F=sy&hppkX3dB9SE9zg7Ie9csQ# zk{%2bU82$oabvjJ4VqMJJKP-LH?9CRJOX#l`_2Gz8cL&<;rc+E{hY;6JSSWLM=kWS&dzPUJnMoVmjT=xO?ox z{4siiP7?VUyL%+fGx8EC_zmuTmus?_Ab;>X#n!IM-wb!%Ke@C*S)MobB)D zg++gNHX_W4*-uD+WVg-J^iQ~T21$&5?1Hs;Z*1%jflmH~+^Fns^LbW}B0ClJrHR&I zWKExh`3*h2osL#iG#=7iduw~cmW66Ff_~cc}3%|)SFIV?@tY)f(5GfD(E(c4gR_*4dR{SWe zMbl!hq2%inM;eC>twyn}A$V?GU4IwjPvqqyD;^J<)uH0)Pxm8eSC7tolgEq*4s2!o zjrX*LFK!4JfA=>zCS%N%9oI1!`tj8fPbJz}K8UM7y4H{4RX?3ErJx3|^u97~P%rr+cMG$!?*-7pYtK7vQ zuK*7ZnnNp)Z~^gLbNY$XZ4*rok*0NSZdW*Jp$hRlzK5B$nQTe0%86#b4vdmU(kdJ# zW|J2$jYN6AToZXz`CS;D?~fm%sOK0?-5)jfP@O6zH!34M9|ejJhH5Q+%u695chOdL zdDSmTt%ub^dS%z(eii|_{MJBkI#?Pey3lHC)I+;{GkW-v_`}8`@2dNQ(Wk2ZxL7`{S0ZD9 z;7#sS@VY|l_C!ZjJ*9I|OsOb3Kj3_KhAH~lV!8x3jrWTJu6b@G>n4-WJeT>ekzBNGifc}b7?agEG}xtW**I?_U0`Z1q4JT9(0!w zXr2D3ZHhRT%)+8Bim<-vvteY)TT&w-9uucGWV($UFFH!u$d(R=)C>LkS$TCyHlWr0 zs(-#KyUEupBQZJc-H#ek;Ac~0Fe0ZvA)YuMfKpnh?jd>wW~hWr;BVW*sTkXzg}W~Z zz-e}~8kK1JEKsr>+d1>UWbfBkwjeA67J16jm@;}vZk=t}$UGps_e@CV^nHy4P< z)V}dOOjrsJ4K12+UcjdoQs7Yq_wCX%o*4^FnH|y_ym+Vwycj5X- z$d!-p)agF9V;~)ngh4vkh|yEN$|$BgWC@j&lvb7WC=jAZI@TthT3@@;v(+@&R04)f zmeLM0W!h0iN5VbHD@dIWYebjKoEM*(JacDI^K|0L()(JOMI?8HROlzx7*&jj-UKnM z7g8tp4-h)vN>AH0QxWb{pmx-9er3hU)tkujH;X(MhP9EAq@LdSp%y%bXvsB(l$snB zho3k=Q%N~5)ZXF-e4jcHxf&-KBQf}3>N*>Ot8c2M4Y~h7y7d#D{1G+%D)4N&UYqm=Z<~7}=LxUV@ z`;wzFvnXh1{58a}g-p~{s)M>(pgAwij^;~eIdvd$Lty@DRaIsS~DGZO+U zM1?hyc~Wq`KkkFDbr+qpu-0`k020W8dMoF&1k9j!DCIz*R<=RwIKL>&Ej$diI1249 zO-wUG((6dbrA%qbro2Ke@V4cva25&HosT@ifPE&&;hpz`%$UBrJ*{T=S9Tv_abiR# zr8u)p@NpM96{MffXe~q~ogsANUy7sAM8c3K4j+=i{wuvpyrI1{k$LyV9v|}*O!9sr z5^l>CA87xaz0YT^$szPO_*BFD0RW0Qjyf;#dQb zDq`p%sn1P(O-+sH!5O4)2Ksgl;&B$J5RGR;wunmc2jq^~S;hfmq%RoKJd@5AeY~~< z%l7&iYdZ?-RQ)>5+Gz=2y~<(O z7B2{W=`H%!_VT_wjO--PJPQheS zaeN$Bmh`Cb)*>wbvFH{M4Ti`46O~?{=V1`4cUvqp1sh;}z@kdSM61`Y&*Py|PXO}@ z?^Y~uDSA|352R;(wFeu>ZFQDA@l%h5+_>5ftKSEWz*}kB)gJ7WxvSl*J;J z{>9rFdAriZ(U*WV#s{nH;J>^%($zNJM(zyI=AWvA_vNaAi**MPOX`{U?*D%ADfxeL z1et%7jR!Ne-<-BfbpeP$|N1aXDnaVz40Xf}K3~g7;9>g@-Ht2)h9kI316joTvfmS-mzj^z(`#=#mj(l%8ApOi}2GYzSPrJ2dKJ z>XuVTKU;(n6hKgj4(xW8s{Z+8=)?}lvXHZxU4PvIzmsV1Y5o$}U72}Dr0`8U*7X&puTco?uI|3y+5^)Br zt+dd3O>2BIa_tmLRIKm-q>Mv1v_p`@PSs);V9uE-*(&M&1Xxk3Ujm5LBR#5D7q`WJ zK+AxQqgjl(ckVcJ+h+)S&{zVU-7O;p7S?bCSh%}YlHCvX2+j(A-r8;fDlo%_ zvtH*<<%eXRgb14o+wx!s5}ASrNWL{-xWJuOh=fdUc2F$ZGHgQWvZk7E@pS-P=MOvMY@?F_zi-v`EYQ% zPh>H28N4AI*4TcP%XoVU^-WIc&JHiV(_cEPY#}200-m* z#?)6=0xZ0--!fKUmuBXWIXFBvEfC5=&`LbqjG}U_}XBj}=yCtQ79&zn`)udsOq znY6YlODrLPH9ejI1{7*Ni1J;}g4r0+PPfqo zTzc=_cM14wKT2hNe+#!UgxsVm+N@{sk&a%vw}*upU)YEZQM>weKelx7lP~Qnj~HiP z@;gu3P3mO9tR*ps(c}7#GFn&kBtewsBp6LZI{Nsazv?$c?3oRx=s^$vW;$cnz>I{& zc6#G}46k!cQxO)mBp|vhi_e{@)$78sqqE!rZdI7;-{u174LQeOvl=Lfg>1s0nkjud ziYIAVek*yrl|%p(HAo09y_0OOIkh*|cwOEtTUaNkI130fXZT^vM}~%ge6x`>SLJuL z2`Susx#qW1z51!RjVf=_K=ddtXeLU-L2aj8FX-d`$3BTsNpp#{ z#3f2-uF&+`mYQ3Pk#!c2{qac{TgbZO%YWU*{|Hp&^;}RP-+3bNXu(13goyQLv(Os& zK9*hzf|>gLowq<>%};Ix$U3CHPUx0n4_`Nj6>!f5An|1B&}T}A`02&G z=+J(|tXr?3?snp}&r3;Q=Sc+2tnGqP!X9aU$2sT1ou!1Em|80He&-j*iis}3rezGc zBvVWJW$;I$$eX@lqvlzY9bRuUN||T+n<#;pgA?l=`GoF!R_sITL^tzGb@;#-CLI+Y z4SpG~Dx`llf%4aR4D1dDYE|zDod=cKegAvfnoE{+ehmxbr?ltZa1En7@40Mhp-*=isGS4SRT z#O;Io)sIJmj(D)xDP9E_X2(Py%&MGxgOP6kHrtq*yY{RG6iNGwg5)tk zaxtpjso8wvA7*sWb7h50V#7;@b&^HeGL(VmRwU32j)1X{n!cAITGGL(K%baoLH@CR zW(7OpfbdCq4hoOec!v9YIXu!6A?wx;S?$E$JHxZ6P)lJ zb%`Ub@={4#Vep@}cIhbNb{e;y+%)WuqH2&6G4bD;Pdq(hHnaA5Ai*n<6363KKzYgy z{MUw#&KQYgaTg>c^>IFgWt8ZMt7h$+M}whP?2C%qZoNj|l8fCS*|HGx-o*C0*Z^SJ z=bn((8aC42KO{sPx5`SH6D~05woTI7?daO^_N}Ce_0qaMZ_F2#9+r4NfD|~%Ii?|+ zo`OrJmOG}Fdrl;lkyi-J{i^%Q7N|{=m>jCLyLy*4;vMJD7qq2XZgJY5{ z<*kfy5y!NQ7)NVEJVtAIAi|iYm{|8)NGhT{>I-fV4ZwL4u=J~905@b(C>BX@uX#I3 zGb@LuSw}^5%Y8@m7L|`)_EC4B|FDLlcF78l*`guvEc0|@#+4e6%L}y^WE@N%yO;=H z6-i?Z(h2WM3n}D;%Ga1?I+$wdS2h96q7kM{`|x>IZrZoeK-;KZmB<|P3AjcgcrHl{aw)Od=wE9ywUdSQbwnFbLDO4 zU9|YGYPw!ri=|%{zR3}Hr)HpwQ3-mCGQB?SHb>dS_dNRXY8?U3^HEVqtTXUjwQ*-( zxV1AtF&1Z`Y46RS_DKy&ZW1da7#FvJA~;pLV9AT;c?pcT*JF`E3+wFAcyx0DNaOii2N$OBx!3DkU3II|;8I3>#j+QV%R7Eun&Y3u z1;Q6(a#OzUUw<@$SGZ~oTO4#og%sWD~?y8EHz9hp^tl(^uom&13o}vmTjUMhGj)d1Wj4% z)%i8l7Cks|k5q`^@KNzwWB;YZFiN}P;=PAV@$d{;czCccyGksxiDYs-;VIx9{)y9L zgA2kX+0SraN{~N23elZXV1m$uMpDuq-uq(boZs@G9$|CL&s{ex$7V-lea@N^vP~?$ zRg`31H?6hRi%(@Dgrm?1y_~3-xh@K$iuXKWC1^>de^7g37qor}M>6dK+3%M8m* zjy_iQs3=I1Q4!?GBm)l*$}8KeC_!NiS)F)=zRBP`d{UQp+QJg!s(3iHUo4b(>fxfz z_*7wh2kb<;gRufaqvW9Hyrbv0R(^W1lKlMK;x2gi@Ov1l!wdTA2xYL?u=nuJ0KahH zsbLTeh&Gx;?98~*ntrC`z{<$aOo|!uB=|ysBCJ>-ddKag_(Idf+h7kC_cLddQhjgX z-8UMu zFgAG?yggc2h0W)wQ6stJ*{xn_izv;<46C+0X)|PP9hbIkgFt6TLl_W-*!%d+5#J(1 z!uRfrbWWwTUK_l0+!GQB#zhR>qUK}#k#f!fHJ{*k(^1s!@R%n;3KgMRryt`Pj4ub3 zB^jR&p-0u#oPSlvftA`GK*zTnxgL9ZGsz#0>QX;M%kI((!jWKU*mWF(vIl@Eu0P{yQ*1dPwDogTQ2%Aja(ILEU z7u4S5Ao~vgw&dn{)CFAVvS}=bauc6j)4}3euswWm$#_P6?;L- z22cm0$E*>g>e$>W;87MK%LogSVpiDQ)Y7vLkkUUY*dX;;U23}GS5KWwX$F?QI7u3w z!i;EWV&UWTFbRhjLQKKE5vD={LKhpljRthWCG8kv0v)-xDBO|Bq2))@P#C@8sl@cw58Y5gw5$*_5-k$L`@pBu5OV zZN%(Eh75*X45vFRt=W{byo?>CLV;fgx}jBVcelEJoUgxSx|=FerEeI>%$O~Kuz}I! zf)PU}Z-wX^9ITxvAq+xtRL1%I?(@G#b?Ph~W{Ulv*r(EByd(1}vv0k9Hx1`O#o9f%~N$v9K!4B=2pB~Ul{V2ul8FJy2iSxCi#T{hF<>{7W`pY zi}OVb{x0uKW&6T09OXu~kp*Qcy=+yaKb3;uiC_$5j3WPq&(@JTB*pzzu~R82mcXQp z6?RNHf6Q-GM>wxv_6K%2p5Kli+65g3aWj@5QGengffuVLnc&gZU%vbt)j`jY2^t8U z6)L8$mtkw=VtNWM^fs~(#?dg|Z~m$(14>Tm?PWT(ZCUws-GTf)*IGonR|9PYElrFA zH0Y>;Vaj%Sx&BASb@m)HN3w2NRu=!@P) z=zcfDr|9!dZboNSICKpdNk1rJb#HtFms-4)%3fXFJjxA%4z7uj`0|ZExhRH&yy7|5 zMEm?LQftklXS@Ci=ECEqL6dA10xMl#cDE>&Cd|STXVZgI*DS9H2A#koq)21j*RW|7 zhdeGzgCt+@teMwAe9JH2qlW{h0-dGRR7;f7j&ZAd{_PBUsVm&#uy58xwnOnOK9LRU z85eq#C&l(?@%(X)bw@%4_yj^?#J(5(P(gG_zX5RXNMTr=AN5rdIEZlKTi8nm`7Th0s<65o`Ssy zA$$Zp(f+BsYR)i@^M!EENt3ehw26q4UjO2(-^i>1kFjA+slq(M_haHjjPS=O6)1kW z^cXZhiia{QTh^aBK3XW9g2~bQ@+n>GXZg9z^^?8xoEj|Zy*N=C9PrFrw>73z5;#0( zSWy@elnicV-~{iW*Mh`g7s=@~EmT`;4S3?hg{6%y__wVsgu{}~z~MZ*3kE&p%QQ96 zuZs=#m4sXAyE3@Y14Jd9IsxPsd6jvmvmelhJEPCccI2eIUt~ba?27rb7PHvOR|_Sp z*&^LYs@JGve)#-8fA{sbSUL^k&xTWdp)n>%kH$fZ*P>+Iio>CP0NC$=On-{SR?u@p z#qnm3lk5x90rxpdpR<}|>+ z-FXvf&S{^FJc|CXmMAtp0}Y$$W$=tn`)YAR5$0*&!!cr3&_4%NOhx=i8|;Z+F=Bx`C03vYh1f~GHck9UaL84YR=5M6rqaI%w3h6PRA+}9poO2butiWYI4ld z)G|{*X1U{BYV?4$v&D*=;qLtVA2ZmF=<&Lh+GM!Rm#`R{gv6N25@nok1v_mndfyyZ zkp=C&b3H>tb#BIUlPAVr@CL=}lf*nLo?SSpur z(o^|g;>IE8AtkzQf}J?w%(JnSqioDL8)epY zmR2w{IBxairsa=w&GxlxyDTa(4Czw}KHZ|ucxd9kOgD1+pHsreAOZE@lWDhQK&{w> zv!SNv0%~sxm+1-{UQ3>yTnJu!X$7`mQ~&FBn9o^b=X4={<-4&qA1<$U0}9io*8hF6 zRya=dIe?7+s~<4m4gIfRAoqXNX4Z5&V1I|FA#gfgK^3^hA$1+VJeG5^kW_!dy2vp9 z3k7Kr+eGr@#mFCe7z&O(Mf~9^9?W)rrzeap&9`J4@Zfp`CPDw+PvjH&USjax^O@w( zkEPk0>se`B0P~x}!ja&0n3uZ4SP|4LfjO7(<=dMPHjLq$gQ*Vy}WK4KdQ+syx zBPkoej9$X_nsCJz{yE{_Dp?!>Av)6i-k1_)F&g7cc3BG!V8#4a;0FWFY2|elzOoMf zop?U;W#DdA(J2&X|MsJED+3K1iVEy#jya%k)B+$k;XU`Q7GkOVw2^JMEHRG{X4<>;Kx-1Czp8>ZLquPD(6KIn;ltS)>4 zpg>~`t@6&O_-*#`9OJ#g2w0x3oB<=+x+CbBE7x$o!TLHhU{0-!$0a!{AG5p0D3J)| z^9QhFds4vLt1zSKJ@pJ%aqsJP^;?-YZMG4wXK8{oBiD?r!fxnZ;_@yXZWPE)|fZ$AM5`VB0* zMZiPK4m5y!T{*UU)(5@zQeWeQINKp+8%C+UyXOoiznYpZonh>(lz_pb8xV3#Fp!dO zJv06jMgvQDbu2%SrLCj?{Uvz6)x%#Euv%IH&wf_}f?EUR+2zO1pY*NS0SB;ZJ%cRG zV}GEFGO?32Io}uX?Q_pv%%1#h5>MD^({=*-Yo7qZk=qx~NxRJ`kZT7+*l@`T^F_`k zFrwQpobM0*)5Y5@g)tP%lN*O!@AFbmrodL}YPcW)LH$Z80$F6~;Qu9~#sGRfn z5#KcJ1e6!ZrpcR}?@y&00aa>K65UFsURHYicO-AuU^ zh0bj28})!j^^}*#&V5+DQ%*niWTM;}s+f6;anZ(xkwZ2WV|Zl&Prg?3E}h+b`HcEp zfRM&Ek>y(KzCWSkz_bFI z+?%ZcSMAT>7{x;X!*c>~MF-$(7By&kzf^CfWk>&=pDAS+kNW1tFLXSxxGVdNrJ@di zxuxyg1T05GiAG0_IiS5kPsi9}gv*BmXl}Oxa?p)8UKm6B@>TmYPl$8<(FWsF?k@q? zXWanK92J~>JcU7@KL+NL=Ps>9Z!QluPxm;U)V2_byRY|u#B|X50<4@nE6v}#_of-2 z`y=q;fYR9*j5X~_HNUA|S$&-;32b-+gu$bl#66;^3zuwFd|tOdlrY(`9RE@ zSsu_;qOs=$--wo`w)1ooE}fBw7|?LhjgbQZZNvAw_hkX#BsD0ErwMu%VR;R_fTG~& zHsd$Hb0c7l3hrWFaZn!!@5Q)ZalhIrQWNW7i0AvH+VF~)AjQQE`jp+;eEv4CY!3|s%6xY2x2ns${R ztq#W~`xZ`5Rf~DSL*6u%eZEFFshk_25O5eemTRlJ1=+R_g>*CmEBzBj63g2mITj!$ zk~r)?Pjw96-{DG;sjuB_@PPq>aBV>`i@M8raF$tN{qe?&ljH=K2fga~A=Wlc#nIu3(vW?Y@2VEU$fW{)JC8jLIc-&=>Uceg_xQ!N5 z<*`(aEE61Q{lSwZMI^Y7LDPF2ds>D#?vHZS{e9GRD~>Q@?#)uw%7FFTz5 zJGsHlpUZ#Kkn6ad&WGvN)AINOyT+)u7^wyO%ioMkr4_xjVI?GPUGuM@uM7`I%#|xw zl+)Tx(l}GjH>`Kn%G&*R#1F>bH#KjVyufVN&I{}8gYEOpq$HSkk8Dtl4aK*T0v&0~ zKnJo~rCJh+PxaA+zXSK7WFlMCJP&E88F!0uY$G6zR-o~HZar{HTexho+PXsfbgh?W zMMK~Dz0#-Iq@Fuf_yU*?>}Mu_Q8r7KDBt0g{3z1{_b>&}y(Chm=z zaRL#=^Ml#p+~taJ`H&K6iT!~hK?)z;tS)YmmBBcGrxdNMTraKu)wZN_9Ea@fk>La|F z99O_MUN9?nr76W2jU^oZBo+E$<|S*5^tTvl{An!sPjrEgnF|mq@|KOpkdh)Uxru&8 z2fK>KZj%JbVfEu3&5M>QieW(*JLoZ6U9CflzuNA(SkeAA5nzs7!7h|qh^ZSKQVowy zDw<@LtkqcW`*i*z=6?|PoWUymL0 zUI4K)?BprsL!}y0oL%Hy*VoJLjOH`@ja>xmBFv4R02-sq%F@g)B||fkCBeX zrP=QIO{JOL{-8d3yyA3EURIKZ!be0rE3Jp;%7FapW0T`&>A{WW2oev2U(kYjTr}2& z$dD|O&~$opVp3AdZxHG0~y zm)B$1*Z0!ysx#-+;5vs_l(#Ozvd7D+B>72~`+*N|C_YGIoMF7#=R3tB&9SKfmkd(u zZJh9!@#{I(MN=22ISy|5Ecnuu2gcN|=}3@@`DsBVS65Vvo^JNbPk?R zqE3uNF^R-6ZT%)v9#<5ixkqDEE&O07KENfS-Dvv^hxo1$5-^zF@LQceh^DY*;=vro z*wBWAFRFAfrH?$@YP95C8pl>mKhIB>?E6wA1SEK~m-t~S@t8g;q4EdqLUKWtm~q+j_zXQ*b_?Hw_Cm?LSM~NwE?&Iynrk;8X~AAoObLew-bl9K zmY4i#FCE3=u9l=Lowz8om`wRnES>Yha)afX+d930$88R;FSkPWBc(SE18w26mqQNA zsP{jW2i|V3`I%G6kiz-AnIpS8@!=$KEAcma^B%4K@I^|;b3|3ckCzQ1^}z8{Sp>Sr z5~l8r<7P9q|HztUaPD97^b~y+P1ivsLakv- zvwL_jE04;mz1zFWS+|z0#gdiF$6zAm29}sHS&+Qq^88Rx;nAu!VlFx`k)8-@zCzPV`}#XO2dC zif!--SVY$eyD+&zaE%5iJGLCm>~A$iM`%vNYeUQ;GO??CjVqw(^UZpDQ)B(KW;y9A z)H|di_I0XreVms3xYTUeFkE4UNWl%GG_EkGIS?z6gjlO&LdkcVG-Dq7b1Yr)!X+i>y>F&YGsgQ;94-X(U|pn zy;fg`?@OK(xqEB{W+B=h8;X$PCcT0m=m%8pE1|}T6LD9Dj5XFDq@~EC9IsiVdY0E# zJVd27A3_wUn$Y<{<2fNKtsRRN(;Hm(kZwlAG{x81wk>n$7^Bimn796*c=d&)F0EVM zm*p~!XV90T{n#|1o*$wCZ-t4=pjC+-?nDX6lr;HANL0KF@Jmvo@mwdoYRcSseVxZR z7G~R|B(8Un$$}t&;i;8%Y&{(=myJ6;7-lS{bRm8E3r7$oBasXx0ZFzO;D8Wd-+mG> zer^01S-SF++NqVu<*s}U<4eCkwy`jOpUPx>QPXegKTq17Q^SL^bkNPi9HcroV{P1pfpX1fRO79C*P0UE>Nm!|-!se-5TB5eNPm+bH zrKqds7-mBTEd!+Hh(qGH}k&oDcLf{jT;p89euhtE-Cv%;%rZh{bg zB`Gt0)`mFyU!b2!{#8ck67DQnjVC&Bi!q(8y{amjp8F9q5a{RKJ=FQLMYs)QJ$@vP z()T;PpO3%G5vWg1m#2lZbK3s?nwd!{#D9SihSK!l!b4d+?_9t8t|};byfLLSm(=35 z)1Q*KJh@{I?p9MJk7jpb#V|UA`+{CZ%_|#0CSqba$D^kv2q*lr&~3+S?!P}riT(1T z(kO_P%yc+w=QCneI%&-@nr2bP^-!3vTr*nGv;Gvgn)!Ir*+s-5RD@p80`>Z_PcY3n zKO_}cJdXhz15)Fc=ePxohn-QYXNm@0%+r&)wSAV^knI4sHyzIF@g!1K6jo%skqo`_ zf%2YYkpaA81`>tq1|-7rD9q-fs^SJ#@7T{bU|H|WI~l=gzG5p+PBlc1cT_h1?*(z2 zvJ^6*9=TwFh?YrUII*Ms)^-0RU4c7(?g@V*6Zx0k^1;&C;>Z+!-1WMN*sS@wR88FB zhE%)IA)AeS4f1MC>P;2*FB!%YAs#DH?KWY8ovDm8T>NrT@YA|Ghxv77d2vt68=f(E zHghPZR2dRyd<`_hzyo@Q-%XYm6jv7Bzk?rE`RG|Kh<_f;T$|_2%gBhdT;A|m5(t2^ zDo?&#bx|F-W8g@T&mnzK@G5UHpG> z5{KNs8Fqo>+nWyq<8B^*26lh8E?Y`>5{}y^d}davw5$!Mx0xDd_SSM2E%{VkgRQFm zK|H}hJ21rVu{+ZGYap{8Zs3wAXE!lb;pzv%_6=9V-U>7D&Np~Z#{P9&uRb*4Kyy8U z8SOux8Pt*6Sw6Qy&~}85&Ab9@6#+rZsa;Cjl_%oZDo)mS8-gKS z46gTuRP?=T-U`gn+nc|LEnES37CQW;CI5`#!iuB?Y-k3gn>KF+oOi#c9)|?In`nXv zr8c==kI3Z?hoj}0nt;sc<8NsA*E*i~QsY{R*8`(a8vp$lyF^Ej%y)HyPWG?7Nb_$0 zERONs%XRQ6bTsrK>-GOJZvU?|;s4WA{r|ve{O`ZnajrvgumPk?Ft2DWY?PYWhWt>nkEX!EE8@TM;L7L0Sf2-etPwfKwj+be|}W+IYIBxW_$5g2$Xsb z7n`uQNQIYu`==i0yxI+$-kY8lV7v2!&Dyns?Y(jxPoWzgIN@)CB)4)Py5FsM9XLdd z3*1#YW0pZ8|D&4N_^aWh&JH05#+$1+y_EV1CpvHP^&FAP^D_=Zd`hI1^(P*cDH_;B;TK!*2RZn_4gd{CcvLHG(CjeSUFtgx^1oFSM2_bCuwSiUr< z{oB`IyxI?LOJzvaxQ2qbkJ@t+z5cs6F|KGxDerp3m-n3XR%V{P1saHf*S^2C<|=Wh zJF*OP673dN#3p$}uSG*8ebzRQxbzcZl)(a1A%*0hkN%wi`l$;UP9N3KyiyQ)WsE2P z22-d5?$dY^{HpFfDx5$%YxEJM0@0cuL;in@x}&8ZUnavos7m4ZuMm*`c`Q`>yaNXa zltaViK$p*l1l}As2}nkf8puVVh;Tsmy&VO^sUpGS{UfuBW} zGhTdf${C@c%2mMc7b?t!KH04IO$^qzzT{9+L?w6n=4ijX-l&Nm5pD{c^50fgzzWVuhaS~G0tW*BnmqOxEMtpkW- z3}g5*5SCx!RCJ_k3L5VqULy4fR<-F#sc`GO0NRIUFmidy1RmQTXLYtw?hGAPny#!s z**7w_oMnL&)T)}gv;3cp|5)_g8_iWRF)TNs+*aU@Dv_T6D%I_$`(2!7RrBt7i@y8g zR<>iLIH8hecT8Uk~01S%mheef3-o! zS{zr4&oZP%78Evga(y&zX9i7=*ONaP{vguo_wpR#fm~NktHaIm@PXJ@t{mf{!3JO* z5e^6clVl~*T+>)H0=R4|Afuw+kq7d=D`@{ypit(~+VQe9CuSc4sBPYYj;tv=NoC@N zhjIP9-U-1z;OS9@Yt;fh?#Z4e@zF><6th_6w`=N(*_B5jCyA9&|G8&q&mYutqu;Xp zBSW2kJ!t&#TjR9(+N)*M>B+YFm8(K6Gmf1 zjpMAyC8Q2p|JSgYv?ZqHmEnZLMG}mrG6u@8z;!Dw3l*B|zkKek8+0&v@m(Uz>|W-p zlG$ph0INUD%v(-er+|@YuI>v#qd<2?Pf?}WZE)(Mzpi1r3xt09`K6dx=@+y(aEtFL z+kM@ByYu=-S83l@J>59d`^UnoAOz9(AnmiU&^69qu(^EEeJHZfB+QwiNO;G0Io*9k z)|zWKQJJ_(^|Nn)#~&+q@Bit`h6C=@J6aNsyXT}Nngfp=gISzZBZ3pyUX_uiM-%l> zc3@8PH#R!O2j@0_t-dyCOL>AiFC(5c@ySGBfM{kFD(g)wbpMUUw{5u7zU$3<_%ymt{Th zs9yh_sb>Xkq=MMMR~`#M8w{w?6bD|Xyx=$lkXpOIN;$k%1#y|7i(XxTpZ zJArzbVHAbev8yA>)l>5Fxoy~rRa7581YP7}2EdrDGhfxM&qB3EWmQ|1MH@^SiF@dE z)-}9un(u+_V#d@^B4ZgN3-0u2IU^B@4|z-8ut@3zIYyaM%&puOp}IpeXot}Skx}yj zM%kDnss`GHkm$96ht=!_SBEpfU$;SGm+{RocD&2c_(P#Mi>h~Pk24Mxjykr{i56!& z?kjX>I|miKU0gzSMqTA=b_yAG(d45~oYk*20SGt|`4DJ!N&MQI4F$yCkC-|tL8%*P zcJJM%J)VGT5l=uV@o^orPZj|VS^#9kv~FCzb*o7q(A4O0v&P#BpHK4W5Kp&(HpeuV zcr`D%HjjBA&(^<)=JL?Djs3k(6j(-L#t(Z&>?_g|-qgeN>jFnL?f9_SL_q%XI8( zwp)i~3s(p$Kc_PAyJO(x0EHFhqI8_%3RcF!(rJzxhqV_AeStQ<+JFaL$sgvQ*!`>O zTi6PL`Wp|H+4|4-N^E|-dnenv`q7{Ay9R}a^z%Q15U=V9_y(GyL~dH|iw{xE_ffu{ z@|z3Dh9c$UY74idx(Eo7?;`uF3$uIRVWDZ zCn?7@|B2IjvNG`&Q2}@+4kKkH<11B-g3$7i4Biv*3WAG8ma z1V4SvE8?!LU9rz62Z+w5=X11JO?e5skC~~i9?AE70}7XvBacTK+(AU$dEDr8cF=)% zoW5l?el8bqBxkFF-1C(aWKKVSw5Q~*fD&+nQh!XK4(SB%D=qQv7mWPVP}NDX%^}>;&(HnMDf{hmyPgL%T0?&~FjVw`<4CBIbaFKoA5{Eg}@-(0o{Hx9(F42ce~f=H3rv()>s5<7Qy z6f!`}W6!EV!D|Kz=S1|y{_V5+k@DPx*fgCW>}0|Nx|u})cbEuH?#J{}6+OUZ>LM5I z*2FM>s8|C5&nz|YIJ-WH0qc4e#4FD;yam}ssK%C;!UB63C>pGgU-VQ?OB47(PtO9{ z-yL`(vOvuD^wG_{8zU`G#CY7G9d#!Z4m@E<&A3P56)830P2z1ds^h+BxIC`B4!T#% zDZ0skaa|e?y84TN6=#ms$eo4#!4ckyJbOIq>naYXSy|P5a%7zt*$HJLhqa~#ul+!c zZQ*Z+x#CY|&V7tN_l%CY=Y|!k9NI_e14GD~M6*7dt@n_6`I1mgK;5>{RNW8h{dZ3W z%>{ox+?8(qZc{#%Vdn6NJBX+I2XtNGxSH;VLORXM>StT8k@$>^xF|V%&AN9Jv&A>M zbCZc*?V^e@pyd@$o`IMP9d7ah+ti`UX~fR&F4NyM0ZAM_$@zC>9Q;M?oO>w|reEWA zzK8h*z63|{gZ`^y3&cn-U=GLtS?uA|_eoqH^1Y=qJZLd7|IluAkVPKG`~-n+-Bbf^ z27T*KP~9^eXgCe}uAe-Z_;Vn*m7ZN;OQ#sdPD`evbBCv`5zb)*l{!8k!G|=eDYa?b zswT$d(*y%`!KF5>0EhFO3a&-9X0nQRJl6hSkB0c-?ly zBh?;X+wTlatgixV^iFaMZSzWXjGR3%X@7o`M&BdtD5eBG_X$^{OH~yG5vjayf8-WD zD!t4oWSdLyqesj0sl7jiSPp#6zZA=7a`dPY)D7R&=e!wGZ$?QnA7LHVglkzl14nEW zXAmvmg^z(?l8I0cz7-I2eJkeOo4%3-I)5xLlX%gWERzQo5CF*o=s4|v9Y8bcw5%sT zj%8v>64rQ$+n%bhYVR$s7y~224|~cr(08}QCfe34h^4YZ3;(Vi&41$BiTOcbFt$T*gLMuzRu7C z2y#BFz64~W;C}Y4tfb7`&Dfs_7*H&*@*JexRlf zL){U(6qefNA99shtX2Y!v-_>{!ZdJIrf;7{h2?1-h>wmW`o(?j0^1A|2Cgd48k>vC z)Uco{9tsTS6mW1&ZUbMt^4`MhHli=A%tV>i7mK4GrCKEDtttn9Nl_EKPv#TdBmcl* zK~I86V~5_r?&&8g-gK8|&?pCt-j6=*b#9CcFQvplsu?O$v{%B_l&TV>&sT|>gT&Zo z+3a(`ga0`kph#U~&8ZqKAE z@40hJ(PWZ~l5e9+48hoQvOy{Q5Oq#wGiQ-U!E#CLdZhn(*R=OrJ)Jn)8C?;T`g6FaR1DQ!Y_IA)P;)KywR`7>P~015EkRBcH9J&862)V z*Nr?56lqR>V4hj{Y)X8e}x!sZ|m;XT}O zs`=+>{9>SxmnrK(&t<4AzB&$ex^8(?j__{IWA!oo+Q+jrwu#<~_HXxV>i#?s`hH|< zU_pm6O?##>AMSA{EPW*_HNTf2ps=u?B{Of=cQo|vu+>of3%3X(jj#*OsYT#^*K;Kw ztlOwtby$6LrCQ4tEWszkrBhE`yd|8z*z&vNN*%Ge`abxou3D~If&7uRzgHieUPHA?*HMvraiq?;FdWz5+{oY6fjT@oXPWx-6 z@|~7!rWeCg|4b6jy|8R~f}m{^u$%v3f`83=%Bt~P+;Q|^VZWvL6yZW1J#gn2wI^Xp z34$Y1VlISZ`xL?_cu>Uk+_$B+GiLHHa)#1r6ESLRM9w@WsH?H>U4|VYCGjL_ivYv3 z&(Yf7T-<=_lLK3|wF?fK@DW(B(cm{^*<>9{ubPsQ)(k>M%}0&DBuP1zwjd%fU`^30 zX-h4WM|JuR_BBllS8wbq#|Ia}wuej%ZRN@==^yfNTRMprR?2pEH9biSn`L`=+Qt)M z zrWI%WwwvcJmT)c4m!ouBd2hAn95&<7@#2-83$f-k0sh9u`aoqS;y7I?{EiOrnBD#d z^XkzST}-Q_AErXcC0zV@ZK%R5Yv*V0P_0Nhh2)<%DhxcY@$!>*$21qWR9q(FdJ$1f znD1uZ+35H&?|Age1ZL`SmhYpCY{PS&NAc>$OXP1pk~o-8jCo~l-Y&-hG|0eS`7cE1 zl#%t^5qS0FjAFc+kP4@-3r4cbfRZP*DWQ|zEfNVo!PkY@_Ogau-3cjk54WA6H$JX% zYt7ZR?t0N7mSRuBU74NNK3Tn8|Y| zDIPVllCTnl4U3ps^>M_7noKkLf2m@Qrl2=lNV+)8-SA_j>_LvCK6?YMnotxYz_ZIo6%jtb-owK86YY zzgskBT;ZR|;raBkdg-XcX~I%B5hCiAR^j@m#%A*|g^>uZq4fFk)$$(EHNz7lhgS0+ zY*?7ZI52FjAvM`XQGKF7$kAcU>vec1nE?YP|OwRxMfl?FgJnYO>&V3;@q~7?*gQvYgrnYg00nKg!UGZ=IHwHH^FRp)>@B%z zYxoyjsc8Z@qNV+n*BMWV&^raN!DwD@3o;U`c*RMlgqlzOZB&aI52_Ob6h zyRM5a)jf)Y`Z3-9I?`+W;@X#BR`OBe+Ofl>?4VY38JkOC2NF5bM7Mkc@XJE_)zoVOf z?kx1IjWeKmo->$?$7>yTzRILS!+(iduc|41k)lBpb?~5&g%nA#im3iz^wvK{^I*lX-*G=sY<*O5>{z{)O zb5w?+e z6u~%iJg0Z~!NawsSR@6HctaJ7nUclygV*vm{rzKmF9IotpT}IMp7_`^FP{6Hfsn z6Bx5r1Vc?vs|D87)c%x%rL;iWptY*(z5}CZ{1nTp^_LU9hdt6yU}SQG`LM}O0#`9z zYiSX*z{1zk?8{-*mt>jgpex`qWy$b@qI#7AsmS*3zW zTtQ1h<=3fnO)=N`UmPe5YmHAmndvi{?a5EB5aptTsuwpqp0q5?T5DsM#n zioH#!7Ed!){MX6GEn9-ZtF;gB*AY1^^0K8O#17qCit(L-?8b}=eXq>sv@B7K{yT5x zkWNUHtBxt-M5tu%T+6FQgFkLr!esuX7g6E?H}7pxSv9r5n14=9T;Xg)Tt^y{5Xo`E zhbVf$o42r|246F$@G>seDyZD>B;?5XM==u`Nw)&%S_o8jO=auG2sv(|F=RP=-hEm8 z5jm}T>jlQg;bE|NoM@0l)>Po^Hunn*!JB5fnDMJ<*hg=4vD?-p!Nt)f#uaMMPHVfn zf)d8Eb!+oP9rWl9b=yI_exyf$M{82wWNOnX~rNXp3w87C%OfxXUKT^lJtlc!Qj#j}i zdSo(toVw4(@4bU+<4&v;&A>#%X(y?YS9N5oOQzhMq?$b^N_xom!+@uD`q)y0&q0Po zI)MYbjzkm>isGlvI1!93ioWw>|J|yKnEN(;%RTqD?ryKWV0*oMt)w*gV)4{9?tF3{ zBD>`inEqH$FdwtwC9UokKNa^BuNr6l*c(Dgm7h%v^ z9OOz&>d%QBTy>TGqQomgtqNU>(*Z8XX&qe? zSMtO6qZ6*8raO_=8U#K&QXW5pllJ?@!&TO#k&PL9nXeCpM=UZ<2pTS_+gz!1t41N&G{?OXdUrBfjLW)Ld2$r>=st42dbd z+__ecRPv#hR7!P>vc7At*Ze)PumN!cPIY|ED)e_kkU7j@PJB)LyH98L_D8Q?1HeZA zfw-|BH|0~Wz36XF;_Ww-PPTaUAeUl2v7UI>P{nWTKy18G%vjqmMPfIVcR@hHj#p>6 zt#D#M!LX|AV5Y2p>H4&ddBXOC=?SIuBrDyx@jXqIxn%0m*5=}1TsKAU;w}1Me)th{ zaW~!S%QP1mL($RuIVtIXWMG%;?{fZ z?lV3YDXi{0OLsfMD2RIOYgx^yg*By2J{!cRTF)vlS+lHpO?S;sjY$$}%pd zln2}wv1Wdi`nC*h#aUYWp^halSMfBUF9Qt zFA>W!aaGkBAGUU}d}?d@3Vh8eQG_~uskM8cepM^y+z*-Sqw0!PI8k~&EsX4^om1ub zL|!9isS!zdnwZ$_f*pLMf#l*>S z;p6GVaXm-J@pmUA_s6Ge+LuUA;0}KGr;p$Iz*!QaoF|T;G)~b9@56rT_$pX46!;Y9`G>qjDGvn;p`~x5HB}6X(R^weOz>GJQ{y*pDlej z!>qJ6U!XeO!?W`I;-ar*in`{0`RJ(>#n|45T{PB{d?IjF?Cj0*!Qg7v;D9{=^4#or zy0#rV=_A#&D5K9*Jrn%cmS0U3CTe_*gKCbsg>C11iEocUccXxA<9L8vO6j3gY*;;4 zZM=Unxq1Lkv1LGMN9#i0l%>3>0#EcQb@g6Mc4&5O#1|wzCaP@BVuWpAV5#sM8a%s} zz0Q+wW=LP*O)F$i?OH^4#WSN0naOD!{7vnz;LlV`FedJ%!Dp=pP*w$cB?W8yo5D`dX28Sf3EHzEQtS zeX5(U$zu#cnZ)@{2-4j>(jCJpO-Xq|y0YODNoah$6k@ZuQEr2oF0dkrz2+@2SJ@5avoiM)+wbe!Nhd|~H= zDZYxPvyD}NH6hmBUtP5FW29)?x*)4-epnLe<~sW1=CrCibGBvp_;GOX-ytQ}?lg5x z25blH2Mgb$-iDE%AD+}J?aJX<-_}#bDkq(i-$tcwzpj{4$^BDPI2Kp5Er>EzV%UH+#RI4tn`yRLT5lW&^Y&nTf8(|cNwm1w7IuE- z1nUNS7KhiZs}oMMy{w-kI4_7uli z9|ch>FoI32>EvL53W*jUURIhGzk?FM?<$gTz&yhXXwV!1)CwdV~`u`lJ`u}p+JjuDb(BE%VV>_O(mEc6RkxtBicr17e^c9UOfBTXB z``z4rbyYw!AsWb_LC+In9f7)@0QB%|7=SVU{TZgTj=ex6Jbo0L4$o2Snx) zLDllTzS>w0ivxPWcW4+6ergy3KSFW2I4A(uWNhVndW7~h#vm#@qz)j4qQl?6)-I*E z{v6M~n7bo%+sb6iH@ITUA(2vAj?xVJa}*&|-J)p5ks9Qk$SCs$L?b@-h7({#{;YH@(WEjLEgp4VWI+3A-OpD%ANmYh4gN#f;tndXMhH*=i^^UC*N1OCV? zE>s&yObw806Eu8qv1n>El*-a`>bPOsgp)!8Lr*CUNhUNt=AS-j2t(xMD&Dh!(z8sz90nNb9YA{3#b$ zir{~tm;gp-Z~(@XE-q>pcxX$`g>b#DuiUIV=>+XFgApV5sVZn-ssM20dH*F&*6-NW zFWoEn=FXfl1saapUR29_|K5`NqI1bFW#@C9gC)R874VfgsmVruW2f0Z9fnU*kKoW%s^Wg)BqXG7mEF_yZ%y2+^A~eCO*_1DqX%-CEggDQ=YlCX_C~ zTGVR6yjTqT?_Qla%~;(UOCxdB+!564oYScy z6DZNV`niW>Rl{qJPFoYl_Jf$CGwvL+_aFyTr=O!;(*wkR<|9VVa7#CqX0?UpiUc_0 zd;!$+`<$`tL<7~=Aeoy~M9*M+fc!<2b+U^`C#XduSBjCC&G6uI{S;kFKPX}Z%O zP&GQXQh1jIyukt=Z^3B6{1y)z3cBByb$LX}2=9rkn9*&{fVaZ;t%W^1zb<#SUSKjP zfHIX9_`Ojf2V|GQck97VA*$1BmNQ4a?kJw$RWqcrb9%Mud`89|vs7oaiK|VrXWLPu z=Ao)^wNuQUvB-mAs=CHhwsE!>dtmEMTp5Xd5tc52A_jP~>?N(I(vqP=$*) zD0LZhloM-9?q_tAy8UF0_}?i+tc&zT99c(@Doa5@d(sU!mWjA`R9?Sr%2NSJv;YT8 zVzmkB(Dlj1^Zi{Xmi%rjq@!WkY!ythVEgzn5SyanqlwQ|E}CS z4Ix${z2ehOww23uCtZ3Br!!8yfeaedW+zLS^WqG{=A9QHKnakO$do^&=}C4X=;j?; z(Av62jP0Rpqz)NbGUcvNrbWLAS6*{q4(WU79dz z4vq;Wb}0L~s`w7pb^LB8Fz;@$x_4YeN0^oIxlJO*ph%peR6nE}3E zNKBX^o1OOlCH(iEvH=XJ^PMLM}64D@AgVdnrMJ zOXoRx0IB?g1rd(RN{>8;$AA-3zcw7u5q32hiMRPJ6H8rEUh;4+4x@dkiD=OFN8(&@ zft+`@*NdIR=tlaKtVJ7lp>x{jzQ_|Ib2=){Vega_dEvs_3lEj#pI;uZLmAIrtt)@A z$b(v}1p)Er4&I}PXMxwY{@H=irS-&Bk{ZoYDLg~CyV{Wv`k^RzQPor|v}gtC026Ao zfLZ{}_kbedhz%?=r*U|xCJP61KGQZv;pTCxmH=U3c8x9Dp@CJp>#9|wuBwV?;}mCZ ze4ANri@5k88x(P=HQ>-Ou3z)0Zm}yUn|Qrf9QZ-P3H$HHZ>M3y_jiNSd|r9{dELph z6-+{cwh?Aub2Q@fY#2I0{d_uyi$cE02+WO;n%Q4`V=(Jw0TRM%+v}gCEx5m@m5=D# zYEy9(aD7sRLEYI!5F#ru)}Fp!J~JHraaT%Gq}e=gCZJhV=!>}ZT`pAWa;Kxv-+bXr52d17P~svCBBh17kU z@|D@RiNj4Bs@ooep+AY?Jg@w>xGBe>KzEaTnu-hUz(gudx0!z`V!x6Pg~SZU-bxi_ zM%gtQk{6bw|M1z@<9;E$a*Cz6J)xXRU7v?tTx`%-KBgfV-s-zPn^zmyM$%yIe#z%} zFhi%1U*-fkM~Y9F29p|1PEamzEm_AYdak-3)tZ#<^1IS<3UhVC>f;lag#Pma3pF`A zkvr29JjGaOUpWYLV!w2UD1QI3O*Mfb=X6zyTf0+YX7XHDU=&Kyp6s$9|7TodK~GH>wY%XyBK1tp7uzqO zC>lnG{UUI(sWswO(~B!Fi2t$JkDu&8ALJ+Lm>@rr&g9z&?d_L49KRwq>|Vmow#6aKR_`9&D{uz6MZ<_@Q!@BsLvNju--QbrF<5 z9ZBFs%m;wO&LU*pp-=+wDE#ywWJfNocO{z+e4Pt8gMX;ESyBhYZZ+@7ot?=o?(%I4rn0#x9*tcbO{Cy`Yc7Ud9g{P1 z(>+?A$w;0?@HnfGm^#q`Z6Y{ic`UBXiVqd|a*NyH6vN#4$aDI+&dk9Gjh;n@LFaqV zsb@N#t2>W8r=E+!wvL@fwU!@!&aiKKDO92)2^HYk5k2@#^kp87#VtKOqV%M!prT;v zR^EAI7+;7y9+dw%RKLkRrr+dfG6xt;{9(5;IH)N6=oM?*w~^@UG@EJb-|;kfIpy!~ zc*ub|2i0Vk$BB`y&dlZrt{?BfoG`p!ooDBB=o_DY?&Yp`@(Z(8xF+k`n|=Tj&P43r z|6KkK7$?~lp?Qmv(9rhH8HZ-2La>b<4Q35y27!B=3>f*?36+n>S!vM3TCa6;c0OP} z+YhmL<`X-eShxGt>1Z)A3SmgX5#C&J6|Ej%3=D6JR&pnLN6~AF~Loa15mxiGv;Rg zU_wA0?g?@?aZoJ9I9t98j80dFRc}CedQiomJ%gsY9w!7qc4gnOUv9uG`M3PvyJvxJ zyTXdzf{QP|icXv`=sWwa&>GQUA{7MQLrAQh?ztjxd2(-6%9od3xpAb|PG zBrEy@Z&s%2)_C^Ep>jZ0DAWTQ%u#Q3A6I5O|-nrUF zEC?f43R*zMpc3zb#_|5EhyXBB%`An(O~~z82@3p^V9Hm=QrnKesJH7L(XrbES&>*= z`sDFOg^n-58+K?aDq>(Jult)sLC4SnX(@6nj*&!$CD3o6+tP6LdNI?$;KZJCLe_ zV>GE+xmIv03(`VM$D?EUUOvtc?n$89JdM}LIm*Q-EZ?~a*;~E|%A35S{VsL|#M#Sk zM)$d7P1yR+rcN^BEk9_Wh_2(e`vEHk)3}7&Z!s4}iC+1Cx;V5y3#%ifLWEYDZ5&=N8i5dNcF#B@4{Y&TH(yipx;kIAZ8;1F8b<0W26VNp|q zD?GJdoC2Pt*D@??hRAZ;pdP4AbL$AiCm-)7$5?JkV~U|8Mjfpj-7 zP!jVycnqcnHRtW56-+^_xgsF)jv5N~g$53A}{b)D6d<$@q?2f=p zQbS@7HbnwAxf1^uN09DW`<{n%CBI_sJp>~24MI!IGq~#?9KCDd8W^xCzfWF|$K4$p3P^$)8ud87g*$_Q6i#ZwG=RpJaI{q;7kYFVt zWEnJGb>tkHNV5D`s}2CEHiLR^>;p32FAtVN^*1SopsAK82#o#umUOHRax_wsYzR-R6RoPKZ@+%u^-tVbvdfT=<2bP2Q|wwd%>+GUmv9R zabw4i3*F4Ux4Q{cn(sCg0T0|r)x);Eq4E@f$Fi`1JiS2K>gg*RYfjc=%j|Thh>}*V zDZ>YnrN+%AizEI0&x=`(%89$&&s^N~wr|bWjDI@UK3i*0>7vtE#D5;#yv2vb3q6%d zLDBEjz z+bs+Ql$Vyu0oCW?uW*Zf=O-maNp_GM=?;ioj-IJE)9nxJrWU@q5;@z8lCYke-dut} zbl90zWApDK<70!(AQ%Ido1&2i9zMFzu!}BWhL`t;tLS&gpvv z5b$67rY&7;2x-GxjW@>{hyZ5O9_i2Fghv;NS+>HfzIA~R<8$C5Ep(c6Y#Z*7XQp=I zkz~L$Z}&L(6u-a4##nAy8ch6Jjh5$nY~=oka%T&HfW|VdTcj)RuS)vM6LC4tGW@Se z2phe?)RXV<`J}|!b&JyOiA>+T3^Jh6l|B`X-2?MedOh6zY*a>3iPPNn{g#>T*rCyM zazA~xV?S?52eQp5jdm0?T{E0jpf{MP2(+kpknaoHTKGRIKZ4bhjTEtVPn-Bh_@9x` z3m_) zVvKv7X#R;x&+-D_xp8ejY>ss`xD4vp2Aarcs%IX+QNep*o`n6 zD9xL?I&ZCMe8T@Kect^hYdz_rsSnxG^?5d{%PQeBS0h%-bxDJF#F~ zdCMY>@A~acz*Q^~Cr2X(R=;7Q1Y#=J>M2#PD!l4pF$AA-&75tx;WpHf_k=E(^r1oW z1+}rxucut0nwNT&=FGan+HZWf6K6_cNyosBcy1kyX(Z`!=VWwr=V?wqT1ipjSDg?r zvLRvZSDsM!UKm!J3PHGDii#HdlXy^R1|*w}z~dLx$?K&VRo1+gw9BTvL8N5fjj!PE zOqrvSf;CgOUse{sWJhxsI7uf4|49Ea^GAjLUeBpAqMtt}$@W#|vTH(AUH>}Bq~yD^ z458gFsm{F0KLO}FF&De_@#Hg&n+Cau6o)538#QZ|ZQ=Qf$S?SU*(`I~{d%r7SU2CG zVtZaWi&E(fsy{(jZThlvvKYbmik%~5nu|o*#+IJMUCwmu^6PzJI{7B|*oiwQ)_hmm zRJB#Vjq#l1DSik^AL7H?daw8YVD7!6;oARy{}f4x2;xLZbV8yA(Faiy5`;9O2N8tP z%Zz%WcgaEYQIe2E3&ALZV3;6`-bNQC7>v#+_r2wO@A}+(*ShPs*6;Vb>;896CS=dv z@BMzi%Jcbnog3eLI4#8=;A)7B;%`=qiQ_Yb6F`mG#_&PI^D|k zEE1^eS6vJ`Yd*<*)__h3lOrH&IGKv@Xscd4coRYu&}kl&S8u>`AMdBqBgIaAy7V=s zse}FwA8qz0?X$2j?59X)1$2pYxg&Agc=Or`>!toynt1)vc1w6iLv@NG*XE@>dcM(T5Q-)Pjc zPNfY4G;T9;xsu|w{F%bY#pcBkEu-OnOuHYgT1hRR_CYa=j{ADQn639_(#qN!fBWua z=kH0DlLr6^`gLow%l@B;xr4)ND5b46J<&*YdSvb?uqWKu&mFIb-AdKhs=b|kv#%;jxcw% zrCW92tn^FLKzHd5nsX__+S3Gqjm|}h7UQMrPS*_;&KVfUstP?%b_@3o7}{Az3#M-w zd19RLFb2FUC0?gF9T8IB2Djs)J8K@89>9#Dl1Bj=)eU+))`kJEHo-Y%zz&Axw!Bd< zmyEeJa!DE?ZZRSl8}^u-M0YOklykSr-IQ~onX_<++=1+mpUOX=0~(M%Pam$VUWlF)^SZ- z_8BFe?bfye9#g=_?ruj^m4Ol@)v3y0E17}poN#NlA0GZ>E5AaJ1Lfte#xHxKKbaWK zGS-|)^l(zxcQ+AMG3HmvzIxgh48%!*a7a-O{qwRc%AC5%ON~^sqn}zH7S8AVYo2UhQf0+5cu^~oS*LkC?4+fa8y=Wht_OUEElQE~7!aNT6 zhzs1MvKKa-r_sgwS4PEIio#8^UYRERmY#`LW2N-t5VmjRegeCd8$f!R6mll}%_UoD z{I~JgBDeTq3_5Z`vD2|iwIMGkwQuUGvWnODVE!f+aI`8XtzBmj2>MI; z9v%Je6&i}fX{1DNpBit-Am>k`JI~OBd9^PsZA5LFF^eq(jma%qPWqm;C7kT`qRP^` zOe(XM5T>7zo{yCwtUSKxUegCli!Q3tF36^)xZ9%Bzi5%IZ;F-;cVfsDU%M{v30M*N z4Dap)h^N9v^?)31;9Z!$2(b)w>z~rv`)B(=544)_y{+%3uvuKO)N_nQVXU8xbLP=1 zH??*b6Cy#u*d~C6;kW&;i{)Jeve*p1n#eD{o0&u$uhz4bj) z@h-MaU8u$*u(@O)FO?nHJ|Z;`SAwr z^fg+Ew+lt`e$Cbgx^;Fd_xo|-4@j-TT8%;VkxR-dQ*I!qV?94@6KOm4-0~mNP}2Nz ziddmHuBZ!xZ|1eVqKVj%Wf8g<-Q~yBtiMZF(=2{4>KHQb?s<0zE)mO|8fBGnv_F?8 zonoVBJ)O57b*?bFcw9!{LOb*4Wgzc>DX}C@v7D9Uz7u79b@f7r8tVLwG?h!DXXp7) z;U*}&)Zg1ZM2j`f=O5b}Pn|A*J*xN{bih!TPD>PVIP!`WMPa02eP^8lh4E3Z4J@ye zjtV^Sn+luP=P)&A_~P0`D?bpoz{l?{7EZ57l(e-V-?bC9k~B|xJ{-g&YGEkbr<(rZ z2AqL`_RW@aVTF=A)7IoOv?`T&T!P&7qV+4V)CC8X62$KiAG!gxti|_nm1i_JhQ>v= zUF$)nb=sdDv2aE7iT8;FgTLoR_f)(;l_{jndA;&J6*PxJ51q?)VP1O zs>*+wcy)8rz42@7m~M|wO?+n|F%?gIAR+9+jFV<%!f1i8?4r5g5GwnRsl_4 zeSsrGQauM^&Ea8v345qoPgvz z!}PtfeVS`j@7(Vju_tR2tlpHrV$zcx_I__9U+U&7%{4TxtuOoQh7zF{CpPM;E)aLi z$e6SD{ei@p)e~i&&va;_fX+O~q^^*@e7Hx97escd5VD)kLyo!pgyPsC=Q0=C_$~?f2x$ z=wjh^2PD~Qg`Lp-wS5OG+n_2kVhfnMg>~xet6c!UNS9+0MKi4jdrz};ws`XF=;YZS z#eY?ON`eveEp^eE)i z9aX-(McOxJdH6F@_^;Wal<`E}?5&Fg@O{973H-Zu8kNg9;Ykx>nckzr4@;L)q;hnX z{&Bh!HbmdS1@jjZiLwJfo_!J#9G*fI{2+)_-+LW}jL{xB=!^^(Lw^nKSiO(SxtRK7 ziUqs+Y(9e{1UL9VsM=1VOyL)+O46DdOiApfJs+1m0pd zP7A&b)3Imvj|?H9R+K1IXq~9nUa{ye4o{)u<8L^DMJ>7p!mvQ&HCf3I3by|5@uWkf zU6^#m>e-N+m0W4-wG*HMt^~wiomrdm+;TxTl)Yyx>qLQ+EJ^PF$0GQA=iVqZo^u%t z7N+BqAN2!8YV(DEFOgnC-`M%iF>ndk`ZyY1%m4uPbx4>1mAxcpgXtP%xc+<_0L6Yf z1C&bwz~28H5;&fDCRiJNCe8UQmvj=tS7PoPrj{TV9 z&z*EzdS-ur2pnJRkRon63@WPwaam639iWSA*x{Z+GNalsxgLD_1g%0h+K=CyLAPo5gGLE(tjM94wDQ{fBcT>u9HEJYB(CIiH5tf7h^!1y7!8D^igm% zpiwW#d;8V_5zm(sPj>uLx_5v=&}3{QM>+veLCPE?l5H=Lp?2s(-@6&N zKHl^QL8JMjbfx3>>}UhW2TOm3po5Znq_EB|RNTA{V#S3wh;lQCfNLVGD$Rit%yp%D zLN+9Ov^jw?*Z>iJ1QoFbnxW~vdw5m*xbmagT!k`qO`O@Le#qN3dy8#oj3&fwd?k-Q zD(N3CFiHgFz4Ali*vU69<+pND9llsCiZGX=o9@&doVB_|!t_4U1W5H6!7|uFmJi3_ zYUpCNwn8;u!R$SbmJLgHtGyYgq8|(~A8H{^bx(Pv9#m2@2`hz-NEW|&;y<8X?V(+R z0#K4Y1Mf0;H%Euj=y1x7JTKhca;d*xCRLIXkCG(rCsLfNCzd?Jn8tk zO-T1gb7R5^!aCR~n(cKbwt@uaX%9~?GJnP$ORxTfv<9XcSLKKSl}oSgrSb9A5b54& zDmSDZj7C)L1tc4T3fH?p-6+euSMBbxzg)dyia#ex1S5I7N}xK197z2bg6a-HhwabE zYP6kT*4Y&#-uo3x{30YOx~!-DMpNfd7#{G%8v;wI5}hHxk%t!;il9%Jgg_&X-UPyz zd3`3N@3Lz~-X_=BuOp!P$(#LL0E+UCy z0b8kqKRgyb=k`mv>)$bQ9|H8EHY)zKn{M#xKkry1RJQr_9oq~kECoJQkr63Wp?oTo zXHUiKxJIUGkUK*ap9CAXHJULnWs%;YF&_`BjR{mXV?-U_g~u~sd_g`+hJlys6*p#Y zAun$`@x;T6+1(^Q9rg)mI5h@{e}v5Tr19I0wD9Wfe{I_ zaxETQmEzn51hC z3CV-QKqK@D8P)u3(bev|r50BuInmG1iZ$GgG06&(wts)3rq zt;O21swFUupbQ^r&Lo9>U$9aLCL36VUwijSNyZoJ-pynp?P);yiZ~AIXJpwUfGNwH z3q;TK54mjV?{z2%Z^~Uw?0#3(7Ut=4-d$ScDZHv&(9v26_&uJH9E&XUOJ2_|P#wJ5 zamadME|d1>$g}_(XL3p>)zmm3oivVR^Z)b!HBo^}h`exj+fkR)v2xC~Q@q!ynC^ZBS5s6p6^yKS=oKXMb{y8#%_UBy zVv*okoS^cx#R_&p19zA>ZOx69BEQYN z;SNlswr-jKWv%?SMW(u)JDVS*L5FZSMn464B04vP-WU|0rw1KShoh2{cXh=h*{SK= z<3;H|k9G6!hd&9(?(6fUrsJcrrC$_=Dc3T})h_-lY}YujR?&(=lt<6KGvKV`>6ScI zu~PhU!J@v`BEBrNDPe0sX@siFqurQ)>{t0sCwrqOH>sI??<*uJ78HzJdp_JU`j;=k z*^jy%W^d}w0k&rfl)KtNM7w7c} zzj&&W{5|ayX{)kbJBf}8yN>riT$lU&n$r(VM=%r|62|yqR4tsfExNCDunCU!7I*VM zfV}+GVZ(9VqJDbKT zYRO@)RxSnc^$j0`SED{Y-b!XB{Bc+ld?q0OT|b&fOtehoU`qhYH$2)sG%c64@9OJ!A`fnWp-^RoqIScIiKQ1 zA5ct{ddLI2SJyYNx!xrNVoJJZCQxe(l;Px~vP7w8zekEuQKCsTCto9+E|3PXt_;|~ z2y65=Lh;(QAXc6i-=$6^+_nX>TYK8)^a5M-^4*@1X$Yx@mwu=nLKKe01)s6ID5E$y z+PHxgyLQpHr#gLHi4nd)OLVfuID0V>8C1+k)4<#NfJ7MxQP%arwmq5 z*kC_$_>v-5yv19Fq_v-?UWdQX3U-H3w(90MQLV;P(Ilz!==qe2m!7nHIUH3O%7jE4(IKUi^%@%GVS#j)gj&T5NBY!7O`Ag-eCdUz6a5ij$@BJ zO~bZ=;1?YBK8~l!^D+Rv&%d#v7VCA&uBk0?$D3XJvq{p4bB|7j)1y<7x~T}pR?b4C z3~9+jP{)=5A2Tz#3~p#CmBJ)tgDApVf%R^wTx~dfs$}%#N`!SlXl7YPV#54WYfzbj zwT`-c7NpHeGPihlefFA9SET8aesILg?OrmPdKE6lMXho@$Y|9xB|csAmz{pYQ_L-r zmfDTeCzoWH-touhR)-Jk*DO4J`a)8tXb;J$bZHy6J-QQw!YYda1~ya9%x{e^ zhpqAIJc9ixP7|++gq5%}Zj4r9k?Tuq;*qg%$Et~7UPQIysn;v8K#Hj8pqmKJ6-y5C zpsxMRE{#>XIZOoXL&sBxQ=e*NU++BP!^!I>QR~>+sXFqLI5{5LJ#4sYn{kde!Qf!H zQD?2}@2_>iou!L>9+fjU^3`gn5I9OD&fYYBwb!`X8dKdWirlJCf+U^bUCiXfV+xL4Kp8T{u;)8{or|3PTh^VDu%7SKBpX!F&&V>7e+VH zO_TZ#3nR#P)1q`@k(yfr5QZsz{=1Opmp4{4ApU;ptPK|mJa!5LGRUekpK*5d3 zu#w6GA`cs(+u`?Sn>a9|&do&{^sYXm{z%GvUwxLH+kTR(_@3RL=sr`TxeRg9{Lv-S zzUVrcp^yuMd)~SE2*+3OUs*NKkkR9dZ@~Zj{V>GEtkd*87GajfH_ZS0A5D(WyRtvp zSbmwIkk8cc>qCt@E_2xTCfKVRdNn}`esniu8qo9rl*c<6d0I7Tgju&{Ox)5GltgHu zk_ecT)EP~eL-8L)hGV_o|D__o|J}8%roCCb3h5e2KY$7h((WNaa?uJ_<}0EqrAMCLsdN14d&wvi6DLgRzT-WWSF*$Nuybe!}tn)#4Qfc!fF!s(WiKoLZyz64O3 z-p>HvoRHa015-$=H$Z$PXiE)!NCOrP_ai{tdRz|58j#`c328)Brw%iUVI8m`8!Emx z2|Ru(_f8gwi8QSP*@%JSR&gv%xCE*jUAtNE$6g0Skai~_R<8@BMFY69ya<&!X#7*3 z>S*`&_7=gh2>0oy)0}L`M2{|1uNs~E2Dm2cpUQoz-2Yhy^SAWwtV2bySgy5%ZSQfi z>E-yCgnl@BRBw^ncRaK$6*ug>Fbe12bjpV}e(Y0tAiIBUtlFwcgCe*@;XUMZKfW7VkY%tL7cVy?_v&@qpC z(jmh|v$1%q?7=668B2e3;jsiZ?jL7iloBair?O2#~8~4CK)9 zkYf-76hNYof$Fl~v9uy$FaHt+i}?=7M~csjPE`Y8PXl!#q~^=vzSEex8J>_8Z_ ziD2*b*yoNEderkfg!`8A9^^1QRso7(u{HbQ%fK~Rxa=8oGX&&2)j3TP0Bfwyv<0H5 z+J5Eq8L0c#=LK|R7t0PQmH}njcnsJI^;QbkmO;$a=Pe|>Ij!CA(7JKBU00V6K%2Swu9}-fC1ODYe^{KTZ;4#vF zXYA4<4uk;-PjQ^+A;q zsvg8c`D&Rkm1E>FHEh`of~N!B?WL1!&GuNb=+6Sl#dtE#UddLq2`9Wna=niv^Uhfc=m!D3V>_J+JJg1r6r;aFVYFbnH zOq^&|4XEuVLNdhJ=_@j3UJWWxSxz8H_+{8{NWNxj#Kj3ck;P&Pj2DCh6SxX5eK=Yq zOl3Vcw30#u7DZL|HAO~Pp~>-Hr_o96OVh=S1}eju_L+oz$YFZthBEH4)*Jf9hO?9= z77^&*n&Ot#t`S-un;aiBEzv zEvt&_J9RO2ShlR)1^v3Jh{vbmc8(wy!s2`9vXfl05Fg%0*{4c(jjuddlMWkHQWCk% zXEAh~QGcF&o$!tt=ek&bKl@R|4=}LMFqUX`d$T0k)97m6x}E8IO3Co6ygsi>3&Uhc zT2%>Ta(OtT+=-`*jXz;FHeFl_YeO%gJ3#f%47_;0uS|qYDLtCKwdstO2609J zq1!e<)c&5A=W#55ZCwO1+65|oMXNc5mF?t^t@0CHe#bAi?|hI4x~7OHMeho@OV1&X zM=PXf7=K=WS}Tp-9U28db9FV70O^eueq(^T_R9`~_x=#xuiVQ-0qY4c_x1_U*I<59ztS@*g1cR&0Mx-|Ktst z@kT(qNBhK?GW*ewkj`@rhA>wP#DSz*?Zf4{TV zhph+54=4`2=7faPb_PK$rBW{Jq!q8~Lkdar4$UH8At|Ad(!i4K#j{rDq1%&02bNFY zebDx%CU9N~hn?Z7qEsaZxl&%`v=8+65PhkwfecIc@?^lfg*Y10HlW2Z-3#b}vBedM<7QP!T?s!yZ z20x*zQ4yzX>Yr#8e3|4l!v$pSzlWDE`w9DeNPSmgr$3eKM2xh?$uC2*-Dt`>a|-e; zqGs-VU4xPHV-7ZqD5>zyMlS{nO|anv(V|*}ULqeVxW7Vii!{qHR!GHgMI4-_1Lv-k z8F^RevV^^aYuv78f4`Y^e7c33j{sayJJ!|v`PXWEfgeg2^&*(>SJ?BZWX|c`YhJ

    !}_5PbMe1lZgl)(_CycJzro_W*k`AE)F z!BKP{FyjpmD9zF55vfhL7=kb*6pjECrKu}-*GD2tM}{GKDi;|NoTt&Y3;VY4;c(%4 zNQEdW;b}$W8(Ma&ES}c;8*6I}?X3cQY1T%cf;Vcu8Scect6VoFMd-&Lk0elkZs*Vb z;&)4K6Ab;8#un5(A@^tb$7z_Z?PR7FY#g=^+bUEbfMB2vjA1n)N6WZ-vMK^Bet+rR zMb+cCfuRw^MiksdMV*ngC!Sm<38*jzU7`D)K^~d=5{eEt1bIiFe(hSq=Aa&nn1~8} z8gBiDFG?#rVQ{7$Zhkd}X>N&ladevM#mq_wytK4w`O?pr?VFjQw5gnW{6|qiZ~kH! zo%)-~q9NU8DFl9d6ru+R3^tL=VIP~~?7TH26HQW86ewTeHVJx0rC*uM=?e1M_3U@$ zW2KGvf`qt)9vBRF%&{@zi9W?M!(_0ZxJMKC>}TGm$G=8+z=Pa0^nJr$q1>_wp6(0` zBDd7F$Us%uve$o6Wo*+3@C5Aa%Qy55tP-jU2#0?T_6VTFRmNu5u>lna$)$YzBzaM} zgwH3~H&c*PT@J6v-n*3(HW`C1O2s=_m89qPt9q!&?$OLM&&fC6;Sx%fuE>yHiu0$& z&Cd;Vuc5Cta|+vT^<4d&&&XEp-uA`5RmK>g2lkic%Z!j|J)(ic(KQ*IA9D2vq!d@B z)vNNpoli|;TXfa!Vv#Ggt1SUK1?l)CL7qZT8W!8S!8%-wP1;cS>xU*1^2Ea#cw+C@h zJE-x+{JJpWQF+ilL=(#9X-YYon;bn-PfXxqikHliXv%L>eyBcARGV*C^<% zfHA>}uo8b&cvmZF9n4b;vsVR|Y{XI>?+}+@D^LdzvJ5dmiOBJP{l7zM9RCWg3lMpO zL3z&_{K}ieo9~bcpfV-q0vZ5`;E+5Ma0gd`bh3ai~uUT}8vK zzme(R+0^{+!a{G}-T$%g3u05q?R1mo`}$8=sj$N+kd{T!;pP1QwT?GJz9Nt(LJZ7f zUlw3%14-qJ|MFqkwsE6!IT?&P!WBee+y7EGq~HJy_w9Bq=%+p_F!nd&z{Pw#SvBW= z6m!|?aWgbU2QqFjJPo-~4`nEA^P=p)kP}|$Iz5C^x)Jt=)nPvjsz%{zD2R8k3)(Y= z&}16JmP0Wb6(=A<7l1UJNZ^pjDE8-N^@ci{U}SLd2rwsDzWVNQ2+df9AQcICKvb{- zmXbUsr_vHdrt5ex{M8WBox6a6qVdpv?U4!1ip>me(STU{_(7?UEKgw7wZju&+Qh=j z7o`u7-D7ia6f`IRdjo1;xpd#-Pk?67iYQij0D(b&IrO1jb@Lwz2?8KTG!V|TvcKM} z1u$VUBQ6z*{N{My-M+4g@qOf zKqoX3>MOkjD34)N(7s&MH4J=rIcOW|n?i%j1^B9WweJC6@IoDE>h;xbfH>CErdsji z_)h>3umVux>(Ka`WBI@TGhculZETG+-N=UM2ni5z*@g^tiV5A75bzi|x@f8ojr#WN zh&tx1NHHGY0(WRmv^ba%lmK-yXo zC4@*bc#;nx_CMF_d6R%qxiRpbZq^M~&~>wS#4SA&0?vb{ZZ{Iw1n|)0qM9UZ9qfZU z@Iei@T-RwsCp=7E(hq_B?h)M%-h|mfJ?nAg6jH+%ou0H_Gvkh9^MnaU^P!ZsEAf3U zt&edKgS`#f!iQhRbTM{>mfqDglkX636S9I7V*_^;hs`^0rrvAfoI(P})0ctPY-oJ; zY^TQo8EOj2f>dD9bHE;=C=Ba&m_hD;@KAgg588)TfTR9}ODxLM z8%()o?QdlJgBfXncr>5rUduqexA!q}U%cF6K|abw%U>kAGNvWb{^U55f*ef1$I+^q z9h$fbcl0*8Tn=l5MM4vvH{6C6Q=0}VXU@;ueuWBMgasK+! zyN9WCtx%30YPz89LqD!oS{mkIHJuzk2W0@FpDbyCLcp;3slG|sPZ00TfcQ;y7m9$7 zY03v{)gV#J%Z>9)lsWE>w-p&fT}OpbswNxT&w{ZCX`heb$FPo^f~AF8;1!Dp(1LH2wDYBwI>Zy%h&{jyM* zGSCPXM}Zt1noT1Fa2*4oTj`+BI|DV=6mo_jQA3gHfFhKFx`r=F*ZkI2oiZT*yxDkR z)3aNPlaF zd)$sV8p$39QGwkq@~rO>lr7iffD1?GQT7h1sD`-`B!5B4bxl;zWqP4f@w(Iv1%@BX@m zF$YwYJlYc_P$n4>4`tuyQI0;-S7)HT<8qIH=#$$&Kc71qgodyOr`gB&~_^if2e9-P?*w54s$mNBe=!-&1G2>OPb+g~VLvf2iYj01XBs&Z``g zHrOpx;wTxV>ZeD?{J3~kDvzC8ay8%vu?a-3o}U!#e&mN+{j~ODog%XDvS}C7uCD0R z4%G37%g^k5Kc}dxFbHz@2PV9$QAY7}-3td$-N6eR-hA9RF%F+CzxgKd%^A0*}oUniu zTRf(MH7o;JOVQM${BM?2B2$Jo8fL(qcPwBi?YQPbB@pnL@O}6Lm3D)^DMhZE^w5jA zVGryr<-4D<`$DaUQrQr))!D{!TkCfjx+yB%eytbB`sPFGg1)#KpbpT3xJ`PbFHu2d zZlQ}53uRCa_$C(0K636vF?298s9d3RPQS;(S_DVD4UVD1t7Row{|=nzN5umAbS^TP znEKCw6)#X?)SP)1$v6!vwU7D49_Yi<{M`f=<#UVA_Is9XYt}&(n)xQ*Q>&A^|N$ zGkO3kL67Cpf~}9l@S&R2!IFW{*mbZri;$05S{+s@6tJY`(`~MThZW(P0WtjgEJ75& z`m_f;HZLn?#i(5D?efTeP*sc8$BrVVqc=rYDpq4AG1IzI=2wMZd_XRK{5bvW$tuj- zl|n2U$`QPDYob-31#CG?VZyCZ>mVL+nb*EvE{%7qyAT)o7Aw((|K@OV%5d6B)Z4(M z^O=v>0NzNZYF2%-fvNUuc{svjKvy9v8sn^51#lL63WuQ0>RQ9zz9Bm)$q$B6be12x z*)0N%uXE4}j9E(2Dg&DaC@-BniT-Ys`Ce zAu%tb`Hc;@6dV&f&q7^wJ7DwD^G8-bSomZz!*!wAb8egCYnpKsFN$EKAGUus3nGry zqHqWzB*{vIv_O3lzECiJ?v6>1o{sO2%I=b|9!JENkB)CAl^c$&YD6B!yDF+9XPo^0 zM3=?TJ>k66#2@+i8urQ0ExvxrRK73Bpm1qN!ZQ%)ES@$T`$xHx^4diB?C)!eHEp{l zD_;7Wy_cF6u3dP&I?KXnG%{Asg=ruN#y3?`h;{Wb<;1wlkE_~X;xMH_34{8$VI4~E z&MvYT3X~IofB}f!*}OT;4z3-wgy6!bIii_fimTTy19pb zNJ>qoD6iY8!!PMG3xc^R2{yjUDX7ETw|c=YJ6rBod+1HLhcPhzgJv9-RDzFkD*=|@B& zFAxVhiNXwOsOjO#t=+K(wyJS5Zci(yeC6Q4%-F)rXsBN#DAGdd^|k%_xpGcs z;ZX;ZDng$@=$ni>;^gfawi^W@8>Mi6Al=p583`h5{6=3K9$OzNe0WsR(((-JYDBl$ z9&ksYlx)-A;40KvHCrA0&te15zfs5(U@LGl1EFs<`ANm7>B`DtuGsMQ;apb?(K18a z?asOr*+>>Ng>Y!%d*WiQK;#&Q@+k9tOVe$} zZ^|3iAOo_*7Okr_Ds?0oKu$=j^E)|g5jxWXb#Jb#T`G51NowvU;bYLVkC|hz3R#ojy5NJ|h z&7MZ!2{zbyAH=wlyzhwBzz1Y!+JPRowyHVYTU_*H(nYcoL=*Q@m`uDD`eJAT#kffM z(GVlZE1f(m2Q^B{bmrBM0ePsO?ykV@BMe3Zk*ZAOAR#7^q22f~PJH0u_}VBEnibmV z{0C;j2N5J+(2T>vM~7;3Z-2eR9IT}PVcqd~uQTYo zpwrdV2GwmDP29S~p|Puy{mjGFgAA+^khyuIRRIhJoAA^%Q>5A;i} z5gzcxwY(5F=;kA_@)w6$+y8jO#b>cfF)a&$-;HD06|^{c?&s+jIk2n1BCJ~D#hin< zP(2^+O;@=uzWnW$W;XE6T4!-p%(A&cu&|;*J4104;2e2tyf#ZpStMRoM6pOz71bRM zFNChD*i06nl>BR{94Q!2H&$Vg>%+BSxn7$K$)EUdJ#UJ*%r_?Iv1GGpSmBgKwGPJH zbq`-zW};6AbRgfsFHZ#rip-Sc{leetWGL1g%<$9hJpKHrsUeWAjq>;T%K{ZZn>_++ zin=`AEQVz}(=2IH$bD zq50-3`@l*Z%%P9F2ET-No@$aX9sG*mEANf8rf>|oz%l~zW?L>g`@nNdVnNa*u*rEw zGKoq#2jl2zdDCxMU@6x*d%D05EERAW|EnBMuxBIQIl-PozHayF>Mh>&DWf48BooFD6A2C5S1<+(*>9|W3{d@ z1Zf2AXjYxMX3pMA6T1a^-*iLzf1Y#$^rTH;(~@d`?`(z*USyL?LcCPqUI)!a=gWe6 zrK^;f0N1&WvU7DnO)WtE{?o(vA@y)KOl^?i1rEg#3ej=jG_f&didZvsZ+gwZvos|x z)t)O3=m{zsXb_`Gj#jdkMzJWFhE=AsTovYelZ43fBOaqy3sso-kHFdIi>=Hi%0(U>BY;PaKAALklg zLD@7&%dlg_9J;U)xZt|)ji0WfY?q(?cmX{;&0${DGzCd3>`WHjt@9_(Q$B7GH7vKc z&F=HIwcQ^svl|7@04ov>5nDO)AWfV*?7>itH#+$Kn{BY~nwwg|{!u1HWo|8E5QkuD zRsrp*#-&ze1?J9%VCrhnfac^zRo+=?l5jlD<%5m17CE&FlX!zrJlZVynfAzSRU0FL z>EzOMJbi~1%ijZ2l z?E~2ju4pyVx7Nl8#m!EK8mvr*`(8P%Qd?h=U0|8K`Af6%0tND+NYU46uDS^iKc%tV z#h$<6^#ih5UJox44%mFt{{h$M{XUi|vc5utM7w4lX3-o$%@(DzA~x&2y6GZxZu^?( zk;B|Xx&8F|I%9|Ce}B<^F6xTV1O_d9_gWFYn2V%mdq6F!Zo~v1@^rc>JnS*Ku_{$- zTJPSqcIjSRJmMX6^;+*rw5ANg*2A#hj-S!zHo!WSiPeC$9e-GL{2}F3GRNP49$~LJ zERUBYghdAEcau6Z{5dM1Q16ixM(!E(YbcIJ=8u*Ei7ANg21myt#zKbi;08%ZEzQYJ z9s0x1v8%F5R{$Xq`X3c6_*ee~tzz6HAt=PXh7G;efg{7xFQ;yuSpxZ;7XkX5VI4Bf9{%`{n6n8xWF_~-aoIfrU@T(Thyiu zj+c6`)g}DYt$EE&>os8gJw9_6#KY&lO#u0X6E2p+Q$2=N>{(2%^VF2`fBcBEiY3@8 zXTV5xs%thF?T)%Rs_5Bly{_z9O@6ti%k@r$%2+3-$lJ zQBzH>;0Lu}-(r|vZZ8e>JpH6wPaLW283ibX+31c$(G0;yAHuFG`C-C!7luIyl&IwI zr=NNIq&_%trNKta@OknUb_eYCqd`;*#-ypH(f$G>?aoIR3HHOKEkS$0nl$qT1VWoC ze~p9My9jt-BKn2Kg+9A$QYR?g_QLl13XBFW3mf(-(vK*3Z8#Z}yl@7cf#_;LFEc4H zEHjSd)6Ulp`T#2R)O=r#S`{B?|Ho@3`v8OR`}I~m|1^E_tyM5e)u}yI8XYoS*%TL< z$n@5v+}?oTxH?hKXHVi?&zOhi4~LuEPkeu!`w@V~9xsFXl~by;qZGZOUB3{+V%Rib z7h$#2<@&Z%mmq);-v>~la?md@ltg>xY9|hhl~7`Dj)g}kckM4RD)VDN&8<*3OJ(*g zySzt!(|w+#T}L;LSIQ_J^_bF9(;BbzcOe7JcyZb_`p3}-htaAsZ=j#hdch=OJa_<# zgN*F|NLWAcM$pqr{!Eu2ppZQX^1M2^4*;Kp?>ty}&t?0m6gUg#LWc_o@8E3Nspb0# z2dTpv8ZcK_yuj1ekJ_pI(JT^t3tlma6JE=AA;_Dlaxi)-pi){ zMi%UGh!_S@)cS1uqMY0O^DzSz4rrGMh{NAc5U2$HuzoestV=--bRNq56W}ZIxZNXz zYtbJ3N80x3mnY#Lps*Md1=e>J)i8KZtHC>+e47tz(HeiF(Cv3OU;cGzqjG!3h1U}~ zJ@ka3Z`Pg?)*Yo@)5zwwpPsE4@Z^5BX|dG8K}aEnmxE`SO|#r`<^xF41_#Q-dvScx zAh`5=anKxn)nxW(GF*X4tE=B1^Z|1V@bNc#SQf(@w-?D<#H$9P$4AuCh}M+s1E0ZW zkk;C;g;6#|Mn0dBCv{Mo$uVBNW%H)#CJ(AIIz~DXMwvGOXTx6W5i4!XrLU^LD*7XI z23QH}DP)~nG3>OCH&3PAq^G#ab@f;5w)}jPnlS?z^QuoEk7J7E^#c?lnG~`9j&7k{ z?ra1d!wPaMa9@AR@f8mjDazRyi}-4SP{3)cEV9IAI-h*k=_T^6;eT914D^wqQu}=8 zyMazqjJhY26sAuh*yi>Z?S!)?JAF{*9ZsH1`Y#9;5;s0ctg$;xzwmKx%*?%|(mE?0 zjdQ3e@Q{-9od>^~>(OGx0@!1vXa1o@6X^%sV1Ehf{iI>O`F-t$_d=4G}}oHfA8O*4oEqvDg@#87%!Gs zGy(2Tht;cEf&(T0wX)*}7pW=SCv+DV2v|)6J>vs*#jF>>^Rt ztJsYH*fe&9I57b+)J5mUbvd$?r%5Awx88+6oTZ`i8>*X=HB7-L!mmpV>aEef)2>{? z9meB5SAX{g`UH?CUN?UuqG{*aw*1|XasVso_}zuhEFVVfBX+_Lv4L=|r-3)&-Jr`O zq#B4B$U(J{J8%C42}174;P{)P2c9J;3V4)w4@T|G{y%*PwuuwRAHVqAh+sdC8z}sH zIs#K*NjJbj8x~JHrGFeED4n3V#cuQG@RcblGOiTo`#%vQr18`p`E?%5jr*^yjN?b{ zQaU>~RQ_amz@YL$3yOI(=~_aMRy@&mD<{$BWJUC{|90gj|L*uGd6eU5cQnWi!NH}r z5W3F9j{|#Ub-KLg@ICi|LX)i(tNejo?XdW0?7Er8EBl_N)-3;w{mCW8iQ1&?y5;b$ zt0wR37mW{IFB&&0FOCHANLAD-Ik?)PiLRj!BId{6FZpM2(yzv2|!9`TNEw#eyQcRChMb_y4ESsN{V8=Dv( zG07Tx;k8o({3kI$FM6bJV=wFZ{bfu8P6p=Ti_0o3C6D1KaOh# zwZl13qf-S_Za4<#28%66LDFx*Fkaao3|l=`-1ZNV+zeIT;6TgVYLf^OXfoLRO~z|| ziSmogAL{P^%~ig&H00X6*Zbq>fb6;5P68d2tNe^{Wh_4jZ&{Ob0}x80ok7Wmr0<(A@9W8xbp2bqlz&)7@) zB$>Uc{%)rB9q)8^AePrWw9NcH{^e<26)=clXxB!1iL_3Yr(f&7KhKo~kvlEV^c$UeT30TKAs6Y%>(q^Fs zWw|QGG7=kH-_kLi-e>1U=|s$(rMXh!K^z`}i(P4IWMh-}a08LB`{UdP(TGf283snS zQt)Y*&7=n_>J>X3h`z#&zCM3Fqb$eGs3R4S*3zOQ1Jh{mSf9hA4{lfz%Hi+oic&c^ z`-Axnx_#0km_!}Ms21>Y!$G?Ij%TBY2hR%(zBQjs#(&gIG_2Kn6R+Sn220g2P)`oK zu5_=^u&j0X06^L94%b-XE<)!=W%iv&!)k+Q_8I}+)1F|_`J3m5q(d+21AAX{G7Aj`u^5XqHmJm-6&@Bb(*;<@a7NJ z{*<>@9V>0e)8Cf;Ntsf`3CK$e^LZy(nMJE2?Z^-OOG$P%>K09jlc1@nVmOT z3Z7D68Sp;t^Huh9m{ai_h4MEkO<`m&KXAvEfoDQ8_ROvVP*3m$43-;Lx_H>J$KC~t zupm9G6g;9a43W~j97k@!Y8(Io1DodiQBZetZw#gNgfg!TpSAF6V1_1g2W~kF{u7R( zyNnU|eDC(UM>J~OK~dQF^yL1|4$+}6MrovLnt!DHKz(#k!ZV#OlccC5;}b~EFd@IM zZSIJ)-b=RHyHK185{qq6`M)DFze?$v{o;bmSr*5bYt0E_cHJMsq4K)PJ0SxtpY4Ud zwtiRjkB>gy=ZhFZb<(X(3_jPo5g4i1ybc=i5;`(C`rX9q4;!Go;=x3+gmrl}TQ8|- z-}k(8Rv-dPZd;N1Z`HkLSW|2FHzGrGtVfRRn@`3HS|!F5>ylfq=YUt^zN+9IsbW{cdl!`%*=;(&L>ZlknFwhd#ztvH}A0G zCvL7CH5qz-%d%(D1t23{FF+LdSjjIFO z&j-P{+|(4as-X2gFr-}b*#Xzgb)VWCqhvRNeduTV1PamcQcH@2JHfun!Q+b$E5%WV zvTlEqdLT%w@y)rHXMfngTf#KpkKJphyW>R?QVrR{E$-*PNgbqo(epwMt5SFwKy5v* zws(JG9UPkBn9OG}hdP)--4eh>+d0==PB)tUKvq|M>!DQ#)X~-#13q7v!lYqxQ0C?E z`7~*Vh>wSotlwWY$|&M=D|Xncds;4im|;kD#-Sk3EX!lYIdowbkS~KQNqw{u;`yRG z%J(M3hz~l`Pd~Kp);c*}W5ml-_T{_Uu{)L09@3XzXOv9-i5S-@*&aNzsJV$|<`o=~ z;3j^SyfmS2iKX5C0R*!@#QO~-+hv_wuR4WU@?voX5ibIZ0e9 zDu{I@kO#RB&9cEtWF^zHv6HPMg^XXC0h@0Pe4pF<`udaBGHmQ6U(hE=1k7ix5EL)5 zbGk)X0r+J$auJi%9>=Kdvb*Dc+?l6rQQr7-@kdUGv+#=QM88i=Jj2L#nB4i$ok7Uq z>0{X|cLNT-V3{lmr)QK2y`B|=p@|HmHKhXy0JpNQ&rfBDrTfXf6w0M~IAJI5 zL=o8bT%p}7Wlet29!^XDs~-BFTelSrNIt{KZofj8I@9N?{ifF#7~znL;A<5U?`mt3 zDeIw*>||~om_-q1bDCNiyt(quq5e*ahEq!^hA%!fhT7yaGPl1V2%}v&2Qy z7IGfjSgm)fcWgmpMWg_s>>fpFNR|Df`kSa^VSl3_4)anTZR@zTb!M8+bE3S%CCwqI zPJ6;ul~=QuDZj2DcGX-|Y^VS?X6k5(UCbDLdIZzHPjOs1Wb^Svx)m|%k?MoT)oDVF zahy&*ShA+{xZG;0gw+o<=(aAj!vem(w`brG<7y20B~Q%wZH8DEQH)2ZkZ79l{NKY2 zHOFI9^Dn%;gwO|L=g00omS*^~Bp-q6d~wFFCAQmH_b&=3&3hcxkxd-8H%)lbSX4q( zizMmo6KzA1;=PQG9ASS~A_n9xb74K*$$-wpDC2qTuxoZOj=-QuWfNQ0#F|SkJE%Eu z7acOwg*N9+Rn;#*-1{=@F>BHCBssA?8Xi<`j$<**&lJkbZ!2tb-uFJsZ)Uq4I3G0e zV{59QL30uPWEH!}dv!-sw zFv98^-zbXOpPs7xlX6TnbKs-dvjZCBI}LIK7i~ErZY(DrHZgRapnKhAhWOQf{6?p2b3Nd$fo73vouWiLG&SJRKCEPn2qS-P2qWEXS zbj0>hb!kig4$l>Xzqi-~LPvSp*{ML1NcDDqDdAwZf^<@Pq=kK^itmjR*E*x)ST?=g zyxhJHM7!{%+{TG^MoTkE5ip-@D*Qf5vm`K4xXYAAEJsGT+7{z*V@f&VaLHMtl10g- zu=AX6=Nw`?*7BI%`>B^L#XpW?EO{B1M*U0ZZoLc{@3`iJX79}L8Na{&Xh7Nr#hi&~ zO5DODO+lL7`OzL3eg6;kFM2dQKw79(>6|9n?LLm$o!>+WE_mCBb_ldxM?&2uSvd`cB_H!os)3Rnhj*Yigrd?ZOe2esp&5)3DWDzmvYY(svWBXgbzwhEGjnAy5;~RsV>%3_8<4_OGVhl*A{)?K{ zH!6;hrF@?hhNyZ33W!rXWy=ORM@~M*nbV0!JY;~yB9n{a6g67Wc|05-szE1XF^jr7 zICD7X%S)80sGhb&UCH9Jp$3evL!Af^O8TYdu|js)C=}s5w(J&9g4$C9=Qjk34b(s$DLZ4>sHuB-nZ|Db|+`) zmydi&^BD@wiHgZ*`-K1K5@7?wI(`$#b~XuZk@p!#xfq(1%m=(b%{G;X~UGr zw40&+;is7zf5bX|riA~P{Z-MsPtd7B$%iY|pYUM-h~#UYiBEzS>>URk6NZIP*c@a? z;$Bnq;D72QLlAQaF`2M#x)t(gi@A(LU)sl3fG#$8U1HImZ^%MPX|3&stS8WrZJ5?t zBFm#Pnnao9#c{Ea&1f&viLI0}Ll>)+;aRqE?)HYQahfS5G0JI5t>M?`VCPvw_r(fy zp&)h3F3l3nV=Sod*TtAP=w~dSgVRZ)%Tv2)Xs`|82mzx?U;5#;e3R8-|XN{XL|6b zt`ea>|Ccm$^C_8|zu2BBriZC7m-M;ZbuM+O*}r-Ffe6=1sm>qMH5@Zv;1XBUxzayd zM(v4O^Q9;Nap`k4-S7Ftv`ESW!IugohvsU7ZntPho3Ak%&texeCH;e+a*iJ&NjZdX zF|(<5B{f>N?32|_Pd>3&=s%&UJ$`7+NyC$V(NS%A$tT=war{7arADaCz-hA;CIUHP zF&53v#Y(0*`i;%>l$$D^nT4K`yyJ>_L{jAY{1rw;+K1p}^M@gUgQqZron$MlBQ%3cQ z+^maFF>tJ&%lZ2BZEUe^bS!5GjOZRmWokC>>&N?49?AP<{EqZ-=V9_;TbMI|@n_r# z?i`vS9otz{b@J_K+q{yhrrT_#)D!x#R&DFM{_5wFj9;tAcSil@RK3n#%__@B3ocW} z-5l@7nCt5f$BX9Nb}`)2Sld4DQ-Ky-Pr{jsZq2-(-}pcyz;)_{{tX>2bSs)%W7|59 z9tz#ruGkL$Yc71hw(7isLEwiwXbL4vSst6HZzYB|4ETSdj{Qe#bD?Q+!Qxku(|>e7 zgH#o*G}JvZcLRteg78-9T9XV9qRi?3)}&m_;ZTB8YzEuuI~~5|nye%`6w3OH86z|l z&o|`X{H;k@ub$7p^=0RK;vGU_^~>BhAPbcbHzig8sc-4h_{cPmPd!NKo%&OrrUaf3 zTg|DX^9t-oisZWWcQ%4ReenLc86+LJ;)XaJ{3)au;o5F`8VQyn5bTl`l5pw_OzBGA zPcy)3;wt!X?t^pN4X36AlM9709Cm>7r+PlV1`M;=2XgiJk9<2m%v=sROGD$D3Qzm0 zfX*FDjRx60BPBN{&+cM%U~MY8>_UHSaMJMhuV_00apwUF-b$uI5RA}!2d)%RYbV#$ z^*uCX?>Z`H;!wC~+;BejT_o~pwhXaZbeI5eH!I)z++nz7 zXX%k*z|aawk0v!3AONR|<`Hhe*chmjgIA>q9eS~cFVYXu=%op~n79Xgi>^w7)slnO z_&&QBRuMsMo^F=#9r(_dlpA8%{lOxU9W2K4<^1!LhfX!+nq|%27;m4s@W#?xpHu6_ z+#Mz^>vz^=yBZiloeuA36TJLAWwt?NeaO||!H4N4`|q#L{~*A5stwG0=JttY7GC<` z9IvJsRT4B*7x}|rrzSgu+kRRRIGEmc#)~WCs-p-ex(!y1q#a=?NpyecpJ0T|=uHWs zR1BobcBiUrR@tjR|3sHll6{vAd?1gmxS0%%HL##3dBL<~w*S39!9mkmI}v4LCO>Jw-A+6<@KL5T(?-n!2a%NJ*Yb&6Ib9r3iv9wG^|X~Y%i9Q^Ul zrdMSV21fckJZ|Cl)~X}{ABaQ00!hth^6PWCGwU!-t`s=9bbjHsrWMf#rkRe1$ZoMH zeJ?@h+qWD1fA5EsQ+&n3=i|SkF+$VfSVy9qbL&K7BA@3K!MmSm$#{@4m4#`3s?hM-dW3 z^u2P&gn_Qq)QjURsw;B?0}sV7eocsHU=y{>Yyxbyd7s{wfDL%hlNL<{pICPT&C`#d zzbWWj$KBI#kR_EhZA!oR&toxfMg{VMB};$+y!&J{j#ts+aez0b=q5ViAl1llkK^Gv ziGCBjP~lh7F`--anRx#P3Bq&0;glJt%U;CZ@IsHZXI`I-*3%Vm+=8A%7Q>l{8xzo4 z$knJ1{wzBaj0l+{g5s4g8oy2J7@*0!tOwouwkEz%H$je}m~l?B6g|_7;N{zXiAPR+ zcJC|wa-}7uZAH)PtCV$ihx2a}zF!dHPLUdY@3H**dy>D9P#9*R^GmGD%JY6$rA298 ztx*nWogJW{E>^x^TVC?rfpe6NjfFyf?zj!~{MSv#ipWw=vYre#T%5vO1|w^9jLVO2 zme?mK6Vmaa1Gvb^doWV-Go54vGp0!$n1yIS2Uu73hzDCoQ?F3)@O0l|WT5sB+N*eu z9lh`-t@0Rc?0KwtDE>NJ@Hf1*Xx1vBo9iR-ydYi3B~7jl*J|S>e4n^sTRWphLBZ}M z$vOUvI*TJK1ROqgmc)BjSb=*^T7YATVZOi-0H!gKl9Wrvq*i%eRa|Esm~N08-QOU0 zMc%Eu1G^bwSFDxR1=Lv8@Wn>1nW?@!Gd?!*^eJU3cc~T#I3O{_aslwp!?Qo31BqMb zGUrupmQ{fjv^|Nkupy7HQk534V(8kwxsHWi~u3xsC&rv>>HgZ z<}rcX8Ta1e6IwjRp)gCs27HR{JQOA^BB@FL+?Zg*3Va6v{v}2SN%LIem?cI6?z6C+_Q`0Y zG5ZDHP;F)0Q8d{neD6MEy!k*jF8498C>KM|h^;)IiFWF5)T+RcI|Ux$R_HxSs~y2B z0@b{<6v`v+@3^|^UqfKV{@B4tVkS88h6&;K(sY6mknKKL4{n@F5tkv04>6%sNs3SQ z;{6KdW=aM;Y`}Jxpn^o$LLm<@X`gIU1-eCI^YIjg) z>g0%M5$*^t)(s}DmihoKs<8&TE52Y{JFI}^w^3!li~I8&8*#;FB!{>joLWPTvJ$j4 zsNlI9foK1sZD&@9R?XRxHy1xVsu|A~R>&k?W!w}bmYo`$GVb2bjw(vs0v6&^b-2)l z|J?aM5hbkQw-uA4e#}o(&WnoI2R>m4))W>JjhWK3UQA)4~xh7Y-3#FN2PfT|18;Z8SQW>?uf7ys7#h{X!vgjqW z{zXCen6F;z8NQwb|5DhPFNErf^4e#y3vzk4`$Y%V3E-0V&4?BwoL*j}JHIie#qHFZ z9*FTm+YxEG!ERG6&$C{# zYkN&`=&xPr1#ES?&FtYOTmeqaKh>q1p*sQn74*}Wv@zp*q7_IOmU0?x>525E2$m<& zVM1*K8)9v-S5di<4r-HlQGQ`e;K)?(<8$Rv<@*^@+1@GAw$#V(wDHu|F3#i_FWl`C z%0SnKFsyQ}sjr6G@Q8PXuKtnbXq-D$L3t_X`}7Tt*_LIKsxw&I==Dd8lp#*pgL~GwTg0_ukq24%f!dSf+K%g@DJciRa{ncp zn>e|}F|XOQH*`<^uRGPR5|H;$3iVP?#T^!8Zr6X)R8fi8wImTi7Wo4{8Z>7_DMkr6 zLc0ntz$)iR+HGgRpQZo5_dnBnUt4>RwJBbu$%h}gUlfW+!`*S%W6GdI)>R5xD>(l* z@t0j~*A}&GIiUv z6NA19o0x3AchL;nZ(%7FNOkfe64@%$wsJUXta-FG{o5~yYj}XNm-PPW!>rR*5$?-V z>p6CYEUWI0zVRuIJGkrcQKk^Gd~rXbp&?m=%bz$Z+zi??(ooBh`NgII4H>D4!<<+{p!ulJ@nO-AvBc!A~pE-vesTw(dT zd(g!PJZ@P7iSBj561!dple>q=@mz|2VBqloz!sMa?^uC2>AtQQ( z2Yf`$Z1>(0PemD{ZD{qlFkv|JPb_9r09M@gOdi+pQukaKt-C!FF|*=|IKpMGD%x5z zqjl9IAiWw|tUj9TPAt{k`XDv0X6>>Oss60J?*uBBcK!4P=yi?d<$f%&_`W*#^8O~Y zQ$pK1s-L11Zsn_OzK@#^fz$Bb+P!Fkc5c?AbZEb7_Dfn{+YinrIo1iBA#_Vz0dQaP zlght%#!*3a7mqJ@>n^DMJtuYg>bKT(xf#oaCTraEE)H$}v^5PqFbz47O)!rmBaDG?WnLw;nd}0(hj~7y)z8@oJ4G>AF zM))$jZi3(KJ1E_S+_q_hEVq8*&#xR4@2|Azf0i)Mjr+(|7ywSaau{FG+GFs7VOca( zoBt}~uwmr{?Lwm!5diH}XpafkJZb>OJRnMY3c2&8`P;rh%Mb2fp6c1aB7gg5o*SG} z&7HTmj>c}i6mrj&G8`6s2IjR3WSSEFH-yd zI#{3}&AU?}xRlNzq&ahHpkoCrs0P^P=nFj&t88bi^KI4GrlZG^V99f%C4&dAz%9{q7Zg6#`{KX2-y z_HRa*>Z6Chi!R}B?G>PXkSNBU6pw3cNwCaiMp;1lqx@<@M_2ASC)RnXBy6;##vn6nrU|n(zhV;$0Cf_}&C%jedc>F!S^{;-U4}6S+ zsuT{bBfM~GLrowMH12&I*GUo%g4S!7+a7(d{Ij#E@S`Xk=UnV$$L!pr;ZR5STFm!J zrFGD0<}36Ka(mNaBX+ho4SwtQ<6}BPqlb`814+i|Q>piv8XI&^!#-|Dg3PF84$&(x z!s|c>^Tv6kw?HoM7g>hPYV$WZ{Tj5jbgb!~JsdBLbhXYoY8oH!|9*f|Htj0O5CH~l z-{RPdfekt=7$$b;OLVpX9Zl_>Z0)dx+~{{U75cV;21oS}2H{P3@0L7a=BoTtlt6R+ z4e>x~9MU}NR<07yO4bn}zAzTK!lS>5eulreH2rh2sDSC~9u^6QeGd{(uL_7A5T_iR zQ}Gqi-II89z?EwNQ{xtEkATMWVf@ZLr%M)KPA0z?1|p2p#8Mj zuL)Hc42=tb(?Z___nn{Ue3?9&(Y?zxL_g4hN{E-_ClCeBqSdCSDkE_;KYs)!V=8lm zCeY-aa5*W$HtntH+w{d3KRAo#md+#|U0MO40_WZxezd`VBqpPwIm^-lo5*oR$QnZ& z^5-{~bP$ccxU#r6X&KZguV3Z9a!Dr*863N~Y;O{KM~tk{XhOcZ8t7!a>uDB}h_9hB z)+ek`&3_Mcyw1zdJf*QUC$9e}3eg!6S_$Stx8ZV*MRmJ0CYGP2bt6eH!Bu zdS=kLrB_`?*tTboBC_=~Seim-H>*9EqdO*nU z<$(wmvST!y0ZI zb<^$o7SImb_MbMb}%#CiOgbQ$g<3Vz;^&q~pGsVBrw#0k31)~Nj39NTSM0$q zu^%J{u{MUYXED<8W{>OSQlY`u_Xdqk?G6x1x-otBRxiyi~$w&`Sd)JO0@W-%bWrN{xPI?{)SiJV4(aEcRwFlaqIAvR!#E7b6%2BSR^5rB7A@@fR!!(oB4g}-9ol^G$17Ij48`Y2U++__SD19vLoNR7x#AOEze$XV6=gocBG-QlA6ziKc##FchFig2 z31-WnYmC$dK3~V6pS4%3G;Zo3Z?F4V4_^Ne#;199ND2NQZy5alPD?@F1O0!KTRL`d zW4B&w3-dWt9%3p^#+ims_k3)b=`KVjjiI%LX(2?ZuH&hvLs8zRJs@sV-YEgmqm5I$ zfBRU?*1X}Vh6ur&-xj-QvzzZqH}>-=2FUk6aU`F={*4+z0&N}a{SW^Q0zslHNR;~c z^S|zF$VsZhZy){FL=KVHEF)~zK8R~asotFE;GA+j>N>Pc)efXgz9AW&#PTWMAOWc; zPE@%VFPp%p;W4m&c{kAnzeeN{$>(v$y+Ws^asZmWtAiV`WtV|3C$o6Rt^0#ZVx^8W zamNkbrNq5=H*W#&@)jzb2gLkVwh_<-#NY?@lcuVGeR=G6BeV;a-z0glcaETf5UoC4_&b+f~Bz(!p`0A*?+nW ze=$9YO+9YWzC6>b_?`%A3|DEH&v;MuIKD0K8!s~<+Z{T5M7v-@4h7u(nFJ+-(zoE@|=nP-xdeE41JZvIa8 z;*E5;3AkO(+KOI~weoZjqzg>2ADEb&m7$?}2M$sM`n?0KED5`xnR^1UeOkOacPv%P zPJQw6H|9EZNJ)==%{7@qn%rE|%-;a!=YAtN8BqU=!XGi1L>z##_~U>^TCy~%kuYx^?6q&3g z${moMfR17nIhp2vxX@TkQ1jFAetOnn3>h3X(r01~{J~fL4veAiK##aU-*XPI9vc-2 zqq$!yy6Uhz7G94yVL-OBNMYgi?YV2g1J9luMngNj36N><@AI#2K5DZ1XBCT=nLreWh|}Lm?+H$-10SnH3FTj9s+T?iFW0fc?eXj?`X(-HaRA5m&#T&`<| zBJsQRx%qp-gj>vpo_OEEn1N#Vj0}0#`z;TSB?DWM!DCa1N|y!sTZqw=uE!VDHJHU_oT72e`yVgrnG0OR>0)4h98K4fw<*G?_;7+UUYvu zay>CJr0usDaHZa~y=Ms$0MAf{gq6bLkwEZY4j8w7B_r1GNWwg)LeC0_7-f*j>(!%S zX8=ogAtS9g_o8w0zU75RJvs^}S=Dz92BQnPaw)hK_;%9N`@kx8XWX{02D@y}1_a+b zHu@O@{`)CnZK1v04gJ51V3NExgP~Z?Gth2-V4_d|5J<^0$r$^ZXXfb{R|7K;;;(p3 z5PdKmFc+at7u8lHgbmqRGdv>min;pGA$%?txV|I}C4qMk8R_#NMPn|o^P+QXa4%xKXMs4X zeEtmB$HQH%I(UvD@ga-v`=;t?!_@tdVJSuQ`!JIz_s@eSM>v)*TS4%?Cfa+2#e*4! z0NC7`e4fHW?fLJ=fsT4NZR*RjCRgp0Ib0m31?P?VhBB(-m@m5U&iW_z)tfvWbSE^- z@=&6$@fI&;>WT@S#AIS|h8(oK7qk!Gx57dx`${LF-%nyQm&C&H^Lg9p43kL8;JH9K z=9{|R?@$=a^0QZ_A3WT#jCoZY4&20wBg4yqIr|nz+qiLQG8n*l3Tz0}NPWmvN z`jtJwBX$;1Vn>EdDwm3x{&@_>A<|6VdxZ6^%p1n4*q{EagwtOm^3#V^T@UQpq|=k> zecvkPKK#w!v9vV@YL|f=wynRHyN&hKsds6^IluXLBXq_q{%Mf!(R5rsB`%??#Pv_= z7Jfmj2P^p{_rJ-g%kM|zV#d!U{2L*Yy|@6o_1{ggF6#eeC85|kZ+U0JYj>)KqzVEo ze3VRfD?W+dv0GKN-qk@EYW@<_pT4GD##lSrCrYCtRxk73(SU?@!-@ zj;2F#C$t-w%)E2;z}ihe)pSB;7ezB3iWxr`%}x%=6?K|cUXx$>=O@7S5{{#`u6ADj zC)ZutfxPl>54NR-{JMYX{2j=DnDhUe{tjhfCI1;kQqu;ByuMK3WZ;Q(i$CDW&T_MT z{VK$hk?xK(aGSJf1i|Bxjn%oiT^qdr+^j7$5{fq`POrkO5KD+pTR!1|(x~jTUB()V zgK`oH4{+4=@2?(ybO*9K?|D9iE!02_o`sru12Rp8HsW;gek`y8)L;w9MB zFWmytxsS_}I0SUz7^1no7K4Zl@Hlbal{r-qyWZ2VwT^vkc>c=b?c zVAnaZ^Z_yB!cDTC01p6GgaGN!!JZONPr-zOc-TqHfI+L+oU0y4!88HAhz2eYNU!epOE}?DoZRrzJ1G&-uO&$z>x(0U=5%jdQ@x)_M~5ssYeg)0;47#u{dd z^nruGEq&*h#kig@|ipkszH=aLX7SNh3Hd2!A zK%_QfLPyt`O!$R(CJsT4I2f`cDR$7O*z^+vOu~}IC`)jVC?Ybb z?P0z@UckG=kd!_!J>i2PD|hiX#6cB!C}r1QY*27#3Uk~j#^txJq5jLW&s%B1w+jBL zw>$?pw9~!rXA1;@0R6Ff0T!uyHAG5baLP4oyCpo*Z#^TlIfvRJ_l4#M7sGsl-Q|h6 z)ALA3HS z!U2(!54?~V7bJ;_ih)WaeT6WDd+ZbDBLV{WmtNADZ~to{4IZNcMd4(TV~An=j?60g z1y9#ZtqKV&hy@2!_9V*0ixmmDv_C#v9xjI5AI1FF?9I6BdI~3HvH%b44cjX%8<=_Y zyjkTli4o@0n)S*K1kgq)*2#89m0Y)vzD#T!Qbf6AF$mJNHeuOwf2^W`oG*k z&f;1t5$Ti|pWGyQbfT;P1L~S5{un<&5vCtN=+0F;J%RDA3#oJ<>V~0^sS?37R^O45 zPS1ncutf#dFCMT2KatH$&qpCwYwjGgtbSm|n^PF5M?nIMx#o$*riq#yDS-qvqsCCU z7Bk9|IF@)i$>;yFN(0_<&*`qP9N1sm+xPLh1>?6Gr3&I8skszYx1hP!BHWI&tkmz@wN@=MGo7Ise>X;(5H~90b=dTDFw^_vImM zBp>XzYE_MW7IW!$oemD~gFa;}F6~*wj}H$OLDC`(#>30YJlgaEqMj8q3X4nc6D+!6 zOKY+9^;aJ#*6N=lE+!yy%XjuCr?_u+%p0>gYgsGKee4I*mFpz#i7T4`FY!)-#z}^q zFMs7%KeL%8D**emo~4@g{J)-->b3omU4buHc(2(>Rnb?rej!OuMa=m+tC9j7M@iae zDau!IF>JDUjQ|m6qsm(e+n>agA)<^_P<9o78rqdRv*is!1oQj#0yH`Z?=< z#*PEh`{x3J87Hv-w-UiIC#$V2PM{ik_7=ARU2FKSif?VL%SH~Gg@GN&ilCE^NH?>r z@O#VPT&(IzN&hQJrUmI5pimvnK7&9TFu?4+1v?s;V|+c<7e)`i3UB(Em#Ii4y8V% zs%ZS5mr;!}O*-#m|MeS4-b8meIB+xm+lEk{!dT^h8?OKT{0AfX;49true + + + gdcc/dev_nginx:unstable + + ${project.basedir}/modules/nginx + + + true From eac4f10dfd11996d0e54c1d83941227e7efebb78 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Thu, 2 Nov 2023 13:52:49 -0400 Subject: [PATCH 193/252] Update dev-usage.rst Remove the instructions related to the re-deploy of containers in the background as of 11/02/2023 these instructions didn't work with the current containers. --- doc/sphinx-guides/source/container/dev-usage.rst | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 04c7eba7913..3a4426d5153 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -150,16 +150,6 @@ by recreating the container(s). In the future, more options may be added here. If you started your containers in foreground, just stop them and follow the steps for building and running again. The same goes for using Maven to start the containers in the background. -In case of using Docker Compose and starting the containers in the background, you can use a workaround to only -restart the application container: - -.. code-block:: - - # First rebuild the container (will complain about an image still in use, this is fine.) - mvn -Pct package - # Then re-create the container (will automatically restart the container for you) - docker compose -f docker-compose-dev.yml create dev_dataverse - Using ``docker container inspect dev_dataverse | grep Image`` you can verify the changed checksums. Using a Debugger From 3f59f7285b922326b6a4b49c04011eb275c05371 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Fri, 3 Nov 2023 10:31:41 -0400 Subject: [PATCH 194/252] Add the .gitattributes file to set LF by default on .sh files since these will run on docker. --- .gitattributes | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 .gitattributes diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000000..9860024f70a --- /dev/null +++ b/.gitattributes @@ -0,0 +1,4 @@ +# https://www.git-scm.com/docs/gitattributes + +# This set mandatory LF line endings for .sh files preventing from windows users to having to change the value of their git config --global core.autocrlf to 'false' or 'input' +*.sh text eol=lf \ No newline at end of file From 7892ec70b456e47a8a90fbbdef088a5605eba0a6 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Fri, 3 Nov 2023 13:08:46 -0400 Subject: [PATCH 195/252] Fix the missing deoployment on Payera detected on Windows caused by a race condition due the database not being ready when the DV container started. --- src/main/docker/scripts/init_3_wait_dataverse_db_host.sh | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 src/main/docker/scripts/init_3_wait_dataverse_db_host.sh diff --git a/src/main/docker/scripts/init_3_wait_dataverse_db_host.sh b/src/main/docker/scripts/init_3_wait_dataverse_db_host.sh new file mode 100644 index 00000000000..c234ad33307 --- /dev/null +++ b/src/main/docker/scripts/init_3_wait_dataverse_db_host.sh @@ -0,0 +1,4 @@ +#It was reported on 9949 that on the first launch of the containers Dataverse would not be deployed on payara +#this was caused by a race condition due postgress not being ready. A solion for docker compose was prepared +#but didn't work due a compatibility issue on the Maven pluggin [https://github.com/fabric8io/docker-maven-plugin/issues/888] +wait-for "${DATAVERSE_DB_HOST:-postgres}:${DATAVERSE_DB_PORT:-5432}" -t 120 \ No newline at end of file From c1afef9db9514567f064f336d7370d6f2173f74f Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 3 Nov 2023 14:06:26 -0400 Subject: [PATCH 196/252] corrected function --- .../V6.0.0.3__10095-guestbook-at-request2.sql | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 src/main/resources/db/migration/V6.0.0.3__10095-guestbook-at-request2.sql diff --git a/src/main/resources/db/migration/V6.0.0.3__10095-guestbook-at-request2.sql b/src/main/resources/db/migration/V6.0.0.3__10095-guestbook-at-request2.sql new file mode 100644 index 00000000000..b6157e6a782 --- /dev/null +++ b/src/main/resources/db/migration/V6.0.0.3__10095-guestbook-at-request2.sql @@ -0,0 +1,34 @@ +-- This creates a function that ESTIMATES the size of the +-- GuestbookResponse table (for the metrics display), instead +-- of relying on straight "SELECT COUNT(*) ..." +-- It uses statistics to estimate the number of guestbook entries +-- and the fraction of them related to downloads, +-- i.e. those that weren't created for 'AccessRequest' events. +-- Significant potential savings for an active installation. +-- See https://github.com/IQSS/dataverse/issues/8840 and +-- https://github.com/IQSS/dataverse/pull/8972 for more details + +CREATE OR REPLACE FUNCTION estimateGuestBookResponseTableSize() +RETURNS bigint AS $$ +DECLARE + estimatedsize bigint; +BEGIN + SELECT CASE WHEN relpages<10 THEN 0 + ELSE ((reltuples / relpages) + * (pg_relation_size('public.guestbookresponse') / current_setting('block_size')::int))::bigint + * (SELECT CASE WHEN ((select count(*) from pg_stats where tablename='guestbookresponse') = 0 + OR (select array_position(most_common_vals::text::text[], 'AccessRequest') + FROM pg_stats WHERE tablename='guestbookresponse' AND attname='eventtype') IS NULL) THEN 1 + ELSE 1 - (SELECT (most_common_freqs::text::text[])[array_position(most_common_vals::text::text[], 'AccessRequest')]::float + FROM pg_stats WHERE tablename='guestbookresponse' and attname='eventtype') END) + END + FROM pg_class + WHERE oid = 'public.guestbookresponse'::regclass INTO estimatedsize; + + if estimatedsize = 0 then + SELECT COUNT(id) FROM guestbookresponse WHERE eventtype!= 'AccessRequest' INTO estimatedsize; + END if; + + RETURN estimatedsize; +END; +$$ LANGUAGE plpgsql IMMUTABLE; From 94b08c9f01fce8ac0cd9e77c2a38921c71257152 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 3 Nov 2023 16:21:13 -0400 Subject: [PATCH 197/252] add user/name to privateURL token, refactor methods --- .../iq/dataverse/FileDownloadServiceBean.java | 25 +---------- .../edu/harvard/iq/dataverse/FilePage.java | 2 +- .../edu/harvard/iq/dataverse/api/Files.java | 6 +-- .../AuthenticationServiceBean.java | 42 ++++++++++++++++++- 4 files changed, 44 insertions(+), 31 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java index 55817d4a746..7a03f1a35dc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java @@ -4,7 +4,6 @@ import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.StorageIO; @@ -16,8 +15,6 @@ import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry; -import edu.harvard.iq.dataverse.privateurl.PrivateUrl; -import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; @@ -75,8 +72,6 @@ public class FileDownloadServiceBean implements java.io.Serializable { @EJB AuthenticationServiceBean authService; @EJB - PrivateUrlServiceBean privateUrlService; - @EJB SettingsServiceBean settingsService; @EJB MailServiceBean mailService; @@ -352,7 +347,7 @@ public void explore(GuestbookResponse guestbookResponse, FileMetadata fmd, Exter User user = session.getUser(); DatasetVersion version = fmd.getDatasetVersion(); if (version.isDraft() || fmd.getDatasetVersion().isDeaccessioned() || (fmd.getDataFile().isRestricted()) || (FileUtil.isActivelyEmbargoed(fmd))) { - apiToken = getApiToken(user); + apiToken = authService.getValidApiTokenForUser(user); } DataFile dataFile = null; if (fmd != null) { @@ -379,24 +374,6 @@ public void explore(GuestbookResponse guestbookResponse, FileMetadata fmd, Exter } } - public ApiToken getApiToken(User user) { - ApiToken apiToken = null; - if (user instanceof AuthenticatedUser) { - AuthenticatedUser authenticatedUser = (AuthenticatedUser) user; - apiToken = authService.findApiTokenByUser(authenticatedUser); - if (apiToken == null || apiToken.isExpired()) { - //No un-expired token - apiToken = authService.generateApiTokenForUser(authenticatedUser); - } - } else if (user instanceof PrivateUrlUser) { - PrivateUrlUser privateUrlUser = (PrivateUrlUser) user; - PrivateUrl privateUrl = privateUrlService.getPrivateUrlFromDatasetId(privateUrlUser.getDatasetId()); - apiToken = new ApiToken(); - apiToken.setTokenString(privateUrl.getToken()); - } - return apiToken; - } - public void downloadDatasetCitationXML(Dataset dataset) { downloadCitationXML(null, dataset, false); } diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index bfae80ade27..479c8a429c6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -1069,7 +1069,7 @@ public String preview(ExternalTool externalTool) { ApiToken apiToken = null; User user = session.getUser(); if (fileMetadata.getDatasetVersion().isDraft() || fileMetadata.getDatasetVersion().isDeaccessioned() || (fileMetadata.getDataFile().isRestricted()) || (FileUtil.isActivelyEmbargoed(fileMetadata))) { - apiToken=fileDownloadService.getApiToken(user); + apiToken=authService.getValidApiTokenForUser(user); } if(externalTool == null){ return ""; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index ad24d81d996..d4fc92f912d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -814,10 +814,8 @@ public Response getExternalToolFMParams(@Context ContainerRequestContext crc, @P return error(BAD_REQUEST, "External tool does not have file scope."); } ApiToken apiToken = null; - User u = getRequestUser(crc); - if (u instanceof AuthenticatedUser) { - apiToken = authSvc.findApiTokenByUser((AuthenticatedUser) u); - } + User user = getRequestUser(crc); + apiToken = authSvc.getValidApiTokenForUser(user); FileMetadata target = fileSvc.findFileMetadata(fmid); if (target == null) { return error(BAD_REQUEST, "FileMetadata not found."); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java index 496620cd6e8..848e57bc6b0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java @@ -21,10 +21,14 @@ import edu.harvard.iq.dataverse.authorization.providers.shib.ShibAuthenticationProvider; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; +import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailData; import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailServiceBean; import edu.harvard.iq.dataverse.passwordreset.PasswordResetData; import edu.harvard.iq.dataverse.passwordreset.PasswordResetServiceBean; +import edu.harvard.iq.dataverse.privateurl.PrivateUrl; +import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.validation.PasswordValidatorServiceBean; @@ -118,6 +122,9 @@ public class AuthenticationServiceBean { @EJB SavedSearchServiceBean savedSearchService; + @EJB + PrivateUrlServiceBean privateUrlService; + @PersistenceContext(unitName = "VDCNet-ejbPU") private EntityManager em; @@ -931,14 +938,45 @@ public List getWorkflowCommentsByAuthenticatedUser(Authenticat return query.getResultList(); } - public ApiToken getValidApiTokenForUser(AuthenticatedUser user) { + /** + * This method gets a valid api token for an AuthenticatedUser, creating a new + * token if one doesn't exist or if the token is expired. + * + * @param user + * @return + */ + public ApiToken getValidApiTokenForAuthenticatedUser(AuthenticatedUser user) { ApiToken apiToken = null; apiToken = findApiTokenByUser(user); - if ((apiToken == null) || (apiToken.getExpireTime().before(new Date()))) { + if ((apiToken == null) || apiToken.isExpired()) { logger.fine("Created apiToken for user: " + user.getIdentifier()); apiToken = generateApiTokenForUser(user); } return apiToken; } + /** + * Gets a token for an AuthenticatedUser or a PrivateUrlUser. It will create a + * new token if needed for an AuthenticatedUser. Note that, for a PrivateUrlUser, this method creates a token + * with a temporary AuthenticateUser that only has a userIdentifier - needed in generating signed Urls. + * @param user + * @return a token or null (i.e. if the user is not an AuthenticatedUser or PrivateUrlUser) + */ + + public ApiToken getValidApiTokenForUser(User user) { + ApiToken apiToken = null; + if (user instanceof AuthenticatedUser) { + apiToken = getValidApiTokenForAuthenticatedUser((AuthenticatedUser) user); + } else if (user instanceof PrivateUrlUser) { + PrivateUrlUser privateUrlUser = (PrivateUrlUser) user; + + PrivateUrl privateUrl = privateUrlService.getPrivateUrlFromDatasetId(privateUrlUser.getDatasetId()); + apiToken = new ApiToken(); + apiToken.setTokenString(privateUrl.getToken()); + AuthenticatedUser au = new AuthenticatedUser(); + au.setUserIdentifier(privateUrlUser.getIdentifier()); + apiToken.setAuthenticatedUser(au); + } + return apiToken; + } } From b4cf71e6b6a8970fed5faaaec2c258a2e24865e9 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 3 Nov 2023 16:22:27 -0400 Subject: [PATCH 198/252] Fix potential infinite loop --- .../iq/dataverse/authorization/AuthenticationServiceBean.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java index 848e57bc6b0..1c0f5010059 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java @@ -622,6 +622,7 @@ public AuthenticatedUser createAuthenticatedUser(UserRecordIdentifier userRecord String identifier = internalUserIdentifier + i; while ( identifierExists(identifier) ) { i += 1; + identifier = internalUserIdentifier + i; } authenticatedUser.setUserIdentifier(identifier); } else { From a7e4a7870ee85c3ba4d09396eb3a801b316f2c4c Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 3 Nov 2023 16:24:45 -0400 Subject: [PATCH 199/252] use authSvc method to handle both auth and privateUrl cases and get valid token if expired/missing (#10045) --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 292aba0cee3..cbc0bcda6ac 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3930,10 +3930,7 @@ public Response getExternalToolDVParams(@Context ContainerRequestContext crc, } ApiToken apiToken = null; User u = getRequestUser(crc); - if (u instanceof AuthenticatedUser) { - apiToken = authSvc.findApiTokenByUser((AuthenticatedUser) u); - } - + apiToken = authSvc.getValidApiTokenForUser(u); ExternalToolHandler eth = new ExternalToolHandler(externalTool, target.getDataset(), apiToken, locale); return ok(eth.createPostBody(eth.getParams(JsonUtil.getJsonObject(externalTool.getToolParameters())))); From 264448e98028f21dd9f61ec5f05e344044298315 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 3 Nov 2023 16:28:38 -0400 Subject: [PATCH 200/252] handle urls signed for a privateUrlUser --- .../api/auth/SignedUrlAuthMechanism.java | 34 +++++++++++++------ 1 file changed, 24 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java index f8572144236..e2c2f2381d8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java @@ -3,7 +3,10 @@ import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.privateurl.PrivateUrl; +import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.UrlSignerUtil; @@ -27,16 +30,18 @@ public class SignedUrlAuthMechanism implements AuthMechanism { @Inject protected AuthenticationServiceBean authSvc; - + @Inject + protected PrivateUrlServiceBean privateUrlSvc; + @Override public User findUserFromRequest(ContainerRequestContext containerRequestContext) throws WrappedAuthErrorResponse { String signedUrlRequestParameter = getSignedUrlRequestParameter(containerRequestContext); if (signedUrlRequestParameter == null) { return null; } - AuthenticatedUser authUser = getAuthenticatedUserFromSignedUrl(containerRequestContext); - if (authUser != null) { - return authUser; + User user = getAuthenticatedUserFromSignedUrl(containerRequestContext); + if (user != null) { + return user; } throw new WrappedAuthErrorResponse(RESPONSE_MESSAGE_BAD_SIGNED_URL); } @@ -45,8 +50,8 @@ private String getSignedUrlRequestParameter(ContainerRequestContext containerReq return containerRequestContext.getUriInfo().getQueryParameters().getFirst(SIGNED_URL_TOKEN); } - private AuthenticatedUser getAuthenticatedUserFromSignedUrl(ContainerRequestContext containerRequestContext) { - AuthenticatedUser authUser = null; + private User getAuthenticatedUserFromSignedUrl(ContainerRequestContext containerRequestContext) { + User user = null; // The signedUrl contains a param telling which user this is supposed to be for. // We don't trust this. So we lookup that user, and get their API key, and use // that as a secret in validating the signedURL. If the signature can't be @@ -54,17 +59,26 @@ private AuthenticatedUser getAuthenticatedUserFromSignedUrl(ContainerRequestCont // we reject the request. UriInfo uriInfo = containerRequestContext.getUriInfo(); String userId = uriInfo.getQueryParameters().getFirst(SIGNED_URL_USER); - AuthenticatedUser targetUser = authSvc.getAuthenticatedUser(userId); - ApiToken userApiToken = authSvc.findApiTokenByUser(targetUser); + User targetUser = null; + ApiToken userApiToken = null; + if(!userId.startsWith(PrivateUrlUser.PREFIX)) { + targetUser = authSvc.getAuthenticatedUser(userId); + userApiToken = authSvc.findApiTokenByUser((AuthenticatedUser)targetUser); + } else { + PrivateUrl privateUrl = privateUrlSvc.getPrivateUrlFromDatasetId(Long.parseLong(userId.substring(PrivateUrlUser.PREFIX.length()))); + userApiToken = new ApiToken(); + userApiToken.setTokenString(privateUrl.getToken()); + targetUser = privateUrlSvc.getPrivateUrlUserFromToken(privateUrl.getToken()); + } if (targetUser != null && userApiToken != null) { String signedUrl = URLDecoder.decode(uriInfo.getRequestUri().toString(), StandardCharsets.UTF_8); String requestMethod = containerRequestContext.getMethod(); String signedUrlSigningKey = JvmSettings.API_SIGNING_SECRET.lookupOptional().orElse("") + userApiToken.getTokenString(); boolean isSignedUrlValid = UrlSignerUtil.isValidUrl(signedUrl, userId, requestMethod, signedUrlSigningKey); if (isSignedUrlValid) { - authUser = targetUser; + user = targetUser; } } - return authUser; + return user; } } From af490cdcb1b2b7ce713f67e93fe98028e596e9f2 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 3 Nov 2023 16:31:42 -0400 Subject: [PATCH 201/252] use ! instead of # to avoid encoding issues in signedurls --- .../iq/dataverse/authorization/users/PrivateUrlUser.java | 2 +- .../db/migration/V6.0.0.3__10093-privateurluser_id_update.sql | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 src/main/resources/db/migration/V6.0.0.3__10093-privateurluser_id_update.sql diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUser.java index f64b5c301e7..03f018221fd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUser.java @@ -12,7 +12,7 @@ */ public class PrivateUrlUser implements User { - public static final String PREFIX = "#"; + public static final String PREFIX = "!"; /** * In the future, this could probably be dvObjectId rather than datasetId, diff --git a/src/main/resources/db/migration/V6.0.0.3__10093-privateurluser_id_update.sql b/src/main/resources/db/migration/V6.0.0.3__10093-privateurluser_id_update.sql new file mode 100644 index 00000000000..260f191f557 --- /dev/null +++ b/src/main/resources/db/migration/V6.0.0.3__10093-privateurluser_id_update.sql @@ -0,0 +1 @@ + update roleassignment set assigneeidentifier=replace(assigneeidentifier, '#','!') where assigneeidentifier like '#%'; \ No newline at end of file From 9f04e07b451e5705b69c2707e49420338fc084da Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 3 Nov 2023 16:37:32 -0400 Subject: [PATCH 202/252] fine logging --- .../harvard/iq/dataverse/externaltools/ExternalToolHandler.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java index cdde9fbe0e8..2fc4df808bf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java @@ -141,7 +141,7 @@ public String handleRequest(boolean preview) { if (requestMethod.equals(HttpMethod.POST)) { String body = JsonUtil.prettyPrint(createPostBody(params).build()); try { - logger.info("POST Body: " + body); + logger.fine("POST Body: " + body); return postFormData(body); } catch (IOException | InterruptedException ex) { Logger.getLogger(ExternalToolHandler.class.getName()).log(Level.SEVERE, null, ex); From 204487d04879be88a1566dd454c5130e6d86bb8c Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 3 Nov 2023 16:51:28 -0400 Subject: [PATCH 203/252] use authSvc methods, #10045 fix --- .../java/edu/harvard/iq/dataverse/DatasetPage.java | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index fc18257196d..85a0277d39f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -5910,14 +5910,7 @@ public void setFolderPresort(boolean folderPresort) { public void explore(ExternalTool externalTool) { ApiToken apiToken = null; User user = session.getUser(); - if (user instanceof AuthenticatedUser) { - apiToken = authService.findApiTokenByUser((AuthenticatedUser) user); - } else if (user instanceof PrivateUrlUser) { - PrivateUrlUser privateUrlUser = (PrivateUrlUser) user; - PrivateUrl privUrl = privateUrlService.getPrivateUrlFromDatasetId(privateUrlUser.getDatasetId()); - apiToken = new ApiToken(); - apiToken.setTokenString(privUrl.getToken()); - } + apiToken = authService.getValidApiTokenForUser(user); ExternalToolHandler externalToolHandler = new ExternalToolHandler(externalTool, dataset, apiToken, session.getLocaleCode()); PrimeFaces.current().executeScript(externalToolHandler.getExploreScript()); } @@ -5925,8 +5918,9 @@ public void explore(ExternalTool externalTool) { public void configure(ExternalTool externalTool) { ApiToken apiToken = null; User user = session.getUser(); + //Not enabled for PrivateUrlUsers (who wouldn't have write permissions anyway) if (user instanceof AuthenticatedUser) { - apiToken = authService.findApiTokenByUser((AuthenticatedUser) user); + apiToken = authService.getValidApiTokenForAuthenticatedUser((AuthenticatedUser) user); } ExternalToolHandler externalToolHandler = new ExternalToolHandler(externalTool, dataset, apiToken, session.getLocaleCode()); PrimeFaces.current().executeScript(externalToolHandler.getConfigureScript()); From e08f5d79bdcf1351967d90bd832cf06ed5803364 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 3 Nov 2023 17:35:47 -0400 Subject: [PATCH 204/252] release note --- doc/release-notes/10093-signedUrl_improvements.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 doc/release-notes/10093-signedUrl_improvements.md diff --git a/doc/release-notes/10093-signedUrl_improvements.md b/doc/release-notes/10093-signedUrl_improvements.md new file mode 100644 index 00000000000..8f6ae89f981 --- /dev/null +++ b/doc/release-notes/10093-signedUrl_improvements.md @@ -0,0 +1,5 @@ +A new version of the standard Dataverse Previewers from https://github/com/gdcc/dataverse-previewers is available. The new version supports the use of signedUrls rather than API keys when previewing restricted files (including files in draft dataset versions). Upgrading is highly recommended. + +SignedUrls can now be used with PrivateUrl access, i.e. allowing PrivateUrl users to view Previews when they are configured to use signedUrls + +Launching a Dataset-level Configuration tool will automatically generate an api key when needed. This is consistent with how other types of tools work. From cb5848ebf3729b5f86e2bca5b349b42080babf9e Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 3 Nov 2023 17:41:16 -0400 Subject: [PATCH 205/252] doc update --- doc/sphinx-guides/source/api/external-tools.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/sphinx-guides/source/api/external-tools.rst b/doc/sphinx-guides/source/api/external-tools.rst index c72b52637c7..d802bc8714a 100644 --- a/doc/sphinx-guides/source/api/external-tools.rst +++ b/doc/sphinx-guides/source/api/external-tools.rst @@ -187,6 +187,7 @@ Using Example Manifests to Get Started ++++++++++++++++++++++++++++++++++++++ Again, you can use :download:`fabulousFileTool.json <../_static/installation/files/root/external-tools/fabulousFileTool.json>` or :download:`dynamicDatasetTool.json <../_static/installation/files/root/external-tools/dynamicDatasetTool.json>` as a starting point for your own manifest file. +Additional working examples, including ones using signedUrls, are available at https://github.com/gdcc/dataverse-previewers . Testing Your External Tool -------------------------- From c208d04f1d5c7a325bcbf4628b550d9b58d713be Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 3 Nov 2023 17:48:32 -0400 Subject: [PATCH 206/252] test updates --- .../harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java index 8c9e0261bfa..da94b288bee 100644 --- a/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java @@ -102,7 +102,7 @@ public void testGetDatasetFromRoleAssignmentSuccess() { RoleAssignment ra = this.createTestRoleAssignment(dataset); assertNotNull(PrivateUrlUtil.getDatasetFromRoleAssignment(ra)); - assertEquals("#42", ra.getAssigneeIdentifier()); + assertEquals(PrivateUrlUser.PREFIX + "42", ra.getAssigneeIdentifier()); } @Test @@ -137,7 +137,7 @@ public void testGetDraftDatasetVersionFromRoleAssignmentSuccess() { DatasetVersion datasetVersionOut = PrivateUrlUtil.getDraftDatasetVersionFromRoleAssignment(ra); assertNotNull(datasetVersionOut); - assertEquals("#42", ra.getAssigneeIdentifier()); + assertEquals(PrivateUrlUser.PREFIX + "42", ra.getAssigneeIdentifier()); } @Test From 98afbaf7057fa2f84cfa9fee9ac906a5eac4d48f Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 3 Nov 2023 17:52:42 -0400 Subject: [PATCH 207/252] another test --- .../edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java index 1d054040e84..88f6a5bdbce 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java @@ -114,7 +114,7 @@ public void testJson_RoleAssignment() { JsonObjectBuilder job = JsonPrinter.json(ra); assertNotNull(job); JsonObject jsonObject = job.build(); - assertEquals("#42", jsonObject.getString("assignee")); + assertEquals(PrivateUrlUser.PREFIX + "42", jsonObject.getString("assignee")); assertEquals(123, jsonObject.getInt("definitionPointId")); assertEquals("e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("privateUrlToken")); } @@ -135,7 +135,7 @@ public void testJson_PrivateUrl() { assertEquals("e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("token")); assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("link")); assertEquals("e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getJsonObject("roleAssignment").getString("privateUrlToken")); - assertEquals("#42", jsonObject.getJsonObject("roleAssignment").getString("assignee")); + assertEquals(PrivateUrlUser.PREFIX + "42", jsonObject.getJsonObject("roleAssignment").getString("assignee")); } @Test From f2eb91e8f7d4776d891684ff9a0dd05f71617860 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Sat, 4 Nov 2023 17:22:36 -0400 Subject: [PATCH 208/252] try longer lock wait --- src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index e3a7fd0cfc3..1414dd32864 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -62,7 +62,7 @@ public class UtilIT { private static final String BUILTIN_USER_KEY = "burrito"; private static final String EMPTY_STRING = ""; public static final int MAXIMUM_INGEST_LOCK_DURATION = 15; - public static final int MAXIMUM_PUBLISH_LOCK_DURATION = 15; + public static final int MAXIMUM_PUBLISH_LOCK_DURATION = 20; public static final int MAXIMUM_IMPORT_DURATION = 1; private static SwordConfigurationImpl swordConfiguration = new SwordConfigurationImpl(); From d074c257c845678c75690e067a68d26daab0993d Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 6 Nov 2023 09:15:54 -0500 Subject: [PATCH 209/252] use prefixes in RoleAssigneeBean --- .../edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java index 059d5a8ffd3..5429f5952dd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java @@ -99,15 +99,15 @@ public RoleAssignee getRoleAssignee(String identifier, Boolean augmented) { switch (identifier.charAt(0)) { case ':': return predefinedRoleAssignees.get(identifier); - case '@': + case AuthenticatedUser.IDENTIFIER_PREFIX: if (!augmented){ return authSvc.getAuthenticatedUser(identifier.substring(1)); } else { return authSvc.getAuthenticatedUserWithProvider(identifier.substring(1)); - } - case '&': + } + case Group.IDENTIFIER_PREFIX: return groupSvc.getGroup(identifier.substring(1)); - case '#': + case PrivateUrlUser.PREFIX: return PrivateUrlUtil.identifier2roleAssignee(identifier); default: throw new IllegalArgumentException("Unsupported assignee identifier '" + identifier + "'"); From 780a0d5ae70bbd5e83ce843b36800304dbb46a0e Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 6 Nov 2023 09:24:27 -0500 Subject: [PATCH 210/252] use string, add missing import --- .../edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java index 5429f5952dd..88acc1916cf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java @@ -11,6 +11,7 @@ import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.GuestUser; +import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.mydata.MyDataFilterParams; import edu.harvard.iq.dataverse.privateurl.PrivateUrlUtil; @@ -96,8 +97,8 @@ public RoleAssignee getRoleAssignee(String identifier, Boolean augmented) { if (identifier == null || identifier.isEmpty()) { throw new IllegalArgumentException("Identifier cannot be null or empty string."); } - switch (identifier.charAt(0)) { - case ':': + switch (identifier.substring(0,1)) { + case ":": return predefinedRoleAssignees.get(identifier); case AuthenticatedUser.IDENTIFIER_PREFIX: if (!augmented){ From 62b85b6229dfde7baba0395b3ac2362b3e96e9ac Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 6 Nov 2023 13:49:59 -0500 Subject: [PATCH 211/252] add release note #9590 --- doc/release-notes/9590-intellij-redeploy.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 doc/release-notes/9590-intellij-redeploy.md diff --git a/doc/release-notes/9590-intellij-redeploy.md b/doc/release-notes/9590-intellij-redeploy.md new file mode 100644 index 00000000000..4c6ab29ea9f --- /dev/null +++ b/doc/release-notes/9590-intellij-redeploy.md @@ -0,0 +1,3 @@ +Developers can enjoy a dramatically faster feedback loop when iterating on code if they are using IntelliJ IDEA Ultimate (free educational licenses are available) and the Payara Platform Tools plugin. + +For details, see https://dataverse-guide--10088.org.readthedocs.build/en/10088/container/dev-usage.html From 334beb8beef6965bd94e6cfdc6955a4986853b87 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 6 Nov 2023 13:57:47 -0500 Subject: [PATCH 212/252] explain fast redeploy with IntelliJ #9590 --- .../source/container/dev-usage.rst | 44 ++++++++++++++++--- 1 file changed, 37 insertions(+), 7 deletions(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 3a4426d5153..b2547306b03 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -141,16 +141,46 @@ Alternatives: Options are the same. -Re-Deploying ------------- +Redeploying +----------- + +Rebuild and Running Images +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The safest way to redeploy code is to stop the running containers (with Ctrl-c if you started them in the foreground) and then build and run them again with ``mvn -Pct clean package docker:run``. + +IntelliJ IDEA Ultimate and Payara Platform Tools +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +If you have IntelliJ IDEA Ultimate (note that `free educational licenses `_ are available), you can install `Payara Platform Tools `_ which can dramatically improve your feedback loop when iterating on code. + +The following steps are suggested: + +- Go to the Payara admin console (either at https://localhost:4848 or http://localhost:4849) and undeploy the dataverse application under "Applications". +- Install Payara Platform Tools. +- Under "Server": + + - Click "Run" then "Edit Configurations". + - Click the plus sign and scroll down to Payara Server and click "Remote". + - For "Name" put "Payara in Docker" or something reasonable. + - Under "Application server" select a local directory that has the same version of Payara used in the container. This should match the version of Payara mentioned in the Installation Guide under :ref:`payara`. + - Change "Admin Server Port" to 4849. + - For username, put "admin". + - For password, put "admin". + +- Under "Deployment": + + - Click the plus button and clien "Artifact" then "dataverse:war". -Currently, the only safe and tested way to re-deploy the Dataverse application after you applied code changes is -by recreating the container(s). In the future, more options may be added here. +- Under "Startup/Connection": -If you started your containers in foreground, just stop them and follow the steps for building and running again. -The same goes for using Maven to start the containers in the background. + - Click "Debug" and change the port to 9009. -Using ``docker container inspect dev_dataverse | grep Image`` you can verify the changed checksums. +- Click "Run" and then "Debug Payara in Docker". This initial deployment will take some time. +- Go to http://localhost:8080/api/info/version and make sure the API is responding. +- Edit ``Info.java`` and make a small change to the ``/api/info/version`` code. +- Click "Run" then "Debugging Actions" then "Reload Changed Classes". The deployment should only take a few seconds. +- Go to http://localhost:8080/api/info/version and verify the change you made. Using a Debugger ---------------- From d966ce1daa41e3ab3a155e2fde91e3fb00ab1b1d Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 6 Nov 2023 13:59:24 -0500 Subject: [PATCH 213/252] remove unused image #9590 --- modules/nginx/img.png | Bin 71929 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 modules/nginx/img.png diff --git a/modules/nginx/img.png b/modules/nginx/img.png deleted file mode 100644 index 278ee490b2d6d7c61b641cae4133b022c66098e9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 71929 zcmeF3Wmr_-7w=I}P+C-aXb`1429O335a}E`q@-s^B?rj?5hVr@l$7q2kcOcrun*2WA$$?`;>k?vhj)xKEK%-W ziSAw$0&sWX+k?K}1P;!VPolTYQ|nJ)>brh@6sUuM^`9ZiGBVrA7b3WGC6-MW;XX3s7ozFm=_uq$<2=JK2I^F*L)d7Y$Fx%gAfKzE1 z|Mvq-6uXT#{%5OFH=@6eND0M`-_nF67N20y@NCmRRt)u-CnKCh?q-IFH2=TIRk zvYjjI*HtkTDp;1Yy{0bir$k!uW^$A6=2Q{3TYqx?P~&@dvU7v}tpCNfuau%B4NZKu%h9_5 zH?>TBPf>0t30*bu?K0Qp1YIKG*OYR{oos0wUUPn4)hDw)c~@%^Ws@-t#?bkYD^Ytt*%QUk)S8{7DQRyPG+3y z#~dYmYDV*GYY#gPHu4HTWyDC|$1NKB+mVMm1;pF-y}ORIlgC6xF25fw4{NG( za@Xz2une@_&zZAIH148yuH${)IJwAM_Y*h20&_ zhSc+%g{_vfQQ2#mcVmC}MXvzrRjjU@_0BU{+&@YkBaM}xzr)>li6-xeL0#Z zDlhS@V#+CR>1}DR(6ca*w=K)2w9?7AMGX3UvoPTO^Zih!HyeX@(U+BtS3mPh`^1*L z8ZV4{Tigg$n9?|p zx}W^rVb^D?_QnmBj#->(M-l$g0tsg$h5p*akHCHDm8e69$=_Z_7qeT5#b{&ExshF; z!)IH6?mZvQcuDh|=7CIM|IFvDs>Nt4;6df#=ItFHG*b^1Rf`SBn6`uA4T4i1sx@a1 zaAS&SB@&3ISi;pXqZA+;w~Ba(M}Xo)`_B2DB&bVWl|4wF@tUiT%apoWS0CA|UjAhG zNmEVqdxX(FwsGrZ$1XB;u!i!;tk15o%c#E1NL}OgQDL_-TdLMQ{a9WTEvw<1#Kz0- zaj#-7uGm0FjFw6no`uybN$3GZx&&Pl*moMwakB(Vl_}@Ot2dv+1eRbk9%Dai(4p5W z!i_nzn^Kqa#Kx9{ymP>`>Kam)GVZnw@^7XiF~&LBYauQ?2mV~KTzAlyw{!(edOz|e zZQ|P}{j$W`-|Cv~Qh%)7smb%b+;1=MQ{k%4%&%JbpdCZMZj4@7oDv5HBeHH8z zM(dgFecnA3&@YXvO+Datv0YhsHAqyr^Rsf+0OY&ZWDw(Dl2CW9;5>$sy15jKE+{Un znOHytT)T}RdZA&&lBa)inNT;QhR(*(qAQ;b+Qy66nzzIaORB|!o*aa3O3fT@_VAiT<#>-bLvs@SmNRf`?Z%o~q~+!&QoW2D*SL)2(>^A8Hr*k2r@j3*I<0X7 z(JAhmDpKlfDgFf&EF*-O-pSl45#oRLB9@Vrk~BX072{VW5@*4|5Z?k|_Li$H8K&Zi z>~g%`5Puy8#;2Uu#$i5RidW})_Ot9Y%jb@VuftznJTsD^1uug0H{^H_qT5Ukl7;1? z27&@O2wA!HCi^_&tARvWBGJsa>cwdBf^O`-AkL3+{+Wc71n8H-!AK{fdj%o0br~{B zWJmKBcs-a44FmG{jYj6`c!d78swdqQLe(@8jAU8aAXoRA{5s zi`5Lip0qFg$$YclPFHC04FWH6L8M7c^nD(HOt}w5%XTN(7wF#umqb_L7Y>l0O<+&_ z$Z9BQ(Nvev-qh@C>d?zU*;H6!Z+n~56DkWif7F;IyLT3qEE|4Jp+1&8djV`4O3deC zap0ItJapvNR{5Ao0uKX+(j#JqLHBlAFD=)w)4MPQEd&zSR;uD`GAS8k7)e!U6_%pe zPJjTE$4s=pM0!LEvyZp2)P2c0s7O3J+K*klnDCv+Ri4f9Aeb?F5MhRN-nNDK0pVJc z1rj51tjB^1xLl;OuN_1PA~iDk>UIu?xrJ=wY&?wJ2jsPdL7HM2wSCDepd-tv>jusL_{5 zU=``(lo-_v0`Il`g4*dy(KM{F)3o1uS0C(ehGYA_TttVKF zolL&K;?Xq@q{m(eh3|VazJ(jt@xb%Xj_|!1vSqMhmCq0|-g33gYJsQgl~sn@5p(i< zFu5tD4bCH8hYb$Gb-A4-MLywK=py%UcqC)kLdbGDb*hmK{uRNl*n{p+X(ZD$LkB;K zN2W75sz?mWRbB)hr*Wc$lY*{LZvRs+sRtvuA0aOpcu0u(F5IBt_mDESLd) z&0)xU5dl%2enZNu4_PGh`kv4ME@KkAML09l(8{OFHq!l3a!+{i-^}Bm9pi)b_+96M z#1U|y;ttdfrm(mynfcTli6|0+tw_uH$Nm7W8b_kY+^u5TVH7(KhecNzG2HHW` zlpv!ND~z*W7;t6J@~7kJ?RI+_vpaQlCX<@ZBpOQ-P48^J?6rp4Q0)(o?SN6ucaxU% z>6WC?u`lG*rJi33m4uI@7sQ})koAYHFwuxVEm9(;E{kYGd@+)L18j$y`)B1m#jG4);x65t-$zrHM8X+_l?VgFwUiG^ zXk|j=2qVGxQ*Q~I28o=D+?ObM75ZYCk#@5NRz|W^w|EsEAPnR*L~w6+TznvTAsbDq zKC5=N&IQIPmKQ<@a@R8Sfx8;=!a#LNXr?^UgM7$#5#iTb@3m|Y18bs*x81gQAe-f_ z&Xq4ZS^al1$weJN%&nrtT(HMtOzN#~gpYKxcx9uR9?C_Q`uhyLh9TE?I!eW0S+Flr z8RQqiuTGt|5H@FwOTnwNW^K2c#!JKp3%J{4X-#equR?R1kZtvB==_Gf!iJjYMpq)& zl@J~x%R0)Z@qZsWMDgziNZK4S&G53$r^Ap$AEXLWWX89X>40LBiZ2P9P#kXJ;~B-; zG{*k<4Yo`sCpD7^9wx4v>_`*4N{CKj%Brh@Kl+x#;rcY@m_#l(Q4O5Y;8PaUGpH*8 z{}y}C68{UCLOM9`vE1XFAGpn{Eb>TeJ0;;=dvOQswN~-y%RxhluO>J7EdCo&!a^q5 zAGx?0Iaxo?5|N(E+*s=o4AL%%`d^53xE+isFKLjA_Z#|c{Y)czeE8=UF}Eaz1up37 z1NmK2e@i2}_2rsGK7{cxA8zacTsOM^(PG)OYq$1ohq}d(rqfQ*8rx@E76l@o*EcyU zr(aJArUinT4#7hmZkLmIsS^oYbu$a;{0>Z*%<<|TCaI;*lX(zp%6wmWK19w421U^9 z=Qp#&n#7#^CV58I$g;&N*{VL&nE_^mzhuw=f87`FkRc8>g*Np=4;UH_!pdNF<%N#h^bv4(uN&7S$!4;$ zmV+Mgyt#DEB{P-ty@og3TZ|IT>VYYxA>czTVq#(A5$eXU>7Jma`*~k0!G{VjNME%| zI&?hiY1w*}StkaIT_-szmOUJRcn=b$$G9BNh$u=nX1^z3syVa}4|}rRh22pB0S`3n zXT)rp1$fB$wFykdqE5evcXfBG^@{$@o#g_a19O7{t{gjgTu#q92n{rjXus^`8%&a7 zVU2MggT})kEg3XN^4|1#I_l^0*Hs`fW#@{q<5vo2N)i6=U*VhRn7Dz|8ScwGYE=`S zdh^XLqC)M~wj3y+w$w4cOvsVY(MRYY+mIp%``)hmC6@p(&jZ@!Ng=z@3FGALhTkG5 znE@XiH>t*&C3}Tx?1j4&sp}`W{@&YlATuhw={9%Hsm(m~J_LITrtom>!G5F6i%WE{ zo*`&bC}&IMt1x$A7M6M1`Zme0iz(J(OmU^etUv%1O?Rp1<+oH?Mf;B#E9<}cJ-X7~ z{pa8#X4v1v5c5qjxwzH8f5oKJvj6|U|7}P>dq9H7|M&U$M;teNQeqAziUWILNAv-8 zkiky<$*e1oW3yngya8v^UGKH@ur8n7df{K!n6J_(BWrJ=eNhr8A2C@nm*n5abx2-! zeZIN8Q@dM`Hg#dhh@fy~hV9(#D2=a0vH$&CDNA1NwQOL$K&L1XP-6~CBRRqUId=Ld z$X^w?-2{}v*I2yM5)9~yO=afLN5G8*G5>RkEA4ZBSj9~Prh-)uIM=M+Z=>!8DnEf& z(?Hp}93?!fbUXF*_JZl1?J@@WxK*dvB~cdO^S$v!01N7P7E?hzSO8M&in_CvWZL=b zi(PK7X_xlrod=lO0F(G@pH8dMCA>E+=`NyAaa9(7SLb>TTWO;cXJmmO_wX#lm^^f%wd0|e)O&uQ~Oy}&UogT+%T~8Z%vZXX`R>? z*6Tz|p0AaRnfQ$CAD;!{b7g0qVG3_WS`RJQ(TFatN(}wXK-OkqV*{wIjPi((&|Fe! zNk0gvkKEmv&qiCesB?*J-^p> z7OyU8XG84(+bVAF0`P#S*5*rPWo60)D=j9}`2;nrB4)ydHO&{zcQmZ2`%-K@b2&3! zEw@4@S50I-*mb*n%v0qnZkzq&`u1*pdSzYx_j2ARu&Fb|%(H^3Z#_IM1S1i2CVx7F zpLldf5X`xDbIwf0wVw$$o~csW=NESYY(eY#`lK;2?dEcI8K_YmX;m!tQg?l`T!VyA z4r3#su)RL<-QoR1oR%THgWG!*Yu!L)HlyKJZx-88s@VjLvYz!9t4}XzT7DG(oTSMr zoaNJL;DFhyhg+P^&UUiYlN(Xs!kUfT<$&wefG&s9ULETcmo~m$fuWC%l`|D;zZ#xC zE39*!00_ZUBJR@w04H<-SVYe`7Z{J~mx3c&RzMxe$%cfQpKX<*CMR(9)g+FF`IbLw zj%u2}06tx@Dsihfw{IOJk3j%7)O|eV4557vRO+rQP6f(L2ff0O372--&DH+j$Y^*@ z-IZjggeM#YP^8=sS8LX>sS4uHFaL*c)d!*!T; zPtK!b1mA6cNC!8ydeFGsT%GKeSZS_e>fs)VlW8j;+s|H~TpgG79%<2xbbH%YB0bLQ zFiFc0Q$xA~3|KuFpgjq-pA~Nrp)iwv8DKPqy$3M?ml=6dK|#{2DEOjnMwCvc9!0mZ zIk3n%fU3i%GCp0jir^78RXV8mR`l+G_0ZdEA;MMu)Jbn5_O$_O7zTiK10=-+>9bS4 z5hbx(cd)^}0o1|HHni60v(>ap?xUv>Ts5|1oldV>)B7~(o34JU%ytktBk&c{CO-q=j%pk5y`vZ3?Wk& zLkOndrg;xw6_>L%;RcO{i<#X|-#;1Tddp7qgWB~iV+;T?Vn4397maxy2@Z>L_*_+{ zg#3G`5&z@TG;N!B`^gc1L$>MF%L5em0ZJ<1rw;%U@@jGnC-rSowO3NTX8DF*@+Sz6 z8I?ZhWPSgGy6|ZXr{br&jR^oTylE_BPvyc87?x7>{2N@Fx6iM*ZUEi_AtozO^f#lP zybW8Y-z5^?dUa3a>N!Kzg3P5?NfBjgADk3;0QgZQ1HiS;cZ6!>7cgxaAn82 z>Wz&ZD7`RK-*)fBhBb%dq&)1sqM!!eYuv4{4^VyrAtj>SD9teNm|q5W2} zv5<834Jzll{f%Ijw(j0#>u)Ho+O5~CD)4yFg%ZSb!m9RhF+J)rp$r>-IQ56m)cD+W zZ#gR$K0F(xPX4U2^6mF_yVSLdF%B?;UcZh53%|!=m$yfH=g zwEbyb{_A^*d>HI?Hd@)BO(eGVwMOb`L=b&eU6v%%`;^zmTD-$1Pi^N4$;Bh^+%(5nF;kW&PxV0Sx4U}7|XDCBWi$BjJ!Ra$%a#W zUVLMv%wcl4u8EQcD^MWs0tjL`3{=p^?T}1{i4K@g0m1#dU%*TaYWMDrK6eR-aL6+Ilsj9=Np=TfLn1{e8QTxpFvY$^`WG$u0>E~4 zoNr%wlqLzZ%Gn6F4%toEsZkl1Et0}LQ-0LM1Es6pNXT=;w- zbQ{k{YSu`y-r|Q)D_@-N5tJ^z7t|q@Xo`9YNuui>3%0w)-ELEY$=FYEhCG z?l7dPfBbWQyW9H+Mi)+aTGRb+o?>kj>hlNX5#8O`qZ@ z5%v(938ki|mZ-)9gb~$LIo6zZo>r!`s`TMytY3F=sy&hppkX3dB9SE9zg7Ie9csQ# zk{%2bU82$oabvjJ4VqMJJKP-LH?9CRJOX#l`_2Gz8cL&<;rc+E{hY;6JSSWLM=kWS&dzPUJnMoVmjT=xO?ox z{4siiP7?VUyL%+fGx8EC_zmuTmus?_Ab;>X#n!IM-wb!%Ke@C*S)MobB)D zg++gNHX_W4*-uD+WVg-J^iQ~T21$&5?1Hs;Z*1%jflmH~+^Fns^LbW}B0ClJrHR&I zWKExh`3*h2osL#iG#=7iduw~cmW66Ff_~cc}3%|)SFIV?@tY)f(5GfD(E(c4gR_*4dR{SWe zMbl!hq2%inM;eC>twyn}A$V?GU4IwjPvqqyD;^J<)uH0)Pxm8eSC7tolgEq*4s2!o zjrX*LFK!4JfA=>zCS%N%9oI1!`tj8fPbJz}K8UM7y4H{4RX?3ErJx3|^u97~P%rr+cMG$!?*-7pYtK7vQ zuK*7ZnnNp)Z~^gLbNY$XZ4*rok*0NSZdW*Jp$hRlzK5B$nQTe0%86#b4vdmU(kdJ# zW|J2$jYN6AToZXz`CS;D?~fm%sOK0?-5)jfP@O6zH!34M9|ejJhH5Q+%u695chOdL zdDSmTt%ub^dS%z(eii|_{MJBkI#?Pey3lHC)I+;{GkW-v_`}8`@2dNQ(Wk2ZxL7`{S0ZD9 z;7#sS@VY|l_C!ZjJ*9I|OsOb3Kj3_KhAH~lV!8x3jrWTJu6b@G>n4-WJeT>ekzBNGifc}b7?agEG}xtW**I?_U0`Z1q4JT9(0!w zXr2D3ZHhRT%)+8Bim<-vvteY)TT&w-9uucGWV($UFFH!u$d(R=)C>LkS$TCyHlWr0 zs(-#KyUEupBQZJc-H#ek;Ac~0Fe0ZvA)YuMfKpnh?jd>wW~hWr;BVW*sTkXzg}W~Z zz-e}~8kK1JEKsr>+d1>UWbfBkwjeA67J16jm@;}vZk=t}$UGps_e@CV^nHy4P< z)V}dOOjrsJ4K12+UcjdoQs7Yq_wCX%o*4^FnH|y_ym+Vwycj5X- z$d!-p)agF9V;~)ngh4vkh|yEN$|$BgWC@j&lvb7WC=jAZI@TthT3@@;v(+@&R04)f zmeLM0W!h0iN5VbHD@dIWYebjKoEM*(JacDI^K|0L()(JOMI?8HROlzx7*&jj-UKnM z7g8tp4-h)vN>AH0QxWb{pmx-9er3hU)tkujH;X(MhP9EAq@LdSp%y%bXvsB(l$snB zho3k=Q%N~5)ZXF-e4jcHxf&-KBQf}3>N*>Ot8c2M4Y~h7y7d#D{1G+%D)4N&UYqm=Z<~7}=LxUV@ z`;wzFvnXh1{58a}g-p~{s)M>(pgAwij^;~eIdvd$Lty@DRaIsS~DGZO+U zM1?hyc~Wq`KkkFDbr+qpu-0`k020W8dMoF&1k9j!DCIz*R<=RwIKL>&Ej$diI1249 zO-wUG((6dbrA%qbro2Ke@V4cva25&HosT@ifPE&&;hpz`%$UBrJ*{T=S9Tv_abiR# zr8u)p@NpM96{MffXe~q~ogsANUy7sAM8c3K4j+=i{wuvpyrI1{k$LyV9v|}*O!9sr z5^l>CA87xaz0YT^$szPO_*BFD0RW0Qjyf;#dQb zDq`p%sn1P(O-+sH!5O4)2Ksgl;&B$J5RGR;wunmc2jq^~S;hfmq%RoKJd@5AeY~~< z%l7&iYdZ?-RQ)>5+Gz=2y~<(O z7B2{W=`H%!_VT_wjO--PJPQheS zaeN$Bmh`Cb)*>wbvFH{M4Ti`46O~?{=V1`4cUvqp1sh;}z@kdSM61`Y&*Py|PXO}@ z?^Y~uDSA|352R;(wFeu>ZFQDA@l%h5+_>5ftKSEWz*}kB)gJ7WxvSl*J;J z{>9rFdAriZ(U*WV#s{nH;J>^%($zNJM(zyI=AWvA_vNaAi**MPOX`{U?*D%ADfxeL z1et%7jR!Ne-<-BfbpeP$|N1aXDnaVz40Xf}K3~g7;9>g@-Ht2)h9kI316joTvfmS-mzj^z(`#=#mj(l%8ApOi}2GYzSPrJ2dKJ z>XuVTKU;(n6hKgj4(xW8s{Z+8=)?}lvXHZxU4PvIzmsV1Y5o$}U72}Dr0`8U*7X&puTco?uI|3y+5^)Br zt+dd3O>2BIa_tmLRIKm-q>Mv1v_p`@PSs);V9uE-*(&M&1Xxk3Ujm5LBR#5D7q`WJ zK+AxQqgjl(ckVcJ+h+)S&{zVU-7O;p7S?bCSh%}YlHCvX2+j(A-r8;fDlo%_ zvtH*<<%eXRgb14o+wx!s5}ASrNWL{-xWJuOh=fdUc2F$ZGHgQWvZk7E@pS-P=MOvMY@?F_zi-v`EYQ% zPh>H28N4AI*4TcP%XoVU^-WIc&JHiV(_cEPY#}200-m* z#?)6=0xZ0--!fKUmuBXWIXFBvEfC5=&`LbqjG}U_}XBj}=yCtQ79&zn`)udsOq znY6YlODrLPH9ejI1{7*Ni1J;}g4r0+PPfqo zTzc=_cM14wKT2hNe+#!UgxsVm+N@{sk&a%vw}*upU)YEZQM>weKelx7lP~Qnj~HiP z@;gu3P3mO9tR*ps(c}7#GFn&kBtewsBp6LZI{Nsazv?$c?3oRx=s^$vW;$cnz>I{& zc6#G}46k!cQxO)mBp|vhi_e{@)$78sqqE!rZdI7;-{u174LQeOvl=Lfg>1s0nkjud ziYIAVek*yrl|%p(HAo09y_0OOIkh*|cwOEtTUaNkI130fXZT^vM}~%ge6x`>SLJuL z2`Susx#qW1z51!RjVf=_K=ddtXeLU-L2aj8FX-d`$3BTsNpp#{ z#3f2-uF&+`mYQ3Pk#!c2{qac{TgbZO%YWU*{|Hp&^;}RP-+3bNXu(13goyQLv(Os& zK9*hzf|>gLowq<>%};Ix$U3CHPUx0n4_`Nj6>!f5An|1B&}T}A`02&G z=+J(|tXr?3?snp}&r3;Q=Sc+2tnGqP!X9aU$2sT1ou!1Em|80He&-j*iis}3rezGc zBvVWJW$;I$$eX@lqvlzY9bRuUN||T+n<#;pgA?l=`GoF!R_sITL^tzGb@;#-CLI+Y z4SpG~Dx`llf%4aR4D1dDYE|zDod=cKegAvfnoE{+ehmxbr?ltZa1En7@40Mhp-*=isGS4SRT z#O;Io)sIJmj(D)xDP9E_X2(Py%&MGxgOP6kHrtq*yY{RG6iNGwg5)tk zaxtpjso8wvA7*sWb7h50V#7;@b&^HeGL(VmRwU32j)1X{n!cAITGGL(K%baoLH@CR zW(7OpfbdCq4hoOec!v9YIXu!6A?wx;S?$E$JHxZ6P)lJ zb%`Ub@={4#Vep@}cIhbNb{e;y+%)WuqH2&6G4bD;Pdq(hHnaA5Ai*n<6363KKzYgy z{MUw#&KQYgaTg>c^>IFgWt8ZMt7h$+M}whP?2C%qZoNj|l8fCS*|HGx-o*C0*Z^SJ z=bn((8aC42KO{sPx5`SH6D~05woTI7?daO^_N}Ce_0qaMZ_F2#9+r4NfD|~%Ii?|+ zo`OrJmOG}Fdrl;lkyi-J{i^%Q7N|{=m>jCLyLy*4;vMJD7qq2XZgJY5{ z<*kfy5y!NQ7)NVEJVtAIAi|iYm{|8)NGhT{>I-fV4ZwL4u=J~905@b(C>BX@uX#I3 zGb@LuSw}^5%Y8@m7L|`)_EC4B|FDLlcF78l*`guvEc0|@#+4e6%L}y^WE@N%yO;=H z6-i?Z(h2WM3n}D;%Ga1?I+$wdS2h96q7kM{`|x>IZrZoeK-;KZmB<|P3AjcgcrHl{aw)Od=wE9ywUdSQbwnFbLDO4 zU9|YGYPw!ri=|%{zR3}Hr)HpwQ3-mCGQB?SHb>dS_dNRXY8?U3^HEVqtTXUjwQ*-( zxV1AtF&1Z`Y46RS_DKy&ZW1da7#FvJA~;pLV9AT;c?pcT*JF`E3+wFAcyx0DNaOii2N$OBx!3DkU3II|;8I3>#j+QV%R7Eun&Y3u z1;Q6(a#OzUUw<@$SGZ~oTO4#og%sWD~?y8EHz9hp^tl(^uom&13o}vmTjUMhGj)d1Wj4% z)%i8l7Cks|k5q`^@KNzwWB;YZFiN}P;=PAV@$d{;czCccyGksxiDYs-;VIx9{)y9L zgA2kX+0SraN{~N23elZXV1m$uMpDuq-uq(boZs@G9$|CL&s{ex$7V-lea@N^vP~?$ zRg`31H?6hRi%(@Dgrm?1y_~3-xh@K$iuXKWC1^>de^7g37qor}M>6dK+3%M8m* zjy_iQs3=I1Q4!?GBm)l*$}8KeC_!NiS)F)=zRBP`d{UQp+QJg!s(3iHUo4b(>fxfz z_*7wh2kb<;gRufaqvW9Hyrbv0R(^W1lKlMK;x2gi@Ov1l!wdTA2xYL?u=nuJ0KahH zsbLTeh&Gx;?98~*ntrC`z{<$aOo|!uB=|ysBCJ>-ddKag_(Idf+h7kC_cLddQhjgX z-8UMu zFgAG?yggc2h0W)wQ6stJ*{xn_izv;<46C+0X)|PP9hbIkgFt6TLl_W-*!%d+5#J(1 z!uRfrbWWwTUK_l0+!GQB#zhR>qUK}#k#f!fHJ{*k(^1s!@R%n;3KgMRryt`Pj4ub3 zB^jR&p-0u#oPSlvftA`GK*zTnxgL9ZGsz#0>QX;M%kI((!jWKU*mWF(vIl@Eu0P{yQ*1dPwDogTQ2%Aja(ILEU z7u4S5Ao~vgw&dn{)CFAVvS}=bauc6j)4}3euswWm$#_P6?;L- z22cm0$E*>g>e$>W;87MK%LogSVpiDQ)Y7vLkkUUY*dX;;U23}GS5KWwX$F?QI7u3w z!i;EWV&UWTFbRhjLQKKE5vD={LKhpljRthWCG8kv0v)-xDBO|Bq2))@P#C@8sl@cw58Y5gw5$*_5-k$L`@pBu5OV zZN%(Eh75*X45vFRt=W{byo?>CLV;fgx}jBVcelEJoUgxSx|=FerEeI>%$O~Kuz}I! zf)PU}Z-wX^9ITxvAq+xtRL1%I?(@G#b?Ph~W{Ulv*r(EByd(1}vv0k9Hx1`O#o9f%~N$v9K!4B=2pB~Ul{V2ul8FJy2iSxCi#T{hF<>{7W`pY zi}OVb{x0uKW&6T09OXu~kp*Qcy=+yaKb3;uiC_$5j3WPq&(@JTB*pzzu~R82mcXQp z6?RNHf6Q-GM>wxv_6K%2p5Kli+65g3aWj@5QGengffuVLnc&gZU%vbt)j`jY2^t8U z6)L8$mtkw=VtNWM^fs~(#?dg|Z~m$(14>Tm?PWT(ZCUws-GTf)*IGonR|9PYElrFA zH0Y>;Vaj%Sx&BASb@m)HN3w2NRu=!@P) z=zcfDr|9!dZboNSICKpdNk1rJb#HtFms-4)%3fXFJjxA%4z7uj`0|ZExhRH&yy7|5 zMEm?LQftklXS@Ci=ECEqL6dA10xMl#cDE>&Cd|STXVZgI*DS9H2A#koq)21j*RW|7 zhdeGzgCt+@teMwAe9JH2qlW{h0-dGRR7;f7j&ZAd{_PBUsVm&#uy58xwnOnOK9LRU z85eq#C&l(?@%(X)bw@%4_yj^?#J(5(P(gG_zX5RXNMTr=AN5rdIEZlKTi8nm`7Th0s<65o`Ssy zA$$Zp(f+BsYR)i@^M!EENt3ehw26q4UjO2(-^i>1kFjA+slq(M_haHjjPS=O6)1kW z^cXZhiia{QTh^aBK3XW9g2~bQ@+n>GXZg9z^^?8xoEj|Zy*N=C9PrFrw>73z5;#0( zSWy@elnicV-~{iW*Mh`g7s=@~EmT`;4S3?hg{6%y__wVsgu{}~z~MZ*3kE&p%QQ96 zuZs=#m4sXAyE3@Y14Jd9IsxPsd6jvmvmelhJEPCccI2eIUt~ba?27rb7PHvOR|_Sp z*&^LYs@JGve)#-8fA{sbSUL^k&xTWdp)n>%kH$fZ*P>+Iio>CP0NC$=On-{SR?u@p z#qnm3lk5x90rxpdpR<}|>+ z-FXvf&S{^FJc|CXmMAtp0}Y$$W$=tn`)YAR5$0*&!!cr3&_4%NOhx=i8|;Z+F=Bx`C03vYh1f~GHck9UaL84YR=5M6rqaI%w3h6PRA+}9poO2butiWYI4ld z)G|{*X1U{BYV?4$v&D*=;qLtVA2ZmF=<&Lh+GM!Rm#`R{gv6N25@nok1v_mndfyyZ zkp=C&b3H>tb#BIUlPAVr@CL=}lf*nLo?SSpur z(o^|g;>IE8AtkzQf}J?w%(JnSqioDL8)epY zmR2w{IBxairsa=w&GxlxyDTa(4Czw}KHZ|ucxd9kOgD1+pHsreAOZE@lWDhQK&{w> zv!SNv0%~sxm+1-{UQ3>yTnJu!X$7`mQ~&FBn9o^b=X4={<-4&qA1<$U0}9io*8hF6 zRya=dIe?7+s~<4m4gIfRAoqXNX4Z5&V1I|FA#gfgK^3^hA$1+VJeG5^kW_!dy2vp9 z3k7Kr+eGr@#mFCe7z&O(Mf~9^9?W)rrzeap&9`J4@Zfp`CPDw+PvjH&USjax^O@w( zkEPk0>se`B0P~x}!ja&0n3uZ4SP|4LfjO7(<=dMPHjLq$gQ*Vy}WK4KdQ+syx zBPkoej9$X_nsCJz{yE{_Dp?!>Av)6i-k1_)F&g7cc3BG!V8#4a;0FWFY2|elzOoMf zop?U;W#DdA(J2&X|MsJED+3K1iVEy#jya%k)B+$k;XU`Q7GkOVw2^JMEHRG{X4<>;Kx-1Czp8>ZLquPD(6KIn;ltS)>4 zpg>~`t@6&O_-*#`9OJ#g2w0x3oB<=+x+CbBE7x$o!TLHhU{0-!$0a!{AG5p0D3J)| z^9QhFds4vLt1zSKJ@pJ%aqsJP^;?-YZMG4wXK8{oBiD?r!fxnZ;_@yXZWPE)|fZ$AM5`VB0* zMZiPK4m5y!T{*UU)(5@zQeWeQINKp+8%C+UyXOoiznYpZonh>(lz_pb8xV3#Fp!dO zJv06jMgvQDbu2%SrLCj?{Uvz6)x%#Euv%IH&wf_}f?EUR+2zO1pY*NS0SB;ZJ%cRG zV}GEFGO?32Io}uX?Q_pv%%1#h5>MD^({=*-Yo7qZk=qx~NxRJ`kZT7+*l@`T^F_`k zFrwQpobM0*)5Y5@g)tP%lN*O!@AFbmrodL}YPcW)LH$Z80$F6~;Qu9~#sGRfn z5#KcJ1e6!ZrpcR}?@y&00aa>K65UFsURHYicO-AuU^ zh0bj28})!j^^}*#&V5+DQ%*niWTM;}s+f6;anZ(xkwZ2WV|Zl&Prg?3E}h+b`HcEp zfRM&Ek>y(KzCWSkz_bFI z+?%ZcSMAT>7{x;X!*c>~MF-$(7By&kzf^CfWk>&=pDAS+kNW1tFLXSxxGVdNrJ@di zxuxyg1T05GiAG0_IiS5kPsi9}gv*BmXl}Oxa?p)8UKm6B@>TmYPl$8<(FWsF?k@q? zXWanK92J~>JcU7@KL+NL=Ps>9Z!QluPxm;U)V2_byRY|u#B|X50<4@nE6v}#_of-2 z`y=q;fYR9*j5X~_HNUA|S$&-;32b-+gu$bl#66;^3zuwFd|tOdlrY(`9RE@ zSsu_;qOs=$--wo`w)1ooE}fBw7|?LhjgbQZZNvAw_hkX#BsD0ErwMu%VR;R_fTG~& zHsd$Hb0c7l3hrWFaZn!!@5Q)ZalhIrQWNW7i0AvH+VF~)AjQQE`jp+;eEv4CY!3|s%6xY2x2ns${R ztq#W~`xZ`5Rf~DSL*6u%eZEFFshk_25O5eemTRlJ1=+R_g>*CmEBzBj63g2mITj!$ zk~r)?Pjw96-{DG;sjuB_@PPq>aBV>`i@M8raF$tN{qe?&ljH=K2fga~A=Wlc#nIu3(vW?Y@2VEU$fW{)JC8jLIc-&=>Uceg_xQ!N5 z<*`(aEE61Q{lSwZMI^Y7LDPF2ds>D#?vHZS{e9GRD~>Q@?#)uw%7FFTz5 zJGsHlpUZ#Kkn6ad&WGvN)AINOyT+)u7^wyO%ioMkr4_xjVI?GPUGuM@uM7`I%#|xw zl+)Tx(l}GjH>`Kn%G&*R#1F>bH#KjVyufVN&I{}8gYEOpq$HSkk8Dtl4aK*T0v&0~ zKnJo~rCJh+PxaA+zXSK7WFlMCJP&E88F!0uY$G6zR-o~HZar{HTexho+PXsfbgh?W zMMK~Dz0#-Iq@Fuf_yU*?>}Mu_Q8r7KDBt0g{3z1{_b>&}y(Chm=z zaRL#=^Ml#p+~taJ`H&K6iT!~hK?)z;tS)YmmBBcGrxdNMTraKu)wZN_9Ea@fk>La|F z99O_MUN9?nr76W2jU^oZBo+E$<|S*5^tTvl{An!sPjrEgnF|mq@|KOpkdh)Uxru&8 z2fK>KZj%JbVfEu3&5M>QieW(*JLoZ6U9CflzuNA(SkeAA5nzs7!7h|qh^ZSKQVowy zDw<@LtkqcW`*i*z=6?|PoWUymL0 zUI4K)?BprsL!}y0oL%Hy*VoJLjOH`@ja>xmBFv4R02-sq%F@g)B||fkCBeX zrP=QIO{JOL{-8d3yyA3EURIKZ!be0rE3Jp;%7FapW0T`&>A{WW2oev2U(kYjTr}2& z$dD|O&~$opVp3AdZxHG0~y zm)B$1*Z0!ysx#-+;5vs_l(#Ozvd7D+B>72~`+*N|C_YGIoMF7#=R3tB&9SKfmkd(u zZJh9!@#{I(MN=22ISy|5Ecnuu2gcN|=}3@@`DsBVS65Vvo^JNbPk?R zqE3uNF^R-6ZT%)v9#<5ixkqDEE&O07KENfS-Dvv^hxo1$5-^zF@LQceh^DY*;=vro z*wBWAFRFAfrH?$@YP95C8pl>mKhIB>?E6wA1SEK~m-t~S@t8g;q4EdqLUKWtm~q+j_zXQ*b_?Hw_Cm?LSM~NwE?&Iynrk;8X~AAoObLew-bl9K zmY4i#FCE3=u9l=Lowz8om`wRnES>Yha)afX+d930$88R;FSkPWBc(SE18w26mqQNA zsP{jW2i|V3`I%G6kiz-AnIpS8@!=$KEAcma^B%4K@I^|;b3|3ckCzQ1^}z8{Sp>Sr z5~l8r<7P9q|HztUaPD97^b~y+P1ivsLakv- zvwL_jE04;mz1zFWS+|z0#gdiF$6zAm29}sHS&+Qq^88Rx;nAu!VlFx`k)8-@zCzPV`}#XO2dC zif!--SVY$eyD+&zaE%5iJGLCm>~A$iM`%vNYeUQ;GO??CjVqw(^UZpDQ)B(KW;y9A z)H|di_I0XreVms3xYTUeFkE4UNWl%GG_EkGIS?z6gjlO&LdkcVG-Dq7b1Yr)!X+i>y>F&YGsgQ;94-X(U|pn zy;fg`?@OK(xqEB{W+B=h8;X$PCcT0m=m%8pE1|}T6LD9Dj5XFDq@~EC9IsiVdY0E# zJVd27A3_wUn$Y<{<2fNKtsRRN(;Hm(kZwlAG{x81wk>n$7^Bimn796*c=d&)F0EVM zm*p~!XV90T{n#|1o*$wCZ-t4=pjC+-?nDX6lr;HANL0KF@Jmvo@mwdoYRcSseVxZR z7G~R|B(8Un$$}t&;i;8%Y&{(=myJ6;7-lS{bRm8E3r7$oBasXx0ZFzO;D8Wd-+mG> zer^01S-SF++NqVu<*s}U<4eCkwy`jOpUPx>QPXegKTq17Q^SL^bkNPi9HcroV{P1pfpX1fRO79C*P0UE>Nm!|-!se-5TB5eNPm+bH zrKqds7-mBTEd!+Hh(qGH}k&oDcLf{jT;p89euhtE-Cv%;%rZh{bg zB`Gt0)`mFyU!b2!{#8ck67DQnjVC&Bi!q(8y{amjp8F9q5a{RKJ=FQLMYs)QJ$@vP z()T;PpO3%G5vWg1m#2lZbK3s?nwd!{#D9SihSK!l!b4d+?_9t8t|};byfLLSm(=35 z)1Q*KJh@{I?p9MJk7jpb#V|UA`+{CZ%_|#0CSqba$D^kv2q*lr&~3+S?!P}riT(1T z(kO_P%yc+w=QCneI%&-@nr2bP^-!3vTr*nGv;Gvgn)!Ir*+s-5RD@p80`>Z_PcY3n zKO_}cJdXhz15)Fc=ePxohn-QYXNm@0%+r&)wSAV^knI4sHyzIF@g!1K6jo%skqo`_ zf%2YYkpaA81`>tq1|-7rD9q-fs^SJ#@7T{bU|H|WI~l=gzG5p+PBlc1cT_h1?*(z2 zvJ^6*9=TwFh?YrUII*Ms)^-0RU4c7(?g@V*6Zx0k^1;&C;>Z+!-1WMN*sS@wR88FB zhE%)IA)AeS4f1MC>P;2*FB!%YAs#DH?KWY8ovDm8T>NrT@YA|Ghxv77d2vt68=f(E zHghPZR2dRyd<`_hzyo@Q-%XYm6jv7Bzk?rE`RG|Kh<_f;T$|_2%gBhdT;A|m5(t2^ zDo?&#bx|F-W8g@T&mnzK@G5UHpG> z5{KNs8Fqo>+nWyq<8B^*26lh8E?Y`>5{}y^d}davw5$!Mx0xDd_SSM2E%{VkgRQFm zK|H}hJ21rVu{+ZGYap{8Zs3wAXE!lb;pzv%_6=9V-U>7D&Np~Z#{P9&uRb*4Kyy8U z8SOux8Pt*6Sw6Qy&~}85&Ab9@6#+rZsa;Cjl_%oZDo)mS8-gKS z46gTuRP?=T-U`gn+nc|LEnES37CQW;CI5`#!iuB?Y-k3gn>KF+oOi#c9)|?In`nXv zr8c==kI3Z?hoj}0nt;sc<8NsA*E*i~QsY{R*8`(a8vp$lyF^Ej%y)HyPWG?7Nb_$0 zERONs%XRQ6bTsrK>-GOJZvU?|;s4WA{r|ve{O`ZnajrvgumPk?Ft2DWY?PYWhWt>nkEX!EE8@TM;L7L0Sf2-etPwfKwj+be|}W+IYIBxW_$5g2$Xsb z7n`uQNQIYu`==i0yxI+$-kY8lV7v2!&Dyns?Y(jxPoWzgIN@)CB)4)Py5FsM9XLdd z3*1#YW0pZ8|D&4N_^aWh&JH05#+$1+y_EV1CpvHP^&FAP^D_=Zd`hI1^(P*cDH_;B;TK!*2RZn_4gd{CcvLHG(CjeSUFtgx^1oFSM2_bCuwSiUr< z{oB`IyxI?LOJzvaxQ2qbkJ@t+z5cs6F|KGxDerp3m-n3XR%V{P1saHf*S^2C<|=Wh zJF*OP673dN#3p$}uSG*8ebzRQxbzcZl)(a1A%*0hkN%wi`l$;UP9N3KyiyQ)WsE2P z22-d5?$dY^{HpFfDx5$%YxEJM0@0cuL;in@x}&8ZUnavos7m4ZuMm*`c`Q`>yaNXa zltaViK$p*l1l}As2}nkf8puVVh;Tsmy&VO^sUpGS{UfuBW} zGhTdf${C@c%2mMc7b?t!KH04IO$^qzzT{9+L?w6n=4ijX-l&Nm5pD{c^50fgzzWVuhaS~G0tW*BnmqOxEMtpkW- z3}g5*5SCx!RCJ_k3L5VqULy4fR<-F#sc`GO0NRIUFmidy1RmQTXLYtw?hGAPny#!s z**7w_oMnL&)T)}gv;3cp|5)_g8_iWRF)TNs+*aU@Dv_T6D%I_$`(2!7RrBt7i@y8g zR<>iLIH8hecT8Uk~01S%mheef3-o! zS{zr4&oZP%78Evga(y&zX9i7=*ONaP{vguo_wpR#fm~NktHaIm@PXJ@t{mf{!3JO* z5e^6clVl~*T+>)H0=R4|Afuw+kq7d=D`@{ypit(~+VQe9CuSc4sBPYYj;tv=NoC@N zhjIP9-U-1z;OS9@Yt;fh?#Z4e@zF><6th_6w`=N(*_B5jCyA9&|G8&q&mYutqu;Xp zBSW2kJ!t&#TjR9(+N)*M>B+YFm8(K6Gmf1 zjpMAyC8Q2p|JSgYv?ZqHmEnZLMG}mrG6u@8z;!Dw3l*B|zkKek8+0&v@m(Uz>|W-p zlG$ph0INUD%v(-er+|@YuI>v#qd<2?Pf?}WZE)(Mzpi1r3xt09`K6dx=@+y(aEtFL z+kM@ByYu=-S83l@J>59d`^UnoAOz9(AnmiU&^69qu(^EEeJHZfB+QwiNO;G0Io*9k z)|zWKQJJ_(^|Nn)#~&+q@Bit`h6C=@J6aNsyXT}Nngfp=gISzZBZ3pyUX_uiM-%l> zc3@8PH#R!O2j@0_t-dyCOL>AiFC(5c@ySGBfM{kFD(g)wbpMUUw{5u7zU$3<_%ymt{Th zs9yh_sb>Xkq=MMMR~`#M8w{w?6bD|Xyx=$lkXpOIN;$k%1#y|7i(XxTpZ zJArzbVHAbev8yA>)l>5Fxoy~rRa7581YP7}2EdrDGhfxM&qB3EWmQ|1MH@^SiF@dE z)-}9un(u+_V#d@^B4ZgN3-0u2IU^B@4|z-8ut@3zIYyaM%&puOp}IpeXot}Skx}yj zM%kDnss`GHkm$96ht=!_SBEpfU$;SGm+{RocD&2c_(P#Mi>h~Pk24Mxjykr{i56!& z?kjX>I|miKU0gzSMqTA=b_yAG(d45~oYk*20SGt|`4DJ!N&MQI4F$yCkC-|tL8%*P zcJJM%J)VGT5l=uV@o^orPZj|VS^#9kv~FCzb*o7q(A4O0v&P#BpHK4W5Kp&(HpeuV zcr`D%HjjBA&(^<)=JL?Djs3k(6j(-L#t(Z&>?_g|-qgeN>jFnL?f9_SL_q%XI8( zwp)i~3s(p$Kc_PAyJO(x0EHFhqI8_%3RcF!(rJzxhqV_AeStQ<+JFaL$sgvQ*!`>O zTi6PL`Wp|H+4|4-N^E|-dnenv`q7{Ay9R}a^z%Q15U=V9_y(GyL~dH|iw{xE_ffu{ z@|z3Dh9c$UY74idx(Eo7?;`uF3$uIRVWDZ zCn?7@|B2IjvNG`&Q2}@+4kKkH<11B-g3$7i4Biv*3WAG8ma z1V4SvE8?!LU9rz62Z+w5=X11JO?e5skC~~i9?AE70}7XvBacTK+(AU$dEDr8cF=)% zoW5l?el8bqBxkFF-1C(aWKKVSw5Q~*fD&+nQh!XK4(SB%D=qQv7mWPVP}NDX%^}>;&(HnMDf{hmyPgL%T0?&~FjVw`<4CBIbaFKoA5{Eg}@-(0o{Hx9(F42ce~f=H3rv()>s5<7Qy z6f!`}W6!EV!D|Kz=S1|y{_V5+k@DPx*fgCW>}0|Nx|u})cbEuH?#J{}6+OUZ>LM5I z*2FM>s8|C5&nz|YIJ-WH0qc4e#4FD;yam}ssK%C;!UB63C>pGgU-VQ?OB47(PtO9{ z-yL`(vOvuD^wG_{8zU`G#CY7G9d#!Z4m@E<&A3P56)830P2z1ds^h+BxIC`B4!T#% zDZ0skaa|e?y84TN6=#ms$eo4#!4ckyJbOIq>naYXSy|P5a%7zt*$HJLhqa~#ul+!c zZQ*Z+x#CY|&V7tN_l%CY=Y|!k9NI_e14GD~M6*7dt@n_6`I1mgK;5>{RNW8h{dZ3W z%>{ox+?8(qZc{#%Vdn6NJBX+I2XtNGxSH;VLORXM>StT8k@$>^xF|V%&AN9Jv&A>M zbCZc*?V^e@pyd@$o`IMP9d7ah+ti`UX~fR&F4NyM0ZAM_$@zC>9Q;M?oO>w|reEWA zzK8h*z63|{gZ`^y3&cn-U=GLtS?uA|_eoqH^1Y=qJZLd7|IluAkVPKG`~-n+-Bbf^ z27T*KP~9^eXgCe}uAe-Z_;Vn*m7ZN;OQ#sdPD`evbBCv`5zb)*l{!8k!G|=eDYa?b zswT$d(*y%`!KF5>0EhFO3a&-9X0nQRJl6hSkB0c-?ly zBh?;X+wTlatgixV^iFaMZSzWXjGR3%X@7o`M&BdtD5eBG_X$^{OH~yG5vjayf8-WD zD!t4oWSdLyqesj0sl7jiSPp#6zZA=7a`dPY)D7R&=e!wGZ$?QnA7LHVglkzl14nEW zXAmvmg^z(?l8I0cz7-I2eJkeOo4%3-I)5xLlX%gWERzQo5CF*o=s4|v9Y8bcw5%sT zj%8v>64rQ$+n%bhYVR$s7y~224|~cr(08}QCfe34h^4YZ3;(Vi&41$BiTOcbFt$T*gLMuzRu7C z2y#BFz64~W;C}Y4tfb7`&Dfs_7*H&*@*JexRlf zL){U(6qefNA99shtX2Y!v-_>{!ZdJIrf;7{h2?1-h>wmW`o(?j0^1A|2Cgd48k>vC z)Uco{9tsTS6mW1&ZUbMt^4`MhHli=A%tV>i7mK4GrCKEDtttn9Nl_EKPv#TdBmcl* zK~I86V~5_r?&&8g-gK8|&?pCt-j6=*b#9CcFQvplsu?O$v{%B_l&TV>&sT|>gT&Zo z+3a(`ga0`kph#U~&8ZqKAE z@40hJ(PWZ~l5e9+48hoQvOy{Q5Oq#wGiQ-U!E#CLdZhn(*R=OrJ)Jn)8C?;T`g6FaR1DQ!Y_IA)P;)KywR`7>P~015EkRBcH9J&862)V z*Nr?56lqR>V4hj{Y)X8e}x!sZ|m;XT}O zs`=+>{9>SxmnrK(&t<4AzB&$ex^8(?j__{IWA!oo+Q+jrwu#<~_HXxV>i#?s`hH|< zU_pm6O?##>AMSA{EPW*_HNTf2ps=u?B{Of=cQo|vu+>of3%3X(jj#*OsYT#^*K;Kw ztlOwtby$6LrCQ4tEWszkrBhE`yd|8z*z&vNN*%Ge`abxou3D~If&7uRzgHieUPHA?*HMvraiq?;FdWz5+{oY6fjT@oXPWx-6 z@|~7!rWeCg|4b6jy|8R~f}m{^u$%v3f`83=%Bt~P+;Q|^VZWvL6yZW1J#gn2wI^Xp z34$Y1VlISZ`xL?_cu>Uk+_$B+GiLHHa)#1r6ESLRM9w@WsH?H>U4|VYCGjL_ivYv3 z&(Yf7T-<=_lLK3|wF?fK@DW(B(cm{^*<>9{ubPsQ)(k>M%}0&DBuP1zwjd%fU`^30 zX-h4WM|JuR_BBllS8wbq#|Ia}wuej%ZRN@==^yfNTRMprR?2pEH9biSn`L`=+Qt)M z zrWI%WwwvcJmT)c4m!ouBd2hAn95&<7@#2-83$f-k0sh9u`aoqS;y7I?{EiOrnBD#d z^XkzST}-Q_AErXcC0zV@ZK%R5Yv*V0P_0Nhh2)<%DhxcY@$!>*$21qWR9q(FdJ$1f znD1uZ+35H&?|Age1ZL`SmhYpCY{PS&NAc>$OXP1pk~o-8jCo~l-Y&-hG|0eS`7cE1 zl#%t^5qS0FjAFc+kP4@-3r4cbfRZP*DWQ|zEfNVo!PkY@_Ogau-3cjk54WA6H$JX% zYt7ZR?t0N7mSRuBU74NNK3Tn8|Y| zDIPVllCTnl4U3ps^>M_7noKkLf2m@Qrl2=lNV+)8-SA_j>_LvCK6?YMnotxYz_ZIo6%jtb-owK86YY zzgskBT;ZR|;raBkdg-XcX~I%B5hCiAR^j@m#%A*|g^>uZq4fFk)$$(EHNz7lhgS0+ zY*?7ZI52FjAvM`XQGKF7$kAcU>vec1nE?YP|OwRxMfl?FgJnYO>&V3;@q~7?*gQvYgrnYg00nKg!UGZ=IHwHH^FRp)>@B%z zYxoyjsc8Z@qNV+n*BMWV&^raN!DwD@3o;U`c*RMlgqlzOZB&aI52_Ob6h zyRM5a)jf)Y`Z3-9I?`+W;@X#BR`OBe+Ofl>?4VY38JkOC2NF5bM7Mkc@XJE_)zoVOf z?kx1IjWeKmo->$?$7>yTzRILS!+(iduc|41k)lBpb?~5&g%nA#im3iz^wvK{^I*lX-*G=sY<*O5>{z{)O zb5w?+e z6u~%iJg0Z~!NawsSR@6HctaJ7nUclygV*vm{rzKmF9IotpT}IMp7_`^FP{6Hfsn z6Bx5r1Vc?vs|D87)c%x%rL;iWptY*(z5}CZ{1nTp^_LU9hdt6yU}SQG`LM}O0#`9z zYiSX*z{1zk?8{-*mt>jgpex`qWy$b@qI#7AsmS*3zW zTtQ1h<=3fnO)=N`UmPe5YmHAmndvi{?a5EB5aptTsuwpqp0q5?T5DsM#n zioH#!7Ed!){MX6GEn9-ZtF;gB*AY1^^0K8O#17qCit(L-?8b}=eXq>sv@B7K{yT5x zkWNUHtBxt-M5tu%T+6FQgFkLr!esuX7g6E?H}7pxSv9r5n14=9T;Xg)Tt^y{5Xo`E zhbVf$o42r|246F$@G>seDyZD>B;?5XM==u`Nw)&%S_o8jO=auG2sv(|F=RP=-hEm8 z5jm}T>jlQg;bE|NoM@0l)>Po^Hunn*!JB5fnDMJ<*hg=4vD?-p!Nt)f#uaMMPHVfn zf)d8Eb!+oP9rWl9b=yI_exyf$M{82wWNOnX~rNXp3w87C%OfxXUKT^lJtlc!Qj#j}i zdSo(toVw4(@4bU+<4&v;&A>#%X(y?YS9N5oOQzhMq?$b^N_xom!+@uD`q)y0&q0Po zI)MYbjzkm>isGlvI1!93ioWw>|J|yKnEN(;%RTqD?ryKWV0*oMt)w*gV)4{9?tF3{ zBD>`inEqH$FdwtwC9UokKNa^BuNr6l*c(Dgm7h%v^ z9OOz&>d%QBTy>TGqQomgtqNU>(*Z8XX&qe? zSMtO6qZ6*8raO_=8U#K&QXW5pllJ?@!&TO#k&PL9nXeCpM=UZ<2pTS_+gz!1t41N&G{?OXdUrBfjLW)Ld2$r>=st42dbd z+__ecRPv#hR7!P>vc7At*Ze)PumN!cPIY|ED)e_kkU7j@PJB)LyH98L_D8Q?1HeZA zfw-|BH|0~Wz36XF;_Ww-PPTaUAeUl2v7UI>P{nWTKy18G%vjqmMPfIVcR@hHj#p>6 zt#D#M!LX|AV5Y2p>H4&ddBXOC=?SIuBrDyx@jXqIxn%0m*5=}1TsKAU;w}1Me)th{ zaW~!S%QP1mL($RuIVtIXWMG%;?{fZ z?lV3YDXi{0OLsfMD2RIOYgx^yg*By2J{!cRTF)vlS+lHpO?S;sjY$$}%pd zln2}wv1Wdi`nC*h#aUYWp^halSMfBUF9Qt zFA>W!aaGkBAGUU}d}?d@3Vh8eQG_~uskM8cepM^y+z*-Sqw0!PI8k~&EsX4^om1ub zL|!9isS!zdnwZ$_f*pLMf#l*>S z;p6GVaXm-J@pmUA_s6Ge+LuUA;0}KGr;p$Iz*!QaoF|T;G)~b9@56rT_$pX46!;Y9`G>qjDGvn;p`~x5HB}6X(R^weOz>GJQ{y*pDlej z!>qJ6U!XeO!?W`I;-ar*in`{0`RJ(>#n|45T{PB{d?IjF?Cj0*!Qg7v;D9{=^4#or zy0#rV=_A#&D5K9*Jrn%cmS0U3CTe_*gKCbsg>C11iEocUccXxA<9L8vO6j3gY*;;4 zZM=Unxq1Lkv1LGMN9#i0l%>3>0#EcQb@g6Mc4&5O#1|wzCaP@BVuWpAV5#sM8a%s} zz0Q+wW=LP*O)F$i?OH^4#WSN0naOD!{7vnz;LlV`FedJ%!Dp=pP*w$cB?W8yo5D`dX28Sf3EHzEQtS zeX5(U$zu#cnZ)@{2-4j>(jCJpO-Xq|y0YODNoah$6k@ZuQEr2oF0dkrz2+@2SJ@5avoiM)+wbe!Nhd|~H= zDZYxPvyD}NH6hmBUtP5FW29)?x*)4-epnLe<~sW1=CrCibGBvp_;GOX-ytQ}?lg5x z25blH2Mgb$-iDE%AD+}J?aJX<-_}#bDkq(i-$tcwzpj{4$^BDPI2Kp5Er>EzV%UH+#RI4tn`yRLT5lW&^Y&nTf8(|cNwm1w7IuE- z1nUNS7KhiZs}oMMy{w-kI4_7uli z9|ch>FoI32>EvL53W*jUURIhGzk?FM?<$gTz&yhXXwV!1)CwdV~`u`lJ`u}p+JjuDb(BE%VV>_O(mEc6RkxtBicr17e^c9UOfBTXB z``z4rbyYw!AsWb_LC+In9f7)@0QB%|7=SVU{TZgTj=ex6Jbo0L4$o2Snx) zLDllTzS>w0ivxPWcW4+6ergy3KSFW2I4A(uWNhVndW7~h#vm#@qz)j4qQl?6)-I*E z{v6M~n7bo%+sb6iH@ITUA(2vAj?xVJa}*&|-J)p5ks9Qk$SCs$L?b@-h7({#{;YH@(WEjLEgp4VWI+3A-OpD%ANmYh4gN#f;tndXMhH*=i^^UC*N1OCV? zE>s&yObw806Eu8qv1n>El*-a`>bPOsgp)!8Lr*CUNhUNt=AS-j2t(xMD&Dh!(z8sz90nNb9YA{3#b$ zir{~tm;gp-Z~(@XE-q>pcxX$`g>b#DuiUIV=>+XFgApV5sVZn-ssM20dH*F&*6-NW zFWoEn=FXfl1saapUR29_|K5`NqI1bFW#@C9gC)R874VfgsmVruW2f0Z9fnU*kKoW%s^Wg)BqXG7mEF_yZ%y2+^A~eCO*_1DqX%-CEggDQ=YlCX_C~ zTGVR6yjTqT?_Qla%~;(UOCxdB+!564oYScy z6DZNV`niW>Rl{qJPFoYl_Jf$CGwvL+_aFyTr=O!;(*wkR<|9VVa7#CqX0?UpiUc_0 zd;!$+`<$`tL<7~=Aeoy~M9*M+fc!<2b+U^`C#XduSBjCC&G6uI{S;kFKPX}Z%O zP&GQXQh1jIyukt=Z^3B6{1y)z3cBByb$LX}2=9rkn9*&{fVaZ;t%W^1zb<#SUSKjP zfHIX9_`Ojf2V|GQck97VA*$1BmNQ4a?kJw$RWqcrb9%Mud`89|vs7oaiK|VrXWLPu z=Ao)^wNuQUvB-mAs=CHhwsE!>dtmEMTp5Xd5tc52A_jP~>?N(I(vqP=$*) zD0LZhloM-9?q_tAy8UF0_}?i+tc&zT99c(@Doa5@d(sU!mWjA`R9?Sr%2NSJv;YT8 zVzmkB(Dlj1^Zi{Xmi%rjq@!WkY!ythVEgzn5SyanqlwQ|E}CS z4Ix${z2ehOww23uCtZ3Br!!8yfeaedW+zLS^WqG{=A9QHKnakO$do^&=}C4X=;j?; z(Av62jP0Rpqz)NbGUcvNrbWLAS6*{q4(WU79dz z4vq;Wb}0L~s`w7pb^LB8Fz;@$x_4YeN0^oIxlJO*ph%peR6nE}3E zNKBX^o1OOlCH(iEvH=XJ^PMLM}64D@AgVdnrMJ zOXoRx0IB?g1rd(RN{>8;$AA-3zcw7u5q32hiMRPJ6H8rEUh;4+4x@dkiD=OFN8(&@ zft+`@*NdIR=tlaKtVJ7lp>x{jzQ_|Ib2=){Vega_dEvs_3lEj#pI;uZLmAIrtt)@A z$b(v}1p)Er4&I}PXMxwY{@H=irS-&Bk{ZoYDLg~CyV{Wv`k^RzQPor|v}gtC026Ao zfLZ{}_kbedhz%?=r*U|xCJP61KGQZv;pTCxmH=U3c8x9Dp@CJp>#9|wuBwV?;}mCZ ze4ANri@5k88x(P=HQ>-Ou3z)0Zm}yUn|Qrf9QZ-P3H$HHZ>M3y_jiNSd|r9{dELph z6-+{cwh?Aub2Q@fY#2I0{d_uyi$cE02+WO;n%Q4`V=(Jw0TRM%+v}gCEx5m@m5=D# zYEy9(aD7sRLEYI!5F#ru)}Fp!J~JHraaT%Gq}e=gCZJhV=!>}ZT`pAWa;Kxv-+bXr52d17P~svCBBh17kU z@|D@RiNj4Bs@ooep+AY?Jg@w>xGBe>KzEaTnu-hUz(gudx0!z`V!x6Pg~SZU-bxi_ zM%gtQk{6bw|M1z@<9;E$a*Cz6J)xXRU7v?tTx`%-KBgfV-s-zPn^zmyM$%yIe#z%} zFhi%1U*-fkM~Y9F29p|1PEamzEm_AYdak-3)tZ#<^1IS<3UhVC>f;lag#Pma3pF`A zkvr29JjGaOUpWYLV!w2UD1QI3O*Mfb=X6zyTf0+YX7XHDU=&Kyp6s$9|7TodK~GH>wY%XyBK1tp7uzqO zC>lnG{UUI(sWswO(~B!Fi2t$JkDu&8ALJ+Lm>@rr&g9z&?d_L49KRwq>|Vmow#6aKR_`9&D{uz6MZ<_@Q!@BsLvNju--QbrF<5 z9ZBFs%m;wO&LU*pp-=+wDE#ywWJfNocO{z+e4Pt8gMX;ESyBhYZZ+@7ot?=o?(%I4rn0#x9*tcbO{Cy`Yc7Ud9g{P1 z(>+?A$w;0?@HnfGm^#q`Z6Y{ic`UBXiVqd|a*NyH6vN#4$aDI+&dk9Gjh;n@LFaqV zsb@N#t2>W8r=E+!wvL@fwU!@!&aiKKDO92)2^HYk5k2@#^kp87#VtKOqV%M!prT;v zR^EAI7+;7y9+dw%RKLkRrr+dfG6xt;{9(5;IH)N6=oM?*w~^@UG@EJb-|;kfIpy!~ zc*ub|2i0Vk$BB`y&dlZrt{?BfoG`p!ooDBB=o_DY?&Yp`@(Z(8xF+k`n|=Tj&P43r z|6KkK7$?~lp?Qmv(9rhH8HZ-2La>b<4Q35y27!B=3>f*?36+n>S!vM3TCa6;c0OP} z+YhmL<`X-eShxGt>1Z)A3SmgX5#C&J6|Ej%3=D6JR&pnLN6~AF~Loa15mxiGv;Rg zU_wA0?g?@?aZoJ9I9t98j80dFRc}CedQiomJ%gsY9w!7qc4gnOUv9uG`M3PvyJvxJ zyTXdzf{QP|icXv`=sWwa&>GQUA{7MQLrAQh?ztjxd2(-6%9od3xpAb|PG zBrEy@Z&s%2)_C^Ep>jZ0DAWTQ%u#Q3A6I5O|-nrUF zEC?f43R*zMpc3zb#_|5EhyXBB%`An(O~~z82@3p^V9Hm=QrnKesJH7L(XrbES&>*= z`sDFOg^n-58+K?aDq>(Jult)sLC4SnX(@6nj*&!$CD3o6+tP6LdNI?$;KZJCLe_ zV>GE+xmIv03(`VM$D?EUUOvtc?n$89JdM}LIm*Q-EZ?~a*;~E|%A35S{VsL|#M#Sk zM)$d7P1yR+rcN^BEk9_Wh_2(e`vEHk)3}7&Z!s4}iC+1Cx;V5y3#%ifLWEYDZ5&=N8i5dNcF#B@4{Y&TH(yipx;kIAZ8;1F8b<0W26VNp|q zD?GJdoC2Pt*D@??hRAZ;pdP4AbL$AiCm-)7$5?JkV~U|8Mjfpj-7 zP!jVycnqcnHRtW56-+^_xgsF)jv5N~g$53A}{b)D6d<$@q?2f=p zQbS@7HbnwAxf1^uN09DW`<{n%CBI_sJp>~24MI!IGq~#?9KCDd8W^xCzfWF|$K4$p3P^$)8ud87g*$_Q6i#ZwG=RpJaI{q;7kYFVt zWEnJGb>tkHNV5D`s}2CEHiLR^>;p32FAtVN^*1SopsAK82#o#umUOHRax_wsYzR-R6RoPKZ@+%u^-tVbvdfT=<2bP2Q|wwd%>+GUmv9R zabw4i3*F4Ux4Q{cn(sCg0T0|r)x);Eq4E@f$Fi`1JiS2K>gg*RYfjc=%j|Thh>}*V zDZ>YnrN+%AizEI0&x=`(%89$&&s^N~wr|bWjDI@UK3i*0>7vtE#D5;#yv2vb3q6%d zLDBEjz z+bs+Ql$Vyu0oCW?uW*Zf=O-maNp_GM=?;ioj-IJE)9nxJrWU@q5;@z8lCYke-dut} zbl90zWApDK<70!(AQ%Ido1&2i9zMFzu!}BWhL`t;tLS&gpvv z5b$67rY&7;2x-GxjW@>{hyZ5O9_i2Fghv;NS+>HfzIA~R<8$C5Ep(c6Y#Z*7XQp=I zkz~L$Z}&L(6u-a4##nAy8ch6Jjh5$nY~=oka%T&HfW|VdTcj)RuS)vM6LC4tGW@Se z2phe?)RXV<`J}|!b&JyOiA>+T3^Jh6l|B`X-2?MedOh6zY*a>3iPPNn{g#>T*rCyM zazA~xV?S?52eQp5jdm0?T{E0jpf{MP2(+kpknaoHTKGRIKZ4bhjTEtVPn-Bh_@9x` z3m_) zVvKv7X#R;x&+-D_xp8ejY>ss`xD4vp2Aarcs%IX+QNep*o`n6 zD9xL?I&ZCMe8T@Kect^hYdz_rsSnxG^?5d{%PQeBS0h%-bxDJF#F~ zdCMY>@A~acz*Q^~Cr2X(R=;7Q1Y#=J>M2#PD!l4pF$AA-&75tx;WpHf_k=E(^r1oW z1+}rxucut0nwNT&=FGan+HZWf6K6_cNyosBcy1kyX(Z`!=VWwr=V?wqT1ipjSDg?r zvLRvZSDsM!UKm!J3PHGDii#HdlXy^R1|*w}z~dLx$?K&VRo1+gw9BTvL8N5fjj!PE zOqrvSf;CgOUse{sWJhxsI7uf4|49Ea^GAjLUeBpAqMtt}$@W#|vTH(AUH>}Bq~yD^ z458gFsm{F0KLO}FF&De_@#Hg&n+Cau6o)538#QZ|ZQ=Qf$S?SU*(`I~{d%r7SU2CG zVtZaWi&E(fsy{(jZThlvvKYbmik%~5nu|o*#+IJMUCwmu^6PzJI{7B|*oiwQ)_hmm zRJB#Vjq#l1DSik^AL7H?daw8YVD7!6;oARy{}f4x2;xLZbV8yA(Faiy5`;9O2N8tP z%Zz%WcgaEYQIe2E3&ALZV3;6`-bNQC7>v#+_r2wO@A}+(*ShPs*6;Vb>;896CS=dv z@BMzi%Jcbnog3eLI4#8=;A)7B;%`=qiQ_Yb6F`mG#_&PI^D|k zEE1^eS6vJ`Yd*<*)__h3lOrH&IGKv@Xscd4coRYu&}kl&S8u>`AMdBqBgIaAy7V=s zse}FwA8qz0?X$2j?59X)1$2pYxg&Agc=Or`>!toynt1)vc1w6iLv@NG*XE@>dcM(T5Q-)Pjc zPNfY4G;T9;xsu|w{F%bY#pcBkEu-OnOuHYgT1hRR_CYa=j{ADQn639_(#qN!fBWua z=kH0DlLr6^`gLow%l@B;xr4)ND5b46J<&*YdSvb?uqWKu&mFIb-AdKhs=b|kv#%;jxcw% zrCW92tn^FLKzHd5nsX__+S3Gqjm|}h7UQMrPS*_;&KVfUstP?%b_@3o7}{Az3#M-w zd19RLFb2FUC0?gF9T8IB2Djs)J8K@89>9#Dl1Bj=)eU+))`kJEHo-Y%zz&Axw!Bd< zmyEeJa!DE?ZZRSl8}^u-M0YOklykSr-IQ~onX_<++=1+mpUOX=0~(M%Pam$VUWlF)^SZ- z_8BFe?bfye9#g=_?ruj^m4Ol@)v3y0E17}poN#NlA0GZ>E5AaJ1Lfte#xHxKKbaWK zGS-|)^l(zxcQ+AMG3HmvzIxgh48%!*a7a-O{qwRc%AC5%ON~^sqn}zH7S8AVYo2UhQf0+5cu^~oS*LkC?4+fa8y=Wht_OUEElQE~7!aNT6 zhzs1MvKKa-r_sgwS4PEIio#8^UYRERmY#`LW2N-t5VmjRegeCd8$f!R6mll}%_UoD z{I~JgBDeTq3_5Z`vD2|iwIMGkwQuUGvWnODVE!f+aI`8XtzBmj2>MI; z9v%Je6&i}fX{1DNpBit-Am>k`JI~OBd9^PsZA5LFF^eq(jma%qPWqm;C7kT`qRP^` zOe(XM5T>7zo{yCwtUSKxUegCli!Q3tF36^)xZ9%Bzi5%IZ;F-;cVfsDU%M{v30M*N z4Dap)h^N9v^?)31;9Z!$2(b)w>z~rv`)B(=544)_y{+%3uvuKO)N_nQVXU8xbLP=1 zH??*b6Cy#u*d~C6;kW&;i{)Jeve*p1n#eD{o0&u$uhz4bj) z@h-MaU8u$*u(@O)FO?nHJ|Z;`SAwr z^fg+Ew+lt`e$Cbgx^;Fd_xo|-4@j-TT8%;VkxR-dQ*I!qV?94@6KOm4-0~mNP}2Nz ziddmHuBZ!xZ|1eVqKVj%Wf8g<-Q~yBtiMZF(=2{4>KHQb?s<0zE)mO|8fBGnv_F?8 zonoVBJ)O57b*?bFcw9!{LOb*4Wgzc>DX}C@v7D9Uz7u79b@f7r8tVLwG?h!DXXp7) z;U*}&)Zg1ZM2j`f=O5b}Pn|A*J*xN{bih!TPD>PVIP!`WMPa02eP^8lh4E3Z4J@ye zjtV^Sn+luP=P)&A_~P0`D?bpoz{l?{7EZ57l(e-V-?bC9k~B|xJ{-g&YGEkbr<(rZ z2AqL`_RW@aVTF=A)7IoOv?`T&T!P&7qV+4V)CC8X62$KiAG!gxti|_nm1i_JhQ>v= zUF$)nb=sdDv2aE7iT8;FgTLoR_f)(;l_{jndA;&J6*PxJ51q?)VP1O zs>*+wcy)8rz42@7m~M|wO?+n|F%?gIAR+9+jFV<%!f1i8?4r5g5GwnRsl_4 zeSsrGQauM^&Ea8v345qoPgvz z!}PtfeVS`j@7(Vju_tR2tlpHrV$zcx_I__9U+U&7%{4TxtuOoQh7zF{CpPM;E)aLi z$e6SD{ei@p)e~i&&va;_fX+O~q^^*@e7Hx97escd5VD)kLyo!pgyPsC=Q0=C_$~?f2x$ z=wjh^2PD~Qg`Lp-wS5OG+n_2kVhfnMg>~xet6c!UNS9+0MKi4jdrz};ws`XF=;YZS z#eY?ON`eveEp^eE)i z9aX-(McOxJdH6F@_^;Wal<`E}?5&Fg@O{973H-Zu8kNg9;Ykx>nckzr4@;L)q;hnX z{&Bh!HbmdS1@jjZiLwJfo_!J#9G*fI{2+)_-+LW}jL{xB=!^^(Lw^nKSiO(SxtRK7 ziUqs+Y(9e{1UL9VsM=1VOyL)+O46DdOiApfJs+1m0pd zP7A&b)3Imvj|?H9R+K1IXq~9nUa{ye4o{)u<8L^DMJ>7p!mvQ&HCf3I3by|5@uWkf zU6^#m>e-N+m0W4-wG*HMt^~wiomrdm+;TxTl)Yyx>qLQ+EJ^PF$0GQA=iVqZo^u%t z7N+BqAN2!8YV(DEFOgnC-`M%iF>ndk`ZyY1%m4uPbx4>1mAxcpgXtP%xc+<_0L6Yf z1C&bwz~28H5;&fDCRiJNCe8UQmvj=tS7PoPrj{TV9 z&z*EzdS-ur2pnJRkRon63@WPwaam639iWSA*x{Z+GNalsxgLD_1g%0h+K=CyLAPo5gGLE(tjM94wDQ{fBcT>u9HEJYB(CIiH5tf7h^!1y7!8D^igm% zpiwW#d;8V_5zm(sPj>uLx_5v=&}3{QM>+veLCPE?l5H=Lp?2s(-@6&N zKHl^QL8JMjbfx3>>}UhW2TOm3po5Znq_EB|RNTA{V#S3wh;lQCfNLVGD$Rit%yp%D zLN+9Ov^jw?*Z>iJ1QoFbnxW~vdw5m*xbmagT!k`qO`O@Le#qN3dy8#oj3&fwd?k-Q zD(N3CFiHgFz4Ali*vU69<+pND9llsCiZGX=o9@&doVB_|!t_4U1W5H6!7|uFmJi3_ zYUpCNwn8;u!R$SbmJLgHtGyYgq8|(~A8H{^bx(Pv9#m2@2`hz-NEW|&;y<8X?V(+R z0#K4Y1Mf0;H%Euj=y1x7JTKhca;d*xCRLIXkCG(rCsLfNCzd?Jn8tk zO-T1gb7R5^!aCR~n(cKbwt@uaX%9~?GJnP$ORxTfv<9XcSLKKSl}oSgrSb9A5b54& zDmSDZj7C)L1tc4T3fH?p-6+euSMBbxzg)dyia#ex1S5I7N}xK197z2bg6a-HhwabE zYP6kT*4Y&#-uo3x{30YOx~!-DMpNfd7#{G%8v;wI5}hHxk%t!;il9%Jgg_&X-UPyz zd3`3N@3Lz~-X_=BuOp!P$(#LL0E+UCy z0b8kqKRgyb=k`mv>)$bQ9|H8EHY)zKn{M#xKkry1RJQr_9oq~kECoJQkr63Wp?oTo zXHUiKxJIUGkUK*ap9CAXHJULnWs%;YF&_`BjR{mXV?-U_g~u~sd_g`+hJlys6*p#Y zAun$`@x;T6+1(^Q9rg)mI5h@{e}v5Tr19I0wD9Wfe{I_ zaxETQmEzn51hC z3CV-QKqK@D8P)u3(bev|r50BuInmG1iZ$GgG06&(wts)3rq zt;O21swFUupbQ^r&Lo9>U$9aLCL36VUwijSNyZoJ-pynp?P);yiZ~AIXJpwUfGNwH z3q;TK54mjV?{z2%Z^~Uw?0#3(7Ut=4-d$ScDZHv&(9v26_&uJH9E&XUOJ2_|P#wJ5 zamadME|d1>$g}_(XL3p>)zmm3oivVR^Z)b!HBo^}h`exj+fkR)v2xC~Q@q!ynC^ZBS5s6p6^yKS=oKXMb{y8#%_UBy zVv*okoS^cx#R_&p19zA>ZOx69BEQYN z;SNlswr-jKWv%?SMW(u)JDVS*L5FZSMn464B04vP-WU|0rw1KShoh2{cXh=h*{SK= z<3;H|k9G6!hd&9(?(6fUrsJcrrC$_=Dc3T})h_-lY}YujR?&(=lt<6KGvKV`>6ScI zu~PhU!J@v`BEBrNDPe0sX@siFqurQ)>{t0sCwrqOH>sI??<*uJ78HzJdp_JU`j;=k z*^jy%W^d}w0k&rfl)KtNM7w7c} zzj&&W{5|ayX{)kbJBf}8yN>riT$lU&n$r(VM=%r|62|yqR4tsfExNCDunCU!7I*VM zfV}+GVZ(9VqJDbKT zYRO@)RxSnc^$j0`SED{Y-b!XB{Bc+ld?q0OT|b&fOtehoU`qhYH$2)sG%c64@9OJ!A`fnWp-^RoqIScIiKQ1 zA5ct{ddLI2SJyYNx!xrNVoJJZCQxe(l;Px~vP7w8zekEuQKCsTCto9+E|3PXt_;|~ z2y65=Lh;(QAXc6i-=$6^+_nX>TYK8)^a5M-^4*@1X$Yx@mwu=nLKKe01)s6ID5E$y z+PHxgyLQpHr#gLHi4nd)OLVfuID0V>8C1+k)4<#NfJ7MxQP%arwmq5 z*kC_$_>v-5yv19Fq_v-?UWdQX3U-H3w(90MQLV;P(Ilz!==qe2m!7nHIUH3O%7jE4(IKUi^%@%GVS#j)gj&T5NBY!7O`Ag-eCdUz6a5ij$@BJ zO~bZ=;1?YBK8~l!^D+Rv&%d#v7VCA&uBk0?$D3XJvq{p4bB|7j)1y<7x~T}pR?b4C z3~9+jP{)=5A2Tz#3~p#CmBJ)tgDApVf%R^wTx~dfs$}%#N`!SlXl7YPV#54WYfzbj zwT`-c7NpHeGPihlefFA9SET8aesILg?OrmPdKE6lMXho@$Y|9xB|csAmz{pYQ_L-r zmfDTeCzoWH-touhR)-Jk*DO4J`a)8tXb;J$bZHy6J-QQw!YYda1~ya9%x{e^ zhpqAIJc9ixP7|++gq5%}Zj4r9k?Tuq;*qg%$Et~7UPQIysn;v8K#Hj8pqmKJ6-y5C zpsxMRE{#>XIZOoXL&sBxQ=e*NU++BP!^!I>QR~>+sXFqLI5{5LJ#4sYn{kde!Qf!H zQD?2}@2_>iou!L>9+fjU^3`gn5I9OD&fYYBwb!`X8dKdWirlJCf+U^bUCiXfV+xL4Kp8T{u;)8{or|3PTh^VDu%7SKBpX!F&&V>7e+VH zO_TZ#3nR#P)1q`@k(yfr5QZsz{=1Opmp4{4ApU;ptPK|mJa!5LGRUekpK*5d3 zu#w6GA`cs(+u`?Sn>a9|&do&{^sYXm{z%GvUwxLH+kTR(_@3RL=sr`TxeRg9{Lv-S zzUVrcp^yuMd)~SE2*+3OUs*NKkkR9dZ@~Zj{V>GEtkd*87GajfH_ZS0A5D(WyRtvp zSbmwIkk8cc>qCt@E_2xTCfKVRdNn}`esniu8qo9rl*c<6d0I7Tgju&{Ox)5GltgHu zk_ecT)EP~eL-8L)hGV_o|D__o|J}8%roCCb3h5e2KY$7h((WNaa?uJ_<}0EqrAMCLsdN14d&wvi6DLgRzT-WWSF*$Nuybe!}tn)#4Qfc!fF!s(WiKoLZyz64O3 z-p>HvoRHa015-$=H$Z$PXiE)!NCOrP_ai{tdRz|58j#`c328)Brw%iUVI8m`8!Emx z2|Ru(_f8gwi8QSP*@%JSR&gv%xCE*jUAtNE$6g0Skai~_R<8@BMFY69ya<&!X#7*3 z>S*`&_7=gh2>0oy)0}L`M2{|1uNs~E2Dm2cpUQoz-2Yhy^SAWwtV2bySgy5%ZSQfi z>E-yCgnl@BRBw^ncRaK$6*ug>Fbe12bjpV}e(Y0tAiIBUtlFwcgCe*@;XUMZKfW7VkY%tL7cVy?_v&@qpC z(jmh|v$1%q?7=668B2e3;jsiZ?jL7iloBair?O2#~8~4CK)9 zkYf-76hNYof$Fl~v9uy$FaHt+i}?=7M~csjPE`Y8PXl!#q~^=vzSEex8J>_8Z_ ziD2*b*yoNEderkfg!`8A9^^1QRso7(u{HbQ%fK~Rxa=8oGX&&2)j3TP0Bfwyv<0H5 z+J5Eq8L0c#=LK|R7t0PQmH}njcnsJI^;QbkmO;$a=Pe|>Ij!CA(7JKBU00V6K%2Swu9}-fC1ODYe^{KTZ;4#vF zXYA4<4uk;-PjQ^+A;q zsvg8c`D&Rkm1E>FHEh`of~N!B?WL1!&GuNb=+6Sl#dtE#UddLq2`9Wna=niv^Uhfc=m!D3V>_J+JJg1r6r;aFVYFbnH zOq^&|4XEuVLNdhJ=_@j3UJWWxSxz8H_+{8{NWNxj#Kj3ck;P&Pj2DCh6SxX5eK=Yq zOl3Vcw30#u7DZL|HAO~Pp~>-Hr_o96OVh=S1}eju_L+oz$YFZthBEH4)*Jf9hO?9= z77^&*n&Ot#t`S-un;aiBEzv zEvt&_J9RO2ShlR)1^v3Jh{vbmc8(wy!s2`9vXfl05Fg%0*{4c(jjuddlMWkHQWCk% zXEAh~QGcF&o$!tt=ek&bKl@R|4=}LMFqUX`d$T0k)97m6x}E8IO3Co6ygsi>3&Uhc zT2%>Ta(OtT+=-`*jXz;FHeFl_YeO%gJ3#f%47_;0uS|qYDLtCKwdstO2609J zq1!e<)c&5A=W#55ZCwO1+65|oMXNc5mF?t^t@0CHe#bAi?|hI4x~7OHMeho@OV1&X zM=PXf7=K=WS}Tp-9U28db9FV70O^eueq(^T_R9`~_x=#xuiVQ-0qY4c_x1_U*I<59ztS@*g1cR&0Mx-|Ktst z@kT(qNBhK?GW*ewkj`@rhA>wP#DSz*?Zf4{TV zhph+54=4`2=7faPb_PK$rBW{Jq!q8~Lkdar4$UH8At|Ad(!i4K#j{rDq1%&02bNFY zebDx%CU9N~hn?Z7qEsaZxl&%`v=8+65PhkwfecIc@?^lfg*Y10HlW2Z-3#b}vBedM<7QP!T?s!yZ z20x*zQ4yzX>Yr#8e3|4l!v$pSzlWDE`w9DeNPSmgr$3eKM2xh?$uC2*-Dt`>a|-e; zqGs-VU4xPHV-7ZqD5>zyMlS{nO|anv(V|*}ULqeVxW7Vii!{qHR!GHgMI4-_1Lv-k z8F^RevV^^aYuv78f4`Y^e7c33j{sayJJ!|v`PXWEfgeg2^&*(>SJ?BZWX|c`YhJ

    !}_5PbMe1lZgl)(_CycJzro_W*k`AE)F z!BKP{FyjpmD9zF55vfhL7=kb*6pjECrKu}-*GD2tM}{GKDi;|NoTt&Y3;VY4;c(%4 zNQEdW;b}$W8(Ma&ES}c;8*6I}?X3cQY1T%cf;Vcu8Scect6VoFMd-&Lk0elkZs*Vb z;&)4K6Ab;8#un5(A@^tb$7z_Z?PR7FY#g=^+bUEbfMB2vjA1n)N6WZ-vMK^Bet+rR zMb+cCfuRw^MiksdMV*ngC!Sm<38*jzU7`D)K^~d=5{eEt1bIiFe(hSq=Aa&nn1~8} z8gBiDFG?#rVQ{7$Zhkd}X>N&ladevM#mq_wytK4w`O?pr?VFjQw5gnW{6|qiZ~kH! zo%)-~q9NU8DFl9d6ru+R3^tL=VIP~~?7TH26HQW86ewTeHVJx0rC*uM=?e1M_3U@$ zW2KGvf`qt)9vBRF%&{@zi9W?M!(_0ZxJMKC>}TGm$G=8+z=Pa0^nJr$q1>_wp6(0` zBDd7F$Us%uve$o6Wo*+3@C5Aa%Qy55tP-jU2#0?T_6VTFRmNu5u>lna$)$YzBzaM} zgwH3~H&c*PT@J6v-n*3(HW`C1O2s=_m89qPt9q!&?$OLM&&fC6;Sx%fuE>yHiu0$& z&Cd;Vuc5Cta|+vT^<4d&&&XEp-uA`5RmK>g2lkic%Z!j|J)(ic(KQ*IA9D2vq!d@B z)vNNpoli|;TXfa!Vv#Ggt1SUK1?l)CL7qZT8W!8S!8%-wP1;cS>xU*1^2Ea#cw+C@h zJE-x+{JJpWQF+ilL=(#9X-YYon;bn-PfXxqikHliXv%L>eyBcARGV*C^<% zfHA>}uo8b&cvmZF9n4b;vsVR|Y{XI>?+}+@D^LdzvJ5dmiOBJP{l7zM9RCWg3lMpO zL3z&_{K}ieo9~bcpfV-q0vZ5`;E+5Ma0gd`bh3ai~uUT}8vK zzme(R+0^{+!a{G}-T$%g3u05q?R1mo`}$8=sj$N+kd{T!;pP1QwT?GJz9Nt(LJZ7f zUlw3%14-qJ|MFqkwsE6!IT?&P!WBee+y7EGq~HJy_w9Bq=%+p_F!nd&z{Pw#SvBW= z6m!|?aWgbU2QqFjJPo-~4`nEA^P=p)kP}|$Iz5C^x)Jt=)nPvjsz%{zD2R8k3)(Y= z&}16JmP0Wb6(=A<7l1UJNZ^pjDE8-N^@ci{U}SLd2rwsDzWVNQ2+df9AQcICKvb{- zmXbUsr_vHdrt5ex{M8WBox6a6qVdpv?U4!1ip>me(STU{_(7?UEKgw7wZju&+Qh=j z7o`u7-D7ia6f`IRdjo1;xpd#-Pk?67iYQij0D(b&IrO1jb@Lwz2?8KTG!V|TvcKM} z1u$VUBQ6z*{N{My-M+4g@qOf zKqoX3>MOkjD34)N(7s&MH4J=rIcOW|n?i%j1^B9WweJC6@IoDE>h;xbfH>CErdsji z_)h>3umVux>(Ka`WBI@TGhculZETG+-N=UM2ni5z*@g^tiV5A75bzi|x@f8ojr#WN zh&tx1NHHGY0(WRmv^ba%lmK-yXo zC4@*bc#;nx_CMF_d6R%qxiRpbZq^M~&~>wS#4SA&0?vb{ZZ{Iw1n|)0qM9UZ9qfZU z@Iei@T-RwsCp=7E(hq_B?h)M%-h|mfJ?nAg6jH+%ou0H_Gvkh9^MnaU^P!ZsEAf3U zt&edKgS`#f!iQhRbTM{>mfqDglkX636S9I7V*_^;hs`^0rrvAfoI(P})0ctPY-oJ; zY^TQo8EOj2f>dD9bHE;=C=Ba&m_hD;@KAgg588)TfTR9}ODxLM z8%()o?QdlJgBfXncr>5rUduqexA!q}U%cF6K|abw%U>kAGNvWb{^U55f*ef1$I+^q z9h$fbcl0*8Tn=l5MM4vvH{6C6Q=0}VXU@;ueuWBMgasK+! zyN9WCtx%30YPz89LqD!oS{mkIHJuzk2W0@FpDbyCLcp;3slG|sPZ00TfcQ;y7m9$7 zY03v{)gV#J%Z>9)lsWE>w-p&fT}OpbswNxT&w{ZCX`heb$FPo^f~AF8;1!Dp(1LH2wDYBwI>Zy%h&{jyM* zGSCPXM}Zt1noT1Fa2*4oTj`+BI|DV=6mo_jQA3gHfFhKFx`r=F*ZkI2oiZT*yxDkR z)3aNPlaF zd)$sV8p$39QGwkq@~rO>lr7iffD1?GQT7h1sD`-`B!5B4bxl;zWqP4f@w(Iv1%@BX@m zF$YwYJlYc_P$n4>4`tuyQI0;-S7)HT<8qIH=#$$&Kc71qgodyOr`gB&~_^if2e9-P?*w54s$mNBe=!-&1G2>OPb+g~VLvf2iYj01XBs&Z``g zHrOpx;wTxV>ZeD?{J3~kDvzC8ay8%vu?a-3o}U!#e&mN+{j~ODog%XDvS}C7uCD0R z4%G37%g^k5Kc}dxFbHz@2PV9$QAY7}-3td$-N6eR-hA9RF%F+CzxgKd%^A0*}oUniu zTRf(MH7o;JOVQM${BM?2B2$Jo8fL(qcPwBi?YQPbB@pnL@O}6Lm3D)^DMhZE^w5jA zVGryr<-4D<`$DaUQrQr))!D{!TkCfjx+yB%eytbB`sPFGg1)#KpbpT3xJ`PbFHu2d zZlQ}53uRCa_$C(0K636vF?298s9d3RPQS;(S_DVD4UVD1t7Row{|=nzN5umAbS^TP znEKCw6)#X?)SP)1$v6!vwU7D49_Yi<{M`f=<#UVA_Is9XYt}&(n)xQ*Q>&A^|N$ zGkO3kL67Cpf~}9l@S&R2!IFW{*mbZri;$05S{+s@6tJY`(`~MThZW(P0WtjgEJ75& z`m_f;HZLn?#i(5D?efTeP*sc8$BrVVqc=rYDpq4AG1IzI=2wMZd_XRK{5bvW$tuj- zl|n2U$`QPDYob-31#CG?VZyCZ>mVL+nb*EvE{%7qyAT)o7Aw((|K@OV%5d6B)Z4(M z^O=v>0NzNZYF2%-fvNUuc{svjKvy9v8sn^51#lL63WuQ0>RQ9zz9Bm)$q$B6be12x z*)0N%uXE4}j9E(2Dg&DaC@-BniT-Ys`Ce zAu%tb`Hc;@6dV&f&q7^wJ7DwD^G8-bSomZz!*!wAb8egCYnpKsFN$EKAGUus3nGry zqHqWzB*{vIv_O3lzECiJ?v6>1o{sO2%I=b|9!JENkB)CAl^c$&YD6B!yDF+9XPo^0 zM3=?TJ>k66#2@+i8urQ0ExvxrRK73Bpm1qN!ZQ%)ES@$T`$xHx^4diB?C)!eHEp{l zD_;7Wy_cF6u3dP&I?KXnG%{Asg=ruN#y3?`h;{Wb<;1wlkE_~X;xMH_34{8$VI4~E z&MvYT3X~IofB}f!*}OT;4z3-wgy6!bIii_fimTTy19pb zNJ>qoD6iY8!!PMG3xc^R2{yjUDX7ETw|c=YJ6rBod+1HLhcPhzgJv9-RDzFkD*=|@B& zFAxVhiNXwOsOjO#t=+K(wyJS5Zci(yeC6Q4%-F)rXsBN#DAGdd^|k%_xpGcs z;ZX;ZDng$@=$ni>;^gfawi^W@8>Mi6Al=p583`h5{6=3K9$OzNe0WsR(((-JYDBl$ z9&ksYlx)-A;40KvHCrA0&te15zfs5(U@LGl1EFs<`ANm7>B`DtuGsMQ;apb?(K18a z?asOr*+>>Ng>Y!%d*WiQK;#&Q@+k9tOVe$} zZ^|3iAOo_*7Okr_Ds?0oKu$=j^E)|g5jxWXb#Jb#T`G51NowvU;bYLVkC|hz3R#ojy5NJ|h z&7MZ!2{zbyAH=wlyzhwBzz1Y!+JPRowyHVYTU_*H(nYcoL=*Q@m`uDD`eJAT#kffM z(GVlZE1f(m2Q^B{bmrBM0ePsO?ykV@BMe3Zk*ZAOAR#7^q22f~PJH0u_}VBEnibmV z{0C;j2N5J+(2T>vM~7;3Z-2eR9IT}PVcqd~uQTYo zpwrdV2GwmDP29S~p|Puy{mjGFgAA+^khyuIRRIhJoAA^%Q>5A;i} z5gzcxwY(5F=;kA_@)w6$+y8jO#b>cfF)a&$-;HD06|^{c?&s+jIk2n1BCJ~D#hin< zP(2^+O;@=uzWnW$W;XE6T4!-p%(A&cu&|;*J4104;2e2tyf#ZpStMRoM6pOz71bRM zFNChD*i06nl>BR{94Q!2H&$Vg>%+BSxn7$K$)EUdJ#UJ*%r_?Iv1GGpSmBgKwGPJH zbq`-zW};6AbRgfsFHZ#rip-Sc{leetWGL1g%<$9hJpKHrsUeWAjq>;T%K{ZZn>_++ zin=`AEQVz}(=2IH$bD zq50-3`@l*Z%%P9F2ET-No@$aX9sG*mEANf8rf>|oz%l~zW?L>g`@nNdVnNa*u*rEw zGKoq#2jl2zdDCxMU@6x*d%D05EERAW|EnBMuxBIQIl-PozHayF>Mh>&DWf48BooFD6A2C5S1<+(*>9|W3{d@ z1Zf2AXjYxMX3pMA6T1a^-*iLzf1Y#$^rTH;(~@d`?`(z*USyL?LcCPqUI)!a=gWe6 zrK^;f0N1&WvU7DnO)WtE{?o(vA@y)KOl^?i1rEg#3ej=jG_f&didZvsZ+gwZvos|x z)t)O3=m{zsXb_`Gj#jdkMzJWFhE=AsTovYelZ43fBOaqy3sso-kHFdIi>=Hi%0(U>BY;PaKAALklg zLD@7&%dlg_9J;U)xZt|)ji0WfY?q(?cmX{;&0${DGzCd3>`WHjt@9_(Q$B7GH7vKc z&F=HIwcQ^svl|7@04ov>5nDO)AWfV*?7>itH#+$Kn{BY~nwwg|{!u1HWo|8E5QkuD zRsrp*#-&ze1?J9%VCrhnfac^zRo+=?l5jlD<%5m17CE&FlX!zrJlZVynfAzSRU0FL z>EzOMJbi~1%ijZ2l z?E~2ju4pyVx7Nl8#m!EK8mvr*`(8P%Qd?h=U0|8K`Af6%0tND+NYU46uDS^iKc%tV z#h$<6^#ih5UJox44%mFt{{h$M{XUi|vc5utM7w4lX3-o$%@(DzA~x&2y6GZxZu^?( zk;B|Xx&8F|I%9|Ce}B<^F6xTV1O_d9_gWFYn2V%mdq6F!Zo~v1@^rc>JnS*Ku_{$- zTJPSqcIjSRJmMX6^;+*rw5ANg*2A#hj-S!zHo!WSiPeC$9e-GL{2}F3GRNP49$~LJ zERUBYghdAEcau6Z{5dM1Q16ixM(!E(YbcIJ=8u*Ei7ANg21myt#zKbi;08%ZEzQYJ z9s0x1v8%F5R{$Xq`X3c6_*ee~tzz6HAt=PXh7G;efg{7xFQ;yuSpxZ;7XkX5VI4Bf9{%`{n6n8xWF_~-aoIfrU@T(Thyiu zj+c6`)g}DYt$EE&>os8gJw9_6#KY&lO#u0X6E2p+Q$2=N>{(2%^VF2`fBcBEiY3@8 zXTV5xs%thF?T)%Rs_5Bly{_z9O@6ti%k@r$%2+3-$lJ zQBzH>;0Lu}-(r|vZZ8e>JpH6wPaLW283ibX+31c$(G0;yAHuFG`C-C!7luIyl&IwI zr=NNIq&_%trNKta@OknUb_eYCqd`;*#-ypH(f$G>?aoIR3HHOKEkS$0nl$qT1VWoC ze~p9My9jt-BKn2Kg+9A$QYR?g_QLl13XBFW3mf(-(vK*3Z8#Z}yl@7cf#_;LFEc4H zEHjSd)6Ulp`T#2R)O=r#S`{B?|Ho@3`v8OR`}I~m|1^E_tyM5e)u}yI8XYoS*%TL< z$n@5v+}?oTxH?hKXHVi?&zOhi4~LuEPkeu!`w@V~9xsFXl~by;qZGZOUB3{+V%Rib z7h$#2<@&Z%mmq);-v>~la?md@ltg>xY9|hhl~7`Dj)g}kckM4RD)VDN&8<*3OJ(*g zySzt!(|w+#T}L;LSIQ_J^_bF9(;BbzcOe7JcyZb_`p3}-htaAsZ=j#hdch=OJa_<# zgN*F|NLWAcM$pqr{!Eu2ppZQX^1M2^4*;Kp?>ty}&t?0m6gUg#LWc_o@8E3Nspb0# z2dTpv8ZcK_yuj1ekJ_pI(JT^t3tlma6JE=AA;_Dlaxi)-pi){ zMi%UGh!_S@)cS1uqMY0O^DzSz4rrGMh{NAc5U2$HuzoestV=--bRNq56W}ZIxZNXz zYtbJ3N80x3mnY#Lps*Md1=e>J)i8KZtHC>+e47tz(HeiF(Cv3OU;cGzqjG!3h1U}~ zJ@ka3Z`Pg?)*Yo@)5zwwpPsE4@Z^5BX|dG8K}aEnmxE`SO|#r`<^xF41_#Q-dvScx zAh`5=anKxn)nxW(GF*X4tE=B1^Z|1V@bNc#SQf(@w-?D<#H$9P$4AuCh}M+s1E0ZW zkk;C;g;6#|Mn0dBCv{Mo$uVBNW%H)#CJ(AIIz~DXMwvGOXTx6W5i4!XrLU^LD*7XI z23QH}DP)~nG3>OCH&3PAq^G#ab@f;5w)}jPnlS?z^QuoEk7J7E^#c?lnG~`9j&7k{ z?ra1d!wPaMa9@AR@f8mjDazRyi}-4SP{3)cEV9IAI-h*k=_T^6;eT914D^wqQu}=8 zyMazqjJhY26sAuh*yi>Z?S!)?JAF{*9ZsH1`Y#9;5;s0ctg$;xzwmKx%*?%|(mE?0 zjdQ3e@Q{-9od>^~>(OGx0@!1vXa1o@6X^%sV1Ehf{iI>O`F-t$_d=4G}}oHfA8O*4oEqvDg@#87%!Gs zGy(2Tht;cEf&(T0wX)*}7pW=SCv+DV2v|)6J>vs*#jF>>^Rt ztJsYH*fe&9I57b+)J5mUbvd$?r%5Awx88+6oTZ`i8>*X=HB7-L!mmpV>aEef)2>{? z9meB5SAX{g`UH?CUN?UuqG{*aw*1|XasVso_}zuhEFVVfBX+_Lv4L=|r-3)&-Jr`O zq#B4B$U(J{J8%C42}174;P{)P2c9J;3V4)w4@T|G{y%*PwuuwRAHVqAh+sdC8z}sH zIs#K*NjJbj8x~JHrGFeED4n3V#cuQG@RcblGOiTo`#%vQr18`p`E?%5jr*^yjN?b{ zQaU>~RQ_amz@YL$3yOI(=~_aMRy@&mD<{$BWJUC{|90gj|L*uGd6eU5cQnWi!NH}r z5W3F9j{|#Ub-KLg@ICi|LX)i(tNejo?XdW0?7Er8EBl_N)-3;w{mCW8iQ1&?y5;b$ zt0wR37mW{IFB&&0FOCHANLAD-Ik?)PiLRj!BId{6FZpM2(yzv2|!9`TNEw#eyQcRChMb_y4ESsN{V8=Dv( zG07Tx;k8o({3kI$FM6bJV=wFZ{bfu8P6p=Ti_0o3C6D1KaOh# zwZl13qf-S_Za4<#28%66LDFx*Fkaao3|l=`-1ZNV+zeIT;6TgVYLf^OXfoLRO~z|| ziSmogAL{P^%~ig&H00X6*Zbq>fb6;5P68d2tNe^{Wh_4jZ&{Ob0}x80ok7Wmr0<(A@9W8xbp2bqlz&)7@) zB$>Uc{%)rB9q)8^AePrWw9NcH{^e<26)=clXxB!1iL_3Yr(f&7KhKo~kvlEV^c$UeT30TKAs6Y%>(q^Fs zWw|QGG7=kH-_kLi-e>1U=|s$(rMXh!K^z`}i(P4IWMh-}a08LB`{UdP(TGf283snS zQt)Y*&7=n_>J>X3h`z#&zCM3Fqb$eGs3R4S*3zOQ1Jh{mSf9hA4{lfz%Hi+oic&c^ z`-Axnx_#0km_!}Ms21>Y!$G?Ij%TBY2hR%(zBQjs#(&gIG_2Kn6R+Sn220g2P)`oK zu5_=^u&j0X06^L94%b-XE<)!=W%iv&!)k+Q_8I}+)1F|_`J3m5q(d+21AAX{G7Aj`u^5XqHmJm-6&@Bb(*;<@a7NJ z{*<>@9V>0e)8Cf;Ntsf`3CK$e^LZy(nMJE2?Z^-OOG$P%>K09jlc1@nVmOT z3Z7D68Sp;t^Huh9m{ai_h4MEkO<`m&KXAvEfoDQ8_ROvVP*3m$43-;Lx_H>J$KC~t zupm9G6g;9a43W~j97k@!Y8(Io1DodiQBZetZw#gNgfg!TpSAF6V1_1g2W~kF{u7R( zyNnU|eDC(UM>J~OK~dQF^yL1|4$+}6MrovLnt!DHKz(#k!ZV#OlccC5;}b~EFd@IM zZSIJ)-b=RHyHK185{qq6`M)DFze?$v{o;bmSr*5bYt0E_cHJMsq4K)PJ0SxtpY4Ud zwtiRjkB>gy=ZhFZb<(X(3_jPo5g4i1ybc=i5;`(C`rX9q4;!Go;=x3+gmrl}TQ8|- z-}k(8Rv-dPZd;N1Z`HkLSW|2FHzGrGtVfRRn@`3HS|!F5>ylfq=YUt^zN+9IsbW{cdl!`%*=;(&L>ZlknFwhd#ztvH}A0G zCvL7CH5qz-%d%(D1t23{FF+LdSjjIFO z&j-P{+|(4as-X2gFr-}b*#Xzgb)VWCqhvRNeduTV1PamcQcH@2JHfun!Q+b$E5%WV zvTlEqdLT%w@y)rHXMfngTf#KpkKJphyW>R?QVrR{E$-*PNgbqo(epwMt5SFwKy5v* zws(JG9UPkBn9OG}hdP)--4eh>+d0==PB)tUKvq|M>!DQ#)X~-#13q7v!lYqxQ0C?E z`7~*Vh>wSotlwWY$|&M=D|Xncds;4im|;kD#-Sk3EX!lYIdowbkS~KQNqw{u;`yRG z%J(M3hz~l`Pd~Kp);c*}W5ml-_T{_Uu{)L09@3XzXOv9-i5S-@*&aNzsJV$|<`o=~ z;3j^SyfmS2iKX5C0R*!@#QO~-+hv_wuR4WU@?voX5ibIZ0e9 zDu{I@kO#RB&9cEtWF^zHv6HPMg^XXC0h@0Pe4pF<`udaBGHmQ6U(hE=1k7ix5EL)5 zbGk)X0r+J$auJi%9>=Kdvb*Dc+?l6rQQr7-@kdUGv+#=QM88i=Jj2L#nB4i$ok7Uq z>0{X|cLNT-V3{lmr)QK2y`B|=p@|HmHKhXy0JpNQ&rfBDrTfXf6w0M~IAJI5 zL=o8bT%p}7Wlet29!^XDs~-BFTelSrNIt{KZofj8I@9N?{ifF#7~znL;A<5U?`mt3 zDeIw*>||~om_-q1bDCNiyt(quq5e*ahEq!^hA%!fhT7yaGPl1V2%}v&2Qy z7IGfjSgm)fcWgmpMWg_s>>fpFNR|Df`kSa^VSl3_4)anTZR@zTb!M8+bE3S%CCwqI zPJ6;ul~=QuDZj2DcGX-|Y^VS?X6k5(UCbDLdIZzHPjOs1Wb^Svx)m|%k?MoT)oDVF zahy&*ShA+{xZG;0gw+o<=(aAj!vem(w`brG<7y20B~Q%wZH8DEQH)2ZkZ79l{NKY2 zHOFI9^Dn%;gwO|L=g00omS*^~Bp-q6d~wFFCAQmH_b&=3&3hcxkxd-8H%)lbSX4q( zizMmo6KzA1;=PQG9ASS~A_n9xb74K*$$-wpDC2qTuxoZOj=-QuWfNQ0#F|SkJE%Eu z7acOwg*N9+Rn;#*-1{=@F>BHCBssA?8Xi<`j$<**&lJkbZ!2tb-uFJsZ)Uq4I3G0e zV{59QL30uPWEH!}dv!-sw zFv98^-zbXOpPs7xlX6TnbKs-dvjZCBI}LIK7i~ErZY(DrHZgRapnKhAhWOQf{6?p2b3Nd$fo73vouWiLG&SJRKCEPn2qS-P2qWEXS zbj0>hb!kig4$l>Xzqi-~LPvSp*{ML1NcDDqDdAwZf^<@Pq=kK^itmjR*E*x)ST?=g zyxhJHM7!{%+{TG^MoTkE5ip-@D*Qf5vm`K4xXYAAEJsGT+7{z*V@f&VaLHMtl10g- zu=AX6=Nw`?*7BI%`>B^L#XpW?EO{B1M*U0ZZoLc{@3`iJX79}L8Na{&Xh7Nr#hi&~ zO5DODO+lL7`OzL3eg6;kFM2dQKw79(>6|9n?LLm$o!>+WE_mCBb_ldxM?&2uSvd`cB_H!os)3Rnhj*Yigrd?ZOe2esp&5)3DWDzmvYY(svWBXgbzwhEGjnAy5;~RsV>%3_8<4_OGVhl*A{)?K{ zH!6;hrF@?hhNyZ33W!rXWy=ORM@~M*nbV0!JY;~yB9n{a6g67Wc|05-szE1XF^jr7 zICD7X%S)80sGhb&UCH9Jp$3evL!Af^O8TYdu|js)C=}s5w(J&9g4$C9=Qjk34b(s$DLZ4>sHuB-nZ|Db|+`) zmydi&^BD@wiHgZ*`-K1K5@7?wI(`$#b~XuZk@p!#xfq(1%m=(b%{G;X~UGr zw40&+;is7zf5bX|riA~P{Z-MsPtd7B$%iY|pYUM-h~#UYiBEzS>>URk6NZIP*c@a? z;$Bnq;D72QLlAQaF`2M#x)t(gi@A(LU)sl3fG#$8U1HImZ^%MPX|3&stS8WrZJ5?t zBFm#Pnnao9#c{Ea&1f&viLI0}Ll>)+;aRqE?)HYQahfS5G0JI5t>M?`VCPvw_r(fy zp&)h3F3l3nV=Sod*TtAP=w~dSgVRZ)%Tv2)Xs`|82mzx?U;5#;e3R8-|XN{XL|6b zt`ea>|Ccm$^C_8|zu2BBriZC7m-M;ZbuM+O*}r-Ffe6=1sm>qMH5@Zv;1XBUxzayd zM(v4O^Q9;Nap`k4-S7Ftv`ESW!IugohvsU7ZntPho3Ak%&texeCH;e+a*iJ&NjZdX zF|(<5B{f>N?32|_Pd>3&=s%&UJ$`7+NyC$V(NS%A$tT=war{7arADaCz-hA;CIUHP zF&53v#Y(0*`i;%>l$$D^nT4K`yyJ>_L{jAY{1rw;+K1p}^M@gUgQqZron$MlBQ%3cQ z+^maFF>tJ&%lZ2BZEUe^bS!5GjOZRmWokC>>&N?49?AP<{EqZ-=V9_;TbMI|@n_r# z?i`vS9otz{b@J_K+q{yhrrT_#)D!x#R&DFM{_5wFj9;tAcSil@RK3n#%__@B3ocW} z-5l@7nCt5f$BX9Nb}`)2Sld4DQ-Ky-Pr{jsZq2-(-}pcyz;)_{{tX>2bSs)%W7|59 z9tz#ruGkL$Yc71hw(7isLEwiwXbL4vSst6HZzYB|4ETSdj{Qe#bD?Q+!Qxku(|>e7 zgH#o*G}JvZcLRteg78-9T9XV9qRi?3)}&m_;ZTB8YzEuuI~~5|nye%`6w3OH86z|l z&o|`X{H;k@ub$7p^=0RK;vGU_^~>BhAPbcbHzig8sc-4h_{cPmPd!NKo%&OrrUaf3 zTg|DX^9t-oisZWWcQ%4ReenLc86+LJ;)XaJ{3)au;o5F`8VQyn5bTl`l5pw_OzBGA zPcy)3;wt!X?t^pN4X36AlM9709Cm>7r+PlV1`M;=2XgiJk9<2m%v=sROGD$D3Qzm0 zfX*FDjRx60BPBN{&+cM%U~MY8>_UHSaMJMhuV_00apwUF-b$uI5RA}!2d)%RYbV#$ z^*uCX?>Z`H;!wC~+;BejT_o~pwhXaZbeI5eH!I)z++nz7 zXX%k*z|aawk0v!3AONR|<`Hhe*chmjgIA>q9eS~cFVYXu=%op~n79Xgi>^w7)slnO z_&&QBRuMsMo^F=#9r(_dlpA8%{lOxU9W2K4<^1!LhfX!+nq|%27;m4s@W#?xpHu6_ z+#Mz^>vz^=yBZiloeuA36TJLAWwt?NeaO||!H4N4`|q#L{~*A5stwG0=JttY7GC<` z9IvJsRT4B*7x}|rrzSgu+kRRRIGEmc#)~WCs-p-ex(!y1q#a=?NpyecpJ0T|=uHWs zR1BobcBiUrR@tjR|3sHll6{vAd?1gmxS0%%HL##3dBL<~w*S39!9mkmI}v4LCO>Jw-A+6<@KL5T(?-n!2a%NJ*Yb&6Ib9r3iv9wG^|X~Y%i9Q^Ul zrdMSV21fckJZ|Cl)~X}{ABaQ00!hth^6PWCGwU!-t`s=9bbjHsrWMf#rkRe1$ZoMH zeJ?@h+qWD1fA5EsQ+&n3=i|SkF+$VfSVy9qbL&K7BA@3K!MmSm$#{@4m4#`3s?hM-dW3 z^u2P&gn_Qq)QjURsw;B?0}sV7eocsHU=y{>Yyxbyd7s{wfDL%hlNL<{pICPT&C`#d zzbWWj$KBI#kR_EhZA!oR&toxfMg{VMB};$+y!&J{j#ts+aez0b=q5ViAl1llkK^Gv ziGCBjP~lh7F`--anRx#P3Bq&0;glJt%U;CZ@IsHZXI`I-*3%Vm+=8A%7Q>l{8xzo4 z$knJ1{wzBaj0l+{g5s4g8oy2J7@*0!tOwouwkEz%H$je}m~l?B6g|_7;N{zXiAPR+ zcJC|wa-}7uZAH)PtCV$ihx2a}zF!dHPLUdY@3H**dy>D9P#9*R^GmGD%JY6$rA298 ztx*nWogJW{E>^x^TVC?rfpe6NjfFyf?zj!~{MSv#ipWw=vYre#T%5vO1|w^9jLVO2 zme?mK6Vmaa1Gvb^doWV-Go54vGp0!$n1yIS2Uu73hzDCoQ?F3)@O0l|WT5sB+N*eu z9lh`-t@0Rc?0KwtDE>NJ@Hf1*Xx1vBo9iR-ydYi3B~7jl*J|S>e4n^sTRWphLBZ}M z$vOUvI*TJK1ROqgmc)BjSb=*^T7YATVZOi-0H!gKl9Wrvq*i%eRa|Esm~N08-QOU0 zMc%Eu1G^bwSFDxR1=Lv8@Wn>1nW?@!Gd?!*^eJU3cc~T#I3O{_aslwp!?Qo31BqMb zGUrupmQ{fjv^|Nkupy7HQk534V(8kwxsHWi~u3xsC&rv>>HgZ z<}rcX8Ta1e6IwjRp)gCs27HR{JQOA^BB@FL+?Zg*3Va6v{v}2SN%LIem?cI6?z6C+_Q`0Y zG5ZDHP;F)0Q8d{neD6MEy!k*jF8498C>KM|h^;)IiFWF5)T+RcI|Ux$R_HxSs~y2B z0@b{<6v`v+@3^|^UqfKV{@B4tVkS88h6&;K(sY6mknKKL4{n@F5tkv04>6%sNs3SQ z;{6KdW=aM;Y`}Jxpn^o$LLm<@X`gIU1-eCI^YIjg) z>g0%M5$*^t)(s}DmihoKs<8&TE52Y{JFI}^w^3!li~I8&8*#;FB!{>joLWPTvJ$j4 zsNlI9foK1sZD&@9R?XRxHy1xVsu|A~R>&k?W!w}bmYo`$GVb2bjw(vs0v6&^b-2)l z|J?aM5hbkQw-uA4e#}o(&WnoI2R>m4))W>JjhWK3UQA)4~xh7Y-3#FN2PfT|18;Z8SQW>?uf7ys7#h{X!vgjqW z{zXCen6F;z8NQwb|5DhPFNErf^4e#y3vzk4`$Y%V3E-0V&4?BwoL*j}JHIie#qHFZ z9*FTm+YxEG!ERG6&$C{# zYkN&`=&xPr1#ES?&FtYOTmeqaKh>q1p*sQn74*}Wv@zp*q7_IOmU0?x>525E2$m<& zVM1*K8)9v-S5di<4r-HlQGQ`e;K)?(<8$Rv<@*^@+1@GAw$#V(wDHu|F3#i_FWl`C z%0SnKFsyQ}sjr6G@Q8PXuKtnbXq-D$L3t_X`}7Tt*_LIKsxw&I==Dd8lp#*pgL~GwTg0_ukq24%f!dSf+K%g@DJciRa{ncp zn>e|}F|XOQH*`<^uRGPR5|H;$3iVP?#T^!8Zr6X)R8fi8wImTi7Wo4{8Z>7_DMkr6 zLc0ntz$)iR+HGgRpQZo5_dnBnUt4>RwJBbu$%h}gUlfW+!`*S%W6GdI)>R5xD>(l* z@t0j~*A}&GIiUv z6NA19o0x3AchL;nZ(%7FNOkfe64@%$wsJUXta-FG{o5~yYj}XNm-PPW!>rR*5$?-V z>p6CYEUWI0zVRuIJGkrcQKk^Gd~rXbp&?m=%bz$Z+zi??(ooBh`NgII4H>D4!<<+{p!ulJ@nO-AvBc!A~pE-vesTw(dT zd(g!PJZ@P7iSBj561!dple>q=@mz|2VBqloz!sMa?^uC2>AtQQ( z2Yf`$Z1>(0PemD{ZD{qlFkv|JPb_9r09M@gOdi+pQukaKt-C!FF|*=|IKpMGD%x5z zqjl9IAiWw|tUj9TPAt{k`XDv0X6>>Oss60J?*uBBcK!4P=yi?d<$f%&_`W*#^8O~Y zQ$pK1s-L11Zsn_OzK@#^fz$Bb+P!Fkc5c?AbZEb7_Dfn{+YinrIo1iBA#_Vz0dQaP zlght%#!*3a7mqJ@>n^DMJtuYg>bKT(xf#oaCTraEE)H$}v^5PqFbz47O)!rmBaDG?WnLw;nd}0(hj~7y)z8@oJ4G>AF zM))$jZi3(KJ1E_S+_q_hEVq8*&#xR4@2|Azf0i)Mjr+(|7ywSaau{FG+GFs7VOca( zoBt}~uwmr{?Lwm!5diH}XpafkJZb>OJRnMY3c2&8`P;rh%Mb2fp6c1aB7gg5o*SG} z&7HTmj>c}i6mrj&G8`6s2IjR3WSSEFH-yd zI#{3}&AU?}xRlNzq&ahHpkoCrs0P^P=nFj&t88bi^KI4GrlZG^V99f%C4&dAz%9{q7Zg6#`{KX2-y z_HRa*>Z6Chi!R}B?G>PXkSNBU6pw3cNwCaiMp;1lqx@<@M_2ASC)RnXBy6;##vn6nrU|n(zhV;$0Cf_}&C%jedc>F!S^{;-U4}6S+ zsuT{bBfM~GLrowMH12&I*GUo%g4S!7+a7(d{Ij#E@S`Xk=UnV$$L!pr;ZR5STFm!J zrFGD0<}36Ka(mNaBX+ho4SwtQ<6}BPqlb`814+i|Q>piv8XI&^!#-|Dg3PF84$&(x z!s|c>^Tv6kw?HoM7g>hPYV$WZ{Tj5jbgb!~JsdBLbhXYoY8oH!|9*f|Htj0O5CH~l z-{RPdfekt=7$$b;OLVpX9Zl_>Z0)dx+~{{U75cV;21oS}2H{P3@0L7a=BoTtlt6R+ z4e>x~9MU}NR<07yO4bn}zAzTK!lS>5eulreH2rh2sDSC~9u^6QeGd{(uL_7A5T_iR zQ}Gqi-II89z?EwNQ{xtEkATMWVf@ZLr%M)KPA0z?1|p2p#8Mj zuL)Hc42=tb(?Z___nn{Ue3?9&(Y?zxL_g4hN{E-_ClCeBqSdCSDkE_;KYs)!V=8lm zCeY-aa5*W$HtntH+w{d3KRAo#md+#|U0MO40_WZxezd`VBqpPwIm^-lo5*oR$QnZ& z^5-{~bP$ccxU#r6X&KZguV3Z9a!Dr*863N~Y;O{KM~tk{XhOcZ8t7!a>uDB}h_9hB z)+ek`&3_Mcyw1zdJf*QUC$9e}3eg!6S_$Stx8ZV*MRmJ0CYGP2bt6eH!Bu zdS=kLrB_`?*tTboBC_=~Seim-H>*9EqdO*nU z<$(wmvST!y0ZI zb<^$o7SImb_MbMb}%#CiOgbQ$g<3Vz;^&q~pGsVBrw#0k31)~Nj39NTSM0$q zu^%J{u{MUYXED<8W{>OSQlY`u_Xdqk?G6x1x-otBRxiyi~$w&`Sd)JO0@W-%bWrN{xPI?{)SiJV4(aEcRwFlaqIAvR!#E7b6%2BSR^5rB7A@@fR!!(oB4g}-9ol^G$17Ij48`Y2U++__SD19vLoNR7x#AOEze$XV6=gocBG-QlA6ziKc##FchFig2 z31-WnYmC$dK3~V6pS4%3G;Zo3Z?F4V4_^Ne#;199ND2NQZy5alPD?@F1O0!KTRL`d zW4B&w3-dWt9%3p^#+ims_k3)b=`KVjjiI%LX(2?ZuH&hvLs8zRJs@sV-YEgmqm5I$ zfBRU?*1X}Vh6ur&-xj-QvzzZqH}>-=2FUk6aU`F={*4+z0&N}a{SW^Q0zslHNR;~c z^S|zF$VsZhZy){FL=KVHEF)~zK8R~asotFE;GA+j>N>Pc)efXgz9AW&#PTWMAOWc; zPE@%VFPp%p;W4m&c{kAnzeeN{$>(v$y+Ws^asZmWtAiV`WtV|3C$o6Rt^0#ZVx^8W zamNkbrNq5=H*W#&@)jzb2gLkVwh_<-#NY?@lcuVGeR=G6BeV;a-z0glcaETf5UoC4_&b+f~Bz(!p`0A*?+nW ze=$9YO+9YWzC6>b_?`%A3|DEH&v;MuIKD0K8!s~<+Z{T5M7v-@4h7u(nFJ+-(zoE@|=nP-xdeE41JZvIa8 z;*E5;3AkO(+KOI~weoZjqzg>2ADEb&m7$?}2M$sM`n?0KED5`xnR^1UeOkOacPv%P zPJQw6H|9EZNJ)==%{7@qn%rE|%-;a!=YAtN8BqU=!XGi1L>z##_~U>^TCy~%kuYx^?6q&3g z${moMfR17nIhp2vxX@TkQ1jFAetOnn3>h3X(r01~{J~fL4veAiK##aU-*XPI9vc-2 zqq$!yy6Uhz7G94yVL-OBNMYgi?YV2g1J9luMngNj36N><@AI#2K5DZ1XBCT=nLreWh|}Lm?+H$-10SnH3FTj9s+T?iFW0fc?eXj?`X(-HaRA5m&#T&`<| zBJsQRx%qp-gj>vpo_OEEn1N#Vj0}0#`z;TSB?DWM!DCa1N|y!sTZqw=uE!VDHJHU_oT72e`yVgrnG0OR>0)4h98K4fw<*G?_;7+UUYvu zay>CJr0usDaHZa~y=Ms$0MAf{gq6bLkwEZY4j8w7B_r1GNWwg)LeC0_7-f*j>(!%S zX8=ogAtS9g_o8w0zU75RJvs^}S=Dz92BQnPaw)hK_;%9N`@kx8XWX{02D@y}1_a+b zHu@O@{`)CnZK1v04gJ51V3NExgP~Z?Gth2-V4_d|5J<^0$r$^ZXXfb{R|7K;;;(p3 z5PdKmFc+at7u8lHgbmqRGdv>min;pGA$%?txV|I}C4qMk8R_#NMPn|o^P+QXa4%xKXMs4X zeEtmB$HQH%I(UvD@ga-v`=;t?!_@tdVJSuQ`!JIz_s@eSM>v)*TS4%?Cfa+2#e*4! z0NC7`e4fHW?fLJ=fsT4NZR*RjCRgp0Ib0m31?P?VhBB(-m@m5U&iW_z)tfvWbSE^- z@=&6$@fI&;>WT@S#AIS|h8(oK7qk!Gx57dx`${LF-%nyQm&C&H^Lg9p43kL8;JH9K z=9{|R?@$=a^0QZ_A3WT#jCoZY4&20wBg4yqIr|nz+qiLQG8n*l3Tz0}NPWmvN z`jtJwBX$;1Vn>EdDwm3x{&@_>A<|6VdxZ6^%p1n4*q{EagwtOm^3#V^T@UQpq|=k> zecvkPKK#w!v9vV@YL|f=wynRHyN&hKsds6^IluXLBXq_q{%Mf!(R5rsB`%??#Pv_= z7Jfmj2P^p{_rJ-g%kM|zV#d!U{2L*Yy|@6o_1{ggF6#eeC85|kZ+U0JYj>)KqzVEo ze3VRfD?W+dv0GKN-qk@EYW@<_pT4GD##lSrCrYCtRxk73(SU?@!-@ zj;2F#C$t-w%)E2;z}ihe)pSB;7ezB3iWxr`%}x%=6?K|cUXx$>=O@7S5{{#`u6ADj zC)ZutfxPl>54NR-{JMYX{2j=DnDhUe{tjhfCI1;kQqu;ByuMK3WZ;Q(i$CDW&T_MT z{VK$hk?xK(aGSJf1i|Bxjn%oiT^qdr+^j7$5{fq`POrkO5KD+pTR!1|(x~jTUB()V zgK`oH4{+4=@2?(ybO*9K?|D9iE!02_o`sru12Rp8HsW;gek`y8)L;w9MB zFWmytxsS_}I0SUz7^1no7K4Zl@Hlbal{r-qyWZ2VwT^vkc>c=b?c zVAnaZ^Z_yB!cDTC01p6GgaGN!!JZONPr-zOc-TqHfI+L+oU0y4!88HAhz2eYNU!epOE}?DoZRrzJ1G&-uO&$z>x(0U=5%jdQ@x)_M~5ssYeg)0;47#u{dd z^nruGEq&*h#kig@|ipkszH=aLX7SNh3Hd2!A zK%_QfLPyt`O!$R(CJsT4I2f`cDR$7O*z^+vOu~}IC`)jVC?Ybb z?P0z@UckG=kd!_!J>i2PD|hiX#6cB!C}r1QY*27#3Uk~j#^txJq5jLW&s%B1w+jBL zw>$?pw9~!rXA1;@0R6Ff0T!uyHAG5baLP4oyCpo*Z#^TlIfvRJ_l4#M7sGsl-Q|h6 z)ALA3HS z!U2(!54?~V7bJ;_ih)WaeT6WDd+ZbDBLV{WmtNADZ~to{4IZNcMd4(TV~An=j?60g z1y9#ZtqKV&hy@2!_9V*0ixmmDv_C#v9xjI5AI1FF?9I6BdI~3HvH%b44cjX%8<=_Y zyjkTli4o@0n)S*K1kgq)*2#89m0Y)vzD#T!Qbf6AF$mJNHeuOwf2^W`oG*k z&f;1t5$Ti|pWGyQbfT;P1L~S5{un<&5vCtN=+0F;J%RDA3#oJ<>V~0^sS?37R^O45 zPS1ncutf#dFCMT2KatH$&qpCwYwjGgtbSm|n^PF5M?nIMx#o$*riq#yDS-qvqsCCU z7Bk9|IF@)i$>;yFN(0_<&*`qP9N1sm+xPLh1>?6Gr3&I8skszYx1hP!BHWI&tkmz@wN@=MGo7Ise>X;(5H~90b=dTDFw^_vImM zBp>XzYE_MW7IW!$oemD~gFa;}F6~*wj}H$OLDC`(#>30YJlgaEqMj8q3X4nc6D+!6 zOKY+9^;aJ#*6N=lE+!yy%XjuCr?_u+%p0>gYgsGKee4I*mFpz#i7T4`FY!)-#z}^q zFMs7%KeL%8D**emo~4@g{J)-->b3omU4buHc(2(>Rnb?rej!OuMa=m+tC9j7M@iae zDau!IF>JDUjQ|m6qsm(e+n>agA)<^_P<9o78rqdRv*is!1oQj#0yH`Z?=< z#*PEh`{x3J87Hv-w-UiIC#$V2PM{ik_7=ARU2FKSif?VL%SH~Gg@GN&ilCE^NH?>r z@O#VPT&(IzN&hQJrUmI5pimvnK7&9TFu?4+1v?s;V|+c<7e)`i3UB(Em#Ii4y8V% zs%ZS5mr;!}O*-#m|MeS4-b8meIB+xm+lEk{!dT^h8?OKT{0AfX;49 Date: Mon, 6 Nov 2023 14:19:20 -0500 Subject: [PATCH 214/252] update release note #9590 --- doc/release-notes/9590-intellij-redeploy.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/9590-intellij-redeploy.md b/doc/release-notes/9590-intellij-redeploy.md index 4c6ab29ea9f..07af352ece4 100644 --- a/doc/release-notes/9590-intellij-redeploy.md +++ b/doc/release-notes/9590-intellij-redeploy.md @@ -1,3 +1,3 @@ Developers can enjoy a dramatically faster feedback loop when iterating on code if they are using IntelliJ IDEA Ultimate (free educational licenses are available) and the Payara Platform Tools plugin. -For details, see https://dataverse-guide--10088.org.readthedocs.build/en/10088/container/dev-usage.html +For details, see http://preview.guides.gdcc.io/en/develop/container/dev-usage.html#intellij-idea-ultimate-and-payara-platform-tools From 2ce299738bb4aa121e09af6ea18bb30662f2f72a Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Thu, 9 Nov 2023 17:13:46 -0500 Subject: [PATCH 215/252] Fix ProvIT enabling the provenance feature --- src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java index 9b3b66538d7..3bfa3d72fbd 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java @@ -17,6 +17,8 @@ import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; + public class ProvIT { @BeforeAll @@ -27,6 +29,8 @@ public static void setUpClass() { @Test public void testFreeformDraftActions() { + + UtilIT.enableSetting(SettingsServiceBean.Key.ProvCollectionEnabled); Response createDepositor = UtilIT.createRandomUser(); createDepositor.prettyPrint(); createDepositor.then().assertThat() @@ -71,6 +75,7 @@ public void testFreeformDraftActions() { JsonObject provFreeFormGood = Json.createObjectBuilder() .add("text", "I inherited this file from my grandfather.") .build(); + Response uploadProvFreeForm = UtilIT.uploadProvFreeForm(dataFileId.toString(), provFreeFormGood, apiTokenForDepositor); uploadProvFreeForm.prettyPrint(); uploadProvFreeForm.then().assertThat() @@ -81,12 +86,15 @@ public void testFreeformDraftActions() { datasetVersions.then().assertThat() .body("data[0].versionState", equalTo("DRAFT")); + UtilIT.deleteSetting(SettingsServiceBean.Key.ProvCollectionEnabled); } @Test public void testAddProvFile() { + UtilIT.enableSetting(SettingsServiceBean.Key.ProvCollectionEnabled); + Response createDepositor = UtilIT.createRandomUser(); createDepositor.prettyPrint(); createDepositor.then().assertThat() @@ -196,6 +204,7 @@ public void testAddProvFile() { .body("data.json", notNullValue(String.class)); assertEquals(200, getProvJson.getStatusCode()); + // TODO: Test that if provenance already exists in CPL (e.g. cplId in fileMetadata is not 0) upload returns error. // There are currently no api endpoints to set up up this test. @@ -204,6 +213,7 @@ public void testAddProvFile() { deleteProvJson.then().assertThat() .statusCode(FORBIDDEN.getStatusCode()); //cannot delete json of a published dataset + UtilIT.deleteSetting(SettingsServiceBean.Key.ProvCollectionEnabled); // Command removed, redundant // Response deleteProvFreeForm = UtilIT.deleteProvFreeForm(dataFileId.toString(), apiTokenForDepositor); // deleteProvFreeForm.prettyPrint(); From 6485444c26ecc2bab226aae75c7ee99f0b1b45e6 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Fri, 10 Nov 2023 15:26:02 +0100 Subject: [PATCH 216/252] single retry when datafile checksum validation fails --- .../harvard/iq/dataverse/util/FileUtil.java | 30 ++++++++++++++----- 1 file changed, 22 insertions(+), 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 1ad389fb0e2..8e408e51f39 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1449,6 +1449,16 @@ public static S3AccessIO getS3AccessForDirectUpload(Dataset dataset) { return s3io; } + private static InputStream getInputStream(StorageIO storage, boolean isTabularData) throws IOException { + if (!isTabularData) { + return storage.getInputStream(); + } else { + // if this is a tabular file, read the preserved original "auxiliary file" + // instead: + return storage.getAuxFileAsInputStream(FileUtil.SAVED_ORIGINAL_FILENAME_EXTENSION); + } + } + public static void validateDataFileChecksum(DataFile dataFile) throws IOException { DataFile.ChecksumType checksumType = dataFile.getChecksumType(); if (checksumType == null) { @@ -1462,14 +1472,7 @@ public static void validateDataFileChecksum(DataFile dataFile) throws IOExceptio try { storage.open(DataAccessOption.READ_ACCESS); - - if (!dataFile.isTabularData()) { - in = storage.getInputStream(); - } else { - // if this is a tabular file, read the preserved original "auxiliary file" - // instead: - in = storage.getAuxFileAsInputStream(FileUtil.SAVED_ORIGINAL_FILENAME_EXTENSION); - } + in = getInputStream(storage, dataFile.isTabularData()); } catch (IOException ioex) { in = null; } @@ -1484,7 +1487,18 @@ public static void validateDataFileChecksum(DataFile dataFile) throws IOExceptio try { recalculatedChecksum = FileUtil.calculateChecksum(in, checksumType); } catch (RuntimeException rte) { + logger.log(Level.SEVERE, "failed to calculated checksum, one retry", rte); recalculatedChecksum = null; + IOUtils.closeQuietly(in); + storage = dataFile.getStorageIO(); + try { + storage.open(DataAccessOption.READ_ACCESS); + in = getInputStream(storage, dataFile.isTabularData()); + recalculatedChecksum = FileUtil.calculateChecksum(in, checksumType); + } catch (RuntimeException rte2) { + logger.log(Level.SEVERE, "failed to calculated checksum, no retry", rte2); + recalculatedChecksum = null; + } } finally { IOUtils.closeQuietly(in); } From 160f9f77f7a958f28faa2bafdfff775b69dff695 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Fri, 10 Nov 2023 16:19:40 +0100 Subject: [PATCH 217/252] rename getInputStream -> getOriginalFileInputStream and moved storage.open inside that method --- .../java/edu/harvard/iq/dataverse/util/FileUtil.java | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 8e408e51f39..4fdeed421f1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1449,7 +1449,8 @@ public static S3AccessIO getS3AccessForDirectUpload(Dataset dataset) { return s3io; } - private static InputStream getInputStream(StorageIO storage, boolean isTabularData) throws IOException { + private static InputStream getOriginalFileInputStream(StorageIO storage, boolean isTabularData) throws IOException { + storage.open(DataAccessOption.READ_ACCESS); if (!isTabularData) { return storage.getInputStream(); } else { @@ -1471,8 +1472,7 @@ public static void validateDataFileChecksum(DataFile dataFile) throws IOExceptio InputStream in = null; try { - storage.open(DataAccessOption.READ_ACCESS); - in = getInputStream(storage, dataFile.isTabularData()); + in = getOriginalFileInputStream(storage, dataFile.isTabularData()); } catch (IOException ioex) { in = null; } @@ -1492,8 +1492,7 @@ public static void validateDataFileChecksum(DataFile dataFile) throws IOExceptio IOUtils.closeQuietly(in); storage = dataFile.getStorageIO(); try { - storage.open(DataAccessOption.READ_ACCESS); - in = getInputStream(storage, dataFile.isTabularData()); + in = getOriginalFileInputStream(storage, dataFile.isTabularData()); recalculatedChecksum = FileUtil.calculateChecksum(in, checksumType); } catch (RuntimeException rte2) { logger.log(Level.SEVERE, "failed to calculated checksum, no retry", rte2); From eee3d87716dab80a12e650a88497ecfb1d8ecb07 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Fri, 10 Nov 2023 16:41:31 +0100 Subject: [PATCH 218/252] try-with-resources --- .../harvard/iq/dataverse/util/FileUtil.java | 29 +++++-------------- 1 file changed, 8 insertions(+), 21 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 4fdeed421f1..26cfb97a0b6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1469,37 +1469,24 @@ public static void validateDataFileChecksum(DataFile dataFile) throws IOExceptio } StorageIO storage = dataFile.getStorageIO(); - InputStream in = null; + String recalculatedChecksum = null; - try { - in = getOriginalFileInputStream(storage, dataFile.isTabularData()); + try (InputStream inputStream = getOriginalFileInputStream(storage, dataFile.isTabularData())) { + recalculatedChecksum = FileUtil.calculateChecksum(inputStream, checksumType); } catch (IOException ioex) { - in = null; - } - - if (in == null) { String info = BundleUtil.getStringFromBundle("dataset.publish.file.validation.error.failRead", Arrays.asList(dataFile.getId().toString())); logger.log(Level.INFO, info); throw new IOException(info); - } - - String recalculatedChecksum = null; - try { - recalculatedChecksum = FileUtil.calculateChecksum(in, checksumType); } catch (RuntimeException rte) { logger.log(Level.SEVERE, "failed to calculated checksum, one retry", rte); recalculatedChecksum = null; - IOUtils.closeQuietly(in); + } + + if (recalculatedChecksum == null) { //retry once storage = dataFile.getStorageIO(); - try { - in = getOriginalFileInputStream(storage, dataFile.isTabularData()); - recalculatedChecksum = FileUtil.calculateChecksum(in, checksumType); - } catch (RuntimeException rte2) { - logger.log(Level.SEVERE, "failed to calculated checksum, no retry", rte2); - recalculatedChecksum = null; + try (InputStream inputStream = getOriginalFileInputStream(storage, dataFile.isTabularData())) { + recalculatedChecksum = FileUtil.calculateChecksum(inputStream, checksumType); } - } finally { - IOUtils.closeQuietly(in); } if (recalculatedChecksum == null) { From 50c3620106b1b4ab98242c9d9e4e2d9914d70d48 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Fri, 10 Nov 2023 16:45:05 +0100 Subject: [PATCH 219/252] fixed compile error --- src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 26cfb97a0b6..6c540e88fb3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1504,6 +1504,7 @@ public static void validateDataFileChecksum(DataFile dataFile) throws IOExceptio boolean fixed = false; if (!dataFile.isTabularData() && dataFile.getIngestReport() != null) { // try again, see if the .orig file happens to be there: + InputStream in = null; try { in = storage.getAuxFileAsInputStream(FileUtil.SAVED_ORIGINAL_FILENAME_EXTENSION); } catch (IOException ioex) { From 7715ff9aa78364d2d49296ceb079145838cb401c Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Fri, 10 Nov 2023 16:49:09 +0100 Subject: [PATCH 220/252] try-with-resources --- .../harvard/iq/dataverse/util/FileUtil.java | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 6c540e88fb3..df0c3e5a019 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1504,20 +1504,12 @@ public static void validateDataFileChecksum(DataFile dataFile) throws IOExceptio boolean fixed = false; if (!dataFile.isTabularData() && dataFile.getIngestReport() != null) { // try again, see if the .orig file happens to be there: - InputStream in = null; - try { - in = storage.getAuxFileAsInputStream(FileUtil.SAVED_ORIGINAL_FILENAME_EXTENSION); - } catch (IOException ioex) { - in = null; + try (InputStream in = storage.getAuxFileAsInputStream(FileUtil.SAVED_ORIGINAL_FILENAME_EXTENSION)) { + recalculatedChecksum = FileUtil.calculateChecksum(in, checksumType); + } catch (RuntimeException rte) { + recalculatedChecksum = null; } - if (in != null) { - try { - recalculatedChecksum = FileUtil.calculateChecksum(in, checksumType); - } catch (RuntimeException rte) { - recalculatedChecksum = null; - } finally { - IOUtils.closeQuietly(in); - } + if (recalculatedChecksum != null) { // try again: if (recalculatedChecksum.equals(dataFile.getChecksumValue())) { fixed = true; From aa7eceeb762eca045127cf91acb35d6c62b00d79 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 10 Nov 2023 09:43:52 -0500 Subject: [PATCH 221/252] add return null if commandexception --- src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java index 9fb584a9133..8b09291d052 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java @@ -320,7 +320,7 @@ public String save() { logger.info("Guestbook Page Command Exception. Dataverse: " + dataverse.getName()); logger.info(ex.toString()); FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_FATAL, BundleUtil.getStringFromBundle("guestbook.save.fail"), " - " + ex.toString())); - //logger.severe(ex.getMessage()); + return null; } editMode = null; String msg = (create)? BundleUtil.getStringFromBundle("guestbook.create"): BundleUtil.getStringFromBundle("guestbook.save"); From ba4d178f5c541ec88ea0879ec5c715bda529f2c9 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 10 Nov 2023 11:25:30 -0500 Subject: [PATCH 222/252] allow longer custom questions --- src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java b/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java index 2cb6f27c3e4..d880da5b4a8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java +++ b/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java @@ -2,7 +2,7 @@ import java.io.Serializable; import java.util.List; import jakarta.persistence.*; -import org.hibernate.validator.constraints.NotBlank; +import jakarta.validation.constraints.NotBlank; /** * @@ -41,7 +41,7 @@ public void setId(Long id) { private String questionType; @NotBlank(message = "{custom.questiontext}") - @Column( nullable = false ) + @Column( nullable = false, columnDefinition = "TEXT") private String questionString; private boolean required; From 6cb1b9a961e5c4c5179f425c4c89dc478c29341a Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 10 Nov 2023 11:25:48 -0500 Subject: [PATCH 223/252] fix gb error message display --- src/main/webapp/resources/iqbs/messages.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/resources/iqbs/messages.xhtml b/src/main/webapp/resources/iqbs/messages.xhtml index bd17cf34d21..f8e1f5e8e9d 100644 --- a/src/main/webapp/resources/iqbs/messages.xhtml +++ b/src/main/webapp/resources/iqbs/messages.xhtml @@ -63,7 +63,7 @@ Server: - #{systemConfig.dataverseServer} + #{systemConfig.dataverseSiteUrl} #{msg.rendered()} From 1652764b44abbdd887a2957ec42ad31e108dd864 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 10 Nov 2023 13:36:27 -0500 Subject: [PATCH 224/252] flyway script and note --- .../10117-support for longer custom questions in guestbooks.md | 1 + .../V6.0.0.4__10017-failure-with-long-custom-question.sql | 1 + 2 files changed, 2 insertions(+) create mode 100644 doc/release-notes/10117-support for longer custom questions in guestbooks.md create mode 100644 src/main/resources/db/migration/V6.0.0.4__10017-failure-with-long-custom-question.sql diff --git a/doc/release-notes/10117-support for longer custom questions in guestbooks.md b/doc/release-notes/10117-support for longer custom questions in guestbooks.md new file mode 100644 index 00000000000..ab5e84d78fe --- /dev/null +++ b/doc/release-notes/10117-support for longer custom questions in guestbooks.md @@ -0,0 +1 @@ +Custom questions in Guestbooks can now be more than 255 characters and the bug causing a silent failure when questions were longer than this limit has been fixed. \ No newline at end of file diff --git a/src/main/resources/db/migration/V6.0.0.4__10017-failure-with-long-custom-question.sql b/src/main/resources/db/migration/V6.0.0.4__10017-failure-with-long-custom-question.sql new file mode 100644 index 00000000000..9a3002378b3 --- /dev/null +++ b/src/main/resources/db/migration/V6.0.0.4__10017-failure-with-long-custom-question.sql @@ -0,0 +1 @@ +ALTER TABLE customquestion ALTER COLUMN questionstring TYPE text; From 2424a55235cc015cfba283146dc2f97d2b8699af Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 10 Nov 2023 13:38:23 -0500 Subject: [PATCH 225/252] Revert "flyway script and note" This reverts commit 1652764b44abbdd887a2957ec42ad31e108dd864. --- .../10117-support for longer custom questions in guestbooks.md | 1 - .../V6.0.0.4__10017-failure-with-long-custom-question.sql | 1 - 2 files changed, 2 deletions(-) delete mode 100644 doc/release-notes/10117-support for longer custom questions in guestbooks.md delete mode 100644 src/main/resources/db/migration/V6.0.0.4__10017-failure-with-long-custom-question.sql diff --git a/doc/release-notes/10117-support for longer custom questions in guestbooks.md b/doc/release-notes/10117-support for longer custom questions in guestbooks.md deleted file mode 100644 index ab5e84d78fe..00000000000 --- a/doc/release-notes/10117-support for longer custom questions in guestbooks.md +++ /dev/null @@ -1 +0,0 @@ -Custom questions in Guestbooks can now be more than 255 characters and the bug causing a silent failure when questions were longer than this limit has been fixed. \ No newline at end of file diff --git a/src/main/resources/db/migration/V6.0.0.4__10017-failure-with-long-custom-question.sql b/src/main/resources/db/migration/V6.0.0.4__10017-failure-with-long-custom-question.sql deleted file mode 100644 index 9a3002378b3..00000000000 --- a/src/main/resources/db/migration/V6.0.0.4__10017-failure-with-long-custom-question.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE customquestion ALTER COLUMN questionstring TYPE text; From 6fc0f358ae53fec619b93a6deafd9297ce6a9294 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 10 Nov 2023 13:38:30 -0500 Subject: [PATCH 226/252] Revert "fix gb error message display" This reverts commit 6cb1b9a961e5c4c5179f425c4c89dc478c29341a. --- src/main/webapp/resources/iqbs/messages.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/resources/iqbs/messages.xhtml b/src/main/webapp/resources/iqbs/messages.xhtml index f8e1f5e8e9d..bd17cf34d21 100644 --- a/src/main/webapp/resources/iqbs/messages.xhtml +++ b/src/main/webapp/resources/iqbs/messages.xhtml @@ -63,7 +63,7 @@ Server: - #{systemConfig.dataverseSiteUrl} + #{systemConfig.dataverseServer} #{msg.rendered()} From 00a17071c358b7ebee09e77130cb7319c665dfb5 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 10 Nov 2023 13:38:36 -0500 Subject: [PATCH 227/252] Revert "allow longer custom questions" This reverts commit ba4d178f5c541ec88ea0879ec5c715bda529f2c9. --- src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java b/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java index d880da5b4a8..2cb6f27c3e4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java +++ b/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java @@ -2,7 +2,7 @@ import java.io.Serializable; import java.util.List; import jakarta.persistence.*; -import jakarta.validation.constraints.NotBlank; +import org.hibernate.validator.constraints.NotBlank; /** * @@ -41,7 +41,7 @@ public void setId(Long id) { private String questionType; @NotBlank(message = "{custom.questiontext}") - @Column( nullable = false, columnDefinition = "TEXT") + @Column( nullable = false ) private String questionString; private boolean required; From d3fbee58262ac439a0b10f4ca7e1494dea4a6c5d Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 10 Nov 2023 13:38:43 -0500 Subject: [PATCH 228/252] Revert "add return null if commandexception" This reverts commit aa7eceeb762eca045127cf91acb35d6c62b00d79. --- src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java index 8b09291d052..9fb584a9133 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java @@ -320,7 +320,7 @@ public String save() { logger.info("Guestbook Page Command Exception. Dataverse: " + dataverse.getName()); logger.info(ex.toString()); FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_FATAL, BundleUtil.getStringFromBundle("guestbook.save.fail"), " - " + ex.toString())); - return null; + //logger.severe(ex.getMessage()); } editMode = null; String msg = (create)? BundleUtil.getStringFromBundle("guestbook.create"): BundleUtil.getStringFromBundle("guestbook.save"); From c33f07aad938f4707e6985ddeeec801969e4a3fc Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Sat, 11 Nov 2023 14:38:00 -0500 Subject: [PATCH 229/252] Add logic to leave settings as found before test --- .../edu/harvard/iq/dataverse/api/ProvIT.java | 30 +++++++++++-------- 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java index 3bfa3d72fbd..6b9b59f431d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java @@ -30,7 +30,12 @@ public static void setUpClass() { @Test public void testFreeformDraftActions() { - UtilIT.enableSetting(SettingsServiceBean.Key.ProvCollectionEnabled); + Response provCollectionStatus = UtilIT.getSetting(SettingsServiceBean.Key.ProvCollectionEnabled); + boolean provEnabled = provCollectionStatus.getStatusCode() == 200; + if(!provEnabled){ + UtilIT.enableSetting(SettingsServiceBean.Key.ProvCollectionEnabled); + } + Response createDepositor = UtilIT.createRandomUser(); createDepositor.prettyPrint(); createDepositor.then().assertThat() @@ -85,15 +90,20 @@ public void testFreeformDraftActions() { datasetVersions.prettyPrint(); datasetVersions.then().assertThat() .body("data[0].versionState", equalTo("DRAFT")); - - UtilIT.deleteSetting(SettingsServiceBean.Key.ProvCollectionEnabled); - + if(!provEnabled){ + UtilIT.deleteSetting(SettingsServiceBean.Key.ProvCollectionEnabled); + } + } @Test public void testAddProvFile() { - UtilIT.enableSetting(SettingsServiceBean.Key.ProvCollectionEnabled); + Response provCollectionStatus = UtilIT.getSetting(SettingsServiceBean.Key.ProvCollectionEnabled); + boolean provEnabled = provCollectionStatus.getStatusCode() == 200; + if(!provEnabled){ + UtilIT.enableSetting(SettingsServiceBean.Key.ProvCollectionEnabled); + } Response createDepositor = UtilIT.createRandomUser(); createDepositor.prettyPrint(); @@ -213,12 +223,8 @@ public void testAddProvFile() { deleteProvJson.then().assertThat() .statusCode(FORBIDDEN.getStatusCode()); //cannot delete json of a published dataset - UtilIT.deleteSetting(SettingsServiceBean.Key.ProvCollectionEnabled); -// Command removed, redundant -// Response deleteProvFreeForm = UtilIT.deleteProvFreeForm(dataFileId.toString(), apiTokenForDepositor); -// deleteProvFreeForm.prettyPrint(); -// deleteProvFreeForm.then().assertThat() -// .statusCode(OK.getStatusCode()); - + if(!provEnabled){ + UtilIT.deleteSetting(SettingsServiceBean.Key.ProvCollectionEnabled); + } } } From 3407fb9f813984c857ef7708af7d6dc239b8f8ee Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 13 Nov 2023 07:04:15 -0500 Subject: [PATCH 230/252] Add ProvIT to integration-tests.txt --- tests/integration-tests.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration-tests.txt b/tests/integration-tests.txt index 18911b3164a..bb3bc7f9ce6 100644 --- a/tests/integration-tests.txt +++ b/tests/integration-tests.txt @@ -1 +1 @@ -DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT,FilesIT,SearchIT,InReviewWorkflowIT,HarvestingServerIT,HarvestingClientsIT,MoveIT,MakeDataCountApiIT,FileTypeDetectionIT,EditDDIIT,ExternalToolsIT,AccessIT,DuplicateFilesIT,DownloadFilesIT,LinkIT,DeleteUsersIT,DeactivateUsersIT,AuxiliaryFilesIT,InvalidCharactersIT,LicensesIT,NotificationsIT,BagIT,MetadataBlocksIT,NetcdfIT,SignpostingIT,FitsIT,LogoutIT +DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT,FilesIT,SearchIT,InReviewWorkflowIT,HarvestingServerIT,HarvestingClientsIT,MoveIT,MakeDataCountApiIT,FileTypeDetectionIT,EditDDIIT,ExternalToolsIT,AccessIT,DuplicateFilesIT,DownloadFilesIT,LinkIT,DeleteUsersIT,DeactivateUsersIT,AuxiliaryFilesIT,InvalidCharactersIT,LicensesIT,NotificationsIT,BagIT,MetadataBlocksIT,NetcdfIT,SignpostingIT,FitsIT,LogoutIT,ProvIT From 2842cdaf246c531b04449ac4c8b20fc4a09c2668 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Mon, 13 Nov 2023 08:42:31 -0500 Subject: [PATCH 231/252] Move this change into BeforeAll/AfterAll --- .../edu/harvard/iq/dataverse/api/ProvIT.java | 37 ++++++++++--------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java index 6b9b59f431d..69a87869fe1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java @@ -11,6 +11,9 @@ import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.AfterAll; + import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.notNullValue; @@ -20,22 +23,24 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; public class ProvIT { + + private static boolean provEnabled = false; @BeforeAll - public static void setUpClass() { + public static void setUpClass() { RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); + Response provCollectionStatus = UtilIT.getSetting(SettingsServiceBean.Key.ProvCollectionEnabled); + + provEnabled = provCollectionStatus.getStatusCode() == 200; + if(!provEnabled){ + UtilIT.enableSetting(SettingsServiceBean.Key.ProvCollectionEnabled); + } } @Test public void testFreeformDraftActions() { - Response provCollectionStatus = UtilIT.getSetting(SettingsServiceBean.Key.ProvCollectionEnabled); - boolean provEnabled = provCollectionStatus.getStatusCode() == 200; - if(!provEnabled){ - UtilIT.enableSetting(SettingsServiceBean.Key.ProvCollectionEnabled); - } - Response createDepositor = UtilIT.createRandomUser(); createDepositor.prettyPrint(); createDepositor.then().assertThat() @@ -90,20 +95,11 @@ public void testFreeformDraftActions() { datasetVersions.prettyPrint(); datasetVersions.then().assertThat() .body("data[0].versionState", equalTo("DRAFT")); - if(!provEnabled){ - UtilIT.deleteSetting(SettingsServiceBean.Key.ProvCollectionEnabled); - } - + } @Test - public void testAddProvFile() { - - Response provCollectionStatus = UtilIT.getSetting(SettingsServiceBean.Key.ProvCollectionEnabled); - boolean provEnabled = provCollectionStatus.getStatusCode() == 200; - if(!provEnabled){ - UtilIT.enableSetting(SettingsServiceBean.Key.ProvCollectionEnabled); - } + public void testAddProvFile() { Response createDepositor = UtilIT.createRandomUser(); createDepositor.prettyPrint(); @@ -223,6 +219,11 @@ public void testAddProvFile() { deleteProvJson.then().assertThat() .statusCode(FORBIDDEN.getStatusCode()); //cannot delete json of a published dataset + + } + + @AfterAll + public static void tearDownClass() { if(!provEnabled){ UtilIT.deleteSetting(SettingsServiceBean.Key.ProvCollectionEnabled); } From d029cacc9aae5e361869b73f7e76661c5ab8d549 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 13 Nov 2023 11:35:28 -0500 Subject: [PATCH 232/252] remove extra whitespace #10112 --- src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java index 69a87869fe1..a944c6aa926 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java @@ -27,12 +27,12 @@ public class ProvIT { private static boolean provEnabled = false; @BeforeAll - public static void setUpClass() { + public static void setUpClass() { RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); Response provCollectionStatus = UtilIT.getSetting(SettingsServiceBean.Key.ProvCollectionEnabled); - + provEnabled = provCollectionStatus.getStatusCode() == 200; - if(!provEnabled){ + if (!provEnabled) { UtilIT.enableSetting(SettingsServiceBean.Key.ProvCollectionEnabled); } } @@ -99,7 +99,7 @@ public void testFreeformDraftActions() { } @Test - public void testAddProvFile() { + public void testAddProvFile() { Response createDepositor = UtilIT.createRandomUser(); createDepositor.prettyPrint(); From c09034d638147c5cd618e5ff4a460e1840b8cd0a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 13 Nov 2023 11:37:16 -0500 Subject: [PATCH 233/252] organize imports #10112 --- .../java/edu/harvard/iq/dataverse/api/ProvIT.java | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java index a944c6aa926..33323ff4239 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java @@ -1,27 +1,23 @@ package edu.harvard.iq.dataverse.api; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import io.restassured.RestAssured; import io.restassured.path.json.JsonPath; import io.restassured.response.Response; import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonObject; -import static jakarta.ws.rs.core.Response.Status.CREATED; -import static jakarta.ws.rs.core.Response.Status.OK; import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; +import static jakarta.ws.rs.core.Response.Status.CREATED; import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; -import static org.junit.jupiter.api.Assertions.assertEquals; - -import org.junit.jupiter.api.AfterAll; - +import static jakarta.ws.rs.core.Response.Status.OK; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.notNullValue; - +import org.junit.jupiter.api.AfterAll; +import static org.junit.jupiter.api.Assertions.assertEquals; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; -import edu.harvard.iq.dataverse.settings.SettingsServiceBean; - public class ProvIT { private static boolean provEnabled = false; From c49036bf3d67d22cec384a8fe4f7cb23ed3d9a46 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 15 Nov 2023 12:06:43 +0000 Subject: [PATCH 234/252] Added: includeDeaccessioned support to getDatasetVersionCitation API endpoint --- .../harvard/iq/dataverse/api/Datasets.java | 9 ++++++-- .../harvard/iq/dataverse/api/DatasetsIT.java | 21 ++++++++++++++++++- .../edu/harvard/iq/dataverse/api/UtilIT.java | 3 ++- 3 files changed, 29 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 292aba0cee3..68c618b0f1f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3995,9 +3995,14 @@ public Response getPrivateUrlDatasetVersionCitation(@PathParam("privateUrlToken" @GET @AuthRequired @Path("{id}/versions/{versionId}/citation") - public Response getDatasetVersionCitation(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + public Response getDatasetVersionCitation(@Context ContainerRequestContext crc, + @PathParam("id") String datasetId, + @PathParam("versionId") String versionId, + @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, + @Context UriInfo uriInfo, + @Context HttpHeaders headers) { return response(req -> ok( - getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers).getCitation(true, false)), getRequestUser(crc)); + getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned).getCitation(true, false)), getRequestUser(crc)); } @POST diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 56bf53c1c99..d20f1e8a58b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3371,13 +3371,32 @@ public void getDatasetVersionCitation() { createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - Response getDatasetVersionCitationResponse = UtilIT.getDatasetVersionCitation(datasetId, DS_VERSION_DRAFT, apiToken); + Response getDatasetVersionCitationResponse = UtilIT.getDatasetVersionCitation(datasetId, DS_VERSION_DRAFT, false, apiToken); getDatasetVersionCitationResponse.prettyPrint(); getDatasetVersionCitationResponse.then().assertThat() .statusCode(OK.getStatusCode()) // We check that the returned message contains information expected for the citation string .body("data.message", containsString("DRAFT VERSION")); + + // Test Deaccessioned + Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, "Test deaccession reason.", null, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // includeDeaccessioned false + Response getDatasetVersionCitationNotDeaccessioned = UtilIT.getDatasetVersionCitation(datasetId, DS_VERSION_LATEST_PUBLISHED, false, apiToken); + getDatasetVersionCitationNotDeaccessioned.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // includeDeaccessioned true + Response getDatasetVersionCitationDeaccessioned = UtilIT.getDatasetVersionCitation(datasetId, DS_VERSION_LATEST_PUBLISHED, true, apiToken); + getDatasetVersionCitationDeaccessioned.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", containsString("DEACCESSIONED VERSION")); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index e3a7fd0cfc3..2336bf8beb8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3345,10 +3345,11 @@ static Response getPrivateUrlDatasetVersionCitation(String privateUrlToken) { return response; } - static Response getDatasetVersionCitation(Integer datasetId, String version, String apiToken) { + static Response getDatasetVersionCitation(Integer datasetId, String version, boolean includeDeaccessioned, String apiToken) { Response response = given() .header(API_TOKEN_HTTP_HEADER, apiToken) .contentType("application/json") + .queryParam("includeDeaccessioned", includeDeaccessioned) .get("/api/datasets/" + datasetId + "/versions/" + version + "/citation"); return response; } From 75ff2fbad275a4543525ac0dc62f65d3eaa0e5c1 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 15 Nov 2023 12:10:14 +0000 Subject: [PATCH 235/252] Added: API docs for #10104 --- doc/sphinx-guides/source/api/native-api.rst | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 1992390410c..2e3a0b2af08 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2502,6 +2502,16 @@ Get Citation curl -H "Accept:application/json" "$SERVER_URL/api/datasets/:persistentId/versions/$VERSION/{version}/citation?persistentId=$PERSISTENT_IDENTIFIER" +By default, deaccessioned dataset versions are not included in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a "not found" error if the version is deaccessioned and you do not enable the ``includeDeaccessioned`` option described below. + +If you want to include deaccessioned dataset versions, you must set ``includeDeaccessioned`` query parameter to ``true``. + +Usage example: + +.. code-block:: bash + + curl -H "Accept:application/json" "$SERVER_URL/api/datasets/:persistentId/versions/$VERSION/{version}/citation?persistentId=$PERSISTENT_IDENTIFIER&includeDeaccessioned=true" + Get Citation by Private URL Token ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From be631af6e5fd5dd181aebdb0ee8a2dd1da3ff789 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 15 Nov 2023 12:12:31 +0000 Subject: [PATCH 236/252] Added: release notes for #10104 --- doc/release-notes/10104-dataset-citation-deaccessioned.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/10104-dataset-citation-deaccessioned.md diff --git a/doc/release-notes/10104-dataset-citation-deaccessioned.md b/doc/release-notes/10104-dataset-citation-deaccessioned.md new file mode 100644 index 00000000000..0ba06d729c4 --- /dev/null +++ b/doc/release-notes/10104-dataset-citation-deaccessioned.md @@ -0,0 +1 @@ +The getDatasetVersionCitation (/api/datasets/{id}/versions/{versionId}/citation) endpoint now accepts a new boolean optional query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain the citation. From a376b4e3f4bacc8dc651b7048d9a323535dc92f7 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Fri, 17 Nov 2023 10:01:33 -0500 Subject: [PATCH 237/252] Add condition for 401 when a invalid key is provided and create changelog on API Guide --- doc/sphinx-guides/source/api/changelog.rst | 13 +++++++++++++ doc/sphinx-guides/source/api/index.rst | 1 + .../java/edu/harvard/iq/dataverse/api/AccessIT.java | 11 ++++++----- 3 files changed, 20 insertions(+), 5 deletions(-) create mode 100644 doc/sphinx-guides/source/api/changelog.rst diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst new file mode 100644 index 00000000000..b78d268db33 --- /dev/null +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -0,0 +1,13 @@ +API Changelog +============= + +.. contents:: |toctitle| + :local: + :depth: 1 + +6.0.0 +----- + +Changes +~~~~~~~ + - **api/access/datafile**: When a null or invalid API Key is provided to download a public with this API call, it will result on a ``401`` error response. diff --git a/doc/sphinx-guides/source/api/index.rst b/doc/sphinx-guides/source/api/index.rst index c9e79098546..dd195aa9d62 100755 --- a/doc/sphinx-guides/source/api/index.rst +++ b/doc/sphinx-guides/source/api/index.rst @@ -24,3 +24,4 @@ API Guide linkeddatanotification apps faq + changelog \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java index 42e21e53101..d08f916243f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java @@ -198,6 +198,8 @@ public void testDownloadSingleFile() { //Not logged in non-restricted Response anonDownloadOriginal = UtilIT.downloadFileOriginal(tabFile1Id); Response anonDownloadConverted = UtilIT.downloadFile(tabFile1Id); + Response anonDownloadConvertedNullKey = UtilIT.downloadFile(tabFile1Id, null); + // ... and download the same tabular data file, but without the variable name header added: Response anonDownloadTabularNoHeader = UtilIT.downloadTabularFileNoVarHeader(tabFile1Id); // ... and download the same tabular file, this time requesting the "format=tab" explicitly: @@ -206,6 +208,8 @@ public void testDownloadSingleFile() { assertEquals(OK.getStatusCode(), anonDownloadConverted.getStatusCode()); assertEquals(OK.getStatusCode(), anonDownloadTabularNoHeader.getStatusCode()); assertEquals(OK.getStatusCode(), anonDownloadTabularWithFormatName.getStatusCode()); + assertEquals(UNAUTHORIZED.getStatusCode(), anonDownloadConvertedNullKey.getStatusCode()); + int origSizeAnon = anonDownloadOriginal.getBody().asByteArray().length; int convertSizeAnon = anonDownloadConverted.getBody().asByteArray().length; int tabularSizeNoVarHeader = anonDownloadTabularNoHeader.getBody().asByteArray().length; @@ -423,10 +427,7 @@ private HashMap readZipResponse(InputStream iStrea } String name = entry.getName(); -// String s = String.format("Entry: %s len %d added %TD", -// entry.getName(), entry.getSize(), -// new Date(entry.getTime())); -// System.out.println(s); + // Once we get the entry from the zStream, the zStream is // positioned read to read the raw data, and we keep @@ -466,7 +467,7 @@ private HashMap readZipResponse(InputStream iStrea @Test public void testRequestAccess() throws InterruptedException { - + String pathToJsonFile = "scripts/api/data/dataset-create-new.json"; Response createDatasetResponse = UtilIT.createDatasetViaNativeApi(dataverseAlias, pathToJsonFile, apiToken); createDatasetResponse.prettyPrint(); From 63725d75c115352ff9d0bb94f2e5b6b4d7ca5d05 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 17 Nov 2023 11:07:17 -0500 Subject: [PATCH 238/252] remove cruft: mdc logs #9115 --- mdc-logs/raw-mdc-2019-01-07.log | 6 ------ 1 file changed, 6 deletions(-) delete mode 100644 mdc-logs/raw-mdc-2019-01-07.log diff --git a/mdc-logs/raw-mdc-2019-01-07.log b/mdc-logs/raw-mdc-2019-01-07.log deleted file mode 100644 index d7a6386160e..00000000000 --- a/mdc-logs/raw-mdc-2019-01-07.log +++ /dev/null @@ -1,6 +0,0 @@ -#Fields: event_time client_ip session_cookie_id user_cookie_id user_id request_url identifier filename size user-agent title publisher publisher_id authors publication_date version other_id target_url publication_year -2019-01-07T15:14:51-0500 0:0:0:0:0:0:0:1 9f4209d3c177d3cb77f4d06cf3ba - :guest http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV doi:10.5072/FK2/XTT5BV - - Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36 Dataset One - 1 Smith, Robert| Kew, Susie 2019-01-07T18:20:54Z 1 - http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV 2019 -2019-01-07T15:15:15-0500 0:0:0:0:0:0:0:1 9f4209d3c177d3cb77f4d06cf3ba - :guest http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV doi:10.5072/FK2/XTT5BV - - Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36 Dataset One - 1 Smith, Robert| Kew, Susie 2019-01-07T18:20:54Z 1 - http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV 2019 -2019-01-07T15:16:04-0500 0:0:0:0:0:0:0:1 9f4209d3c177d3cb77f4d06cf3ba - :guest http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV doi:10.5072/FK2/XTT5BV - - Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36 Dataset One - 1 Smith, Robert| Kew, Susie 2019-01-07T18:20:54Z 1 - http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV 2019 -2019-01-07T15:16:14-0500 0:0:0:0:0:0:0:1 9f4209d3c177d3cb77f4d06cf3ba - :guest http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV doi:10.5072/FK2/XTT5BV 168298bae7c-2c5bbc1a9c8c 1 Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36 Dataset One - 1 Smith, Robert| Kew, Susie 2019-01-07T18:20:54Z 1 - http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV 2019 -2019-01-07T15:16:19-0500 0:0:0:0:0:0:0:1 9f4209d3c177d3cb77f4d06cf3ba - :guest http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV doi:10.5072/FK2/XTT5BV 168298bb8ce-337d8df49763 4026 Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36 Dataset One - 1 Smith, Robert| Kew, Susie 2019-01-07T18:20:54Z 1 - http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV 2019 From 2433114ec7b8430753bc730056a07e24ac0bb5d3 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 17 Nov 2023 11:20:03 -0500 Subject: [PATCH 239/252] fix bullet #10060 #10070 --- doc/sphinx-guides/source/api/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index b78d268db33..a1cffd84f33 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -10,4 +10,4 @@ API Changelog Changes ~~~~~~~ - - **api/access/datafile**: When a null or invalid API Key is provided to download a public with this API call, it will result on a ``401`` error response. +- **api/access/datafile**: When a null or invalid API Key is provided to download a public with this API call, it will result on a ``401`` error response. From e0350e735551270f9bd23bfa226b6946282df467 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Fri, 17 Nov 2023 11:38:53 -0500 Subject: [PATCH 240/252] Change 6.0.0 to 6.0 --- doc/sphinx-guides/source/api/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index a1cffd84f33..086ff4a20e5 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -5,7 +5,7 @@ API Changelog :local: :depth: 1 -6.0.0 +6.0 ----- Changes From 437e3b94edf89a2245310709c07d8238c0df4235 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Fri, 17 Nov 2023 11:42:17 -0500 Subject: [PATCH 241/252] Update doc/sphinx-guides/source/api/changelog.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/api/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index 086ff4a20e5..2698ba3debf 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -10,4 +10,4 @@ API Changelog Changes ~~~~~~~ -- **api/access/datafile**: When a null or invalid API Key is provided to download a public with this API call, it will result on a ``401`` error response. +- **/api/access/datafile**: When a null or invalid API Key is provided to download a public with this API call, it will result on a ``401`` error response. From 640f69e39f71244b9ba1d7f534180a6b4c8b58cc Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 17 Nov 2023 13:19:14 -0500 Subject: [PATCH 242/252] add release note for API changelog #10060 --- doc/release-notes/10060-api-changelog.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 doc/release-notes/10060-api-changelog.md diff --git a/doc/release-notes/10060-api-changelog.md b/doc/release-notes/10060-api-changelog.md new file mode 100644 index 00000000000..56ac96e3564 --- /dev/null +++ b/doc/release-notes/10060-api-changelog.md @@ -0,0 +1,3 @@ +We have started maintaining an API changelog: https://dataverse-guide--10127.org.readthedocs.build/en/10127/api/changelog.html + +See also #10060. From 83a66aac65db2f7634b3917d332b0e4253be3c84 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Fri, 17 Nov 2023 14:55:58 -0500 Subject: [PATCH 243/252] Update doc/sphinx-guides/source/api/changelog.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/api/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index 2698ba3debf..f518a9b542d 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -10,4 +10,4 @@ API Changelog Changes ~~~~~~~ -- **/api/access/datafile**: When a null or invalid API Key is provided to download a public with this API call, it will result on a ``401`` error response. +- **/api/access/datafile**: When a null or invalid API token is provided to download a public (non-restricted) file with this API call, it will result on a ``401`` error response. Previously, the download was allowed to happy (``200`` response). Please note that we noticed this change sometime between 5.9 and 6.0. If you can help us pinpoint the exact version (or commit!), please get in touch. From 70edaa789e84c99b110036c232155337afb5c459 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Fri, 17 Nov 2023 15:02:32 -0500 Subject: [PATCH 244/252] Remove "to happy " --- doc/sphinx-guides/source/api/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index f518a9b542d..d6742252d27 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -10,4 +10,4 @@ API Changelog Changes ~~~~~~~ -- **/api/access/datafile**: When a null or invalid API token is provided to download a public (non-restricted) file with this API call, it will result on a ``401`` error response. Previously, the download was allowed to happy (``200`` response). Please note that we noticed this change sometime between 5.9 and 6.0. If you can help us pinpoint the exact version (or commit!), please get in touch. +- **/api/access/datafile**: When a null or invalid API token is provided to download a public (non-restricted) file with this API call, it will result on a ``401`` error response. Previously, the download was allowed (``200`` response). Please note that we noticed this change sometime between 5.9 and 6.0. If you can help us pinpoint the exact version (or commit!), please get in touch. From d34e92711a118b43b90023b458116dba651fe8b0 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Mon, 20 Nov 2023 15:58:06 -0500 Subject: [PATCH 245/252] Remove the authentication annotation from info API --- .../harvard/iq/dataverse/api/AbstractApiBean.java | 10 ++++++++++ .../java/edu/harvard/iq/dataverse/api/Info.java | 15 ++++++--------- 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 027f9e0fcb1..754ea95e427 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -636,6 +636,16 @@ protected Response response(DataverseRequestHandler hdl, User user) { } } + /*** + * The preferred way of handling a request from an open API. + * + * @param hdl handling code block. + * @return HTTP Response appropriate for the way {@code hdl} executed. + */ + protected Response response(DataverseRequestHandler hdl) { + return response(hdl, null); + } + private Response handleDataverseRequestHandlerException(Exception ex) { String incidentId = UUID.randomUUID().toString(); logger.log(Level.SEVERE, "API internal error " + incidentId +": " + ex.getMessage(), ex); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Info.java b/src/main/java/edu/harvard/iq/dataverse/api/Info.java index 0652539b595..0a3887cbcb3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Info.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Info.java @@ -35,30 +35,27 @@ public Response getMaxEmbargoDurationInMonths() { } @GET - @AuthRequired @Path("version") - public Response getInfo(@Context ContainerRequestContext crc) { + public Response getInfo() { String versionStr = systemConfig.getVersion(true); String[] comps = versionStr.split("build",2); String version = comps[0].trim(); JsonValue build = comps.length > 1 ? Json.createArrayBuilder().add(comps[1].trim()).build().get(0) : JsonValue.NULL; return response( req -> ok( Json.createObjectBuilder().add("version", version) - .add("build", build)), getRequestUser(crc)); + .add("build", build))); } @GET - @AuthRequired @Path("server") - public Response getServer(@Context ContainerRequestContext crc) { - return response( req -> ok(JvmSettings.FQDN.lookup()), getRequestUser(crc)); + public Response getServer() { + return response( req -> ok(JvmSettings.FQDN.lookup())); } @GET - @AuthRequired @Path("apiTermsOfUse") - public Response getTermsOfUse(@Context ContainerRequestContext crc) { - return response( req -> ok(systemConfig.getApiTermsOfUse()), getRequestUser(crc)); + public Response getTermsOfUse() { + return response( req -> ok(systemConfig.getApiTermsOfUse())); } @GET From 9a5f523a36b3e3aa708628a8de8fcb1bbe52565d Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Mon, 20 Nov 2023 16:03:00 -0500 Subject: [PATCH 246/252] Remove unused libraries --- src/main/java/edu/harvard/iq/dataverse/api/Info.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Info.java b/src/main/java/edu/harvard/iq/dataverse/api/Info.java index 0a3887cbcb3..5478c3be0bd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Info.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Info.java @@ -1,6 +1,5 @@ package edu.harvard.iq.dataverse.api; -import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; @@ -9,8 +8,6 @@ import jakarta.json.JsonValue; import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; -import jakarta.ws.rs.container.ContainerRequestContext; -import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.Response; @Path("info") From 233da54f29b8a741d65fcb54e2f394a98485d6a5 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 21 Nov 2023 10:51:10 -0500 Subject: [PATCH 247/252] add to API changelog #10104 --- doc/sphinx-guides/source/api/changelog.rst | 11 +++++++++-- doc/sphinx-guides/source/api/native-api.rst | 2 ++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index d6742252d27..1726736e75c 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -5,9 +5,16 @@ API Changelog :local: :depth: 1 +6.1 +--- + +Changes +~~~~~~~ +- **/api/datasets/{id}/versions/{versionId}/citation**: This endpoint now accepts a new boolean optional query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain the citation. See :ref:`get-citation`. + 6.0 ------ +--- Changes ~~~~~~~ -- **/api/access/datafile**: When a null or invalid API token is provided to download a public (non-restricted) file with this API call, it will result on a ``401`` error response. Previously, the download was allowed (``200`` response). Please note that we noticed this change sometime between 5.9 and 6.0. If you can help us pinpoint the exact version (or commit!), please get in touch. +- **/api/access/datafile**: When a null or invalid API token is provided to download a public (non-restricted) file with this API call, it will result on a ``401`` error response. Previously, the download was allowed (``200`` response). Please note that we noticed this change sometime between 5.9 and 6.0. If you can help us pinpoint the exact version (or commit!), please get in touch. See :doc:`dataaccess`. diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 2e3a0b2af08..2713580f238 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2491,6 +2491,8 @@ Get Dataset By Private URL Token curl "$SERVER_URL/api/datasets/privateUrlDatasetVersion/$PRIVATE_URL_TOKEN" +.. _get-citation: + Get Citation ~~~~~~~~~~~~ From d720aeaaffce7a9196769789dc888f6ea05c193a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 21 Nov 2023 11:57:14 -0500 Subject: [PATCH 248/252] rename flyway script #10093 --- ...id_update.sql => V6.0.0.4__10093-privateurluser_id_update.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V6.0.0.3__10093-privateurluser_id_update.sql => V6.0.0.4__10093-privateurluser_id_update.sql} (100%) diff --git a/src/main/resources/db/migration/V6.0.0.3__10093-privateurluser_id_update.sql b/src/main/resources/db/migration/V6.0.0.4__10093-privateurluser_id_update.sql similarity index 100% rename from src/main/resources/db/migration/V6.0.0.3__10093-privateurluser_id_update.sql rename to src/main/resources/db/migration/V6.0.0.4__10093-privateurluser_id_update.sql From 1c2a20d940632da96c9d56c1bb40367c7902b398 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 21 Nov 2023 14:25:26 -0500 Subject: [PATCH 249/252] fix formatting (indentation) #10093 --- .../iq/dataverse/api/auth/SignedUrlAuthMechanism.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java index e2c2f2381d8..258661f6495 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java @@ -61,9 +61,9 @@ private User getAuthenticatedUserFromSignedUrl(ContainerRequestContext container String userId = uriInfo.getQueryParameters().getFirst(SIGNED_URL_USER); User targetUser = null; ApiToken userApiToken = null; - if(!userId.startsWith(PrivateUrlUser.PREFIX)) { - targetUser = authSvc.getAuthenticatedUser(userId); - userApiToken = authSvc.findApiTokenByUser((AuthenticatedUser)targetUser); + if (!userId.startsWith(PrivateUrlUser.PREFIX)) { + targetUser = authSvc.getAuthenticatedUser(userId); + userApiToken = authSvc.findApiTokenByUser((AuthenticatedUser) targetUser); } else { PrivateUrl privateUrl = privateUrlSvc.getPrivateUrlFromDatasetId(Long.parseLong(userId.substring(PrivateUrlUser.PREFIX.length()))); userApiToken = new ApiToken(); From 949b0885f84d953ab51cc458b4f61c5d376b12a9 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 21 Nov 2023 14:25:59 -0500 Subject: [PATCH 250/252] give Signed URLs a "ref" in the guides, link to it #10093 --- doc/sphinx-guides/source/api/auth.rst | 5 ++++ .../source/api/external-tools.rst | 26 ++++++++++++------- 2 files changed, 22 insertions(+), 9 deletions(-) diff --git a/doc/sphinx-guides/source/api/auth.rst b/doc/sphinx-guides/source/api/auth.rst index eced7afbbcf..eae3bd3c969 100644 --- a/doc/sphinx-guides/source/api/auth.rst +++ b/doc/sphinx-guides/source/api/auth.rst @@ -80,3 +80,8 @@ To test if bearer tokens are working, you can try something like the following ( export TOKEN=`curl -s -X POST --location "http://keycloak.mydomain.com:8090/realms/test/protocol/openid-connect/token" -H "Content-Type: application/x-www-form-urlencoded" -d "username=user&password=user&grant_type=password&client_id=test&client_secret=94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8" | jq '.access_token' -r | tr -d "\n"` curl -H "Authorization: Bearer $TOKEN" http://localhost:8080/api/users/:me + +Signed URLs +----------- + +See :ref:`signed-urls`. diff --git a/doc/sphinx-guides/source/api/external-tools.rst b/doc/sphinx-guides/source/api/external-tools.rst index d802bc8714a..ae0e44b36aa 100644 --- a/doc/sphinx-guides/source/api/external-tools.rst +++ b/doc/sphinx-guides/source/api/external-tools.rst @@ -160,17 +160,25 @@ Authorization Options When called for datasets or data files that are not public (i.e. in a draft dataset or for a restricted file), external tools are allowed access via the user's credentials. This is accomplished by one of two mechanisms: -* Signed URLs (more secure, recommended) +.. _signed-urls: - - Configured via the ``allowedApiCalls`` section of the manifest. The tool will be provided with signed URLs allowing the specified access to the given dataset or datafile for the specified amount of time. The tool will not be able to access any other datasets or files the user may have access to and will not be able to make calls other than those specified. - - For tools invoked via a GET call, Dataverse will include a callback query parameter with a Base64 encoded value. The decoded value is a signed URL that can be called to retrieve a JSON response containing all of the queryParameters and allowedApiCalls specified in the manfiest. - - For tools invoked via POST, Dataverse will send a JSON body including the requested queryParameters and allowedApiCalls. Dataverse expects the response to the POST to indicate a redirect which Dataverse will use to open the tool. +Signed URLs +^^^^^^^^^^^ -* API Token (deprecated, less secure, not recommended) +The signed URL mechanism is more secure than exposing API tokens and therefore recommended. - - Configured via the ``queryParameters`` by including an ``{apiToken}`` value. When this is present Dataverse will send the user's apiToken to the tool. With the user's API token, the tool can perform any action via the Dataverse API that the user could. External tools configured via this method should be assessed for their trustworthiness. - - For tools invoked via GET, this will be done via a query parameter in the request URL which could be cached in the browser's history. Dataverse expects the response to the POST to indicate a redirect which Dataverse will use to open the tool. - - For tools invoked via POST, Dataverse will send a JSON body including the apiToken. +- Configured via the ``allowedApiCalls`` section of the manifest. The tool will be provided with signed URLs allowing the specified access to the given dataset or datafile for the specified amount of time. The tool will not be able to access any other datasets or files the user may have access to and will not be able to make calls other than those specified. +- For tools invoked via a GET call, Dataverse will include a callback query parameter with a Base64 encoded value. The decoded value is a signed URL that can be called to retrieve a JSON response containing all of the queryParameters and allowedApiCalls specified in the manfiest. +- For tools invoked via POST, Dataverse will send a JSON body including the requested queryParameters and allowedApiCalls. Dataverse expects the response to the POST to indicate a redirect which Dataverse will use to open the tool. + +API Token +^^^^^^^^^ + +The API token mechanism is deprecated. Because it is less secure than signed URLs, it is not recommended for new external tools. + +- Configured via the ``queryParameters`` by including an ``{apiToken}`` value. When this is present Dataverse will send the user's apiToken to the tool. With the user's API token, the tool can perform any action via the Dataverse API that the user could. External tools configured via this method should be assessed for their trustworthiness. +- For tools invoked via GET, this will be done via a query parameter in the request URL which could be cached in the browser's history. Dataverse expects the response to the POST to indicate a redirect which Dataverse will use to open the tool. +- For tools invoked via POST, Dataverse will send a JSON body including the apiToken. Internationalization of Your External Tool ++++++++++++++++++++++++++++++++++++++++++ @@ -187,7 +195,7 @@ Using Example Manifests to Get Started ++++++++++++++++++++++++++++++++++++++ Again, you can use :download:`fabulousFileTool.json <../_static/installation/files/root/external-tools/fabulousFileTool.json>` or :download:`dynamicDatasetTool.json <../_static/installation/files/root/external-tools/dynamicDatasetTool.json>` as a starting point for your own manifest file. -Additional working examples, including ones using signedUrls, are available at https://github.com/gdcc/dataverse-previewers . +Additional working examples, including ones using :ref:`signed-urls`, are available at https://github.com/gdcc/dataverse-previewers . Testing Your External Tool -------------------------- From 551194671f69b2bc9a400a1dc5e52926f0867c8e Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 21 Nov 2023 14:26:44 -0500 Subject: [PATCH 251/252] tweak release note, add issue numbers #10093 #10045 --- doc/release-notes/10093-signedUrl_improvements.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/release-notes/10093-signedUrl_improvements.md b/doc/release-notes/10093-signedUrl_improvements.md index 8f6ae89f981..26a17c65e3f 100644 --- a/doc/release-notes/10093-signedUrl_improvements.md +++ b/doc/release-notes/10093-signedUrl_improvements.md @@ -1,5 +1,5 @@ A new version of the standard Dataverse Previewers from https://github/com/gdcc/dataverse-previewers is available. The new version supports the use of signedUrls rather than API keys when previewing restricted files (including files in draft dataset versions). Upgrading is highly recommended. -SignedUrls can now be used with PrivateUrl access, i.e. allowing PrivateUrl users to view Previews when they are configured to use signedUrls +SignedUrls can now be used with PrivateUrl access tokens, which allows PrivateUrl users to view previewers that are configured to use SignedUrls. See #10093. -Launching a Dataset-level Configuration tool will automatically generate an api key when needed. This is consistent with how other types of tools work. +Launching a dataset-level configuration tool will automatically generate an API token when needed. This is consistent with how other types of tools work. See #10045. From 8e69c6d8d7b995607d121a87561f9850ce387ae0 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 28 Nov 2023 10:29:12 -0500 Subject: [PATCH 252/252] remove new `response()` method, use regular `ok()` instead #9466 --- .../edu/harvard/iq/dataverse/api/AbstractApiBean.java | 10 ---------- src/main/java/edu/harvard/iq/dataverse/api/Info.java | 10 +++++----- 2 files changed, 5 insertions(+), 15 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 754ea95e427..027f9e0fcb1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -636,16 +636,6 @@ protected Response response(DataverseRequestHandler hdl, User user) { } } - /*** - * The preferred way of handling a request from an open API. - * - * @param hdl handling code block. - * @return HTTP Response appropriate for the way {@code hdl} executed. - */ - protected Response response(DataverseRequestHandler hdl) { - return response(hdl, null); - } - private Response handleDataverseRequestHandlerException(Exception ex) { String incidentId = UUID.randomUUID().toString(); logger.log(Level.SEVERE, "API internal error " + incidentId +": " + ex.getMessage(), ex); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Info.java b/src/main/java/edu/harvard/iq/dataverse/api/Info.java index 5478c3be0bd..40ce6cd25b7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Info.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Info.java @@ -38,21 +38,21 @@ public Response getInfo() { String[] comps = versionStr.split("build",2); String version = comps[0].trim(); JsonValue build = comps.length > 1 ? Json.createArrayBuilder().add(comps[1].trim()).build().get(0) : JsonValue.NULL; - - return response( req -> ok( Json.createObjectBuilder().add("version", version) - .add("build", build))); + return ok(Json.createObjectBuilder() + .add("version", version) + .add("build", build)); } @GET @Path("server") public Response getServer() { - return response( req -> ok(JvmSettings.FQDN.lookup())); + return ok(JvmSettings.FQDN.lookup()); } @GET @Path("apiTermsOfUse") public Response getTermsOfUse() { - return response( req -> ok(systemConfig.getApiTermsOfUse())); + return ok(systemConfig.getApiTermsOfUse()); } @GET