diff --git a/doc/sphinx-guides/source/api/index.rst b/doc/sphinx-guides/source/api/index.rst index 9fc58ef4e5a..c9e79098546 100755 --- a/doc/sphinx-guides/source/api/index.rst +++ b/doc/sphinx-guides/source/api/index.rst @@ -21,5 +21,6 @@ API Guide client-libraries external-tools curation-labels + linkeddatanotification apps faq diff --git a/doc/sphinx-guides/source/api/linkeddatanotification.rst b/doc/sphinx-guides/source/api/linkeddatanotification.rst new file mode 100644 index 00000000000..d55dc4da084 --- /dev/null +++ b/doc/sphinx-guides/source/api/linkeddatanotification.rst @@ -0,0 +1,65 @@ +Linked Data Notification API +============================ + +Dataverse has a limited, experimental API implementing a Linked Data Notification inbox allowing it to receive messages indicating a link between an external resource and a Dataverse dataset. +The motivating use case is to support a use case where Dataverse administrators may wish to create back-links to the remote resource (e.g. as a Related Publication, Related Material, etc.). + +Upon receipt of a relevant message, Dataverse will create Announcement Received notifications for superusers, who can edit the dataset involved. (In the motivating use case, these users may then add an appropriate relationship and use the Update Curent Version publishing option to add it to the most recently published version of the dataset.) + +The ``:LDNMessageHosts`` setting is a comma-separated whitelist of hosts from which Dataverse will accept and process messages. By default, no hosts are allowed. ``*`` can be used in testing to indicate all hosts are allowed. + +Messages can be sent via POST, using the application/ld+json ContentType: + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + + curl -X POST -H 'ContentType:application/ld+json' $SERVER_URL/api/inbox --upload-file message.jsonld + +The supported message format is described by `our preliminary specification `_. The format is expected to change in the near future to match the standard for relationship announcements being developed as part of `the COAR Notify Project `_. + +An example message is shown below. It indicates that a resource with the name "An Interesting Title" exists and "IsSupplementedBy" the dataset with DOI https://doi.org/10.5072/FK2/GGCCDL. If this dataset is managed in the receiving Dataverse, a notification will be sent to user with the relevant permissions (as described above). + +.. code:: json + + { + "@context": [ + "https://www.w3.org/ns/activitystreams", + "https://purl.org/coar/notify" + ], + "id": "urn:uuid:94ecae35-dcfd-4182-8550-22c7164fe23f", + "actor": { + "id": "https://research-organisation.org/dspace", + "name": "DSpace Repository", + "type": "Service" + }, + "context": { + "IsSupplementedBy": + { + "id": "http://dev-hdc3b.lib.harvard.edu/dataset.xhtml?persistentId=doi:10.5072/FK2/GGCCDL", + "ietf:cite-as": "https://doi.org/10.5072/FK2/GGCCDL", + "type": "sorg:Dataset" + } + }, + "object": { + "id": "https://research-organisation.org/dspace/item/35759679-5df3-4633-b7e5-4cf24b4d0614", + "ietf:cite-as": "https://research-organisation.org/authority/resolve/35759679-5df3-4633-b7e5-4cf24b4d0614", + "sorg:name": "An Interesting Title", + "type": "sorg:ScholarlyArticle" + }, + "origin": { + "id": "https://research-organisation.org/dspace", + "inbox": "https://research-organisation.org/dspace/inbox/", + "type": "Service" + }, + "target": { + "id": "https://research-organisation.org/dataverse", + "inbox": "https://research-organisation.org/dataverse/inbox/", + "type": "Service" + }, + "type": [ + "Announce", + "coar-notify:ReleaseAction" + ] + } + diff --git a/doc/sphinx-guides/source/developers/workflows.rst b/doc/sphinx-guides/source/developers/workflows.rst index df63bf239fe..38ca6f4e141 100644 --- a/doc/sphinx-guides/source/developers/workflows.rst +++ b/doc/sphinx-guides/source/developers/workflows.rst @@ -201,3 +201,31 @@ Note - the example step includes two settings required for any archiver, three ( } } + +ldnannounce ++++++++++++ + +An experimental step that sends a Linked Data Notification (LDN) message to a specific LDN Inbox announcing the publication/availability of a dataset meeting certain criteria. + +The two parameters are +* ``:LDNAnnounceRequiredFields`` - a list of metadata fields that must exist to trigger the message. Currently, the message also includes the values for these fields but future versions may only send the dataset's persistent identifier (making the receiver responsible for making a call-back to get any metadata). +* ``:LDNTarget`` - a JSON object containing an ``inbox`` key whose value is the URL of the target LDN inbox to which messages should be sent, e.g. ``{"id": "https://dashv7-dev.lib.harvard.edu","inbox": "https://dashv7-api-dev.lib.harvard.edu/server/ldn/inbox","type": "Service"}`` ). + +The supported message format is desribed by `our preliminary specification `_. The format is expected to change in the near future to match the standard for relationship announcements being developed as part of `the COAR Notify Project `_. + + +.. code:: json + + + { + "provider":":internal", + "stepType":"ldnannounce", + "parameters": { + "stepName":"LDN Announce" + }, + "requiredSettings": { + ":LDNAnnounceRequiredFields": "string", + ":LDNTarget": "string" + } + } + diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 3018f838ccb..22ef2af769c 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -2904,3 +2904,19 @@ For configuration details, see :ref:`mute-notifications`. Overrides the default empty list of never muted notifications. Never muted notifications cannot be muted by the users. Always muted notifications are grayed out and are not adjustable by the user. For configuration details, see :ref:`mute-notifications`. + +:LDNMessageHosts +++++++++++++++++ + +The comma-separated list of hosts allowed to send Dataverse Linked Data Notification messages. See :doc:`/api/linkeddatanotification` for details. ``*`` allows messages from anywhere (not recommended for production). By default, messages are not accepted from anywhere. + + +:LDN_TARGET ++++++++++++ + +The URL of an LDN Inbox to which the LDN Announce workflow step will send messages. See :doc:`/developers/workflows` for details. + +:LDNAnnounceRequiredFields +++++++++++++++++++++++++++ + +The list of parent dataset field names for which the LDN Announce workflow step should send messages. See :doc:`/developers/workflows` for details. diff --git a/doc/sphinx-guides/source/user/dataverse-management.rst b/doc/sphinx-guides/source/user/dataverse-management.rst index efe98e8327c..ed90497da8c 100755 --- a/doc/sphinx-guides/source/user/dataverse-management.rst +++ b/doc/sphinx-guides/source/user/dataverse-management.rst @@ -44,7 +44,7 @@ To edit your Dataverse collection, navigate to your Dataverse collection's landi - :ref:`Theme `: upload a logo for your Dataverse collection, add a link to your department or personal website, add a custom footer image, and select colors for your Dataverse collection in order to brand it - :ref:`Widgets `: get code to add to your website to have your Dataverse collection display on it - :ref:`Permissions `: give other users permissions to your Dataverse collection, i.e.-can edit datasets, and see which users already have which permissions for your Dataverse collection -- :ref:`Dataset Templates `: these are useful when you have several datasets that have the same information in multiple metadata fields that you would prefer not to have to keep manually typing in +- :ref:`Dataset Templates `: these are useful when you want to provide custom instructions on how to fill out fields or have several datasets that have the same information in multiple metadata fields that you would prefer not to have to keep manually typing in - :ref:`Dataset Guestbooks `: allows you to collect data about who is downloading the files from your datasets - :ref:`Featured Dataverse collections `: if you have one or more Dataverse collection, you can use this option to show them at the top of your Dataverse collection page to help others easily find interesting or important Dataverse collections - **Delete Dataverse**: you are able to delete your Dataverse collection as long as it is not published and does not have any draft datasets @@ -52,7 +52,7 @@ To edit your Dataverse collection, navigate to your Dataverse collection's landi .. _general-information: General Information ---------------------- +------------------- The General Information page is how you edit the information you filled in while creating your Dataverse collection. If you need to change or add a contact email address, this is the place to do it. Additionally, you can update the metadata elements used for datasets within the Dataverse collection, change which metadata fields are hidden, required, or optional, and update the facets you would like displayed for browsing the Dataverse collection. If you plan on using templates, you need to select the metadata fields on the General Information page. @@ -60,8 +60,8 @@ Tip: The metadata fields you select as required will appear on the Create Datase .. _theme: -Theme ---------- +Theme +----- The Theme features provides you with a way to customize the look of your Dataverse collection. You can: @@ -77,7 +77,7 @@ Supported image types for logo images and footer images are JPEG, TIFF, or PNG a .. _dataverse-widgets: Widgets --------------- +------- The Widgets feature provides you with code for you to put on your personal website to have your Dataverse collection displayed there. There are two types of Widgets for a Dataverse collection, a Dataverse collection Search Box widget and a Dataverse collection Listing widget. Once a Dataverse collection has been published, from the Widgets tab on the Dataverse collection's Theme + Widgets page, it is possible to copy the code snippets for the widget(s) you would like to add to your website. If you need to adjust the height of the widget on your website, you may do so by editing the `heightPx=500` parameter in the code snippet. @@ -94,7 +94,7 @@ The Dataverse Collection Listing Widget provides a listing of all your Dataverse .. _openscholar-dataverse-level: Adding Widgets to an OpenScholar Website -****************************************** +**************************************** #. Log in to your OpenScholar website #. Either build a new page or navigate to the page you would like to use to show the Dataverse collection widgets. #. Click on the Settings Cog and select Layout @@ -102,8 +102,8 @@ Adding Widgets to an OpenScholar Website .. _dataverse-permissions: -Roles & Permissions ---------------------- +Roles & Permissions +------------------- Dataverse installation user accounts can be granted roles that define which actions they are allowed to take on specific Dataverse collections, datasets, and/or files. Each role comes with a set of permissions, which define the specific actions that users may take. Roles and permissions may also be granted to groups. Groups can be defined as a collection of Dataverse installation user accounts, a collection of IP addresses (e.g. all users of a library's computers), or a collection of all users who log in using a particular institutional login (e.g. everyone who logs in with a particular university's account credentials). @@ -127,7 +127,7 @@ When you access a Dataverse collection's permissions page, you will see three se Please note that even on a newly created Dataverse collection, you may see user and groups have already been granted role(s) if your installation has ``:InheritParentRoleAssignments`` set. For more on this setting, see the :doc:`/installation/config` section of the Installation Guide. Setting Access Configurations -******************************* +***************************** Under the Permissions tab, you can click the "Edit Access" button to open a box where you can add to your Dataverse collection and what permissions are granted to those who add to your Dataverse collection. @@ -140,7 +140,7 @@ The second question on this page allows you to choose the role (and thus the per Both of these settings can be changed at any time. Assigning Roles to Users and Groups -************************************* +*********************************** Under the Users/Groups tab, you can add, edit, or remove the roles granted to users and groups on your Dataverse collection. A role is a set of permissions granted to a user or group when they're using your Dataverse collection. For example, giving your research assistant the "Contributor" role would give them the following self-explanatory permissions on your Dataverse collection and all datasets within your Dataverse collection: "ViewUnpublishedDataset", "DownloadFile", "EditDataset", and "DeleteDatasetDraft". They would, however, lack the "PublishDataset" permission, and thus would be unable to publish datasets on your Dataverse collection. If you wanted to give them that permission, you would give them a role with that permission, like the Curator role. Users and groups can hold multiple roles at the same time if needed. Roles can be removed at any time. All roles and their associated permissions are listed under the "Roles" tab of the same page. @@ -155,15 +155,16 @@ Note: If you need to assign a role to ALL user accounts in a Dataverse installat .. _dataset-templates: Dataset Templates -------------------- +----------------- -Templates are useful when you have several datasets that have the same information in multiple metadata fields that you would prefer not to have to keep manually typing in, or if you want to use a custom set of Terms of Use and Access for multiple datasets in a Dataverse collection. In Dataverse Software 4.0+, templates are created at the Dataverse collection level, can be deleted (so it does not show for future datasets), set to default (not required), or can be copied so you do not have to start over when creating a new template with similar metadata from another template. When a template is deleted, it does not impact the datasets that have used the template already. +Templates are useful when you want to provide custom instructions on how to fill out a field, have several datasets that have the same information in multiple metadata fields that you would prefer not to have to keep manually typing in, or if you want to use a custom set of Terms of Use and Access for multiple datasets in a Dataverse collection. In Dataverse Software 4.0+, templates are created at the Dataverse collection level, can be deleted (so it does not show for future datasets), set to default (not required), or can be copied so you do not have to start over when creating a new template with similar metadata from another template. When a template is deleted, it does not impact the datasets that have used the template already. How do you create a template? #. Navigate to your Dataverse collection, click on the Edit Dataverse button and select Dataset Templates. #. Once you have clicked on Dataset Templates, you will be brought to the Dataset Templates page. On this page, you can 1) decide to use the dataset templates from your parent Dataverse collection 2) create a new dataset template or 3) do both. #. Click on the Create Dataset Template to get started. You will see that the template is the same as the create dataset page with an additional field at the top of the page to add a name for the template. +#. To add custom instructions, click on ''(None - click to add)'' and enter the instructions you wish users to see. If you wish to edit existing instructions, click on them to make the text editable. #. After adding information into the metadata fields you have information for and clicking Save and Add Terms, you will be brought to the page where you can add custom Terms of Use and Access. If you do not need custom Terms of Use and Access, click the Save Dataset Template, and only the metadata fields will be saved. #. After clicking Save Dataset Template, you will be brought back to the Manage Dataset Templates page and should see your template listed there now with the make default, edit, view, or delete options. #. A Dataverse collection does not have to have a default template and users can select which template they would like to use while on the Create Dataset page. @@ -174,7 +175,7 @@ How do you create a template? .. _dataset-guestbooks: Dataset Guestbooks ------------------------------ +------------------ Guestbooks allow you to collect data about who is downloading the files from your datasets. You can decide to collect account information (username, given name & last name, affiliation, etc.) as well as create custom questions (e.g., What do you plan to use this data for?). You are also able to download the data collected from the enabled guestbooks as CSV files to store and use outside of the Dataverse installation. @@ -227,7 +228,7 @@ Similarly to dataset linking, Dataverse collection linking allows a Dataverse co If you need to have a Dataverse collection linked to your Dataverse collection, please contact the support team for the Dataverse installation you are using. Publish Your Dataverse Collection -================================================================= +================================= Once your Dataverse collection is ready to go public, go to your Dataverse collection page, click on the "Publish" button on the right hand side of the page. A pop-up will appear to confirm that you are ready to actually Publish, since once a Dataverse collection diff --git a/local_lib/com/apicatalog/titanium-json-ld/1.3.0-SNAPSHOT/titanium-json-ld-1.3.0-SNAPSHOT.jar b/local_lib/com/apicatalog/titanium-json-ld/1.3.0-SNAPSHOT/titanium-json-ld-1.3.0-SNAPSHOT.jar new file mode 100644 index 00000000000..ee499ae4b76 Binary files /dev/null and b/local_lib/com/apicatalog/titanium-json-ld/1.3.0-SNAPSHOT/titanium-json-ld-1.3.0-SNAPSHOT.jar differ diff --git a/pom.xml b/pom.xml index e5f7ab836dc..7f8b3ffa258 100644 --- a/pom.xml +++ b/pom.xml @@ -112,7 +112,7 @@ com.apicatalog titanium-json-ld - 0.8.6 + 1.3.0-SNAPSHOT com.google.code.gson diff --git a/scripts/api/data/metadatablocks/citation.tsv b/scripts/api/data/metadatablocks/citation.tsv index 94aa509334f..29d121aae16 100644 --- a/scripts/api/data/metadatablocks/citation.tsv +++ b/scripts/api/data/metadatablocks/citation.tsv @@ -111,6 +111,7 @@ publicationIDType upc 14 publicationIDType url 15 publicationIDType urn 16 + publicationIDType DASH-NRS 17 contributorType Data Collector 0 contributorType Data Curator 1 contributorType Data Manager 2 diff --git a/scripts/api/data/workflows/internal-ldnannounce-workflow.json b/scripts/api/data/workflows/internal-ldnannounce-workflow.json new file mode 100644 index 00000000000..9cf058b68a1 --- /dev/null +++ b/scripts/api/data/workflows/internal-ldnannounce-workflow.json @@ -0,0 +1,16 @@ +{ + "name": "LDN Announce workflow", + "steps": [ + { + "provider":":internal", + "stepType":"ldnannounce", + "parameters": { + "stepName":"LDN Announce" + }, + "requiredSettings": { + ":LDNAnnounceRequiredFields": "string", + ":LDNTarget": "string" + } + } + ] +} diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index c60ea7020bd..a4f82d41bac 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -33,8 +33,8 @@ import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; -import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.StringUtil; +import edu.harvard.iq.dataverse.util.SystemConfig; /** * @@ -152,6 +152,19 @@ public void setCitationDateDatasetFieldType(DatasetFieldType citationDateDataset this.citationDateDatasetFieldType = citationDateDatasetFieldType; } + + @ManyToOne + @JoinColumn(name="template_id",nullable = true) + private Template template; + + public Template getTemplate() { + return template; + } + + public void setTemplate(Template template) { + this.template = template; + } + public Dataset() { DatasetVersion datasetVersion = new DatasetVersion(); datasetVersion.setDataset(this); @@ -743,6 +756,11 @@ public void setHarvestIdentifier(String harvestIdentifier) { this.harvestIdentifier = harvestIdentifier; } + public String getLocalURL() { + //Assumes GlobalId != null + return SystemConfig.getDataverseSiteUrlStatic() + "/dataset.xhtml?persistentId=" + this.getGlobalId().asString(); + } + public String getRemoteArchiveURL() { if (isHarvested()) { if (HarvestingClient.HARVEST_STYLE_DATAVERSE.equals(this.getHarvestedFrom().getHarvestStyle())) { diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 2236bdc24ba..d80aa4ab141 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -1768,6 +1768,7 @@ public void handleChangeButton() { workingVersion.initDefaultValues(licenseServiceBean.getDefault()); updateDatasetFieldInputLevels(); } + dataset.setTemplate(selectedTemplate); /* Issue 8646: necessary for the access popup which is shared by the dataset page and the file page */ @@ -2055,6 +2056,8 @@ private String init(boolean initFull) { selectedTemplate = testT; } } + //Initalize with the default if there is one + dataset.setTemplate(selectedTemplate); workingVersion = dataset.getEditVersion(selectedTemplate, null); updateDatasetFieldInputLevels(); } else { @@ -3572,6 +3575,7 @@ public String save() { if (editMode == EditMode.CREATE) { //Lock the metadataLanguage once created dataset.setMetadataLanguage(getEffectiveMetadataLanguage()); + //ToDo - could drop use of selectedTemplate and just use the persistent dataset.getTemplate() if ( selectedTemplate != null ) { if ( isSessionUserAuthenticated() ) { cmd = new CreateNewDatasetCommand(dataset, dvRequestService.getDataverseRequest(), false, selectedTemplate); diff --git a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java index f39fb8b0a32..fa5216140c2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java @@ -16,6 +16,8 @@ import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.MailUtil; import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.json.JsonUtil; + import java.io.UnsupportedEncodingException; import java.text.MessageFormat; import java.util.ArrayList; @@ -169,7 +171,7 @@ public boolean sendSystemEmail(String to, String subject, String messageText, bo return sent; } - private InternetAddress getSystemAddress() { + public InternetAddress getSystemAddress() { String systemEmail = settingsService.getValueForKey(Key.SystemEmail); return MailUtil.parseSystemAddress(systemEmail); } @@ -608,6 +610,26 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio )); return ingestedCompletedWithErrorsMessage; + case DATASETMENTIONED: + String additionalInfo = userNotification.getAdditionalInfo(); + dataset = (Dataset) targetObject; + javax.json.JsonObject citingResource = null; + citingResource = JsonUtil.getJsonObject(additionalInfo); + + + pattern = BundleUtil.getStringFromBundle("notification.email.datasetWasMentioned"); + Object[] paramArrayDatasetMentioned = { + userNotification.getUser().getName(), + BrandingUtil.getInstallationBrandName(), + citingResource.getString("@type"), + citingResource.getString("@id"), + citingResource.getString("name"), + citingResource.getString("relationship"), + systemConfig.getDataverseSiteUrl(), + dataset.getGlobalId().toString(), + dataset.getDisplayName()}; + messageText = MessageFormat.format(pattern, paramArrayDatasetMentioned); + return messageText; } return ""; @@ -632,6 +654,7 @@ public Object getObjectOfNotification (UserNotification userNotification){ case GRANTFILEACCESS: case REJECTFILEACCESS: case DATASETCREATED: + case DATASETMENTIONED: return datasetService.find(userNotification.getObjectId()); case CREATEDS: case SUBMITTEDDS: diff --git a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java index 9bf155740af..8370030e44b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java @@ -13,6 +13,7 @@ import edu.harvard.iq.dataverse.util.MailUtil; import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.UserNotification.Type; import java.time.LocalDate; @@ -646,5 +647,4 @@ public boolean isCustomLicenseAllowed() { } return customLicenseAllowed; } -} - +} \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/Template.java b/src/main/java/edu/harvard/iq/dataverse/Template.java index 4a2e8272c7d..61f0a78656f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Template.java +++ b/src/main/java/edu/harvard/iq/dataverse/Template.java @@ -1,7 +1,6 @@ package edu.harvard.iq.dataverse; import java.io.Serializable; -import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -10,6 +9,11 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; + +import javax.json.Json; +import javax.json.JsonObjectBuilder; +import javax.json.JsonString; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; @@ -28,6 +32,8 @@ import javax.validation.constraints.Size; import edu.harvard.iq.dataverse.util.DateUtil; +import edu.harvard.iq.dataverse.util.json.JsonUtil; + import javax.persistence.NamedQueries; import javax.persistence.NamedQuery; import org.hibernate.validator.constraints.NotBlank; @@ -125,7 +131,13 @@ public void setTermsOfUseAndAccess(TermsOfUseAndAccess termsOfUseAndAccess) { public List getDatasetFields() { return datasetFields; } + + @Column(columnDefinition="TEXT", nullable = true ) + private String instructions; + @Transient + private Map instructionsMap = null; + @Transient private Map> metadataBlocksForView = new HashMap<>(); @Transient @@ -256,26 +268,31 @@ public void setMetadataValueBlocks() { metadataBlocksForView.clear(); metadataBlocksForEdit.clear(); List filledInFields = this.getDatasetFields(); + + Map instructionsMap = getInstructionsMap(); List viewMDB = new ArrayList<>(); - List editMDB=this.getDataverse().getMetadataBlocks(true); + List editMDB=this.getDataverse().getMetadataBlocks(false); + //The metadatablocks in this template include any from the Dataverse it is associated with + //plus any others where the template has a displayable field (i.e. from before a block was dropped in the dataverse/collection) viewMDB.addAll(this.getDataverse().getMetadataBlocks(true)); - for (DatasetField dsfv : filledInFields) { - if (!dsfv.isEmptyForDisplay()) { - MetadataBlock mdbTest = dsfv.getDatasetFieldType().getMetadataBlock(); + for (DatasetField dsf : filledInFields) { + if (!dsf.isEmptyForDisplay()) { + MetadataBlock mdbTest = dsf.getDatasetFieldType().getMetadataBlock(); if (!viewMDB.contains(mdbTest)) { viewMDB.add(mdbTest); } } - } - + } + for (MetadataBlock mdb : viewMDB) { List datasetFieldsForView = new ArrayList<>(); for (DatasetField dsf : this.getDatasetFields()) { if (dsf.getDatasetFieldType().getMetadataBlock().equals(mdb)) { - if (!dsf.isEmpty()) { + //For viewing, show the field if it has a value or custom instructions + if (!dsf.isEmpty() || instructionsMap.containsKey(dsf.getDatasetFieldType().getName())) { datasetFieldsForView.add(dsf); } } @@ -344,6 +361,9 @@ public Template cloneNewTemplate(Template source) { } terms.setTemplate(newTemplate); newTemplate.setTermsOfUseAndAccess(terms); + + newTemplate.getInstructionsMap().putAll(source.getInstructionsMap()); + newTemplate.updateInstructions(); return newTemplate; } @@ -383,6 +403,45 @@ private List getFlatDatasetFields(List dsfList) { return retList; } + //Cache values in map for reading + public Map getInstructionsMap() { + if(instructionsMap==null) + if(instructions != null) { + instructionsMap = JsonUtil.getJsonObject(instructions).entrySet().stream().collect(Collectors.toMap(entry -> entry.getKey(),entry -> ((JsonString)entry.getValue()).getString())); + } else { + instructionsMap = new HashMap(); + } + return instructionsMap; + } + + //Get the cutstom instructions defined for a give fieldType + public String getInstructionsFor(String fieldType) { + return getInstructionsMap().get(fieldType); + } + + /* + //Add/change or remove (null instructionString) instructions for a given fieldType + public void setInstructionsFor(String fieldType, String instructionString) { + if(instructionString==null) { + getInstructionsMap().remove(fieldType); + } else { + getInstructionsMap().put(fieldType, instructionString); + } + updateInstructions(); + } + */ + + //Keep instructions up-to-date on any change + public void updateInstructions() { + JsonObjectBuilder builder = Json.createObjectBuilder(); + getInstructionsMap().forEach((key, value) -> { + if (value != null) + builder.add(key, value); + }); + instructions = JsonUtil.prettyPrint(builder.build()); + } + + @Override public int hashCode() { int hash = 0; diff --git a/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java b/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java index 5f574c07d17..6da0d99da20 100644 --- a/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java @@ -185,6 +185,8 @@ public String save(String redirectPage) { DatasetFieldUtil.tidyUpFields( template.getDatasetFields(), false ); + template.updateInstructions(); + if (editMode == EditMode.CREATE) { template.setCreateTime(new Timestamp(new Date().getTime())); template.setUsageCount(new Long(0)); @@ -247,5 +249,11 @@ public String deleteTemplate(Long templateId) { } return "/manage-templates.xhtml?dataverseId=" + dataverse.getId() + "&faces-redirect=true"; } + + //Get the cutstom instructions defined for a give fieldType + public String getInstructionsLabelFor(String fieldType) { + String fieldInstructions = template.getInstructionsMap().get(fieldType); + return (fieldInstructions!=null && !fieldInstructions.isBlank()) ? fieldInstructions : BundleUtil.getStringFromBundle("template.instructions.empty.label"); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java index 5714a879527..2a3c63c1c31 100644 --- a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java +++ b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java @@ -37,7 +37,7 @@ public enum Type { ASSIGNROLE, REVOKEROLE, CREATEDV, CREATEDS, CREATEACC, SUBMITTEDDS, RETURNEDDS, PUBLISHEDDS, REQUESTFILEACCESS, GRANTFILEACCESS, REJECTFILEACCESS, FILESYSTEMIMPORT, CHECKSUMIMPORT, CHECKSUMFAIL, CONFIRMEMAIL, APIGENERATED, INGESTCOMPLETED, INGESTCOMPLETEDWITHERRORS, - PUBLISHFAILED_PIDREG, WORKFLOW_SUCCESS, WORKFLOW_FAILURE, STATUSUPDATED, DATASETCREATED; + PUBLISHFAILED_PIDREG, WORKFLOW_SUCCESS, WORKFLOW_FAILURE, STATUSUPDATED, DATASETCREATED, DATASETMENTIONED; public String getDescription() { return BundleUtil.getStringFromBundle("notification.typeDescription." + this.name()); @@ -88,6 +88,8 @@ public static String toStringValue(Set typesSet) { @Column( nullable = false ) private Type type; private Long objectId; + + private String additionalInfo; @Transient private boolean displayAsRead; @@ -196,4 +198,12 @@ public void setRoleString(String roleString) { public String getLocaleSendDate() { return DateUtil.formatDate(sendDate); } + + public String getAdditionalInfo() { + return additionalInfo; + } + + public void setAdditionalInfo(String additionalInfo) { + this.additionalInfo = additionalInfo; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java index 6792a7bedc7..947ee3ce989 100644 --- a/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java @@ -110,12 +110,16 @@ public void sendNotification(AuthenticatedUser dataverseUser, Timestamp sendDate } public void sendNotification(AuthenticatedUser dataverseUser, Timestamp sendDate, Type type, Long objectId, String comment, AuthenticatedUser requestor, boolean isHtmlContent) { + sendNotification(dataverseUser, sendDate, type, objectId, comment, requestor, isHtmlContent, null); + } + public void sendNotification(AuthenticatedUser dataverseUser, Timestamp sendDate, Type type, Long objectId, String comment, AuthenticatedUser requestor, boolean isHtmlContent, String additionalInfo) { UserNotification userNotification = new UserNotification(); userNotification.setUser(dataverseUser); userNotification.setSendDate(sendDate); userNotification.setType(type); userNotification.setObjectId(objectId); userNotification.setRequestor(requestor); + userNotification.setAdditionalInfo(additionalInfo); if (!isEmailMuted(userNotification) && mailService.sendNotificationEmail(userNotification, comment, requestor, isHtmlContent)) { logger.fine("email was sent"); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 7941dfd70c8..b65d0a254fc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -1592,8 +1592,7 @@ public Response getCustomTermsTab(@PathParam("id") String id, @PathParam("versio User user = session.getUser(); String persistentId; try { - if (getDatasetVersionOrDie(createDataverseRequest(user), versionId, findDatasetOrDie(id), uriInfo, headers) - .getTermsOfUseAndAccess().getLicense() != null) { + if (DatasetUtil.getLicense(getDatasetVersionOrDie(createDataverseRequest(user), versionId, findDatasetOrDie(id), uriInfo, headers)) != null) { return error(Status.NOT_FOUND, "This Dataset has no custom license"); } persistentId = getRequestParameter(":persistentId".substring(1)); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/LDNInbox.java b/src/main/java/edu/harvard/iq/dataverse/api/LDNInbox.java new file mode 100644 index 00000000000..3912b9102e2 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/LDNInbox.java @@ -0,0 +1,195 @@ +package edu.harvard.iq.dataverse.api; + +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetServiceBean; +import edu.harvard.iq.dataverse.DataverseRoleServiceBean; +import edu.harvard.iq.dataverse.GlobalId; +import edu.harvard.iq.dataverse.MailServiceBean; +import edu.harvard.iq.dataverse.RoleAssigneeServiceBean; +import edu.harvard.iq.dataverse.RoleAssignment; +import edu.harvard.iq.dataverse.UserNotification; +import edu.harvard.iq.dataverse.UserNotificationServiceBean; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.json.JSONLDUtil; +import edu.harvard.iq.dataverse.util.json.JsonLDNamespace; +import edu.harvard.iq.dataverse.util.json.JsonLDTerm; + +import java.util.Date; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.io.StringWriter; +import java.sql.Timestamp; +import java.util.logging.Logger; + +import javax.ejb.EJB; +import javax.json.Json; +import javax.json.JsonObject; +import javax.json.JsonValue; +import javax.json.JsonWriter; +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.BadRequestException; +import javax.ws.rs.ServiceUnavailableException; +import javax.ws.rs.Consumes; +import javax.ws.rs.ForbiddenException; +import javax.ws.rs.POST; +import javax.ws.rs.Path; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.Response; + +@Path("inbox") +public class LDNInbox extends AbstractApiBean { + + private static final Logger logger = Logger.getLogger(LDNInbox.class.getName()); + + @EJB + SettingsServiceBean settingsService; + + @EJB + DatasetServiceBean datasetService; + + @EJB + MailServiceBean mailService; + + @EJB + UserNotificationServiceBean userNotificationService; + + @EJB + DataverseRoleServiceBean roleService; + + @EJB + RoleAssigneeServiceBean roleAssigneeService; + @Context + protected HttpServletRequest httpRequest; + + @POST + @Path("/") + @Consumes("application/ld+json, application/json-ld") + public Response acceptMessage(String body) { + IpAddress origin = new DataverseRequest(null, httpRequest).getSourceAddress(); + String whitelist = settingsService.get(SettingsServiceBean.Key.LDNMessageHosts.toString(), ""); + // Only do something if we listen to this host + if (whitelist.equals("*") || whitelist.contains(origin.toString())) { + String citingPID = null; + String citingType = null; + boolean sent = false; + + JsonObject jsonld = null; + jsonld = JSONLDUtil.decontextualizeJsonLD(body); + if (jsonld == null) { + // Kludge - something about the coar notify URL causes a + // LOADING_REMOTE_CONTEXT_FAILED error in the titanium library - so replace it + // and try with a local copy + body = body.replace("\"https://purl.org/coar/notify\"", + "{\n" + " \"@vocab\": \"http://purl.org/coar/notify_vocabulary/\",\n" + + " \"ietf\": \"http://www.iana.org/assignments/relation/\",\n" + + " \"coar-notify\": \"http://purl.org/coar/notify_vocabulary/\",\n" + + " \"sorg\": \"http://schema.org/\",\n" + + " \"ReviewAction\": \"coar-notify:ReviewAction\",\n" + + " \"EndorsementAction\": \"coar-notify:EndorsementAction\",\n" + + " \"IngestAction\": \"coar-notify:IngestAction\",\n" + + " \"ietf:cite-as\": {\n" + " \"@type\": \"@id\"\n" + + " }}"); + jsonld = JSONLDUtil.decontextualizeJsonLD(body); + } + if (jsonld == null) { + throw new BadRequestException("Could not parse message to find acceptable citation link to a dataset."); + } + String relationship = "isRelatedTo"; + String name = null; + JsonLDNamespace activityStreams = JsonLDNamespace.defineNamespace("as", + "https://www.w3.org/ns/activitystreams#"); + JsonLDNamespace ietf = JsonLDNamespace.defineNamespace("ietf", "http://www.iana.org/assignments/relation/"); + String objectKey = new JsonLDTerm(activityStreams, "object").getUrl(); + if (jsonld.containsKey(objectKey)) { + JsonObject msgObject = jsonld.getJsonObject(objectKey); + + citingPID = msgObject.getJsonObject(new JsonLDTerm(ietf, "cite-as").getUrl()).getString("@id"); + logger.fine("Citing PID: " + citingPID); + if (msgObject.containsKey("@type")) { + citingType = msgObject.getString("@type"); + if (citingType.startsWith(JsonLDNamespace.schema.getUrl())) { + citingType = citingType.replace(JsonLDNamespace.schema.getUrl(), ""); + } + if (msgObject.containsKey(JsonLDTerm.schemaOrg("name").getUrl())) { + name = msgObject.getString(JsonLDTerm.schemaOrg("name").getUrl()); + } + logger.fine("Citing Type: " + citingType); + String contextKey = new JsonLDTerm(activityStreams, "context").getUrl(); + + if (jsonld.containsKey(contextKey)) { + JsonObject context = jsonld.getJsonObject(contextKey); + for (Map.Entry entry : context.entrySet()) { + + relationship = entry.getKey().replace("_:", ""); + // Assuming only one for now - should check for array and loop + JsonObject citedResource = (JsonObject) entry.getValue(); + String pid = citedResource.getJsonObject(new JsonLDTerm(ietf, "cite-as").getUrl()) + .getString("@id"); + if (citedResource.getString("@type").equals(JsonLDTerm.schemaOrg("Dataset").getUrl())) { + logger.fine("Raw PID: " + pid); + if (pid.startsWith(GlobalId.DOI_RESOLVER_URL)) { + pid = pid.replace(GlobalId.DOI_RESOLVER_URL, GlobalId.DOI_PROTOCOL + ":"); + } else if (pid.startsWith(GlobalId.HDL_RESOLVER_URL)) { + pid = pid.replace(GlobalId.HDL_RESOLVER_URL, GlobalId.HDL_PROTOCOL + ":"); + } + logger.fine("Protocol PID: " + pid); + Optional id = GlobalId.parse(pid); + Dataset dataset = datasetSvc.findByGlobalId(pid); + if (dataset != null) { + JsonObject citingResource = Json.createObjectBuilder().add("@id", citingPID) + .add("@type", citingType).add("relationship", relationship) + .add("name", name).build(); + StringWriter sw = new StringWriter(128); + try (JsonWriter jw = Json.createWriter(sw)) { + jw.write(citingResource); + } + String jsonstring = sw.toString(); + Set ras = roleService.rolesAssignments(dataset); + + roleService.rolesAssignments(dataset).stream() + .filter(ra -> ra.getRole().permissions() + .contains(Permission.PublishDataset)) + .flatMap( + ra -> roleAssigneeService + .getExplicitUsers(roleAssigneeService + .getRoleAssignee(ra.getAssigneeIdentifier())) + .stream()) + .distinct() // prevent double-send + .forEach(au -> { + + if (au.isSuperuser()) { + userNotificationService.sendNotification(au, + new Timestamp(new Date().getTime()), + UserNotification.Type.DATASETMENTIONED, dataset.getId(), + null, null, true, jsonstring); + + } + }); + sent = true; + } + } + } + } + } + } + + if (!sent) { + if (citingPID == null || citingType == null) { + throw new BadRequestException( + "Could not parse message to find acceptable citation link to a dataset."); + } else { + throw new ServiceUnavailableException( + "Unable to process message. Please contact the administrators."); + } + } + } else { + logger.info("Ignoring message from IP address: " + origin.toString()); + throw new ForbiddenException("Inbox does not acept messages from this address"); + } + return ok("Message Received"); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java index 96df3ab400a..2e093dbcf36 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java @@ -9,6 +9,7 @@ import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.dataset.DatasetUtil; import edu.harvard.iq.dataverse.license.License; import edu.harvard.iq.dataverse.license.LicenseServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; @@ -163,7 +164,7 @@ public void setDatasetLicenseAndTermsOfUse(DatasetVersion datasetVersionToMutate terms.setDatasetVersion(datasetVersionToMutate); if (listOfLicensesProvided == null) { - License existingLicense = datasetVersionToMutate.getTermsOfUseAndAccess().getLicense(); + License existingLicense = DatasetUtil.getLicense(datasetVersionToMutate); if (existingLicense != null) { // leave the license alone but set terms of use setTermsOfUse(datasetVersionToMutate, dcterms, existingLicense); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java index 142420bc7d9..4df567815d5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java @@ -496,6 +496,7 @@ public void displayNotification() { case GRANTFILEACCESS: case REJECTFILEACCESS: case DATASETCREATED: + case DATASETMENTIONED: userNotification.setTheObject(datasetService.find(userNotification.getObjectId())); break; diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index ccf947b8868..b45d958e918 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -5,6 +5,7 @@ import edu.harvard.iq.dataverse.DatasetField; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import static edu.harvard.iq.dataverse.dataaccess.DataAccess.getStorageIO; @@ -538,14 +539,23 @@ public static boolean validateDatasetMetadataExternally(Dataset ds, String execu } + public static License getLicense(DatasetVersion dsv) { + License license = null; + TermsOfUseAndAccess tua = dsv.getTermsOfUseAndAccess(); + if(tua!=null) { + license = tua.getLicense(); + } + return license; + } + public static String getLicenseName(DatasetVersion dsv) { - License license = dsv.getTermsOfUseAndAccess().getLicense(); + License license = DatasetUtil.getLicense(dsv); return license != null ? license.getName() : BundleUtil.getStringFromBundle("license.custom"); } public static String getLicenseURI(DatasetVersion dsv) { - License license = dsv.getTermsOfUseAndAccess().getLicense(); + License license = DatasetUtil.getLicense(dsv); // Return the URI // For standard licenses, just return the stored URI return (license != null) ? license.getUri().toString() @@ -560,12 +570,12 @@ public static String getLicenseURI(DatasetVersion dsv) { } public static String getLicenseIcon(DatasetVersion dsv) { - License license = dsv.getTermsOfUseAndAccess().getLicense(); + License license = DatasetUtil.getLicense(dsv); return license != null && license.getIconUrl() != null ? license.getIconUrl().toString() : null; } public static String getLicenseDescription(DatasetVersion dsv) { - License license = dsv.getTermsOfUseAndAccess().getLicense(); + License license = DatasetUtil.getLicense(dsv); return license != null ? license.getShortDescription() : BundleUtil.getStringFromBundle("license.custom.description"); } diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index 12ae777f3f8..e2be7050cae 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -522,7 +522,11 @@ Whether Harvesting (OAI) service is enabled * would also work) of never muted notifications that cannot be turned off by the users. AlwaysMuted setting overrides * Nevermuted setting warning is logged. */ - NeverMuted + NeverMuted, + /** + * LDN Inbox Allowed Hosts - a comma separated list of IP addresses allowed to submit messages to the inbox + */ + LDNMessageHosts ; @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 67c1c464cb9..d76aa2fbd91 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -32,6 +32,7 @@ import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import edu.harvard.iq.dataverse.dataaccess.S3AccessIO; import edu.harvard.iq.dataverse.dataset.DatasetThumbnail; +import edu.harvard.iq.dataverse.dataset.DatasetUtil; import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException; import static edu.harvard.iq.dataverse.datasetutility.FileSizeChecker.bytesToHumanReadable; import edu.harvard.iq.dataverse.ingest.IngestReport; @@ -1534,7 +1535,7 @@ private static Boolean popupDueToStateOrTerms(DatasetVersion datasetVersion) { } // 1. License and Terms of Use: if (datasetVersion.getTermsOfUseAndAccess() != null) { - License license = datasetVersion.getTermsOfUseAndAccess().getLicense(); + License license = DatasetUtil.getLicense(datasetVersion); if ((license == null && StringUtils.isNotBlank(datasetVersion.getTermsOfUseAndAccess().getTermsOfUse())) || (license != null && !license.isDefault())) { logger.fine("Popup required because of license or terms of use."); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java index 55c6f4d83d6..03ab6da1d31 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java @@ -86,8 +86,10 @@ public static String getSubjectTextBasedOnNotification(UserNotification userNoti return BundleUtil.getStringFromBundle("notification.email.ingestCompleted.subject", rootDvNameAsList); case INGESTCOMPLETEDWITHERRORS: return BundleUtil.getStringFromBundle("notification.email.ingestCompletedWithErrors.subject", rootDvNameAsList); + case DATASETMENTIONED: + return BundleUtil.getStringFromBundle("notification.email.datasetWasMentioned.subject", rootDvNameAsList); } return ""; } -} +} \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java index a295f264d66..4b31e5cf0a4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java @@ -97,82 +97,11 @@ public JsonObjectBuilder getOREMapBuilder(boolean aggregationOnly) throws Except for (DatasetField field : fields) { if (!field.isEmpty()) { DatasetFieldType dfType = field.getDatasetFieldType(); - if (excludeEmail && DatasetFieldType.FieldType.EMAIL.equals(dfType.getFieldType())) { - continue; - } JsonLDTerm fieldName = dfType.getJsonLDTerm(); - if (fieldName.inNamespace()) { - localContext.putIfAbsent(fieldName.getNamespace().getPrefix(), fieldName.getNamespace().getUrl()); - } else { - localContext.putIfAbsent(fieldName.getLabel(), fieldName.getUrl()); + JsonValue jv = getJsonLDForField(field, excludeEmail, cvocMap, localContext); + if(jv!=null) { + aggBuilder.add(fieldName.getLabel(), jv); } - JsonArrayBuilder vals = Json.createArrayBuilder(); - if (!dfType.isCompound()) { - for (String val : field.getValues_nondisplay()) { - if (cvocMap.containsKey(dfType.getId())) { - try { - JsonObject cvocEntry = cvocMap.get(dfType.getId()); - if (cvocEntry.containsKey("retrieval-filtering")) { - JsonObject filtering = cvocEntry.getJsonObject("retrieval-filtering"); - JsonObject context = filtering.getJsonObject("@context"); - for (String prefix : context.keySet()) { - localContext.putIfAbsent(prefix, context.getString(prefix)); - } - vals.add(datasetFieldService.getExternalVocabularyValue(val)); - } else { - vals.add(val); - } - } catch(Exception e) { - logger.warning("Couldn't interpret value for : " + val + " : " + e.getMessage()); - logger.log(Level.FINE, ExceptionUtils.getStackTrace(e)); - vals.add(val); - } - } else { - vals.add(val); - } - } - } else { - // ToDo: Needs to be recursive (as in JsonPrinter?) - for (DatasetFieldCompoundValue dscv : field.getDatasetFieldCompoundValues()) { - // compound values are of different types - JsonObjectBuilder child = Json.createObjectBuilder(); - - for (DatasetField dsf : dscv.getChildDatasetFields()) { - DatasetFieldType dsft = dsf.getDatasetFieldType(); - if (excludeEmail && DatasetFieldType.FieldType.EMAIL.equals(dsft.getFieldType())) { - continue; - } - // which may have multiple values - if (!dsf.isEmpty()) { - // Add context entry - // ToDo - also needs to recurse here? - JsonLDTerm subFieldName = dsft.getJsonLDTerm(); - if (subFieldName.inNamespace()) { - localContext.putIfAbsent(subFieldName.getNamespace().getPrefix(), - subFieldName.getNamespace().getUrl()); - } else { - localContext.putIfAbsent(subFieldName.getLabel(), subFieldName.getUrl()); - } - - List values = dsf.getValues_nondisplay(); - if (values.size() > 1) { - JsonArrayBuilder childVals = Json.createArrayBuilder(); - - for (String val : dsf.getValues_nondisplay()) { - childVals.add(val); - } - child.add(subFieldName.getLabel(), childVals); - } else { - child.add(subFieldName.getLabel(), values.get(0)); - } - } - } - vals.add(child); - } - } - // Add metadata value to aggregation, suppress array when only one value - JsonArray valArray = vals.build(); - aggBuilder.add(fieldName.getLabel(), (valArray.size() != 1) ? valArray : valArray.get(0)); } } // Add metadata related to the Dataset/DatasetVersion @@ -403,6 +332,89 @@ private JsonLDTerm getTermFor(String fieldTypeName) { } return null; } + + public static JsonValue getJsonLDForField(DatasetField field, Boolean excludeEmail, Map cvocMap, + Map localContext) { + + DatasetFieldType dfType = field.getDatasetFieldType(); + if (excludeEmail && DatasetFieldType.FieldType.EMAIL.equals(dfType.getFieldType())) { + return null; + } + + JsonLDTerm fieldName = dfType.getJsonLDTerm(); + if (fieldName.inNamespace()) { + localContext.putIfAbsent(fieldName.getNamespace().getPrefix(), fieldName.getNamespace().getUrl()); + } else { + localContext.putIfAbsent(fieldName.getLabel(), fieldName.getUrl()); + } + JsonArrayBuilder vals = Json.createArrayBuilder(); + if (!dfType.isCompound()) { + for (String val : field.getValues_nondisplay()) { + if (cvocMap.containsKey(dfType.getId())) { + try { + JsonObject cvocEntry = cvocMap.get(dfType.getId()); + if (cvocEntry.containsKey("retrieval-filtering")) { + JsonObject filtering = cvocEntry.getJsonObject("retrieval-filtering"); + JsonObject context = filtering.getJsonObject("@context"); + for (String prefix : context.keySet()) { + localContext.putIfAbsent(prefix, context.getString(prefix)); + } + vals.add(datasetFieldService.getExternalVocabularyValue(val)); + } else { + vals.add(val); + } + } catch (Exception e) { + logger.warning("Couldn't interpret value for : " + val + " : " + e.getMessage()); + logger.log(Level.FINE, ExceptionUtils.getStackTrace(e)); + vals.add(val); + } + } else { + vals.add(val); + } + } + } else { + // ToDo: Needs to be recursive (as in JsonPrinter?) + for (DatasetFieldCompoundValue dscv : field.getDatasetFieldCompoundValues()) { + // compound values are of different types + JsonObjectBuilder child = Json.createObjectBuilder(); + + for (DatasetField dsf : dscv.getChildDatasetFields()) { + DatasetFieldType dsft = dsf.getDatasetFieldType(); + if (excludeEmail && DatasetFieldType.FieldType.EMAIL.equals(dsft.getFieldType())) { + continue; + } + // which may have multiple values + if (!dsf.isEmpty()) { + // Add context entry + // ToDo - also needs to recurse here? + JsonLDTerm subFieldName = dsft.getJsonLDTerm(); + if (subFieldName.inNamespace()) { + localContext.putIfAbsent(subFieldName.getNamespace().getPrefix(), + subFieldName.getNamespace().getUrl()); + } else { + localContext.putIfAbsent(subFieldName.getLabel(), subFieldName.getUrl()); + } + + List values = dsf.getValues_nondisplay(); + if (values.size() > 1) { + JsonArrayBuilder childVals = Json.createArrayBuilder(); + + for (String val : dsf.getValues_nondisplay()) { + childVals.add(val); + } + child.add(subFieldName.getLabel(), childVals); + } else { + child.add(subFieldName.getLabel(), values.get(0)); + } + } + } + vals.add(child); + } + } + // Add metadata value to aggregation, suppress array when only one value + JsonArray valArray = vals.build(); + return (valArray.size() != 1) ? valArray : valArray.get(0); + } public static void injectSettingsService(SettingsServiceBean settingsSvc, DatasetFieldServiceBean datasetFieldSvc) { settingsService = settingsSvc; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java index 465360f84cc..127632bf711 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java @@ -45,7 +45,7 @@ import org.apache.commons.lang3.StringUtils; import com.apicatalog.jsonld.JsonLd; -import com.apicatalog.jsonld.api.JsonLdError; +import com.apicatalog.jsonld.JsonLdError; import com.apicatalog.jsonld.document.JsonDocument; import edu.harvard.iq.dataverse.DatasetVersion.VersionState; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 91f1ac2cfbc..4316ea58667 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -338,7 +338,7 @@ public static JsonObjectBuilder json(DatasetVersion dsv) { .add("UNF", dsv.getUNF()).add("archiveTime", format(dsv.getArchiveTime())) .add("lastUpdateTime", format(dsv.getLastUpdateTime())).add("releaseTime", format(dsv.getReleaseTime())) .add("createTime", format(dsv.getCreateTime())); - License license = dsv.getTermsOfUseAndAccess().getLicense(); + License license = DatasetUtil.getLicense(dsv);; if (license != null) { // Standard license bld.add("license", jsonObjectBuilder() diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/InternalWorkflowStepSP.java b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/InternalWorkflowStepSP.java index ef11d306cd3..d99e0901d3c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/InternalWorkflowStepSP.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/InternalWorkflowStepSP.java @@ -25,6 +25,8 @@ public WorkflowStep getStep(String stepType, Map stepParameters) return new AuthorizedExternalStep(stepParameters); case "archiver": return new ArchivalSubmissionWorkflowStep(stepParameters); + case "ldnannounce": + return new LDNAnnounceDatasetVersionStep(stepParameters); default: throw new IllegalArgumentException("Unsupported step type: '" + stepType + "'."); } diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/LDNAnnounceDatasetVersionStep.java b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/LDNAnnounceDatasetVersionStep.java new file mode 100644 index 00000000000..3478d9398f0 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/LDNAnnounceDatasetVersionStep.java @@ -0,0 +1,279 @@ +package edu.harvard.iq.dataverse.workflow.internalspi; + +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetField; +import edu.harvard.iq.dataverse.DatasetFieldType; +import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.branding.BrandingUtil; +import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.bagit.OREMap; +import edu.harvard.iq.dataverse.util.json.JsonLDTerm; +import edu.harvard.iq.dataverse.util.json.JsonUtil; +import edu.harvard.iq.dataverse.workflow.WorkflowContext; +import edu.harvard.iq.dataverse.workflow.step.Failure; +import edu.harvard.iq.dataverse.workflow.step.WorkflowStep; +import edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult; +import static edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult.OK; + +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.UUID; +import java.util.logging.Level; +import java.util.logging.Logger; +import javax.json.Json; +import javax.json.JsonArray; +import javax.json.JsonArrayBuilder; +import javax.json.JsonObject; +import javax.json.JsonObjectBuilder; +import javax.json.JsonValue; + +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; + +/** + * A workflow step that generates and sends an LDN Announcement message to the + * inbox of a configured target. THe initial use case is for Dataverse to + * anounce new dataset versions to the Harvard DASH preprint repository so that + * a DASH admin can create a backlink for any dataset versions that reference a + * DASH deposit or a paper with a DOI where DASH has a preprint copy. + * + * @author qqmyers + */ + +public class LDNAnnounceDatasetVersionStep implements WorkflowStep { + private static final Logger logger = Logger.getLogger(LDNAnnounceDatasetVersionStep.class.getName()); + private static final String REQUIRED_FIELDS = ":LDNAnnounceRequiredFields"; + private static final String LDN_TARGET = ":LDNTarget"; + private static final String RELATED_PUBLICATION = "publication"; + + JsonLDTerm publicationIDType = null; + JsonLDTerm publicationIDNumber = null; + JsonLDTerm publicationURL = null; + + public LDNAnnounceDatasetVersionStep(Map paramSet) { + new HashMap<>(paramSet); + } + + @Override + public WorkflowStepResult run(WorkflowContext context) { + + JsonObject target = JsonUtil.getJsonObject((String) context.getSettings().get(LDN_TARGET)); + if (target != null) { + String inboxUrl = target.getString("inbox"); + + CloseableHttpClient client = HttpClients.createDefault(); + + // build method + + HttpPost announcement; + try { + announcement = buildAnnouncement(false, context, target); + } catch (URISyntaxException e) { + return new Failure("LDNAnnounceDatasetVersion workflow step failed: unable to parse inbox in :LDNTarget setting."); + } + if(announcement==null) { + logger.info(context.getDataset().getGlobalId().asString() + "does not have metadata required to send LDN message. Nothing sent."); + return OK; + } + // execute + try (CloseableHttpResponse response = client.execute(announcement)) { + int code = response.getStatusLine().getStatusCode(); + if (code >= 200 && code < 300) { + // HTTP OK range + return OK; + } else { + String responseBody = new String(response.getEntity().getContent().readAllBytes(), + StandardCharsets.UTF_8); + ; + return new Failure("Error communicating with " + inboxUrl + ". Server response: " + responseBody + + " (" + response + ")."); + } + + } catch (Exception ex) { + logger.log(Level.SEVERE, "Error communicating with remote server: " + ex.getMessage(), ex); + return new Failure("Error executing request: " + ex.getLocalizedMessage(), + "Cannot communicate with remote server."); + } + } + return new Failure("LDNAnnounceDatasetVersion workflow step failed: :LDNTarget setting missing or invalid."); + } + + @Override + public WorkflowStepResult resume(WorkflowContext context, Map internalData, String externalData) { + throw new UnsupportedOperationException("Not supported yet."); // This class does not need to resume. + } + + @Override + public void rollback(WorkflowContext context, Failure reason) { + throw new UnsupportedOperationException("Not supported yet."); // This class does not need to resume. + } + + HttpPost buildAnnouncement(boolean qb, WorkflowContext ctxt, JsonObject target) throws URISyntaxException { + + // First check that we have what is required + DatasetVersion dv = ctxt.getDataset().getReleasedVersion(); + List dvf = dv.getDatasetFields(); + Map fields = new HashMap(); + String[] requiredFields = ((String) ctxt.getSettings().getOrDefault(REQUIRED_FIELDS, "")).split(",\\s*"); + for (String field : requiredFields) { + fields.put(field, null); + } + Set reqFields = fields.keySet(); + for (DatasetField df : dvf) { + if(!df.isEmpty() && reqFields.contains(df.getDatasetFieldType().getName())) { + fields.put(df.getDatasetFieldType().getName(), df); + } + } + if (fields.containsValue(null)) { + logger.fine("DatasetVersion doesn't contain metadata required to trigger announcement"); + return null; + } + // We do, so construct the json-ld body and method + + Map localContext = new HashMap(); + JsonObjectBuilder coarContext = Json.createObjectBuilder(); + Map emptyCvocMap = new HashMap(); + boolean includeLocalContext = false; + for (Entry entry : fields.entrySet()) { + DatasetField field = entry.getValue(); + DatasetFieldType dft = field.getDatasetFieldType(); + String dfTypeName = entry.getKey(); + JsonValue jv = OREMap.getJsonLDForField(field, false, emptyCvocMap, localContext); + switch (dfTypeName) { + case RELATED_PUBLICATION: + JsonArrayBuilder relArrayBuilder = Json.createArrayBuilder(); + publicationIDType = null; + publicationIDNumber = null; + publicationURL = null; + Collection childTypes = dft.getChildDatasetFieldTypes(); + for (DatasetFieldType cdft : childTypes) { + switch (cdft.getName()) { + case "publicationURL": + publicationURL = cdft.getJsonLDTerm(); + break; + case "publicationIDType": + publicationIDType = cdft.getJsonLDTerm(); + break; + case "publicationIDNumber": + publicationIDNumber = cdft.getJsonLDTerm(); + break; + } + + } + + if (jv != null) { + if (jv instanceof JsonArray) { + JsonArray rels = (JsonArray) jv; + for (JsonObject jo : rels.getValuesAs(JsonObject.class)) { + String id = getBestPubId(jo); + relArrayBuilder.add(Json.createObjectBuilder().add("id", id).add("ietf:cite-as", id) + .add("type", "sorg:ScholaryArticle").build()); + } + } + + else { // JsonObject + String id = getBestPubId((JsonObject) jv); + relArrayBuilder.add(Json.createObjectBuilder().add("id", id).add("ietf:cite-as", id) + .add("type", "sorg:ScholaryArticle").build()); + } + } + coarContext.add("IsSupplementTo", relArrayBuilder); + break; + default: + if (jv != null) { + includeLocalContext = true; + coarContext.add(dft.getJsonLDTerm().getLabel(), jv); + } + + } + } + dvf.get(0).getDatasetFieldType().getName(); + JsonObjectBuilder job = Json.createObjectBuilder(); + JsonArrayBuilder context = Json.createArrayBuilder().add("https://purl.org/coar/notify") + .add("https://www.w3.org/ns/activitystreams"); + if (includeLocalContext && !localContext.isEmpty()) { + JsonObjectBuilder contextBuilder = Json.createObjectBuilder(); + for (Entry e : localContext.entrySet()) { + contextBuilder.add(e.getKey(), e.getValue()); + } + context.add(contextBuilder); + } + job.add("@context", context); + job.add("id", "urn:uuid:" + UUID.randomUUID().toString()); + job.add("actor", Json.createObjectBuilder().add("id", SystemConfig.getDataverseSiteUrlStatic()) + .add("name", BrandingUtil.getInstallationBrandName()).add("type", "Service")); + job.add("context", coarContext); + Dataset d = ctxt.getDataset(); + job.add("object", + Json.createObjectBuilder().add("id", d.getLocalURL()) + .add("ietf:cite-as", d.getGlobalId().toURL().toExternalForm()) + .add("sorg:name", d.getDisplayName()).add("type", "sorg:Dataset")); + job.add("origin", Json.createObjectBuilder().add("id", SystemConfig.getDataverseSiteUrlStatic()) + .add("inbox", SystemConfig.getDataverseSiteUrlStatic() + "/api/inbox").add("type", "Service")); + job.add("target", target); + job.add("type", Json.createArrayBuilder().add("Announce").add("coar-notify:ReleaseAction")); + + HttpPost annPost = new HttpPost(); + annPost.setURI(new URI(target.getString("inbox"))); + String body = JsonUtil.prettyPrint(job.build()); + logger.fine("Body: " + body); + annPost.setEntity(new StringEntity(JsonUtil.prettyPrint(body), "utf-8")); + annPost.setHeader("Content-Type", "application/ld+json"); + return annPost; + } + + private String getBestPubId(JsonObject jo) { + String id = null; + if (jo.containsKey(publicationURL.getLabel())) { + id = jo.getString(publicationURL.getLabel()); + } else if (jo.containsKey(publicationIDType.getLabel())) { + if ((jo.containsKey(publicationIDNumber.getLabel()))) { + String number = jo.getString(publicationIDNumber.getLabel()); + + switch (jo.getString(publicationIDType.getLabel())) { + case "doi": + if (number.startsWith("https://doi.org/")) { + id = number; + } else if (number.startsWith("doi:")) { + id = "https://doi.org/" + number.substring(4); + } + + break; + case "DASH-URN": + if (number.startsWith("http")) { + id = number; + } + break; + } + } + } + return id; + } + + String process(String template, Map values) { + String curValue = template; + for (Map.Entry ent : values.entrySet()) { + String val = ent.getValue(); + if (val == null) { + val = ""; + } + String varRef = "${" + ent.getKey() + "}"; + while (curValue.contains(varRef)) { + curValue = curValue.replace(varRef, val); + } + } + + return curValue; + } + +} diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index cfaeab739ef..5b6216aaff1 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -216,6 +216,7 @@ notification.publishFailedPidReg={0} in {1} could not be published due to a fail notification.workflowFailed=An external workflow run on {0} in {1} has failed. Check your email and/or view the Dataset page which may have additional details. Contact support if this continues to happen. notification.workflowSucceeded=An external workflow run on {0} in {1} has succeeded. Check your email and/or view the Dataset page which may have additional details. notification.statusUpdated=The status of dataset {0} has been updated to {1}. +notification.datasetMentioned=Announcement Received: Newly released {0} {2} {3} Dataset {4}. notification.ingestCompleted=Dataset {1} has one or more tabular files that completed the tabular ingest process and are available in archival formats. notification.ingestCompletedWithErrors=Dataset {1} has one or more tabular files that are available but are not supported for tabular ingest. @@ -265,6 +266,7 @@ notification.typeDescription.WORKFLOW_SUCCESS=External workflow run has succeede notification.typeDescription.WORKFLOW_FAILURE=External workflow run has failed notification.typeDescription.STATUSUPDATED=Status of dataset has been updated notification.typeDescription.DATASETCREATED=Dataset was created by user +notification.typeDescription.DATASETMENTIONED=Dataset was referenced in remote system groupAndRoles.manageTips=Here is where you can access and manage all the groups you belong to, and the roles you have been assigned. user.message.signup.label=Create Account @@ -780,6 +782,9 @@ contact.delegation.default_personal=Dataverse Installation Admin notification.email.info.unavailable=Unavailable notification.email.apiTokenGenerated=Hello {0} {1},\n\nAPI Token has been generated. Please keep it secure as you would do with a password. notification.email.apiTokenGenerated.subject=API Token was generated +notification.email.datasetWasMentioned=Hello {0},

The {1} has just been notified that the {2}, {4}, {5} "{8}" in this repository. +notification.email.datasetWasMentioned.subject={0}: A Dataset Relationship has been reported! + # dataverse.xhtml dataverse.name=Dataverse Name @@ -2474,6 +2479,10 @@ template.delete.error=The dataset template cannot be deleted. template.update=Template data updated template.update.error=Template update failed template.makeDefault.error=The dataset template cannot be made default. +template.instructions.label=Custom Instructions: +template.instructions.label.tip=Click to Edit +template.instructions.empty.label=(None - click to add) + page.copy=Copy of #RolePermissionFragment.java @@ -2821,3 +2830,5 @@ publishDatasetCommand.pidNotReserved=Cannot publish dataset because its persiste # APIs api.errors.invalidApiToken=Invalid API token. +api.ldninbox.citation.alert={0},

The {1} has just been notified that the {2}, {3}, cites "{6}" in this repository. +api.ldninbox.citation.subject={0}: A Dataset Citation has been reported! diff --git a/src/main/java/propertyFiles/citation.properties b/src/main/java/propertyFiles/citation.properties index bdcc48b5bf1..b69a8e1549c 100644 --- a/src/main/java/propertyFiles/citation.properties +++ b/src/main/java/propertyFiles/citation.properties @@ -265,6 +265,7 @@ controlledvocabulary.publicationIDType.purl=purl controlledvocabulary.publicationIDType.upc=upc controlledvocabulary.publicationIDType.url=url controlledvocabulary.publicationIDType.urn=urn +controlledvocabulary.publicationIDType.dash-nrs=DASH-NRS controlledvocabulary.contributorType.data_collector=Data Collector controlledvocabulary.contributorType.data_curator=Data Curator controlledvocabulary.contributorType.data_manager=Data Manager diff --git a/src/main/resources/db/migration/V5.11.1.3__hdc-3b.sql b/src/main/resources/db/migration/V5.11.1.3__hdc-3b.sql new file mode 100644 index 00000000000..af8143a97d6 --- /dev/null +++ b/src/main/resources/db/migration/V5.11.1.3__hdc-3b.sql @@ -0,0 +1 @@ +ALTER TABLE usernotification ADD COLUMN IF NOT EXISTS additionalinfo VARCHAR; diff --git a/src/main/resources/db/migration/V5.11.1.4__hdc-3b2-template-instructions.sql b/src/main/resources/db/migration/V5.11.1.4__hdc-3b2-template-instructions.sql new file mode 100644 index 00000000000..df1d3068159 --- /dev/null +++ b/src/main/resources/db/migration/V5.11.1.4__hdc-3b2-template-instructions.sql @@ -0,0 +1,14 @@ +ALTER TABLE template ADD COLUMN IF NOT EXISTS instructions TEXT; + +ALTER TABLE dataset ADD COLUMN IF NOT EXISTS template_id BIGINT; + +DO $$ +BEGIN + + BEGIN + ALTER TABLE dataset ADD CONSTRAINT fx_dataset_template_id FOREIGN KEY (template_id) REFERENCES template(id); + EXCEPTION + WHEN duplicate_object THEN RAISE NOTICE 'Table constraint fk_dataset_template_id already exists'; + END; + +END $$; diff --git a/src/main/webapp/dataset-license-terms.xhtml b/src/main/webapp/dataset-license-terms.xhtml index 1a190064a42..f51c899f479 100644 --- a/src/main/webapp/dataset-license-terms.xhtml +++ b/src/main/webapp/dataset-license-terms.xhtml @@ -248,7 +248,7 @@
-
+