From 07e4d1d85b7782f879a2b790848b0cf37a105a70 Mon Sep 17 00:00:00 2001 From: Jan van Mansum Date: Fri, 13 Dec 2024 16:14:56 +0100 Subject: [PATCH] Merged Jims PR --- .../dataverse/ControlledVocabularyValue.java | 10 +- .../edu/harvard/iq/dataverse/Dataset.java | 2 + .../iq/dataverse/DatasetFieldServiceBean.java | 10 +- .../edu/harvard/iq/dataverse/DatasetPage.java | 61 +- .../iq/dataverse/DatasetServiceBean.java | 3 +- .../harvard/iq/dataverse/DatasetVersion.java | 48 +- .../dataverse/DatasetVersionDifference.java | 800 +++++++++--------- .../dataverse/DatasetVersionModifiedDate.java | 51 ++ .../harvard/iq/dataverse/FileMetadata.java | 2 +- .../edu/harvard/iq/dataverse/api/Access.java | 4 +- .../harvard/iq/dataverse/api/Datasets.java | 10 +- .../command/impl/AbstractDatasetCommand.java | 105 ++- .../CuratePublishedDatasetVersionCommand.java | 2 +- .../impl/DuraCloudSubmitToArchiveCommand.java | 2 +- .../command/impl/PublishDatasetCommand.java | 29 +- .../impl/UpdateDatasetVersionCommand.java | 226 +++-- .../UpdateDatasetVersionMetadataCommand.java | 280 ++++++ .../dataverse/util/json/BriefJsonPrinter.java | 2 +- .../iq/dataverse/util/json/JSONLDUtil.java | 2 +- src/main/java/propertyFiles/Bundle.properties | 6 +- .../webapp/file-edit-button-fragment.xhtml | 4 +- .../edu/harvard/iq/dataverse/api/FilesIT.java | 1 + .../impl/CreateDatasetVersionCommandTest.java | 29 +- .../util/json/BriefJsonPrinterTest.java | 3 +- 24 files changed, 1113 insertions(+), 579 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/DatasetVersionModifiedDate.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionMetadataCommand.java diff --git a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java index 5dcce98a90f..2dcf54974a2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java +++ b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java @@ -34,7 +34,7 @@ */ @Entity @Table(indexes = {@Index(columnList="datasetfieldtype_id"), @Index(columnList="displayorder")}) -public class ControlledVocabularyValue implements Serializable { +public class ControlledVocabularyValue implements Serializable, Comparable { private static final Logger logger = Logger.getLogger(ControlledVocabularyValue.class.getCanonicalName()); @@ -167,7 +167,13 @@ public boolean equals(Object object) { } ControlledVocabularyValue other = (ControlledVocabularyValue) object; return Objects.equals(getId(), other.getId()); - } + } + + @Override + public int compareTo(ControlledVocabularyValue o) { + //Display order may be better but the raw return from the db is by id, so for now we use id. + return Long.compare(this.getId(), o.getId()); + } @Override public String toString() { diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 98766dca447..12c6fcc8e2e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -348,6 +348,8 @@ public void setVersions(List versions) { private DatasetVersion createNewDatasetVersion(Template template, FileMetadata fmVarMet) { DatasetVersion dsv = new DatasetVersion(); + DatasetVersionModifiedDate date = new DatasetVersionModifiedDate(); + dsv.setModifiedDate(date); dsv.setVersionState(DatasetVersion.VersionState.DRAFT); dsv.setFileMetadatas(new ArrayList<>()); DatasetVersion latestVersion; diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java index ff78b0c83ec..aa380e5cb57 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java @@ -276,9 +276,11 @@ public ControlledVocabAlternate save(ControlledVocabAlternate alt) { * @return - a map of JsonObjects containing configuration information keyed by the DatasetFieldType id (Long) */ public Map getCVocConf(boolean byTermUriField){ - + return getCVocConf(byTermUriField, settingsService.getValueForKey(SettingsServiceBean.Key.CVocConf)); + } + + public Map getCVocConf(boolean byTermUriField, String cvocSetting) { //ToDo - change to an API call to be able to provide feedback if the json is invalid? - String cvocSetting = settingsService.getValueForKey(SettingsServiceBean.Key.CVocConf); if (cvocSetting == null || cvocSetting.isEmpty()) { oldHash=null; //Release old maps @@ -356,11 +358,11 @@ public Set getCvocFieldSet() { /** * Adds information about the external vocabulary term being used in this DatasetField to the ExternalVocabularyValue table if it doesn't already exist. * @param df - the primitive/parent compound field containing a newly saved value + * @param cvocEntry */ - public void registerExternalVocabValues(DatasetField df) { + public void registerExternalVocabValues(DatasetField df, JsonObject cvocEntry) { DatasetFieldType dft = df.getDatasetFieldType(); logger.fine("Registering for field: " + dft.getName()); - JsonObject cvocEntry = getCVocConf(true).get(dft.getId()); if (dft.isPrimitive()) { List siblingsDatasetFields = new ArrayList<>(); if(dft.getParentDatasetFieldType()!=null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index eae4a9f2977..a474253978a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -37,6 +37,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; +import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionMetadataCommand; import edu.harvard.iq.dataverse.export.ExportService; import edu.harvard.iq.dataverse.util.cache.CacheFactoryBean; import io.gdcc.spi.export.ExportException; @@ -101,6 +102,7 @@ import jakarta.faces.view.ViewScoped; import jakarta.inject.Inject; import jakarta.inject.Named; +import jakarta.persistence.OptimisticLockException; import org.apache.commons.lang3.StringUtils; import org.primefaces.event.FileUploadEvent; @@ -182,7 +184,7 @@ public class DatasetPage implements java.io.Serializable { public enum EditMode { - CREATE, INFO, FILE, METADATA, LICENSE + CREATE, INFO, FILE, METADATA, LICENSE, DEFAULT }; public enum DisplayMode { @@ -2887,6 +2889,9 @@ private String releaseDataset(boolean minor) { // the lock info system. JsfHelper.addErrorMessage(ex.getLocalizedMessage()); } + if(ex.getCause()!=null && ex.getCause() instanceof OptimisticLockException) { + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.parallelPublishError")); + } logger.severe(ex.getMessage()); } @@ -2896,21 +2901,6 @@ private String releaseDataset(boolean minor) { return returnToDraftVersion(); } - @Deprecated - public String registerDataset() { - try { - UpdateDatasetVersionCommand cmd = new UpdateDatasetVersionCommand(dataset, dvRequestService.getDataverseRequest()); - cmd.setValidateLenient(true); - dataset = commandEngine.submit(cmd); - } catch (CommandException ex) { - FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_WARN,BundleUtil.getStringFromBundle( "dataset.registration.failed"), " - " + ex.toString())); - logger.severe(ex.getMessage()); - } - FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.registered"), BundleUtil.getStringFromBundle("dataset.registered.msg")); - FacesContext.getCurrentInstance().addMessage(null, message); - return returnToDatasetOnly(); - } - public String updateCurrentVersion() { /* * Note: The code here mirrors that in the @@ -3918,8 +3908,10 @@ public String save() { Map deleteStorageLocations = null; try { - if (editMode == EditMode.CREATE) { - //Lock the metadataLanguage once created + EditMode currentMode = (editMode == null? EditMode.DEFAULT : editMode); + switch (currentMode) { + case CREATE: + //Lock the metadataLanguage once created dataset.setMetadataLanguage(getEffectiveMetadataLanguage()); //ToDo - could drop use of selectedTemplate and just use the persistent dataset.getTemplate() if ( selectedTemplate != null ) { @@ -3932,8 +3924,9 @@ public String save() { } else { cmd = new CreateNewDatasetCommand(dataset, dvRequestService.getDataverseRequest()); } - - } else { + break; + case METADATA: + case LICENSE: //Precheck - also checking db copy of dataset to catch edits in progress that would cause update command transaction to fail if (dataset.getId() != null) { Dataset lockTest = datasetService.find(dataset.getId()); @@ -3944,6 +3937,21 @@ public String save() { return returnToDraftVersion(); } } + + cmd = new UpdateDatasetVersionMetadataCommand(dataset, dvRequestService.getDataverseRequest(), clone ); + ((UpdateDatasetVersionMetadataCommand) cmd).setValidateLenient(true); + break; + default: + //Precheck - also checking db copy of dataset to catch edits in progress that would cause update command transaction to fail + if (dataset.getId() != null) { + Dataset lockTest = datasetService.find(dataset.getId()); + if (dataset.isLockedFor(DatasetLock.Reason.EditInProgress) || lockTest.isLockedFor(DatasetLock.Reason.EditInProgress)) { + logger.log(Level.INFO, "Couldn''t save dataset: {0}", "It is locked." + + ""); + JH.addMessage(FacesMessage.SEVERITY_FATAL, BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message"),BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message.details", Arrays.asList(BrandingUtil.getSupportTeamName(null)))); + return returnToDraftVersion(); + } + } if (!filesToBeDeleted.isEmpty()) { deleteStorageLocations = datafileService.getPhysicalFilesToDelete(filesToBeDeleted); } @@ -3964,6 +3972,10 @@ public String save() { Throwable cause = ex; while (cause.getCause()!= null) { cause = cause.getCause(); + if (cause != null && cause instanceof OptimisticLockException) { + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.parallelUpdateError")); + return null; + } error.append(cause).append(" "); error.append(cause.getMessage()).append(" "); } @@ -3973,6 +3985,15 @@ public String save() { } catch (CommandException ex) { //FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + ex.toString())); logger.log(Level.SEVERE, "CommandException, when attempting to update the dataset: " + ex.getMessage(), ex); + Throwable cause = ex; + while (cause.getCause()!= null) { + cause = cause.getCause(); + logger.info("Cause is: " + cause.getClass().getName() + ", Message: " + cause.getMessage()); + if (cause != null && cause instanceof OptimisticLockException) { + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.parallelUpdateError")); + return null; + } + } populateDatasetUpdateFailureMessage(); return returnToDraftVersion(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index dab0ff43fcf..c46751986e5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -442,7 +442,8 @@ public DatasetLock addDatasetLock(Long datasetId, DatasetLock.Reason reason, Lon // (to prevent multiple, duplicate locks on the dataset!) DatasetLock lock = dataset.getLockFor(reason); if (lock != null) { - return lock; + //return lock; + return null; } // Create new: diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index 943693355a3..8c2c6e370bc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -83,6 +83,12 @@ @ValidateVersionNote(versionNote = "versionNote", versionState = "versionState") public class DatasetVersion implements Serializable { + public DatasetVersion() { + super(); + this.modifiedDate = new DatasetVersionModifiedDate(); + + } + private static final Logger logger = Logger.getLogger(DatasetVersion.class.getCanonicalName()); private static final Validator validator = Validation.buildDefaultValidatorFactory().getValidator(); @@ -127,8 +133,17 @@ public enum VersionState { private String UNF; - @Version - private Long version; + + @OneToOne(cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) + private DatasetVersionModifiedDate modifiedDate; + + public void setModifiedDate(DatasetVersionModifiedDate modifiedDate) { + this.modifiedDate = modifiedDate; + } + + public DatasetVersionModifiedDate getModifiedDate() { + return modifiedDate; + } private Long versionNumber; private Long minorVersionNumber; @@ -163,10 +178,6 @@ public enum VersionState { @Column( nullable=false ) private Date createTime; - @Temporal(value = TemporalType.TIMESTAMP) - @Column( nullable=false ) - private Date lastUpdateTime; - @Temporal(value = TemporalType.TIMESTAMP) private Date releaseTime; @@ -231,17 +242,6 @@ public void setUNF(String UNF) { this.UNF = UNF; } - /** - * This is JPA's optimistic locking mechanism, and has no semantic meaning in the DV object model. - * @return the object db version - */ - public Long getVersion() { - return this.version; - } - - public void setVersion(Long version) { - } - public String getDataverseSiteUrl() { return dataverseSiteUrl; } @@ -429,25 +429,25 @@ public void setCreateTime(Date createTime) { } public Date getLastUpdateTime() { - return lastUpdateTime; + return modifiedDate.getLastUpdateTime(); } public void setLastUpdateTime(Date lastUpdateTime) { if (createTime == null) { createTime = lastUpdateTime; } - this.lastUpdateTime = lastUpdateTime; + modifiedDate.setLastUpdateTime(lastUpdateTime); } public String getVersionDate() { - if (this.lastUpdateTime == null){ + if (modifiedDate.getLastUpdateTime() == null){ return null; } - return DateUtil.formatDate(lastUpdateTime); + return DateUtil.formatDate(modifiedDate.getLastUpdateTime()); } public String getVersionYear() { - return new SimpleDateFormat("yyyy").format(lastUpdateTime); + return new SimpleDateFormat("yyyy").format(modifiedDate.getLastUpdateTime()); } public Date getReleaseTime() { @@ -672,7 +672,7 @@ public void updateDefaultValuesFromTemplate(Template template) { public DatasetVersion cloneDatasetVersion(){ DatasetVersion dsv = new DatasetVersion(); - dsv.setVersionState(this.getPriorVersionState()); + dsv.setVersionState(this.getVersionState()); dsv.setFileMetadatas(new ArrayList<>()); if (this.getUNF() != null){ @@ -2122,7 +2122,7 @@ public String getJsonLd() { } public String getLocaleLastUpdateTime() { - return DateUtil.formatDate(new Timestamp(lastUpdateTime.getTime())); + return DateUtil.formatDate(new Timestamp(modifiedDate.getLastUpdateTime().getTime())); } public String getExternalStatusLabel() { diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java index eca0c84ae84..22569e8790a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java @@ -5,16 +5,24 @@ import edu.harvard.iq.dataverse.datavariable.VariableMetadataUtil; import edu.harvard.iq.dataverse.util.StringUtil; +import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import java.util.logging.Logger; +import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; import org.apache.commons.lang3.StringUtils; import edu.harvard.iq.dataverse.util.BundleUtil; import java.util.Arrays; import java.util.Date; +import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; @@ -29,19 +37,18 @@ public final class DatasetVersionDifference { private DatasetVersion newVersion; private DatasetVersion originalVersion; private List> detailDataByBlock = new ArrayList<>(); - private List datasetFilesDiffList; - private List datasetFilesReplacementList; + private List datasetFilesDiffList = new ArrayList<>(); + private List datasetFilesReplacementList= new ArrayList<>(); private List addedFiles = new ArrayList<>(); private List removedFiles = new ArrayList<>(); private List changedFileMetadata = new ArrayList<>(); + private Map>> changedFileMetadataDiff = new HashMap<>(); private List changedVariableMetadata = new ArrayList<>(); private List replacedFiles = new ArrayList<>(); private List changedTermsAccess = new ArrayList<>(); private List summaryDataForNote = new ArrayList<>(); private List blockDataForNote = new ArrayList<>(); - private VariableMetadataUtil variableMetadataUtil; - private List differenceSummaryGroups = new ArrayList<>(); public List getDifferenceSummaryGroups() { @@ -53,6 +60,9 @@ public void setDifferenceSummaryGroups(List differenceSu } public DatasetVersionDifference(DatasetVersion newVersion, DatasetVersion originalVersion) { + this(newVersion, originalVersion, true); + } + public DatasetVersionDifference(DatasetVersion newVersion, DatasetVersion originalVersion, boolean checkForFileChanges) { setOriginalVersion(originalVersion); setNewVersion(newVersion); //Compare Data @@ -99,61 +109,86 @@ public DatasetVersionDifference(DatasetVersion newVersion, DatasetVersion origin updateBlockSummary(dsfn, dsfn.getControlledVocabularyValues().size(), 0, 0); } else { updateBlockSummary(dsfn, dsfn.getDatasetFieldValues().size(), 0, 0); - } + } } else { updateBlockSummary(dsfn, dsfn.getDatasetFieldCompoundValues().size(), 0, 0); } addToSummary(null, dsfn); } } - - // TODO: ? - // It looks like we are going through the filemetadatas in both versions, - // *sequentially* (i.e. at the cost of O(N*M)), to select the lists of - // changed, deleted and added files between the 2 versions... But why - // are we doing it, if we are doing virtually the same thing inside - // the initDatasetFilesDifferenceList(), below - but in a more efficient - // way (sorting both lists, then goint through them in parallel, at the - // cost of (N+M) max.? - // -- 4.6 Nov. 2016 - - for (FileMetadata fmdo : originalVersion.getFileMetadatas()) { - boolean deleted = true; + + long startTime = System.currentTimeMillis(); + if (checkForFileChanges) { + Map originalFileMetadataMap = new HashMap<>(); + Map previousIDtoFileMetadataMap = new HashMap<>(); + for (FileMetadata fmdo : originalVersion.getFileMetadatas()) { + originalFileMetadataMap.put(fmdo.getDataFile().getId(), fmdo); + } + for (FileMetadata fmdn : newVersion.getFileMetadatas()) { - if (fmdo.getDataFile().equals(fmdn.getDataFile())) { - deleted = false; - if (!compareFileMetadatas(fmdo, fmdn)) { + DataFile ndf = fmdn.getDataFile(); + Long id = ndf.getId(); + FileMetadata fmdo = originalFileMetadataMap.get(id); + // If this file was in the original version + if (fmdo != null) { + // Check for differences + Map> fileMetadataDiff = compareFileMetadatas(fmdo, fmdn); + if (!fileMetadataDiff.isEmpty()) { changedFileMetadata.add(fmdo); changedFileMetadata.add(fmdn); + // TODO: find a better key for the map. needs to be something that doesn't + // change + changedFileMetadataDiff.put(fmdo, fileMetadataDiff); } - if (!variableMetadataUtil.compareVariableMetadata(fmdo,fmdn) || !compareVarGroup(fmdo, fmdn)) { + if (!VariableMetadataUtil.compareVariableMetadata(fmdo, fmdn) || !compareVarGroup(fmdo, fmdn)) { changedVariableMetadata.add(fmdo); changedVariableMetadata.add(fmdn); } - break; + // And drop it from the list since it can't be a deleted file + originalFileMetadataMap.remove(id); + } else { + // It wasn't in the original version + Long prevID = ndf.getPreviousDataFileId(); + // It might be a replacement file or an added file + if (prevID != null) { + // Add it to a map so we can check later to see if it's a replacement + previousIDtoFileMetadataMap.put(prevID, fmdn); + } else { + // Otherwise make it an added file now + addedFiles.add(fmdn); + } } } - if (deleted) { - removedFiles.add(fmdo); - } - } - for (FileMetadata fmdn : newVersion.getFileMetadatas()) { - boolean added = true; - for (FileMetadata fmdo : originalVersion.getFileMetadatas()) { - if (fmdo.getDataFile().equals(fmdn.getDataFile())) { - added = false; - break; + // Finally check any remaining files from the original version that weren't in + // the new version' + for (Long removedId : originalFileMetadataMap.keySet()) { + // See if it has been replaced + FileMetadata replacingFmd = previousIDtoFileMetadataMap.get(removedId); + FileMetadata fmdRemoved = originalFileMetadataMap.get(removedId); + if (replacingFmd != null) { + // This is a replacement + replacedFiles.add(new FileMetadata[] { fmdRemoved, replacingFmd }); + // Drop if from the map + previousIDtoFileMetadataMap.remove(removedId); + } else { + // This is a removed file + removedFiles.add(fmdRemoved); } } - if (added) { - addedFiles.add(fmdn); + // Any fms left are not updating existing files and aren't replacing a file, but + // they are claiming a previous file id. That shouldn't be possible, but this + // will + // make sure they get listed in the difference if they do + for (Entry entry : previousIDtoFileMetadataMap.entrySet()) { + logger.warning("Previous file id claimed for a new file: fmd id: " + entry.getValue() + + ", previous file id: " + entry.getKey()); + addedFiles.add(entry.getValue()); } - } - getReplacedFiles(); - initDatasetFilesDifferencesList(); - //Sort within blocks by datasetfieldtype dispaly order then.... - //sort via metadatablock order - citation first... + logger.fine("Main difference loop execution time: " + (System.currentTimeMillis() - startTime) + " ms"); + initDatasetFilesDifferencesList(); + } + //Sort within blocks by datasetfieldtype display order for (List blockList : detailDataByBlock) { Collections.sort(blockList, (DatasetField[] l1, DatasetField[] l2) -> { DatasetField dsfa = l1[0]; //(DatasetField[]) l1.get(0); @@ -163,304 +198,90 @@ public DatasetVersionDifference(DatasetVersion newVersion, DatasetVersion origin return Integer.valueOf(a).compareTo(b); }); } + //Sort existing compoundValues by datasetfieldtype display order + for (List blockList : detailDataByBlock) { + for (DatasetField[] dfarr : blockList) { + for (DatasetField df : dfarr) { + for (DatasetFieldCompoundValue dfcv : df.getDatasetFieldCompoundValues()) { + Collections.sort(dfcv.getChildDatasetFields(), DatasetField.DisplayOrder); + } + } + } + } + //Sort via metadatablock order Collections.sort(detailDataByBlock, (List l1, List l2) -> { - DatasetField dsfa[] = (DatasetField[]) l1.get(0); - DatasetField dsfb[] = (DatasetField[]) l2.get(0); - int a = dsfa[0].getDatasetFieldType().getMetadataBlock().getId().intValue(); - int b = dsfb[0].getDatasetFieldType().getMetadataBlock().getId().intValue(); + DatasetField dsfa[] = (DatasetField[]) l1.get(0); + DatasetField dsfb[] = (DatasetField[]) l2.get(0); + int a = dsfa[0].getDatasetFieldType().getMetadataBlock().getId().intValue(); + int b = dsfb[0].getDatasetFieldType().getMetadataBlock().getId().intValue(); return Integer.valueOf(a).compareTo(b); }); getTermsDifferences(); } - - private void getReplacedFiles() { - if (addedFiles.isEmpty() || removedFiles.isEmpty()) { - return; - } - List addedToReplaced = new ArrayList<>(); - List removedToReplaced = new ArrayList<>(); - for (FileMetadata added : addedFiles) { - DataFile addedDF = added.getDataFile(); - Long replacedId = addedDF.getPreviousDataFileId(); - if (added.getDataFile().getPreviousDataFileId() != null){ - } - for (FileMetadata removed : removedFiles) { - DataFile test = removed.getDataFile(); - if (test.getId().equals(replacedId)) { - addedToReplaced.add(added); - removedToReplaced.add(removed); - FileMetadata[] replacedArray = new FileMetadata[2]; - replacedArray[0] = removed; - replacedArray[1] = added; - replacedFiles.add(replacedArray); - } - } - } - if(addedToReplaced.isEmpty()){ - } else{ - addedToReplaced.stream().forEach((delete) -> { - addedFiles.remove(delete); - }); - removedToReplaced.stream().forEach((delete) -> { - removedFiles.remove(delete); - }); - } - } - - private void getTermsDifferences() { - changedTermsAccess = new ArrayList<>(); - if (newVersion.getTermsOfUseAndAccess() != null && originalVersion.getTermsOfUseAndAccess() != null) { - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfUse()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfUse()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfUse()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfUse())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSpecialPermissions()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSpecialPermissions()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSpecialPermissions()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSpecialPermissions())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getRestrictions()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getRestrictions()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getRestrictions()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getRestrictions())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getCitationRequirements()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getCitationRequirements()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getCitationRequirements()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getCitationRequirements())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDepositorRequirements()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDepositorRequirements()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDepositorRequirements()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDepositorRequirements())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConditions()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConditions()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConditions()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConditions())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDisclaimer()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDisclaimer()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDisclaimer()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDisclaimer())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfAccess()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfAccess()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfAccess()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfAccess())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDataAccessPlace()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDataAccessPlace()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDataAccessPlace()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDataAccessPlace())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getOriginalArchive()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getOriginalArchive()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getOriginalArchive()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getOriginalArchive())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getAvailabilityStatus()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getAvailabilityStatus()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getAvailabilityStatus()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getAvailabilityStatus())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getContactForAccess()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getContactForAccess()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getContactForAccess()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getContactForAccess())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSizeOfCollection()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSizeOfCollection()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSizeOfCollection()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSizeOfCollection())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getStudyCompletion()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getStudyCompletion()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getStudyCompletion()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getStudyCompletion())); - } - } + private void getTermsDifferences() { - if (newVersion.getTermsOfUseAndAccess() != null && originalVersion.getTermsOfUseAndAccess() == null) { - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfUse()).isEmpty()) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfUse())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSpecialPermissions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSpecialPermissions())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getRestrictions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getRestrictions())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getCitationRequirements()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getCitationRequirements())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDepositorRequirements()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDepositorRequirements())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConditions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConditions())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDisclaimer()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDisclaimer())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfAccess()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfAccess())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDataAccessPlace()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDataAccessPlace())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getOriginalArchive()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getOriginalArchive())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getAvailabilityStatus()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getAvailabilityStatus())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getContactForAccess()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getContactForAccess())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSizeOfCollection()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSizeOfCollection())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getStudyCompletion()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getStudyCompletion())); - } - } - - if (newVersion.getTermsOfUseAndAccess() == null && originalVersion.getTermsOfUseAndAccess() != null) { - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfUse()).isEmpty()) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfUse()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSpecialPermissions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSpecialPermissions()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getRestrictions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getRestrictions()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getCitationRequirements()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getCitationRequirements()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDepositorRequirements()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDepositorRequirements()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConditions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConditions()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDisclaimer()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDisclaimer()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfAccess()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfAccess()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDataAccessPlace()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDataAccessPlace()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getOriginalArchive()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getOriginalArchive()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getAvailabilityStatus()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getAvailabilityStatus()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getContactForAccess()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getContactForAccess()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSizeOfCollection()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSizeOfCollection()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getStudyCompletion()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getStudyCompletion()), ""); - } - } - } - - private DifferenceSummaryItem createSummaryItem(){ - return null; - } - - private List addToSummaryGroup(String displayName, DifferenceSummaryItem differenceSummaryItem){ - - return null; + TermsOfUseAndAccess originalTerms = originalVersion.getTermsOfUseAndAccess(); + if(originalTerms == null) { + originalTerms = new TermsOfUseAndAccess(); + } + // newTerms should never be null + TermsOfUseAndAccess newTerms = newVersion.getTermsOfUseAndAccess(); + if(newTerms == null) { + logger.warning("New version does not have TermsOfUseAndAccess"); + newTerms = new TermsOfUseAndAccess(); + } + + checkAndAddToChangeList(originalTerms.getTermsOfUse(), newTerms.getTermsOfUse(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header")); + checkAndAddToChangeList(originalTerms.getConfidentialityDeclaration(), newTerms.getConfidentialityDeclaration(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration")); + checkAndAddToChangeList(originalTerms.getSpecialPermissions(), newTerms.getSpecialPermissions(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions")); + checkAndAddToChangeList(originalTerms.getRestrictions(), newTerms.getRestrictions(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions")); + checkAndAddToChangeList(originalTerms.getCitationRequirements(), newTerms.getCitationRequirements(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements")); + checkAndAddToChangeList(originalTerms.getDepositorRequirements(), newTerms.getDepositorRequirements(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements")); + checkAndAddToChangeList(originalTerms.getConditions(), newTerms.getConditions(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions")); + checkAndAddToChangeList(originalTerms.getDisclaimer(), newTerms.getDisclaimer(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer")); + checkAndAddToChangeList(originalTerms.getTermsOfAccess(), newTerms.getTermsOfAccess(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess")); + checkAndAddToChangeList(originalTerms.getDataAccessPlace(), newTerms.getDataAccessPlace(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace")); + checkAndAddToChangeList(originalTerms.getOriginalArchive(), newTerms.getOriginalArchive(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive")); + checkAndAddToChangeList(originalTerms.getAvailabilityStatus(), newTerms.getAvailabilityStatus(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus")); + checkAndAddToChangeList(originalTerms.getContactForAccess(), newTerms.getContactForAccess(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess")); + checkAndAddToChangeList(originalTerms.getSizeOfCollection(), newTerms.getSizeOfCollection(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection")); + checkAndAddToChangeList(originalTerms.getStudyCompletion(), newTerms.getStudyCompletion(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion")); + checkAndAddToChangeList(Boolean.toString(originalTerms.isFileAccessRequest()), Boolean.toString(newTerms.isFileAccessRequest()), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.fileAccessRequest")); + String customTerms = BundleUtil.getStringFromBundle("license.custom"); + checkAndAddToChangeList( + (originalTerms.getLicense() == null) ? customTerms : originalTerms.getLicense().getName(), + (newTerms.getLicense() == null) ? customTerms : newTerms.getLicense().getName(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.license")); } - private List addToTermsChangedList(List listIn, String label, String origVal, String newVal) { - String[] diffArray; - diffArray = new String[3]; - diffArray[0] = label; - diffArray[1] = origVal; - diffArray[2] = newVal; - listIn.add(diffArray); - return listIn; + private void checkAndAddToChangeList(String originalTerm, String newTerm, + String termLabel) { + originalTerm = StringUtil.nullToEmpty(originalTerm); + newTerm = StringUtil.nullToEmpty(newTerm); + if(!originalTerm.equals(newTerm)) { + changedTermsAccess.add(new String[]{termLabel, originalTerm, newTerm}); + } } - private void addToList(List listIn, DatasetField dsfo, DatasetField dsfn) { DatasetField[] dsfArray; dsfArray = new DatasetField[2]; @@ -523,7 +344,7 @@ private void addToNoteSummary(DatasetField dsfo, int added, int deleted, int cha summaryDataForNote.add(noteArray); } - private boolean compareVarGroup(FileMetadata fmdo, FileMetadata fmdn) { + static boolean compareVarGroup(FileMetadata fmdo, FileMetadata fmdn) { List vglo = fmdo.getVarGroups(); List vgln = fmdn.getVarGroups(); @@ -533,7 +354,7 @@ private boolean compareVarGroup(FileMetadata fmdo, FileMetadata fmdn) { int count = 0; for (VarGroup vgo : vglo) { for (VarGroup vgn : vgln) { - if (!variableMetadataUtil.checkDiff(vgo.getLabel(), vgn.getLabel())) { + if (!VariableMetadataUtil.checkDiff(vgo.getLabel(), vgn.getLabel())) { Set dvo = vgo.getVarsInGroup(); Set dvn = vgn.getVarsInGroup(); if (dvo.equals(dvn)) { @@ -551,27 +372,36 @@ private boolean compareVarGroup(FileMetadata fmdo, FileMetadata fmdn) { } } - public static boolean compareFileMetadatas(FileMetadata fmdo, FileMetadata fmdn) { - + public static Map> compareFileMetadatas(FileMetadata fmdo, FileMetadata fmdn) { + Map> fileMetadataChanged = new HashMap<>(); if (!StringUtils.equals(StringUtil.nullToEmpty(fmdo.getDescription()), StringUtil.nullToEmpty(fmdn.getDescription()))) { - return false; + fileMetadataChanged.put("Description", + List.of(StringUtil.nullToEmpty(fmdo.getDescription()), StringUtil.nullToEmpty(fmdn.getDescription()))); } if (!StringUtils.equals(fmdo.getCategoriesByName().toString(), fmdn.getCategoriesByName().toString())) { - return false; + fileMetadataChanged.put("Categories", + List.of(fmdo.getCategoriesByName().toString(), fmdn.getCategoriesByName().toString())); } - + if (!StringUtils.equals(fmdo.getLabel(), fmdn.getLabel())) { - return false; + fileMetadataChanged.put("Label", + List.of(fmdo.getLabel(), fmdn.getLabel())); } - + if (!StringUtils.equals(fmdo.getProvFreeForm(), fmdn.getProvFreeForm())) { - return false; + fileMetadataChanged.put("ProvFreeForm", + List.of(fmdo.getProvFreeForm(), fmdn.getProvFreeForm())); + } + + if (fmdo.isRestricted() != fmdn.isRestricted()) { + fileMetadataChanged.put("isRestricted", + List.of(String.valueOf(fmdo.isRestricted()), String.valueOf(fmdn.isRestricted()))); } - - return fmdo.isRestricted() == fmdn.isRestricted(); + + return fileMetadataChanged; } - + private void compareValues(DatasetField originalField, DatasetField newField, boolean compound) { String originalValue = ""; String newValue = ""; @@ -632,7 +462,7 @@ private void compareValues(DatasetField originalField, DatasetField newField, bo totalChanged = 0; } } - + if (countNew > countOriginal) { totalAdded = countNew - countOriginal; } @@ -665,7 +495,7 @@ public String getFileNote() { retString += BundleUtil.getStringFromBundle("dataset.version.file.removed2", Arrays.asList(removedFiles.size()+"")); } } - + if (replacedFiles.size() > 0) { if (retString.isEmpty()) { retString = BundleUtil.getStringFromBundle("dataset.version.file.replaced", Arrays.asList(replacedFiles.size()+"")); @@ -673,7 +503,7 @@ public String getFileNote() { retString += BundleUtil.getStringFromBundle("dataset.version.file.replaced2", Arrays.asList(replacedFiles.size()+"")); } } - + if (changedFileMetadata.size() > 0) { if (retString.isEmpty()) { @@ -697,7 +527,7 @@ public String getFileNote() { return retString; } - + public List getDatasetFilesReplacementList() { return datasetFilesReplacementList; } @@ -769,8 +599,8 @@ public void setSummaryDataForNote(List summaryDataForNote) { public void setBlockDataForNote(List blockDataForNote) { this.blockDataForNote = blockDataForNote; } - - + + public List getChangedTermsAccess() { return changedTermsAccess; } @@ -780,9 +610,7 @@ public void setChangedTermsAccess(List changedTermsAccess) { } private void initDatasetFilesDifferencesList() { - datasetFilesDiffList = new ArrayList<>(); - datasetFilesReplacementList = new ArrayList <>(); - + // Study Files themselves are version-less; // In other words, 2 different versions can have different sets of // study files, but the files themselves don't have versions. @@ -800,20 +628,20 @@ private void initDatasetFilesDifferencesList() { FileMetadata fm1; FileMetadata fm2; - - // We also have to be careful sorting this FileMetadatas. If we sort the + + // We also have to be careful sorting this FileMetadatas. If we sort the // lists as they are still attached to their respective versions, we may end - // up messing up the page, which was rendered based on the specific order - // of these in the working version! + // up messing up the page, which was rendered based on the specific order + // of these in the working version! // So the right way of doing this is to create defensive copies of the - // lists; extra memory, but safer. + // lists; extra memory, but safer. // -- L.A. Nov. 2016 - + List fileMetadatasNew = new ArrayList<>(newVersion.getFileMetadatas()); List fileMetadatasOriginal = new ArrayList<>(originalVersion.getFileMetadatas()); - + if (!replacedFiles.isEmpty()) { - + replacedFiles.stream().map((replacedPair) -> { FileMetadata replacedFile = replacedPair[0]; FileMetadata newFile = replacedPair[1]; @@ -848,15 +676,15 @@ private void initDatasetFilesDifferencesList() { }); // Here's a potential problem: this new version may have been created - // specifically because new files are being added to the dataset. - // In which case there may be files associated with this new version - // with no database ids - since they haven't been saved yet. - // So if we try to sort the files in the version the way we did above, - // by ID, it may fail with a null pointer. - // To solve this, we should simply check if the file has the id; and if not, + // specifically because new files are being added to the dataset. + // In which case there may be files associated with this new version + // with no database ids - since they haven't been saved yet. + // So if we try to sort the files in the version the way we did above, + // by ID, it may fail with a null pointer. + // To solve this, we should simply check if the file has the id; and if not, // sort it higher than any file with an id - because it is a most recently // added file. Since we are only doing this for the purposes of generating - // version differences, this should be OK. + // version differences, this should be OK. // -- L.A. Aug. 2014 Collections.sort(fileMetadatasNew, (FileMetadata l1, FileMetadata l2) -> { @@ -864,18 +692,18 @@ private void initDatasetFilesDifferencesList() { FileMetadata fm4 = l2; Long a = fm3.getDataFile().getId(); Long b = fm4.getDataFile().getId(); - if (a == null && b == null) { - return 0; - } else if (a == null) { - return 1; - } else if (b == null) { - return -1; - } - return a.compareTo(b); + if (a == null && b == null) { + return 0; + } else if (a == null) { + return 1; + } else if (b == null) { + return -1; + } + return a.compareTo(b); }); while (i < fileMetadatasOriginal.size() - && j < fileMetadatasNew.size()) { + && j < fileMetadatasNew.size()) { fm1 = fileMetadatasOriginal.get(i); fm2 = fileMetadatasNew.get(j); @@ -1042,11 +870,11 @@ private boolean fileMetadataIsDifferent(FileMetadata fm1, FileMetadata fm2) { if (!value1.equals(value2)) { return true; } - + // Provenance Freeform Text value1 = fm1.getProvFreeForm(); value2 = fm2.getProvFreeForm(); - + if (value1 == null || value1.isEmpty() || value1.equals(" ")) { value1 = ""; } @@ -1057,7 +885,7 @@ private boolean fileMetadataIsDifferent(FileMetadata fm1, FileMetadata fm2) { if (!value1.equals(value2)) { return true; } - + // File restrictions return fm1.isRestricted() != fm2.isRestricted(); } @@ -1091,7 +919,7 @@ private datasetFileDifferenceItem selectFileMetadataDiffs(FileMetadata fm1, File fdi.setFileName2(fm2.getLabel()); fdi.setFileType2(fm2.getDataFile().getFriendlyType()); - + //fdi.setFileSize2(FileUtil.byteCountToDisplaySize(new File(fm2.getStudyFile().getFileSystemLocation()).length())); // deprecated: fdi.setFileCat2(fm2.getCategory()); fdi.setFileDesc2(fm2.getDescription()); @@ -1143,7 +971,7 @@ private datasetFileDifferenceItem selectFileMetadataDiffs(FileMetadata fm1, File if (!value1.equals(value2)) { fdi.setFileCat1(value1); fdi.setFileCat2(value2); - } + } // file description: value1 = fm1.getDescription(); @@ -1178,7 +1006,7 @@ private datasetFileDifferenceItem selectFileMetadataDiffs(FileMetadata fm1, File fdi.setFileProvFree1(value1); fdi.setFileProvFree2(value2); } - + // file restricted: if (fm1.isRestricted() != fm2.isRestricted() || fm1.getDataFile().getEmbargo() != fm2.getDataFile().getEmbargo()) { fdi.setFileRest1(BundleUtil.getStringFromBundle(getAccessLabel(fm1))); @@ -1187,7 +1015,7 @@ private datasetFileDifferenceItem selectFileMetadataDiffs(FileMetadata fm1, File } return fdi; } - + private String getAccessLabel(FileMetadata fm) { boolean embargoed = fm.getDataFile().getEmbargo()!=null; boolean restricted = fm.isRestricted(); @@ -1198,14 +1026,14 @@ private String getAccessLabel(FileMetadata fm) { } public String getEditSummaryForLog() { - - String retVal = ""; - + + String retVal = ""; + retVal = System.lineSeparator() + this.newVersion.getTitle() + " (" + this.originalVersion.getDataset().getIdentifier() + ") was updated " + new Date(); - + String valueString = ""; String groupString = ""; - + //Metadata differences displayed by Metdata block if (!this.detailDataByBlock.isEmpty()) { for (List blocks : detailDataByBlock) { @@ -1217,7 +1045,7 @@ public String getEditSummaryForLog() { String title = dsfArray[0].getDatasetFieldType().getTitle(); valueString += title; String oldValue = " " + BundleUtil.getStringFromBundle("dataset.versionDifferences.changed") + " " + BundleUtil.getStringFromBundle("dataset.versionDifferences.from") + ": "; - + if (!dsfArray[0].isEmpty()) { if (dsfArray[0].getDatasetFieldType().isPrimitive()) { oldValue += dsfArray[0].getRawValue(); @@ -1226,7 +1054,7 @@ public String getEditSummaryForLog() { } } valueString += oldValue; - + String newValue = " " + BundleUtil.getStringFromBundle("dataset.versionDifferences.to") + ": "; if (!dsfArray[1].isEmpty()) { if (dsfArray[1].getDatasetFieldType().isPrimitive()) { @@ -1242,16 +1070,16 @@ public String getEditSummaryForLog() { retVal += groupString + System.lineSeparator(); } } - + // File Differences String fileDiff = System.lineSeparator() + BundleUtil.getStringFromBundle("file.viewDiffDialog.files.header") + ": " + System.lineSeparator(); if(!this.getDatasetFilesDiffList().isEmpty()){ - + String itemDiff; - + for (datasetFileDifferenceItem item : this.getDatasetFilesDiffList()) { - itemDiff = BundleUtil.getStringFromBundle("file.viewDiffDialog.fileID") + ": " + item.fileId; - + itemDiff = BundleUtil.getStringFromBundle("file.viewDiffDialog.fileID") + ": " + item.fileId; + if (item.fileName1 != null || item.fileName2 != null) { itemDiff = System.lineSeparator() + " " + BundleUtil.getStringFromBundle("file.viewDiffDialog.fileName") + ": "; itemDiff += item.fileName1 != null ? item.fileName1 : BundleUtil.getStringFromBundle("file.viewDiffDialog.notAvailable"); @@ -1272,14 +1100,14 @@ public String getEditSummaryForLog() { itemDiff += " : "; itemDiff += item.fileSize2 != null ? item.fileSize2 : BundleUtil.getStringFromBundle("file.viewDiffDialog.notAvailable") + " "; } - + if (item.fileCat1 != null || item.fileCat2 != null) { itemDiff += System.lineSeparator() + " " + BundleUtil.getStringFromBundle("file.viewDiffDialog.category") + ": "; itemDiff += item.fileCat1 != null ? item.fileCat1 : BundleUtil.getStringFromBundle("file.viewDiffDialog.notAvailable"); itemDiff += " : "; itemDiff += item.fileCat2 != null ? item.fileCat2 : BundleUtil.getStringFromBundle("file.viewDiffDialog.notAvailable") + " "; } - + if (item.fileDesc1 != null || item.fileDesc2 != null) { itemDiff += System.lineSeparator() + " " + BundleUtil.getStringFromBundle("file.viewDiffDialog.description") + ": "; itemDiff += item.fileDesc1 != null ? item.fileDesc1 : BundleUtil.getStringFromBundle("file.viewDiffDialog.notAvailable"); @@ -1293,7 +1121,7 @@ public String getEditSummaryForLog() { itemDiff += " : "; itemDiff += item.fileProvFree2 != null ? item.fileProvFree2 : BundleUtil.getStringFromBundle("file.viewDiffDialog.notAvailable") + " "; } - + if (item.fileRest1 != null || item.fileRest2 != null) { itemDiff += System.lineSeparator() + " " + BundleUtil.getStringFromBundle("file.viewDiffDialog.fileAccess") + ": "; itemDiff += item.fileRest1 != null ? item.fileRest1 : BundleUtil.getStringFromBundle("file.viewDiffDialog.notAvailable"); @@ -1301,16 +1129,16 @@ public String getEditSummaryForLog() { itemDiff += item.fileRest2 != null ? item.fileRest2 : BundleUtil.getStringFromBundle("file.viewDiffDialog.notAvailable") + " "; } - + fileDiff += itemDiff; } - + retVal += fileDiff; } - + String fileReplaced = System.lineSeparator() + BundleUtil.getStringFromBundle("file.viewDiffDialog.filesReplaced")+ ": "+ System.lineSeparator(); - if(!this.getDatasetFilesReplacementList().isEmpty()){ - String itemDiff; + if(!this.getDatasetFilesReplacementList().isEmpty()){ + String itemDiff; for (datasetReplaceFileItem item : this.getDatasetFilesReplacementList()) { itemDiff = ""; itemDiff = System.lineSeparator() + " " + BundleUtil.getStringFromBundle("file.viewDiffDialog.fileName") + ": "; @@ -1342,28 +1170,28 @@ public String getEditSummaryForLog() { itemDiff += " : "; itemDiff += item.fdi.fileRest2 != null ? item.fdi.fileRest2 : BundleUtil.getStringFromBundle("file.viewDiffDialog.notAvailable") + " "; fileReplaced += itemDiff; - } + } retVal += fileReplaced; } - + String termsOfUseDiff = System.lineSeparator() + "Terms of Use and Access Changes: "+ System.lineSeparator(); - + if (!this.changedTermsAccess.isEmpty()){ for (String[] blocks : changedTermsAccess) { - String itemDiff = System.lineSeparator() + blocks[0] + " " + BundleUtil.getStringFromBundle("dataset.versionDifferences.changed") + " " + BundleUtil.getStringFromBundle("dataset.versionDifferences.from") + ": "; - itemDiff += blocks[1]; - itemDiff += " " + BundleUtil.getStringFromBundle("dataset.versionDifferences.to") + ": "+ blocks[2]; - termsOfUseDiff +=itemDiff; + String itemDiff = System.lineSeparator() + blocks[0] + " " + BundleUtil.getStringFromBundle("dataset.versionDifferences.changed") + " " + BundleUtil.getStringFromBundle("dataset.versionDifferences.from") + ": "; + itemDiff += blocks[1]; + itemDiff += " " + BundleUtil.getStringFromBundle("dataset.versionDifferences.to") + ": "+ blocks[2]; + termsOfUseDiff +=itemDiff; } retVal +=termsOfUseDiff; } - + return retVal; } - - + + public class DifferenceSummaryGroup { - + private String displayName; private String type; private List differenceSummaryItems; @@ -1392,7 +1220,7 @@ public void setDifferenceSummaryItems(List differenceSumm this.differenceSummaryItems = differenceSummaryItems; } } - + public class DifferenceSummaryItem { private String displayName; private int changed; @@ -1449,7 +1277,7 @@ public void setMultiple(boolean multiple) { this.multiple = multiple; } } - + public class datasetReplaceFileItem { public datasetFileDifferenceItem getFdi() { @@ -1522,7 +1350,7 @@ public void setFile2ChecksumValue(String file2ChecksumValue) { private DataFile.ChecksumType file1ChecksumType; private DataFile.ChecksumType file2ChecksumType; private String file1ChecksumValue; - private String file2ChecksumValue; + private String file2ChecksumValue; } public class datasetFileDifferenceItem { @@ -1541,7 +1369,7 @@ public datasetFileDifferenceItem() { private String fileDesc1; private String fileProvFree1; private String fileRest1; - + private String fileName2; private String fileType2; private String fileSize2; @@ -1565,7 +1393,7 @@ public String getFileProvFree2() { public void setFileProvFree2(String fileProvFree2) { this.fileProvFree2 = fileProvFree2; } - + public String getFileRest1() { return fileRest1; } @@ -1719,7 +1547,7 @@ public void setDatasetFilesDiffList(List datasetFiles * DatasetVersions. Currently used to assess whether 'system metadatablocks' * (protected by a separate key) have changed. (Simplified from the methods * above that track all the individual changes) - * + * */ public static Set getBlocksWithChanges(DatasetVersion newVersion, DatasetVersion originalVersion) { Set changedBlockSet = new HashSet(); @@ -1790,7 +1618,7 @@ private static boolean fieldsAreDifferent(DatasetField originalField, DatasetFie if (compound) { for (DatasetFieldCompoundValue datasetFieldCompoundValueOriginal : originalField - .getDatasetFieldCompoundValues()) { + .getDatasetFieldCompoundValues()) { int loopIndex = 0; if (newField.getDatasetFieldCompoundValues().size() >= loopIndex + 1) { for (DatasetField dsfo : datasetFieldCompoundValueOriginal.getChildDatasetFields()) { @@ -1799,7 +1627,7 @@ private static boolean fieldsAreDifferent(DatasetField originalField, DatasetFie } } for (DatasetField dsfn : newField.getDatasetFieldCompoundValues().get(loopIndex) - .getChildDatasetFields()) { + .getChildDatasetFields()) { if (!dsfn.getDisplayValue().isEmpty()) { newValue += dsfn.getDisplayValue() + ", "; } @@ -1811,6 +1639,10 @@ private static boolean fieldsAreDifferent(DatasetField originalField, DatasetFie loopIndex++; } } else { + //New CVV values may be in the order selected rather than the display order + if(!newField.getControlledVocabularyValues().isEmpty()) { + newField.getControlledVocabularyValues().sort(null); + } originalValue = originalField.getDisplayValue(); newValue = newField.getDisplayValue(); if (!originalValue.equalsIgnoreCase(newValue)) { @@ -1819,4 +1651,138 @@ private static boolean fieldsAreDifferent(DatasetField originalField, DatasetFie } return false; } + + List getChangedVariableMetadata() { + return changedVariableMetadata; + } + + List getReplacedFiles() { + return replacedFiles; + } + public JsonObjectBuilder compareVersionsAsJson() { + JsonObjectBuilder job = new NullSafeJsonBuilder(); + JsonObjectBuilder jobVersion = new NullSafeJsonBuilder(); + jobVersion.add("versionNumber", originalVersion.getFriendlyVersionNumber()); + jobVersion.add("lastUpdatedDate", new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'").format(originalVersion.getLastUpdateTime())); + job.add("oldVersion", jobVersion); + jobVersion = new NullSafeJsonBuilder(); + jobVersion.add("versionNumber", newVersion.getFriendlyVersionNumber()); + jobVersion.add("lastUpdatedDate", new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'").format(newVersion.getLastUpdateTime())); + job.add("newVersion", jobVersion); + + if (!this.detailDataByBlock.isEmpty()) { + JsonArrayBuilder jabMetadata = Json.createArrayBuilder(); + for (List blocks : detailDataByBlock) { + JsonObjectBuilder jobMetadata = new NullSafeJsonBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + String blockDisplay = blocks.get(0)[0].getDatasetFieldType().getMetadataBlock().getDisplayName(); + for (DatasetField[] dsfArray : blocks) { + JsonObjectBuilder jb = new NullSafeJsonBuilder(); + jb.add("fieldName", dsfArray[0].getDatasetFieldType().getTitle()); + if (dsfArray[0].getDatasetFieldType().isPrimitive()) { + jb.add("oldValue", dsfArray[0].getRawValue()); + } else { + jb.add("oldValue", dsfArray[0].getCompoundRawValue()); + } + if (dsfArray[1].getDatasetFieldType().isPrimitive()) { + jb.add("newValue", dsfArray[1].getRawValue()); + } else { + jb.add("newValue", dsfArray[1].getCompoundRawValue()); + } + jab.add(jb); + } + jobMetadata.add("blockName", blockDisplay); + jobMetadata.add("changed", jab); + jabMetadata.add(jobMetadata); + } + job.add("metadataChanges", jabMetadata); + } + + // Format added, removed, and modified files + JsonArrayBuilder jabDiffFiles = Json.createArrayBuilder(); + if (!addedFiles.isEmpty()) { + JsonArrayBuilder jab = Json.createArrayBuilder(); + addedFiles.forEach(f -> { + jab.add(filesDiffJson(f)); + }); + job.add("filesAdded", jab); + } + if (!removedFiles.isEmpty()) { + JsonArrayBuilder jab = Json.createArrayBuilder(); + removedFiles.forEach(f -> { + jab.add(filesDiffJson(f)); + }); + job.add("filesRemoved", jab); + } + if (!replacedFiles.isEmpty()) { + JsonArrayBuilder jabReplaced = Json.createArrayBuilder(); + replacedFiles.forEach(fm -> { + if (fm.length == 2) { + JsonObjectBuilder jobReplaced = new NullSafeJsonBuilder(); + jobReplaced.add("oldFile", filesDiffJson(fm[0])); + jobReplaced.add("newFile", filesDiffJson(fm[1])); + jabReplaced.add(jobReplaced); + } + }); + job.add("filesReplaced", jabReplaced); + } + if (!changedFileMetadata.isEmpty()) { + changedFileMetadataDiff.entrySet().forEach(entry -> { + JsonArrayBuilder jab = Json.createArrayBuilder(); + JsonObjectBuilder jobChanged = new NullSafeJsonBuilder(); + jobChanged.add("fileName", entry.getKey().getDataFile().getDisplayName()); + jobChanged.add(entry.getKey().getDataFile().getChecksumType().name(), entry.getKey().getDataFile().getChecksumValue()); + jobChanged.add("fileId", entry.getKey().getDataFile().getId()); + entry.getValue().entrySet().forEach(e -> { + JsonObjectBuilder jobDiffField = new NullSafeJsonBuilder(); + jobDiffField.add("fieldName",e.getKey()); + jobDiffField.add("oldValue",e.getValue().get(0)); + jobDiffField.add("newValue",e.getValue().get(1)); + jab.add(jobDiffField); + }); + jobChanged.add("changed", jab); + jabDiffFiles.add(jobChanged); + }); + job.add("fileChanges", jabDiffFiles); + } + + // Format Terms Of Access changes + if (!changedTermsAccess.isEmpty()) { + JsonObjectBuilder jobTOA = new NullSafeJsonBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + changedTermsAccess.forEach(toa -> { + JsonObjectBuilder jobValue = new NullSafeJsonBuilder(); + jobValue.add("fieldName",toa[0]); + jobValue.add("oldValue",toa[1]); + jobValue.add("newValue",toa[2]); + jab.add(jobValue); + }); + jobTOA.add("changed", jab); + job.add("TermsOfAccess", jobTOA); + } + + return job; + } + private JsonObjectBuilder filesDiffJson(FileMetadata fileMetadata) { + NullSafeJsonBuilder job = new NullSafeJsonBuilder(); + DataFile df = fileMetadata.getDataFile(); + job.add("fileName", df.getDisplayName()) + .add("filePath", fileMetadata.getDirectoryLabel()) + .add(df.getChecksumType().name(), df.getChecksumValue()) + .add("type",df.getContentType()) + .add("fileId", df.getId()) + .add("description", fileMetadata.getDescription()) + .add("isRestricted", df.isRestricted()); + if (fileMetadata.getCategories() != null && !fileMetadata.getCategories().isEmpty()) { + JsonArrayBuilder jabCategories = Json.createArrayBuilder(); + fileMetadata.getCategories().forEach(c -> jabCategories.add(c.getName())); + job.add("categories", jabCategories); + } + if (df.getTags() != null && !df.getTags().isEmpty()) { + JsonArrayBuilder jabTags = Json.createArrayBuilder(); + df.getTags().forEach(t -> jabTags.add(t.getTypeLabel())); + job.add("tags", jabTags); + } + return job; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionModifiedDate.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionModifiedDate.java new file mode 100644 index 00000000000..60c2f3038a7 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionModifiedDate.java @@ -0,0 +1,51 @@ +package edu.harvard.iq.dataverse; + +import java.io.Serializable; +import java.util.Date; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Temporal; +import jakarta.persistence.TemporalType; +import jakarta.persistence.Version; + +@Entity +public class DatasetVersionModifiedDate implements Serializable{ + /** + * + */ + private static final long serialVersionUID = 1L; + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + @Version + private Long version; + + @Temporal(value = TemporalType.TIMESTAMP) + @Column( nullable=false ) + private Date lastUpdateTime; + + public Date getLastUpdateTime() { + return lastUpdateTime; + } + + public void setLastUpdateTime(Date lastUpdateTime) { + this.lastUpdateTime = lastUpdateTime; + } + + /** + * This is JPA's optimistic locking mechanism, and has no semantic meaning in the DV object model. + * @return the object db version + */ + public Long getVersion() { + return this.version; + } + + public void setVersion(Long version) { + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java b/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java index 461c8b14e46..567f8fdf489 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java @@ -205,7 +205,7 @@ public void setVarGroups(List varGroups) { @OrderBy("name") private List fileCategories; - public List getCategories() { + public synchronized List getCategories() { if (fileCategories != null) { /* * fileCategories can sometimes be an diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 00da4990996..ff06a0adf9b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -63,7 +63,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.GetSpecificPublishedDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.RequestAccessCommand; import edu.harvard.iq.dataverse.engine.command.impl.RevokeRoleCommand; -import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; +import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionMetadataCommand; import edu.harvard.iq.dataverse.export.DDIExportServiceBean; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry; @@ -1390,7 +1390,7 @@ public Response allowAccessRequest(@Context ContainerRequestContext crc, @PathPa dataset.getOrCreateEditVersion().getTermsOfUseAndAccess().setFileAccessRequest(allowRequest); try { - engineSvc.submit(new UpdateDatasetVersionCommand(dataset, dataverseRequest)); + engineSvc.submit(new UpdateDatasetVersionMetadataCommand(dataset, dataverseRequest)); } catch (CommandException ex) { List args = Arrays.asList(dataset.getDisplayName(), ex.getLocalizedMessage()); return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.fileAccess.failure.noSave", args)); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 4b919c5ed82..8c41ce5796e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -765,7 +765,7 @@ public Response updateDraftVersion(@Context ContainerRequestContext crc, String if (!hasValidTerms) { return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid")); } - Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req)); + Dataset managedDataset = execCommand(new UpdateDatasetVersionMetadataCommand(ds, req)); managedVersion = managedDataset.getOrCreateEditVersion(); } else { boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(incomingVersion.getTermsOfUseAndAccess(), null); @@ -837,7 +837,7 @@ public Response updateVersionMetadata(@Context ContainerRequestContext crc, Stri return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid")); } DatasetVersion managedVersion; - Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req)); + Dataset managedDataset = execCommand(new UpdateDatasetVersionMetadataCommand(ds, req)); managedVersion = managedDataset.getLatestVersion(); String info = updateDraft ? "Version Updated" : "Version Created"; return ok(Json.createObjectBuilder().add(info, managedVersion.getVersionDate())); @@ -866,7 +866,7 @@ public Response deleteMetadata(@Context ContainerRequestContext crc, String json dsv = JSONLDUtil.deleteDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, licenseSvc); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); DatasetVersion managedVersion; - Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req)); + Dataset managedDataset = execCommand(new UpdateDatasetVersionMetadataCommand(ds, req)); managedVersion = managedDataset.getLatestVersion(); String info = updateDraft ? "Version Updated" : "Version Created"; return ok(Json.createObjectBuilder().add(info, managedVersion.getVersionDate())); @@ -1008,7 +1008,7 @@ private Response processDatasetFieldDataDelete(String jsonBody, String id, Datav } - DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion(); + DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionMetadataCommand(ds, req)).getLatestVersion(); return ok(json(managedVersion, true)); } catch (JsonParseException ex) { @@ -1156,7 +1156,7 @@ private Response processDatasetUpdate(String jsonBody, String id, DataverseReque dsv.getDatasetFields().add(updateField); } } - DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion(); + DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionMetadataCommand(ds, req)).getLatestVersion(); return ok(json(managedVersion, true)); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java index 1a1f4f9318b..f8e177e537e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java @@ -1,8 +1,8 @@ package edu.harvard.iq.dataverse.engine.command.impl; +import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetField; -import edu.harvard.iq.dataverse.DatasetFieldServiceBean; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.DatasetVersionDifference; import edu.harvard.iq.dataverse.DatasetVersionUser; @@ -18,16 +18,22 @@ import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.pidproviders.PidProvider; import edu.harvard.iq.dataverse.pidproviders.PidUtil; +import edu.harvard.iq.dataverse.pidproviders.doi.fake.FakeDOIProvider; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import java.sql.Timestamp; +import java.util.Arrays; import java.util.Date; +import java.util.HashSet; +import java.util.List; +import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import static java.util.stream.Collectors.joining; -import jakarta.ejb.EJB; +import jakarta.json.JsonObject; import jakarta.validation.ConstraintViolation; import edu.harvard.iq.dataverse.settings.JvmSettings; @@ -217,8 +223,88 @@ protected Timestamp getTimestamp() { return timestamp; } - protected void checkSystemMetadataKeyIfNeeded(DatasetVersion newVersion, DatasetVersion persistedVersion) throws IllegalCommandException { - Set changedMDBs = DatasetVersionDifference.getBlocksWithChanges(newVersion, persistedVersion); + protected void registerFilePidsIfNeeded(Dataset theDataset, CommandContext ctxt, boolean b) throws CommandException { + // Register file PIDs if needed + PidProvider pidGenerator = ctxt.dvObjects().getEffectivePidGenerator(getDataset()); + boolean shouldRegister = !pidGenerator.registerWhenPublished() && + ctxt.systemConfig().isFilePIDsEnabledForCollection(getDataset().getOwner()) && + pidGenerator.canCreatePidsLike(getDataset().getGlobalId()); + if (shouldRegister) { + for (DataFile dataFile : theDataset.getFiles()) { + logger.fine(dataFile.getId() + " is registered?: " + dataFile.isIdentifierRegistered()); + if (!dataFile.isIdentifierRegistered()) { + // pre-register a persistent id + registerFileExternalIdentifier(dataFile, pidGenerator, ctxt, true); + } + } + } + } + + private void registerFileExternalIdentifier(DataFile dataFile, PidProvider pidProvider, CommandContext ctxt, boolean retry) throws CommandException { + + if (!dataFile.isIdentifierRegistered()) { + + if (pidProvider instanceof FakeDOIProvider) { + retry = false; // No reason to allow a retry with the FakeProvider (even if it allows + // pre-registration someday), so set false for efficiency + } + try { + if (pidProvider.alreadyRegistered(dataFile)) { + int attempts = 0; + if (retry) { + do { + pidProvider.generatePid(dataFile); + logger.log(Level.INFO, "Attempting to register external identifier for datafile {0} (trying: {1}).", + new Object[] { dataFile.getId(), dataFile.getIdentifier() }); + attempts++; + } while (pidProvider.alreadyRegistered(dataFile) && attempts <= FOOLPROOF_RETRIAL_ATTEMPTS_LIMIT); + } + if (!retry) { + logger.warning("Reserving File PID for: " + getDataset().getId() + ", fileId: " + dataFile.getId() + ", during publication failed."); + throw new CommandExecutionException(BundleUtil.getStringFromBundle("abstractDatasetCommand.filePidNotReserved", Arrays.asList(getDataset().getIdentifier())), this); + } + if (attempts > FOOLPROOF_RETRIAL_ATTEMPTS_LIMIT) { + // Didn't work - we existed the loop with too many tries + throw new CommandExecutionException("This dataset may not be published because its identifier is already in use by another dataset; " + + "gave up after " + attempts + " attempts. Current (last requested) identifier: " + dataFile.getIdentifier(), this); + } + } + // Invariant: DataFile identifier does not exist in the remote registry + try { + pidProvider.createIdentifier(dataFile); + dataFile.setGlobalIdCreateTime(getTimestamp()); + dataFile.setIdentifierRegistered(true); + } catch (Throwable ex) { + logger.info("Call to globalIdServiceBean.createIdentifier failed: " + ex); + } + + } catch (Throwable e) { + if (e instanceof CommandException) { + throw (CommandException) e; + } + throw new CommandException(BundleUtil.getStringFromBundle("file.register.error", pidProvider.getProviderInformation()), this); + } + } else { + throw new IllegalCommandException("This datafile may not have a PID because its id registry service is not supported.", this); + } + + } + + + void checkSystemMetadataKeyIfNeeded(DatasetVersion newVersion, DatasetVersion persistedVersion) throws IllegalCommandException { + checkSystemMetadataKeyIfNeeded(DatasetVersionDifference.getBlocksWithChanges(newVersion, persistedVersion)); + } + + protected void checkSystemMetadataKeyIfNeeded(DatasetVersionDifference dvDifference) throws IllegalCommandException { + List> changeListsByBlock = dvDifference.getDetailDataByBlock(); + Set changedMDBs = new HashSet<>(); + for (List changeList : changeListsByBlock) { + changedMDBs.add(changeList.get(0)[0].getDatasetFieldType().getMetadataBlock()); + } + checkSystemMetadataKeyIfNeeded(changedMDBs); + } + + private void checkSystemMetadataKeyIfNeeded(Set changedMDBs) throws IllegalCommandException { for (MetadataBlock mdb : changedMDBs) { logger.fine(mdb.getName() + " has been changed"); String smdbString = JvmSettings.MDB_SYSTEM_KEY_FOR.lookupOptional(mdb.getName()) @@ -235,10 +321,15 @@ protected void checkSystemMetadataKeyIfNeeded(DatasetVersion newVersion, Dataset } protected void registerExternalVocabValuesIfAny(CommandContext ctxt, DatasetVersion newVersion) { + registerExternalVocabValuesIfAny(ctxt, newVersion, ctxt.settings().getValueForKey(SettingsServiceBean.Key.CVocConf)); + } + protected void registerExternalVocabValuesIfAny(CommandContext ctxt, DatasetVersion newVersion, String cvocSetting) { + Map cvocConf = ctxt.dsField().getCVocConf(true, cvocSetting); for (DatasetField df : newVersion.getFlatDatasetFields()) { - logger.fine("Found id: " + df.getDatasetFieldType().getId()); - if (ctxt.dsField().getCVocConf(true).containsKey(df.getDatasetFieldType().getId())) { - ctxt.dsField().registerExternalVocabValues(df); + long typeId = df.getDatasetFieldType().getId(); + if (cvocConf.containsKey(typeId)) { + ctxt.dsField().registerExternalVocabValues(df, cvocConf.get(typeId)); + } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java index e6e8279a314..e378e2e2ef7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java @@ -131,7 +131,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { throw new IllegalCommandException(BundleUtil.getStringFromBundle("datasetversion.update.failure"), this); } else { - metadataUpdated = DatasetVersionDifference.compareFileMetadatas(publishedFmd, draftFmd); + metadataUpdated = !DatasetVersionDifference.compareFileMetadatas(publishedFmd, draftFmd).isEmpty(); publishedFmd.setLabel(draftFmd.getLabel()); publishedFmd.setDescription(draftFmd.getDescription()); publishedFmd.setCategories(draftFmd.getCategories()); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java index d6d7b49d172..0de911e34fc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java @@ -215,7 +215,7 @@ public void run() { logger.warning(e.getMessage()); e.printStackTrace(); String mesg = "DuraCloud Submission Failure"; - if (!(1 == dv.getVersion()) || !(0 == dv.getMinorVersionNumber())) { + if (!(1 == dv.getVersionNumber()) || !(0 == dv.getMinorVersionNumber())) { mesg = mesg + ": Prior Version archiving not yet complete?"; } return new Failure("Unable to create DuraCloud space with name: " + baseFileName, mesg); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java index 6b95f3b6de1..650fedb4d8f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java @@ -4,20 +4,15 @@ import edu.harvard.iq.dataverse.DatasetLock; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; -import edu.harvard.iq.dataverse.pidproviders.PidProvider; -import edu.harvard.iq.dataverse.privateurl.PrivateUrl; -import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.workflow.Workflow; import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType; -import java.util.Date; -import java.util.List; +import jakarta.persistence.OptimisticLockException; + import java.util.Optional; import java.util.logging.Logger; import static java.util.stream.Collectors.joining; @@ -112,10 +107,15 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException Optional prePubWf = ctxt.workflows().getDefaultWorkflow(TriggerType.PrePublishDataset); if ( prePubWf.isPresent() ) { // We start a workflow - theDataset = ctxt.em().merge(theDataset); - ctxt.em().flush(); - ctxt.workflows().start(prePubWf.get(), buildContext(theDataset, TriggerType.PrePublishDataset, datasetExternallyReleased), true); - return new PublishDatasetResult(theDataset, Status.Workflow); + try { + theDataset = ctxt.em().merge(theDataset); + ctxt.em().flush(); + ctxt.workflows().start(prePubWf.get(), + buildContext(theDataset, TriggerType.PrePublishDataset, datasetExternallyReleased), true); + return new PublishDatasetResult(theDataset, Status.Workflow); + } catch (OptimisticLockException e) { + throw new CommandException(e.getMessage(), e, this); + } } else{ // We will skip trying to register the global identifiers for datafiles @@ -164,7 +164,12 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException lock.setInfo(info); ctxt.datasets().addDatasetLock(theDataset, lock); } - theDataset = ctxt.em().merge(theDataset); + try { + theDataset = ctxt.em().merge(theDataset); + } catch (OptimisticLockException e) { + ctxt.datasets().removeDatasetLocks(theDataset, DatasetLock.Reason.finalizePublication); + throw new CommandException(e.getMessage(), e, this); + } // The call to FinalizePublicationCommand has been moved to the new @onSuccess() // method: //ctxt.datasets().callFinalizePublishCommandAsynchronously(theDataset.getId(), ctxt, request, datasetExternallyReleased); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java index 994f4c7dfb6..f6e5eaddcb2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java @@ -2,6 +2,7 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFileCategory; +import edu.harvard.iq.dataverse.DataFileTag; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetLock; import edu.harvard.iq.dataverse.DatasetVersion; @@ -14,16 +15,16 @@ import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.DatasetFieldUtil; import edu.harvard.iq.dataverse.util.FileMetadataUtil; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; -import jakarta.validation.ConstraintViolationException; - /** * * @author skraffmiller @@ -37,6 +38,8 @@ public class UpdateDatasetVersionCommand extends AbstractDatasetCommand private final DatasetVersion clone; final FileMetadata fmVarMet; + private String cvocSetting=null; + public UpdateDatasetVersionCommand(Dataset theDataset, DataverseRequest aRequest) { super(aRequest, theDataset); this.filesToDelete = new ArrayList<>(); @@ -100,67 +103,119 @@ public Dataset execute(CommandContext ctxt) throws CommandException { if ( ! (getUser() instanceof AuthenticatedUser) ) { throw new IllegalCommandException("Only authenticated users can update datasets", this); } - - Dataset theDataset = getDataset(); - ctxt.permissions().checkUpdateDatasetVersionLock(theDataset, getRequest(), this); - Dataset savedDataset = null; - - DatasetVersion persistedVersion = clone; - /* - * Unless a pre-change clone has been provided, we need to get it from the db. - * There are two cases: We're updating an existing draft, which has an id, and - * exists in the database We've created a new draft, with null id, and we need - * to get the lastest version in the db - * - */ - if(persistedVersion==null) { - Long id = getDataset().getLatestVersion().getId(); - persistedVersion = ctxt.datasetVersion().find(id!=null ? id: getDataset().getLatestVersionForCopy().getId()); - } - - //Will throw an IllegalCommandException if a system metadatablock is changed and the appropriate key is not supplied. - checkSystemMetadataKeyIfNeeded(getDataset().getOrCreateEditVersion(fmVarMet), persistedVersion); - - getDataset().getOrCreateEditVersion().setLastUpdateTime(getTimestamp()); + long startTime = System.currentTimeMillis(); + logger.fine("Executing UpdateDatasetVersionCommand at: " + startTime); - registerExternalVocabValuesIfAny(ctxt, getDataset().getOrCreateEditVersion(fmVarMet)); + cvocSetting = ctxt.settings().getValueForKey(SettingsServiceBean.Key.CVocConf); + Dataset theDataset = getDataset(); + for(DataFile f:theDataset.getFiles()) { + f.getLatestFileMetadata(); + List dftList = f.getTags(); + if (dftList != null) { + for (DataFileTag dft : f.getTags()) { + logger.info("Found tag: " + dft.getTypeLabel() + " on " + f.getId()); + if(dft.getId()==null) { + ctxt.em().persist(dft); + } + } + } + } + theDataset.getLatestVersion().getFileMetadatas(); + //logger.info("Dataset fmd " + theDataset.getFiles().get(0).getLatestFileMetadata().getId() + " is restricted: " + theDataset.getFiles().get(0).getLatestFileMetadata().isRestricted()); + //logger.info("Dataset latest version fmd " + theDataset.getLatestVersion().getFileMetadatas().get(0).getId() + " is restricted: " + theDataset.getLatestVersion().getFileMetadatas().get(0).isRestricted()); + //Check for an existing lock + ctxt.permissions().checkUpdateDatasetVersionLock(theDataset, getRequest(), this); try { + logger.info("Getting lock"); // Invariant: Dataset has no locks preventing the update String lockInfoMessage = "saving current edits"; DatasetLock lock = ctxt.datasets().addDatasetLock(getDataset().getId(), DatasetLock.Reason.EditInProgress, ((AuthenticatedUser) getUser()).getId(), lockInfoMessage); if (lock != null) { + lock = ctxt.em().merge(lock); theDataset.addLock(lock); } else { logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", getDataset().getId()); } - - getDataset().getOrCreateEditVersion(fmVarMet).setDatasetFields(getDataset().getOrCreateEditVersion(fmVarMet).initDatasetFields()); - validateOrDie(getDataset().getOrCreateEditVersion(fmVarMet), isValidateLenient()); - final DatasetVersion editVersion = getDataset().getOrCreateEditVersion(fmVarMet); + DatasetVersion persistedVersion = clone; + /* + * Unless a pre-change clone has been provided, we need to get it from the db. + * There are two cases: We're updating an existing draft, which has an id, and + * exists in the database We've created a new draft, with null id, and we need + * to get the lastest version in the db + * + */ + DatasetVersion latestVersion = theDataset.getLatestVersion(); + logger.info("lates Version num: " + latestVersion.getSemanticVersion()); + logger.info("Ready to get peristent version: " +(System.currentTimeMillis() - startTime) + " ms"); + if (persistedVersion == null) { + Long id = latestVersion.getId(); + persistedVersion = ctxt.datasetVersion() + .find(id != null ? id : getDataset().getLatestVersionForCopy().getId()); + } + logger.info("Done getting peristent version: " +(System.currentTimeMillis() - startTime) + " ms"); + + // Get or create (currently only when called with fmVarMet != null) a new edit + // version + DatasetVersion editVersion = theDataset.getOrCreateEditVersion(fmVarMet); + //logger.info("Dataset orig edit version fmd " + editVersion.getFileMetadatas().get(0).getId() + " is restricted: " + editVersion.getFileMetadatas().get(0).isRestricted()); - DatasetFieldUtil.tidyUpFields(editVersion.getDatasetFields(), true); + logger.info("Starting Version num: " + editVersion.getSemanticVersion()); - // Merge the new version into out JPA context, if needed. - if (editVersion.getId() == null || editVersion.getId() == 0L) { - ctxt.em().persist(editVersion); - } else { - try { - ctxt.em().merge(editVersion); - } catch (ConstraintViolationException e) { - logger.log(Level.SEVERE,"Exception: "); - e.getConstraintViolations().forEach(err->logger.log(Level.SEVERE,err.toString())); - throw e; - } + // Now merge the dataset + theDataset = ctxt.em().merge(theDataset); + setDataset(theDataset); + logger.info("Dataset merge done at: " + (System.currentTimeMillis() - startTime) + " ms"); + //Lookup of merged version + if (!ctxt.em().contains(editVersion)) { + logger.info("Orig Edit Version not merged"); + } + editVersion = theDataset.getOrCreateEditVersion(fmVarMet); + + //if (!latestVersion.isWorkingCopy()) { + // logger.info("Edit Version had to be created"); + if (!ctxt.em().contains(editVersion)) { + logger.info("Edit Version had to be merged"); + editVersion = ctxt.em().merge(editVersion); + } + // } + //logger.info("Dataset final edit version fmd " + editVersion.getFileMetadatas().get(0).getId() + " is restricted: " + editVersion.getFileMetadatas().get(0).isRestricted()); + + List metadatas = new ArrayList(editVersion.getFileMetadatas()); + boolean changed = false; + for (FileMetadata fmd : editVersion.getFileMetadatas()) { + if (!ctxt.em().contains(fmd)) { + logger.info("FMD " + fmd.getLabel() + " was not merged " + fmd.getId()); + metadatas.remove(fmd); + fmd = ctxt.em().merge(fmd); + metadatas.add(fmd); + changed = true; + } + } + if(changed) { + editVersion.setFileMetadatas(metadatas); } + // Will throw an IllegalCommandException if a system metadatablock is changed + // and the appropriate key is not supplied. + checkSystemMetadataKeyIfNeeded(editVersion, persistedVersion); + + editVersion.setLastUpdateTime(getTimestamp()); + + editVersion.setDatasetFields(editVersion.initDatasetFields()); + validateOrDie(editVersion, isValidateLenient()); + + DatasetFieldUtil.tidyUpFields(editVersion.getDatasetFields(), true); + + registerExternalVocabValuesIfAny(ctxt, editVersion, cvocSetting); + + //Set creator and create date for files if needed for (DataFile dataFile : theDataset.getFiles()) { if (dataFile.getCreateDate() == null) { dataFile.setCreateDate(getTimestamp()); dataFile.setCreator((AuthenticatedUser) getUser()); } - dataFile.setModificationTime(getTimestamp()); } // Remove / delete any files that were removed @@ -187,14 +242,6 @@ public Dataset execute(CommandContext ctxt) throws CommandException { recalculateUNF = true; } } - // we have to merge to update the database but not flush because - // we don't want to create two draft versions! - // Although not completely tested, it looks like this merge handles the - // thumbnail case - if the filemetadata is removed from the context below and - // the dataset still references it, that could cause an issue. Merging here - // avoids any reference from it being the dataset thumbnail - theDataset = ctxt.em().merge(theDataset); - /* * This code has to handle many cases, and anyone making changes should * carefully check tests and basic methods that update the dataset version. The @@ -209,7 +256,6 @@ public Dataset execute(CommandContext ctxt) throws CommandException { * the fmd.getId() is null, which just removes the first element found. */ for (FileMetadata fmd : filesToDelete) { - logger.fine("Deleting fmd: " + fmd.getId() + " for file: " + fmd.getDataFile().getId()); // if file is draft (ie. new to this version), delete it. Otherwise just remove // filemetadata object) // There are a few cases to handle: @@ -225,13 +271,19 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // If the datasetversion doesn't match, we have the fmd from a published version // and we need to remove the one for the newly created draft instead, so we find // it here - logger.fine("Edit ver: " + theDataset.getOrCreateEditVersion().getId()); - logger.fine("fmd ver: " + fmd.getDatasetVersion().getId()); - if (!theDataset.getOrCreateEditVersion().equals(fmd.getDatasetVersion())) { - fmd = FileMetadataUtil.getFmdForFileInEditVersion(fmd, theDataset.getOrCreateEditVersion()); + if (!editVersion.equals(fmd.getDatasetVersion())) { + fmd = FileMetadataUtil.getFmdForFileInEditVersion(fmd, editVersion); } - } - fmd = ctxt.em().merge(fmd); + } + if(!ctxt.em().contains(fmd)) { + logger.info("FMD wasn't merged"); + fmd.setDataFile(ctxt.em().merge(fmd.getDataFile())); + fmd = ctxt.em().merge(fmd); + } + if(!ctxt.em().contains(fmd.getDataFile())) { + logger.info("DF wasn't merged"); + ctxt.em().merge(fmd.getDataFile()); + } // There are two datafile cases as well - the file has been released, so we're // just removing it from the current draft version or it is only in the draft @@ -241,6 +293,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { ctxt.engine().submit(new DeleteDataFileCommand(fmd.getDataFile(), getRequest())); // and remove the file from the dataset's list theDataset.getFiles().remove(fmd.getDataFile()); + ctxt.em().remove(fmd.getDataFile()); } else { // if we aren't removing the file, we need to explicitly remove the fmd from the // context and then remove it from the datafile's list @@ -250,42 +303,69 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // In either case, to fully remove the fmd, we have to remove any other possible // references // From the datasetversion - FileMetadataUtil.removeFileMetadataFromList(theDataset.getOrCreateEditVersion().getFileMetadatas(), fmd); + FileMetadataUtil.removeFileMetadataFromList(editVersion.getFileMetadatas(), fmd); // and from the list associated with each category for (DataFileCategory cat : theDataset.getCategories()) { FileMetadataUtil.removeFileMetadataFromList(cat.getFileMetadatas(), fmd); } + ctxt.em().remove(fmd); } - for(FileMetadata fmd: theDataset.getOrCreateEditVersion().getFileMetadatas()) { - logger.fine("FMD: " + fmd.getId() + " for file: " + fmd.getDataFile().getId() + "is in final draft version"); + + if (logger.isLoggable(Level.FINE)) { + for (FileMetadata fmd : editVersion.getFileMetadatas()) { + logger.fine("FMD: " + fmd.getId() + " for file: " + fmd.getDataFile().getId() + + "is in final draft version"); + } } - + + registerFilePidsIfNeeded(theDataset, ctxt, true); + if (recalculateUNF) { - ctxt.ingest().recalculateDatasetVersionUNF(theDataset.getOrCreateEditVersion()); + ctxt.ingest().recalculateDatasetVersionUNF(editVersion); } theDataset.setModificationTime(getTimestamp()); - savedDataset = ctxt.em().merge(theDataset); - ctxt.em().flush(); - + //Update the DatasetUser (which merges it into the context) updateDatasetUser(ctxt); + if (clone != null) { - DatasetVersionDifference dvd = new DatasetVersionDifference(editVersion, clone); AuthenticatedUser au = (AuthenticatedUser) getUser(); - ctxt.datasetVersion().writeEditVersionLog(dvd, au); + DatasetVersionDifference dvDifference = new DatasetVersionDifference(editVersion, clone); + ctxt.datasetVersion().writeEditVersionLog(dvDifference, au); + logger.fine("log done at: " + (System.currentTimeMillis()-startTime)); + } + if ( theDataset != null ) { + final Dataset savedDataset=theDataset; + logger.info("Locks found: " + theDataset.getLocks().size()); + new HashSet<>(savedDataset.getLocks()).stream() + .filter( l -> l.getReason() == DatasetLock.Reason.EditInProgress ) + .forEach( existingLock -> { + logger.info("Removing lock: " + existingLock.getId() + " reason: " + existingLock.getReason()); + existingLock = ctxt.em().merge(existingLock); + savedDataset.removeLock(existingLock); + + AuthenticatedUser user = existingLock.getUser(); + user.getDatasetLocks().remove(existingLock); + + ctxt.em().remove(existingLock); + }); + + logger.info("theD locked: " + !theDataset.getLocks().isEmpty()); + theDataset.removeLock(theDataset.getLockFor(DatasetLock.Reason.EditInProgress)); + logger.info("2nd time theD locked: " + !theDataset.getLocks().isEmpty()); } + logger.info("Done with changes at " + (System.currentTimeMillis()-startTime)); } finally { // We're done making changes - remove the lock... - //Failures above may occur before savedDataset is set, in which case we need to remove the lock on theDataset instead - if(savedDataset!=null) { - ctxt.datasets().removeDatasetLocks(savedDataset, DatasetLock.Reason.EditInProgress); - } else { + //Only happens if an exception has caused us to miss the lock removal in this transaction + if(!theDataset.getLocks().isEmpty()) { ctxt.datasets().removeDatasetLocks(theDataset, DatasetLock.Reason.EditInProgress); + } else { + logger.info("No locks to remove"); } } - - return savedDataset; + return theDataset; } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionMetadataCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionMetadataCommand.java new file mode 100644 index 00000000000..ece3c3c747a --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionMetadataCommand.java @@ -0,0 +1,280 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.ControlledVocabularyValue; +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.DataFileCategory; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetField; +import edu.harvard.iq.dataverse.DatasetFieldType; +import edu.harvard.iq.dataverse.DatasetLock; +import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.DatasetVersionDifference; +import edu.harvard.iq.dataverse.DatasetVersionModifiedDate; +import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException; +import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.DatasetFieldUtil; +import edu.harvard.iq.dataverse.util.FileMetadataUtil; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import jakarta.validation.ConstraintViolationException; + +/** + * + * @author skraffmiller + */ +@RequiredPermissions(Permission.EditDataset) +public class UpdateDatasetVersionMetadataCommand extends AbstractDatasetCommand { + + static final Logger logger = Logger.getLogger(UpdateDatasetVersionMetadataCommand.class.getCanonicalName()); + private boolean validateLenient = false; + private final DatasetVersion clone; + + private String cvocSetting = null; + + public UpdateDatasetVersionMetadataCommand(Dataset theDataset, DataverseRequest aRequest) { + super(aRequest, theDataset); + this.clone = null; + } + + public UpdateDatasetVersionMetadataCommand(Dataset theDataset, DataverseRequest aRequest, DatasetVersion clone) { + super(aRequest, theDataset); + this.clone = clone; + } + + public boolean isValidateLenient() { + return validateLenient; + } + + public void setValidateLenient(boolean validateLenient) { + this.validateLenient = validateLenient; + } + + @Override + public Dataset execute(CommandContext ctxt) throws CommandException { + if (!(getUser() instanceof AuthenticatedUser)) { + throw new IllegalCommandException("Only authenticated users can update datasets", this); + } + long startTime = System.currentTimeMillis(); + logger.info("Starting update: " + startTime); + Dataset theDataset = getDataset(); + ctxt.permissions().checkUpdateDatasetVersionLock(theDataset, getRequest(), this); + Dataset savedDataset = null; + + try { + logger.info("Getting lock"); + // Invariant: Dataset has no locks preventing the update + String lockInfoMessage = "saving current edits"; + DatasetLock lock = ctxt.datasets().addDatasetLock(getDataset().getId(), DatasetLock.Reason.EditInProgress, + ((AuthenticatedUser) getUser()).getId(), lockInfoMessage); + if (lock != null) { + lock = ctxt.em().merge(lock); + theDataset.addLock(lock); + } else { + logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", getDataset().getId()); + } + + DatasetVersion persistedVersion = clone; + /* + * Unless a pre-change clone has been provided, we need to get it from the db. + * There are two cases: We're updating an existing draft, which has an id, and + * exists in the database We've created a new draft, with null id, and we need + * to get the lastest version in the db + * + */ + if (persistedVersion == null) { + Long id = getDataset().getLatestVersion().getId(); + persistedVersion = ctxt.datasetVersion() + .find(id != null ? id : getDataset().getLatestVersionForCopy().getId()); + } + + DatasetVersion editVersion = getDataset().getOrCreateEditVersion(); + + // Calculate the difference from the in-database version and use it to optimize + // the update. + + DatasetVersionDifference dvDifference = new DatasetVersionDifference(editVersion, persistedVersion, false); + logger.info(dvDifference.getEditSummaryForLog()); + logger.info("difference done at: " + (System.currentTimeMillis() - startTime)); + + // Will throw an IllegalCommandException if a system metadatablock is changed + // and the appropriate key is not supplied. + checkSystemMetadataKeyIfNeeded(dvDifference); + // ToDo - validation goes through file list? + editVersion.setDatasetFields(editVersion.initDatasetFields()); + validateOrDie(editVersion, isValidateLenient()); + + DatasetFieldUtil.tidyUpFields(editVersion.getDatasetFields(), true); + logger.info("validation done at: " + (System.currentTimeMillis() - startTime)); + + cvocSetting = ctxt.settings().getValueForKey(SettingsServiceBean.Key.CVocConf); + + /* + * If the edit version is new, we need to bring it into the context. There are + * several steps to this and it is important to not make any calls between them + * that would cause an implicit flush of a potentially incomplete + * datasetversion. This includes calls like ctxt.settings().getValueForKey() + * above and ctxt.em().createNativeQuery used below. + * + * Start of editVersion setup: + */ + // If the editVersion is new, we need to persist it. If it already exists in the + // db, we will avoid even merging it for efficiency's sake. + if (editVersion.getId() == null || editVersion.getId() == 0L) { + ctxt.em().persist(editVersion); + logger.info("Persisted new version at: " + (System.currentTimeMillis() - startTime)); + + } + // + DatasetVersionModifiedDate mDate = editVersion.getModifiedDate(); + /* + * //Shouldn't be needed anymore as the date should be added at construction if + * (mDate == null) { mDate = new DatasetVersionModifiedDate(); + * editVersion.setModifiedDate(mDate); logger.info("created date at: " + + * (System.currentTimeMillis() - startTime)); } + */ + // If we have not persisted a new version, the date will not be merged yet, so + // we do it now. + if (!ctxt.em().contains(mDate)) { + mDate = ctxt.em().merge(mDate); + // Make sure the merged date is the one in the version so the setLastUpdateTime + // call changes the merged version + editVersion.setModifiedDate(mDate); + logger.info("merged date at: " + (System.currentTimeMillis() - startTime)); + } + // Update the time/make sure it is non null for a new version + editVersion.setLastUpdateTime(getTimestamp()); + + /* + * Two cases: a new version which has been persisted, but, for some reason, if there are datasetfield changes (not just terms changes) + * the controlled vocabulary fields will have the field merged but the cvv value not yet merged. Nominally this makes sense in that + * the datasetfield list of cvvs is cascade: merge only, but it is not clear why this is not needed when only terms have changed + * (there is still a new version, it still has new fields) + * + * an existing version which has not been merged into the context, in which case + * we need to merge any changed/added fields + * + * ToDo iterating through all the fields isn't needed - just the cvv ones for case one or the updated ones for case 2 + */ + + if (!dvDifference.getDetailDataByBlock().isEmpty()) { + List mergedFields = new ArrayList<>(); + final DatasetVersion dbVersion = persistedVersion; + boolean dbIsDraft = dbVersion.isDraft(); + editVersion.getDatasetFields().forEach(df -> { + if (df.getId() == null) { + logger.info("Swapping fields of type: " + df.getDatasetFieldType()); + ctxt.em().persist(df); + if (dbIsDraft) { + DatasetField obsolete = dbVersion.getDatasetField(df.getDatasetFieldType()); + if (obsolete != null) { + ctxt.em().remove(obsolete); + } + } + } + logger.info("Merging existing field at: " + (System.currentTimeMillis() - startTime)); + df = ctxt.em().merge(df); + mergedFields.add(df); + }); + editVersion.setDatasetFields(mergedFields); + } + + // ToDo - only needed if editVersion wasn't persisted + if (!dvDifference.getChangedTermsAccess().isEmpty()) { + // Update the access terms of the dataset version + if(editVersion.getTermsOfUseAndAccess().getId()==null) { + ctxt.em().persist(editVersion.getTermsOfUseAndAccess()); + editVersion = ctxt.em().merge(editVersion); + editVersion.getTermsOfUseAndAccess().setDatasetVersion(editVersion); + } + editVersion.setTermsOfUseAndAccess(ctxt.em().merge(editVersion.getTermsOfUseAndAccess())); + } + /* End editVersion setup */ + registerExternalVocabValuesIfAny(ctxt, editVersion, cvocSetting); + + logger.info("locked and fields validated at: " + (System.currentTimeMillis() - startTime)); + + // Create and execute query to update the modification time on the dataset + // directly in the database + theDataset.setModificationTime(getTimestamp()); + + if (!ctxt.em().contains(theDataset)) { + logger.info("Dataset not in context"); + ctxt.em().createNativeQuery("UPDATE dvobject " + "SET modificationtime='" + getTimestamp() + + "' WHERE id='" + theDataset.getId() + "'").executeUpdate(); + } + // ToDO - remove + savedDataset = theDataset; + + // savedDataset = ctxt.em().merge(savedDataset); + logger.info("merge done at: " + (System.currentTimeMillis() - startTime)); + + updateDatasetUser(ctxt); + logger.info("update ds user done at: " + (System.currentTimeMillis() - startTime)); + if (clone != null) { + // DatasetVersionDifference dvd = new DatasetVersionDifference(editVersion, + // clone); + AuthenticatedUser au = (AuthenticatedUser) getUser(); + ctxt.datasetVersion().writeEditVersionLog(dvDifference, au); + logger.info("edit log written at: " + (System.currentTimeMillis() - startTime)); + } + if (savedDataset != null) { + final Dataset lockedDataset = savedDataset; + logger.info("Locks found: " + savedDataset.getLocks().size()); + new HashSet<>(lockedDataset.getLocks()).stream() + .filter(l -> l.getReason() == DatasetLock.Reason.EditInProgress).forEach(existingLock -> { + logger.info( + "Removing lock: " + existingLock.getId() + " reason: " + existingLock.getReason()); + existingLock = ctxt.em().merge(existingLock); + lockedDataset.removeLock(existingLock); + + AuthenticatedUser user = existingLock.getUser(); + user.getDatasetLocks().remove(existingLock); + + ctxt.em().remove(existingLock); + }); + + logger.info("theD locked: " + !savedDataset.getLocks().isEmpty()); + savedDataset.removeLock(savedDataset.getLockFor(DatasetLock.Reason.EditInProgress)); + logger.info("2nd time theD locked: " + !savedDataset.getLocks().isEmpty()); + } + logger.info("Done with changes at " + (System.currentTimeMillis() - startTime)); + } finally { + // We're done making changes - remove the lock... + // Only happens if an exception has caused us to miss the lock removal in this + // transaction + if (!theDataset.getLocks().isEmpty()) { + ctxt.datasets().removeDatasetLocks(theDataset, DatasetLock.Reason.EditInProgress); + } else { + logger.info("No locks to remove"); + } + } + + return savedDataset; + } + + @Override + public boolean onSuccess(CommandContext ctxt, Object r) { + // Async indexing significantly improves performance when updating datasets with + // thousands of files + // Indexing will be started immediately, unless an index is already busy for the + // given data + // (it will be scheduled then for later indexing of the newest version). + // See the documentation of asyncIndexDataset method for more details. + ctxt.index().asyncIndexDataset((Dataset) r, true); + return true; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java index c16a46a1765..9e7bf85e4aa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java @@ -18,7 +18,7 @@ public JsonObjectBuilder json( DatasetVersion dsv ) { return ( dsv==null ) ? null : jsonObjectBuilder().add("id", dsv.getId()) - .add("version", dsv.getVersion() ) + .add("version", dsv.getFriendlyVersionNumber() ) .add("versionState", dsv.getVersionState().name() ) .add("title", dsv.getTitle()); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java index 52491a5a7e1..4d8adf8bf8b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java @@ -150,7 +150,7 @@ public static DatasetVersion updateDatasetVersionMDFromJsonLD(DatasetVersion dsv fieldByTypeMap.put(dsf.getDatasetFieldType(), dsf); } - TermsOfUseAndAccess terms = (dsv.getTermsOfUseAndAccess() != null) ? dsv.getTermsOfUseAndAccess().copyTermsOfUseAndAccess() : new TermsOfUseAndAccess(); + TermsOfUseAndAccess terms = (dsv.getTermsOfUseAndAccess() != null) ? dsv.getTermsOfUseAndAccess() : new TermsOfUseAndAccess(); for (String key : jsonld.keySet()) { diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 14a23355048..92402f2ac00 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1639,6 +1639,8 @@ dataset.message.createFailure=The dataset could not be created. dataset.message.termsFailure=The dataset terms could not be updated. dataset.message.label.fileAccess=Publicly-accessible storage dataset.message.publicInstall=Files in this dataset may be readable outside Dataverse, restricted and embargoed access are disabled +dataset.message.parallelUpdateError=Changes cannot be saved. This dataset has been edited since this page was opened. To continue, copy your changes, refresh the page to see the recent updates, and re-enter any changes you want to save. +dataset.message.parallelPublishError=Publishing is blocked. This dataset has been edited since this page was opened. To publish it, refresh the page to see the recent updates, and publish again. dataset.metadata.publicationDate=Publication Date dataset.metadata.publicationDate.tip=The publication date of a Dataset. dataset.metadata.citationDate=Citation Date @@ -1941,10 +1943,10 @@ file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions=Conditions file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions.title=Any additional information that will assist the user in understanding the access and use conditions of the Dataset. file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer=Disclaimer file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer.title=Information regarding responsibility for uses of the Dataset. +file.dataFilesTab.terms.list.termsOfUse.addInfo.license=License file.dataFilesTab.terms.list.termsOfAccess.header=Restricted Files + Terms of Access file.dataFilesTab.terms.list.termsOfAccess.description=Restricting limits access to published files. People who want to use the restricted files can request access by default. If you disable request access, you must add information about access to the Terms of Access field. file.dataFilesTab.terms.list.termsOfAccess.description.line.2=Learn about restricting files and dataset access in the User Guide. - file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles=Restricted Files file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.title=The number of restricted files in this dataset. file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.txt=There {0, choice, 0#are|1#is|2#are} {0} restricted {0, choice, 0#files|1#file|2#files} in this dataset. @@ -1970,6 +1972,8 @@ file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection=Size of Coll file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection.tip=Summary of the number of physical files that exist in a Dataset, recording the number of files that contain data and noting whether the collection contains machine readable documentation and/or other supplementary files and information, such as code, data dictionaries, data definition statements, or data collection instruments. file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion=Study Completion file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion.title=Relationship of the data collected to the amount of data coded and stored in the Dataset. Information as to why certain items of collected information were not included in the dataset or a specific data file should be provided. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.fileAccessRequest=File Access Requests Allowed + file.dataFilesTab.terms.list.guestbook=Guestbook file.dataFilesTab.terms.list.guestbook.title=User information (i.e., name, email, institution, and position) will be collected when files are downloaded. file.dataFilesTab.terms.list.guestbook.noSelected.tip=No guestbook is assigned to this dataset so users will not be prompted to provide any information when downloading files. diff --git a/src/main/webapp/file-edit-button-fragment.xhtml b/src/main/webapp/file-edit-button-fragment.xhtml index 8b07aa50929..90f6f001390 100644 --- a/src/main/webapp/file-edit-button-fragment.xhtml +++ b/src/main/webapp/file-edit-button-fragment.xhtml @@ -36,7 +36,7 @@
  • @@ -72,7 +72,7 @@
  • - +
  • diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index e3c26284d55..43c9bc5f47b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -1014,6 +1014,7 @@ public void testRestrictFile() { System.out.println("Orig file id: " + origFileId); assertNotNull(origFileId); // If checkOut fails, display message + UtilIT.sleepForReindex(datasetId.toString(), apiToken, 5); //restrict file good Response restrictResponse = UtilIT.restrictFile(origFileId.toString(), restrict, apiToken); restrictResponse.prettyPrint(); diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommandTest.java index a2d9cdfb917..9df6f442622 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommandTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommandTest.java @@ -1,11 +1,14 @@ package edu.harvard.iq.dataverse.engine.command.impl; import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetFieldServiceBean; import edu.harvard.iq.dataverse.DatasetServiceBean; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.mocks.MocksFactory; +import jakarta.json.JsonObject; + import static edu.harvard.iq.dataverse.mocks.MocksFactory.*; import edu.harvard.iq.dataverse.engine.TestCommandContext; import edu.harvard.iq.dataverse.engine.TestDataverseEngine; @@ -52,9 +55,18 @@ public void testSimpleVersionAddition() throws Exception { CreateDatasetVersionCommand sut = new CreateDatasetVersionCommand( makeRequest(), ds, dsvNew ); final MockDatasetServiceBean serviceBean = new MockDatasetServiceBean(); - TestDataverseEngine testEngine = new TestDataverseEngine( new TestCommandContext(){ - @Override public DatasetServiceBean datasets() { return serviceBean; } - } ); + final MockDatasetFieldServiceBean dsfServiceBean = new MockDatasetFieldServiceBean(); + TestDataverseEngine testEngine = new TestDataverseEngine(new TestCommandContext() { + @Override + public DatasetServiceBean datasets() { + return serviceBean; + } + + @Override + public DatasetFieldServiceBean dsField() { + return dsfServiceBean; + } + }); testEngine.submit(sut); @@ -106,4 +118,15 @@ public DatasetVersion storeVersion(DatasetVersion dsv) { } + static class MockDatasetFieldServiceBean extends DatasetFieldServiceBean { + + boolean storeVersionCalled = false; + + @Override + public Map getCVocConf(boolean b, String s) { + return new HashMap<>(); + } + + } + } diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinterTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinterTest.java index fc458d88acd..d5952062180 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinterTest.java @@ -30,7 +30,8 @@ public void testJson_DatasetVersion() { DatasetVersion dsv = MocksFactory.makeDatasetVersion( ds.getCategories() ); dsv.setId(1L); - dsv.setVersion(2l); + dsv.setVersionNumber(2l); + dsv.setMinorVersionNumber(0l); dsv.setVersionState(DatasetVersion.VersionState.DEACCESSIONED); DatasetField titleFld = new DatasetField();