Skip to content

Commit

Permalink
More changes/refinements, dedicated "validation failed" lock, etc. (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
landreev committed Apr 2, 2020
1 parent 58ac83b commit 790a5e5
Show file tree
Hide file tree
Showing 6 changed files with 52 additions and 24 deletions.
6 changes: 5 additions & 1 deletion src/main/java/edu/harvard/iq/dataverse/DatasetLock.java
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,11 @@ public enum Reason {
finalizePublication,

/*Another edit is in progress*/
EditInProgress
EditInProgress,

/* Some files in the dataset failed validation */
FileValidationFailed

}

private static final long serialVersionUID = 1L;
Expand Down
36 changes: 27 additions & 9 deletions src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
Original file line number Diff line number Diff line change
Expand Up @@ -2018,17 +2018,24 @@ private String init(boolean initFull) {
datasetService.removeDatasetLocks(dataset.getId(), DatasetLock.Reason.finalizePublication);
}*/
if (dataset.isLockedFor(DatasetLock.Reason.finalizePublication)) {
// the "finalizePublication" lock is used to lock the dataset for BOTH the
// asynchronous persistent id registration for files AND (or)
// physical file validation.
if (FinalizeDatasetPublicationCommand.FILE_VALIDATION_ERROR.equals(dataset.getLockFor(DatasetLock.Reason.finalizePublication).getInfo())) {
// "finalizePublication" lock is used to lock the dataset while
// the FinalizeDatasetPublicationCommand is running asynchronously.
// the tasks currently performed by the command are the pid registration
// for files and (or) physical file validation (either or both
// of these two can be disabled via database settings). More
// such asynchronous processing tasks may be added in the future.
JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("dataset.publish.workflow.message"),
BundleUtil.getStringFromBundle("dataset.pidRegister.workflow.inprogress"));
}
if (dataset.isLockedFor(DatasetLock.Reason.FileValidationFailed)) {
// the dataset is locked, because one or more datafiles in it
// failed validation during an attempt to publish it.
if (FacesContext.getCurrentInstance().getExternalContext().getFlash().get("errorMsg") == null) {
JH.addMessage(FacesMessage.SEVERITY_ERROR, BundleUtil.getStringFromBundle("dataset.publish.file.validation.error.message"),
BundleUtil.getStringFromBundle("dataset.publish.file.validation.error.details"));
} else {
JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("dataset.publish.workflow.message"),
BundleUtil.getStringFromBundle("dataset.pidRegister.workflow.inprogress"));
}
}
/* and now that we've shown the message to the user - remove the lock? */
}
if (dataset.isLockedFor(DatasetLock.Reason.EditInProgress)) {
String rootDataverseName = dataverseService.findRootDataverse().getName();
JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message"),
Expand Down Expand Up @@ -3639,11 +3646,22 @@ public boolean isStillLockedForIngest() {
}

public boolean isStillLockedForAnyReason() {
boolean lockedForAsyncPublish = dataset.isLockedFor(DatasetLock.Reason.finalizePublication);
if (dataset.getId() != null) {
Dataset testDataset = datasetService.find(dataset.getId());
if (testDataset != null && testDataset.getId() != null) {
logger.log(Level.FINE, "checking lock status of dataset {0}", dataset.getId());
logger.log(Level.INFO, "checking lock status of dataset {0}", dataset.getId());
if (testDataset.getLocks().size() > 0) {
if (lockedForAsyncPublish) {
if (testDataset.isLockedFor(DatasetLock.Reason.FileValidationFailed)) {
//if (FacesContext.getCurrentInstance().getExternalContext().getFlash().get("errorMsg") == null) {
JH.addMessage(FacesMessage.SEVERITY_ERROR, BundleUtil.getStringFromBundle("dataset.publish.file.validation.error.message"),
BundleUtil.getStringFromBundle("dataset.publish.file.validation.error.details"));
init();
//}
/* and now that we've shown the message to the user - remove the lock? */
}
}
return true;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -401,7 +401,7 @@ public DatasetLock addDatasetLock(Dataset dataset, DatasetLock lock) {
dataset.addLock(lock);
lock.setStartTime( new Date() );
em.persist(lock);
em.merge(dataset);
//em.merge(dataset);
return lock;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,8 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t
String host = requestedSettings.get(DURACLOUD_HOST);
if (host != null) {
Dataset dataset = dv.getDataset();
if (dataset.getLockFor(Reason.finalizePublication) == null) {
if (dataset.getLockFor(Reason.finalizePublication) == null
&& dataset.getLockFor(Reason.FileValidationFailed) == null) {
// Use Duracloud client classes to login
ContentStoreManager storeManager = new ContentStoreManagerImpl(host, port, dpnContext);
Credential credential = new Credential(System.getProperty("duracloud.username"),
Expand Down Expand Up @@ -186,7 +187,7 @@ public void run() {
logger.severe("MD5 MessageDigest not available!");
}
} else {
logger.warning("DuraCloud Submision Workflow aborted: Dataset locked for finalizePublication");
logger.warning("DuraCloud Submision Workflow aborted: Dataset locked for finalizePublication, or because file validation failed");
return new Failure("Dataset locked");
}
return WorkflowStepResult.OK;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -139,9 +139,11 @@ public Dataset execute(CommandContext ctxt) throws CommandException {

// validate the physical files (verify checksums):
validateDataFiles(theDataset, ctxt);
// (this will throw a CommandException if it fails)

if (!datasetExternallyReleased){
publicizeExternalIdentifier(theDataset, ctxt);
// (will also throw a CommandException, unless successful)
}
theDataset.getLatestVersion().setVersionState(RELEASED);
}
Expand Down Expand Up @@ -272,9 +274,7 @@ private void validateDataFiles(Dataset dataset, CommandContext ctxt) throws Comm

String recalculatedChecksum = null;
try {
logger.log(Level.INFO, "start: "+new Date().getTime());
recalculatedChecksum = FileUtil.calculateChecksum(in, checksumType);
logger.log(Level.INFO, "end: "+new Date().getTime());
} catch (RuntimeException rte) {
recalculatedChecksum = null;
} finally {
Expand All @@ -300,16 +300,20 @@ private void validateDataFiles(Dataset dataset, CommandContext ctxt) throws Comm
logger.log(Level.INFO, "successfully validated DataFile {0}; checksum {1}", new Object[]{dataFile.getId(), recalculatedChecksum});
}
} catch (Throwable e) {
// Check if there is a workflow lock on the dataset - i.e., if this
// is being done asynchronously. If so, change the lock message
// to notify the user what went wrong, and leave the lock in place:

if (dataset.isLockedFor(DatasetLock.Reason.finalizePublication)) {
DatasetLock workflowLock = dataset.getLockFor(DatasetLock.Reason.finalizePublication);
workflowLock.setInfo(FILE_VALIDATION_ERROR);
ctxt.datasets().updateDatasetLock(workflowLock);
DatasetLock lock = dataset.getLockFor(DatasetLock.Reason.finalizePublication);
lock.setReason(DatasetLock.Reason.FileValidationFailed);
lock.setInfo(FILE_VALIDATION_ERROR);
ctxt.datasets().updateDatasetLock(lock);
} else {
// Lock the dataset with a new FileValidationFailed lock:
DatasetLock lock = new DatasetLock(DatasetLock.Reason.FileValidationFailed, getRequest().getAuthenticatedUser()); //(AuthenticatedUser)getUser());
lock.setDataset(dataset);
lock.setInfo(FILE_VALIDATION_ERROR);
ctxt.datasets().addDatasetLock(dataset, lock);
}

// Throw a new CommandException; if the command is being called
// synchronously, it will be intercepted and the page will display
// the error message for the user.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,8 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t
Dataset dataset = dv.getDataset();


if (dataset.getLockFor(Reason.finalizePublication) == null) {
if (dataset.getLockFor(Reason.finalizePublication) == null
&& dataset.getLockFor(Reason.FileValidationFailed) == null) {

String spaceName = dataset.getGlobalId().asString().replace(':', '-').replace('/', '-')
.replace('.', '-').toLowerCase();
Expand All @@ -78,7 +79,7 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t
dv.setArchivalCopyLocation(sb.toString());

} else {
logger.warning("Localhost Submision Workflow aborted: Dataset locked for finalizePublication");
logger.warning("Localhost Submision Workflow aborted: Dataset locked for finalizePublication, or because file validation failed");
return new Failure("Dataset locked");
}
} catch (Exception e) {
Expand Down

0 comments on commit 790a5e5

Please sign in to comment.