diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index dc435208ae8..fc847ac0ef8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -535,7 +535,15 @@ public DatasetLock addDatasetLock(Long datasetId, DatasetLock.Reason reason, Lon user = em.find(AuthenticatedUser.class, userId); } - DatasetLock lock = new DatasetLock(reason, user); + // Check if the dataset is already locked for this reason: + // (to prevent multiple, duplicate locks on the dataset!) + DatasetLock lock = dataset.getLockFor(reason); + if (lock != null) { + return lock; + } + + // Create new: + lock = new DatasetLock(reason, user); lock.setDataset(dataset); lock.setInfo(info); lock.setStartTime(new Date()); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 60630228526..2e041c590d5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -679,7 +679,14 @@ public Response getRsync(@PathParam("identifier") String id) { Dataset dataset = null; try { dataset = findDatasetOrDie(id); - ScriptRequestResponse scriptRequestResponse = execCommand(new RequestRsyncScriptCommand(createDataverseRequest(findUserOrDie()), dataset)); + AuthenticatedUser user = findAuthenticatedUserOrDie(); + ScriptRequestResponse scriptRequestResponse = execCommand(new RequestRsyncScriptCommand(createDataverseRequest(user), dataset)); + + DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.DcmUpload, user.getId(), "script downloaded"); + if (lock == null) { + logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId()); + return error(Response.Status.FORBIDDEN, "Failed to lock the dataset (dataset id="+dataset.getId()+")"); + } return ok(scriptRequestResponse.getScript(), MediaType.valueOf(MediaType.TEXT_PLAIN)); } catch (WrappedResponse wr) { return wr.getResponse(); diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java index f58f6d1f0af..4c578f00ba7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java +++ b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java @@ -164,32 +164,23 @@ public void beforeJob() throws Exception { jobLogger.log(Level.INFO, "Job Status = " + jobContext.getBatchStatus()); jobParams.setProperty("datasetGlobalId", getDatasetGlobalId()); - jobParams.setProperty("userId", getUserId()); - jobParams.setProperty("mode", getMode()); - uploadFolder = jobParams.getProperty("uploadFolder"); + if (dataset == null) { + getJobLogger().log(Level.SEVERE, "Can't find dataset."); + jobContext.setExitStatus("FAILED"); + throw new IOException("Can't find dataset."); + } - /* - // lock the dataset - jobLogger.log(Level.INFO, "Locking dataset"); - String info = "Starting batch file import job."; - */ + if (!dataset.isLockedFor(DatasetLock.Reason.DcmUpload)) { + getJobLogger().log(Level.SEVERE, "Dataset " + dataset.getGlobalId() + " is not locked for DCM upload. Exiting"); + jobContext.setExitStatus("FAILED"); + throw new IOException("Dataset " + dataset.getGlobalId() + " is not locked for DCM upload"); + } - // TODO: - // In the current #3348 implementation, we are no longer locking the - // Dataset here. Because it gets locked immediately after the user - // downloads the rsync scipt. Once this branch (#3561) is merged into - // #3348, let's revisit this. We should probably check here that - // the dataset is locked, and that it's locked waiting for an - // rsync upload to happen. And then we may want to "re-lock" it, with - // the info field in the new lock specifying that there is now a - // file crawler job in progress (to prevent another one from happening - // in parallel. -- L.A. Aug 31 2017 + jobParams.setProperty("userId", getUserId()); + jobParams.setProperty("mode", getMode()); - //datasetServiceBean.addDatasetLock(dataset.getId(), - // DatasetLock.Reason.Ingest, - // (user!=null)?user.getId():null, - // info); + uploadFolder = jobParams.getProperty("uploadFolder"); // check constraints for running the job if (canRunJob()) { @@ -214,7 +205,7 @@ public void beforeJob() throws Exception { } /** - * After the job, generate a report and remove the dataset lock + * After the job is done, generate a report */ @Override public void afterJob() throws Exception { @@ -235,15 +226,6 @@ public void afterJob() throws Exception { getJobLogger().log(Level.SEVERE, "File listed in checksum manifest not found: " + key); } - // remove dataset lock - // Disabled now, see L.A.'s comment at beforeJob() -// if (dataset != null && dataset.getId() != null) { -// datasetServiceBean.removeDatasetLock(dataset.getId(), DatasetLock.Reason.Ingest); -// } - - - getJobLogger().log(Level.INFO, "Removing dataset lock."); - // job step info JobOperator jobOperator = BatchRuntime.getJobOperator(); StepExecution step = jobOperator.getStepExecutions(jobContext.getInstanceId()).get(0); @@ -369,17 +351,21 @@ private String getDatasetGlobalId() { String datasetId = jobParams.getProperty("datasetId"); dataset = datasetServiceBean.find(new Long(datasetId)); - getJobLogger().log(Level.INFO, "Dataset Identifier (datasetId=" + datasetId + "): " + dataset.getIdentifier()); - return dataset.getGlobalId(); + if (dataset != null) { + getJobLogger().log(Level.INFO, "Dataset Identifier (datasetId=" + datasetId + "): " + dataset.getIdentifier()); + return dataset.getGlobalId(); + } } if (jobParams.containsKey("datasetPrimaryKey")) { long datasetPrimaryKey = Long.parseLong(jobParams.getProperty("datasetPrimaryKey")); dataset = datasetServiceBean.find(datasetPrimaryKey); - getJobLogger().log(Level.INFO, "Dataset Identifier (datasetPrimaryKey=" + datasetPrimaryKey + "): " + if (dataset != null) { + getJobLogger().log(Level.INFO, "Dataset Identifier (datasetPrimaryKey=" + datasetPrimaryKey + "): " + dataset.getIdentifier()); - return dataset.getGlobalId(); + return dataset.getGlobalId(); + } } - getJobLogger().log(Level.SEVERE, "Can't find dataset."); + dataset = null; return null; } diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordWriter.java b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordWriter.java index 49ecce4fdbb..56e4cbcc976 100644 --- a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordWriter.java +++ b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordWriter.java @@ -23,6 +23,7 @@ import edu.harvard.iq.dataverse.DataFile.ChecksumType; import edu.harvard.iq.dataverse.DataFileServiceBean; import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetLock; import edu.harvard.iq.dataverse.DatasetServiceBean; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.EjbDataverseEngine; @@ -125,7 +126,7 @@ public void init() { suppliedSize = new Long(jobParams.getProperty("totalSize")); getJobLogger().log(Level.INFO, "Size parameter supplied: "+suppliedSize); } catch (NumberFormatException ex) { - getJobLogger().log(Level.WARNING, "Invalid file size supplied: "+jobParams.getProperty("totalSize")); + getJobLogger().log(Level.WARNING, "Invalid file size supplied (in FileRecordWriter.init()): "+jobParams.getProperty("totalSize")); suppliedSize = null; } } @@ -133,7 +134,7 @@ public void init() { @Override public void open(Serializable checkpoint) throws Exception { - // no-op + // no-op } @Override @@ -166,6 +167,13 @@ public void writeItems(List list) { jobContext.setExitStatus("FAILED"); return; } + DatasetLock dcmLock = dataset.getLockFor(DatasetLock.Reason.DcmUpload); + if (dcmLock == null) { + getJobLogger().log(Level.WARNING, "Dataset not locked for DCM upload"); + } else { + datasetServiceBean.removeDatasetLocks(dataset.getId(), DatasetLock.Reason.DcmUpload); + dataset.removeLock(dcmLock); + } updateDatasetVersion(dataset.getLatestVersion()); } else { getJobLogger().log(Level.SEVERE, "File mode "+fileMode+" is not supported.");