Skip to content

Commit

Permalink
Merge pull request #6857 from GlobalDataverseCommunityConsortium/IQSS…
Browse files Browse the repository at this point in the history
…/6829

IQSS/6829 - account for file failures
  • Loading branch information
kcondon authored May 1, 2020
2 parents 5ffc7b0 + 35b38e0 commit b0868df
Show file tree
Hide file tree
Showing 5 changed files with 45 additions and 25 deletions.
20 changes: 17 additions & 3 deletions src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import edu.harvard.iq.dataverse.dataset.DatasetUtil;
import edu.harvard.iq.dataverse.datavariable.VariableServiceBean;
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.CommandContext;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.impl.CreatePrivateUrlCommand;
import edu.harvard.iq.dataverse.engine.command.impl.CuratePublishedDatasetVersionCommand;
Expand Down Expand Up @@ -50,6 +51,8 @@
import edu.harvard.iq.dataverse.util.FileUtil;
import edu.harvard.iq.dataverse.util.JsfHelper;
import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
import static edu.harvard.iq.dataverse.util.StringUtil.isEmpty;

import edu.harvard.iq.dataverse.util.StringUtil;
import edu.harvard.iq.dataverse.util.SystemConfig;
import java.io.File;
Expand Down Expand Up @@ -1754,8 +1757,14 @@ public void updateOwnerDataverse() {
if (dataset.getOwner() != null && dataset.getOwner().getId() != null) {
ownerId = dataset.getOwner().getId();
logger.info("New host dataverse id: "+ownerId);
// discard the dataset already created:
// discard the dataset already created
//If a global ID was already assigned, as is true for direct upload, keep it (if files were already uploaded, they are at the path corresponding to the existing global id)
GlobalId gid = dataset.getGlobalId();
dataset = new Dataset();
if(gid!=null) {
dataset.setGlobalId(gid);
}

// initiate from scratch: (isolate the creation of a new dataset in its own method?)
init(true);
// rebuild the bred crumbs display:
Expand Down Expand Up @@ -1930,14 +1939,19 @@ private String init(boolean initFull) {
dataset.setOwner(dataverseService.find(ownerId));
dataset.setProtocol(protocol);
dataset.setAuthority(authority);
//Wait until the create command before actually getting an identifier

if (dataset.getOwner() == null) {
return permissionsWrapper.notFound();
} else if (!permissionService.on(dataset.getOwner()).has(Permission.AddDataset)) {
return permissionsWrapper.notAuthorized();
}

//Wait until the create command before actually getting an identifier, except if we're using directUpload
//Need to assign an identifier prior to calls to requestDirectUploadUrl if direct upload is used.
if ( isEmpty(dataset.getIdentifier()) && systemConfig.directUploadEnabled(dataset) ) {
CommandContext ctxt = commandEngine.getContext();
GlobalIdServiceBean idServiceBean = GlobalIdServiceBean.getBean(ctxt);
dataset.setIdentifier(ctxt.datasets().generateDatasetIdentifier(dataset, idServiceBean));
}
dataverseTemplates.addAll(dataverseService.find(ownerId).getTemplates());
if (!dataverseService.find(ownerId).isTemplateRoot()) {
dataverseTemplates.addAll(dataverseService.find(ownerId).getParentTemplates());
Expand Down
13 changes: 3 additions & 10 deletions src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
Original file line number Diff line number Diff line change
Expand Up @@ -346,10 +346,6 @@ public boolean doesSessionUserHaveDataSetPermission(Permission permissionToCheck
return hasPermission;
}

public boolean directUploadEnabled() {
return Boolean.getBoolean("dataverse.files." + this.dataset.getDataverseContext().getEffectiveStorageDriverId() + ".upload-redirect");
}

public void reset() {
// ?
}
Expand Down Expand Up @@ -480,6 +476,8 @@ public String init() {
// that the dataset id is mandatory... But 404 will do for now.
return permissionsWrapper.notFound();
}



this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getOwner().getEffectiveStorageDriverId());
this.multipleUploadFilesLimit = systemConfig.getMultipleUploadFilesLimit();
Expand Down Expand Up @@ -1745,12 +1743,7 @@ public String getRsyncScriptFilename() {

public void requestDirectUploadUrl() {

//Need to assign an identifier at this point if direct upload is used.
if ( isEmpty(dataset.getIdentifier()) ) {
CommandContext ctxt = commandEngine.getContext();
GlobalIdServiceBean idServiceBean = GlobalIdServiceBean.getBean(ctxt);
dataset.setIdentifier(ctxt.datasets().generateDatasetIdentifier(dataset, idServiceBean));
}


S3AccessIO<?> s3io = FileUtil.getS3AccessForDirectUpload(dataset);
if(s3io == null) {
Expand Down
5 changes: 5 additions & 0 deletions src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import com.ocpsoft.pretty.PrettyContext;
import edu.harvard.iq.dataverse.DataFile;
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DataverseServiceBean;
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProvider;
Expand Down Expand Up @@ -1047,4 +1048,8 @@ public boolean isDatafileValidationOnPublishEnabled() {
boolean safeDefaultIfKeyNotFound = true;
return settingsService.isTrueForKey(SettingsServiceBean.Key.FileValidationOnPublishEnabled, safeDefaultIfKeyNotFound);
}

public boolean directUploadEnabled(Dataset dataset) {
return Boolean.getBoolean("dataverse.files." + dataset.getDataverseContext().getEffectiveStorageDriverId() + ".upload-redirect");
}
}
4 changes: 2 additions & 2 deletions src/main/webapp/editFilesFragment.xhtml
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@

$(document).ready(function () {
uploadWidgetDropMsg();
setupDirectUpload(#{EditDatafilesPage.directUploadEnabled()}, #{EditDatafilesPage.workingVersion.dataset.id});
setupDirectUpload(#{systemConfig.directUploadEnabled(EditDatafilesPage.dataset)});
});
//]]>
</script>
Expand Down Expand Up @@ -127,7 +127,7 @@

<p:fileUpload id="fileUpload"
dragDropSupport="true"
auto="#{!(EditDatafilesPage.directUploadEnabled())}"
auto="#{!(systemConfig.directUploadEnabled(EditDatafilesPage.dataset))}"
multiple="#{datasetPage || EditDatafilesPage.allowMultipleFileUpload()}"
disabled="#{lockedFromEdits || !(datasetPage || EditDatafilesPage.showFileUploadComponent()) }"
listener="#{EditDatafilesPage.handleFileUpload}"
Expand Down
28 changes: 18 additions & 10 deletions src/main/webapp/resources/js/fileupload.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
var fileList = [];
var observer2=null;
var datasetId=null;
var directUploadEnabled=false;
//How many files have started being processed but aren't yet being uploaded
var filesInProgress=0;
//The # of the current file being processed (total number of files for which upload has at least started)
Expand All @@ -17,9 +17,9 @@ var finishFile = (function () {
})();


function setupDirectUpload(enabled, theDatasetId) {
function setupDirectUpload(enabled) {
if(enabled) {
datasetId=theDatasetId;
directUploadEnabled=true;
$('.ui-fileupload-upload').hide();
$('.ui-fileupload-cancel').hide();
//Catch files entered via upload dialog box. Since this 'select' widget is replaced by PF, we need to add a listener again when it is replaced
Expand All @@ -28,7 +28,7 @@ function setupDirectUpload(enabled, theDatasetId) {
fileInput.addEventListener('change', function(event) {
fileList=[];
for(var i=0;i<fileInput.files.length;i++) {
queueFileForDirectUpload(fileInput.files[i], datasetId);
queueFileForDirectUpload(fileInput.files[i]);
}
}, {once:false});
}
Expand All @@ -37,7 +37,7 @@ function setupDirectUpload(enabled, theDatasetId) {
fileDropWidget.addEventListener('drop', function(event) {
fileList=[];
for(var i=0;i<event.dataTransfer.files.length;i++) {
queueFileForDirectUpload(event.dataTransfer.files[i], datasetId);
queueFileForDirectUpload(event.dataTransfer.files[i]);
}
}, {once:false});

Expand All @@ -50,7 +50,7 @@ function setupDirectUpload(enabled, theDatasetId) {
fileInput=mutation.addedNodes[i];
mutation.addedNodes[i].addEventListener('change', function(event) {
for(var j=0;j<mutation.addedNodes[i].files.length;j++) {
queueFileForDirectUpload(mutation.addedNodes[i].files[j], datasetId);
queueFileForDirectUpload(mutation.addedNodes[i].files[j]);
}
}, {once:false});
}
Expand All @@ -65,7 +65,7 @@ function setupDirectUpload(enabled, theDatasetId) {
} //else ?
}

function queueFileForDirectUpload(file, datasetId) {
function queueFileForDirectUpload(file) {
if(fileList.length === 0) {uploadWidgetDropRemoveMsg();}
fileList.push(file);
//Fire off the first 4 to start (0,1,2,3)
Expand Down Expand Up @@ -243,7 +243,7 @@ function uploadFailure(jqXHR, upid, filename) {
// from the call stack instead (arguments to the fail() method that calls onerror() that calls this function

//Retrieve the error number (status) and related explanation (statusText)
var status = null;
var status = 0;
var statusText =null;

// There are various metadata available about which file the error pertains to
Expand All @@ -262,10 +262,14 @@ function uploadFailure(jqXHR, upid, filename) {
id = upid;
name=filename;
} else {
status=arguments.callee.caller.caller.arguments[1].jqXHR.status;
statusText = arguments.callee.caller.caller.arguments[1].jqXHR.statusText;
try {
name = arguments.callee.caller.caller.arguments[1].files[0].name;
id = arguments.callee.caller.caller.arguments[1].files[0].row[0].attributes.upid.value;
status=arguments.callee.caller.caller.arguments[1].jqXHR.status;
statusText = arguments.callee.caller.caller.arguments[1].jqXHR.statusText;
} catch {
console.log("Unable to determine status for error - assuming network issue");
}
}

//statusText for error 0 is the unhelpful 'error'
Expand Down Expand Up @@ -295,6 +299,10 @@ function uploadFailure(jqXHR, upid, filename) {
break;
}
}
if(directUploadEnabled) {
//Mark this file as processed and keep processing further files
directUploadFinished();
}
}
//MD5 Hashing functions

Expand Down

0 comments on commit b0868df

Please sign in to comment.