Skip to content

Commit

Permalink
Removed extra logging/experimenal tweaks added while investigating th…
Browse files Browse the repository at this point in the history
…e freezing file upload page issue.
landreev committed May 1, 2017

Verified

This commit was created on GitHub.com and signed with GitHub’s verified signature.
1 parent f976c09 commit 96e5762
Showing 4 changed files with 14 additions and 17 deletions.
4 changes: 2 additions & 2 deletions src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
Original file line number Diff line number Diff line change
@@ -2228,7 +2228,7 @@ public String save() {
userNotificationService.sendNotification((AuthenticatedUser) session.getUser(), dataset.getCreateDate(), UserNotification.Type.CREATEDS, dataset.getLatestVersion().getId());
}
}
logger.info("Successfully executed SaveDatasetCommand.");
logger.fine("Successfully executed SaveDatasetCommand.");
} catch (EJBException ex) {
StringBuilder error = new StringBuilder();
error.append(ex).append(" ");
@@ -2273,7 +2273,7 @@ public String save() {
// queue the data ingest jobs for asynchronous execution:
ingestService.startIngestJobs(dataset, (AuthenticatedUser) session.getUser());

logger.info("Redirecting to the Dataset page.");
logger.fine("Redirecting to the Dataset page.");

return returnToDraftVersion();
}
8 changes: 3 additions & 5 deletions src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
Original file line number Diff line number Diff line change
@@ -1080,10 +1080,8 @@ public String save() {
ingestService.addFiles(workingVersion, newFiles);
//boolean newDraftVersion = false;

datasetUpdateRequired = true;

if (workingVersion.getId() == null || datasetUpdateRequired) {
logger.info("issuing the dataset update command");
logger.fine("issuing the dataset update command");
// We are creating a new draft version;
// (OR, a full update of the dataset has been explicitly requested,
// because of the nature of the updates the user has made).
@@ -1176,7 +1174,7 @@ public String save() {
try {
//DataFile savedDatafile = datafileService.save(fileMetadata.getDataFile());
fileMetadata = datafileService.mergeFileMetadata(fileMetadata);
logger.info("Successfully saved DataFile "+fileMetadata.getLabel()+" in the database.");
logger.fine("Successfully saved DataFile "+fileMetadata.getLabel()+" in the database.");
} catch (EJBException ex) {
saveError.append(ex).append(" ");
saveError.append(ex.getMessage()).append(" ");
@@ -1273,7 +1271,7 @@ public String save() {
// return returnToDraftVersionById();
//}

logger.info("Redirecting to the dataset page, from the edit/upload page.");
logger.fine("Redirecting to the dataset page, from the edit/upload page.");
return returnToDraftVersion();
}

Original file line number Diff line number Diff line change
@@ -81,14 +81,13 @@ public void onMessage(Message message) {
while (iter.hasNext()) {
datafile_id = (Long) iter.next();

logger.info("Start ingest job;");
logger.fine("Start ingest job;");
try {
Thread.sleep(5000);
if (ingestService.ingestAsTabular(datafile_id)) {
//Thread.sleep(10000);
logger.info("Finished ingest job;");
logger.fine("Finished ingest job;");
} else {
logger.info("Error occurred during ingest job!");
logger.warning("Error occurred during ingest job!");
}
} catch (Exception ex) {
//ex.printStackTrace();
@@ -99,7 +98,7 @@ public void onMessage(Message message) {
// -- L.A. Aug. 13 2014;
logger.info("Unknown exception occurred during ingest (supressed stack trace); re-setting ingest status.");
if (datafile_id != null) {
logger.info("looking up datafile for id " + datafile_id);
logger.fine("looking up datafile for id " + datafile_id);
DataFile datafile = datafileService.find(datafile_id);
if (datafile != null) {
datafile.SetIngestProblem();
@@ -114,7 +113,7 @@ public void onMessage(Message message) {
datafile.setIngestReport(errorReport);
datafile.setDataTables(null);

logger.info("trying to save datafile " + datafile_id);
logger.info("trying to save datafile and the failed ingest report, id=" + datafile_id);
datafile = datafileService.save(datafile);

Dataset dataset = datafile.getOwner();
Original file line number Diff line number Diff line change
@@ -317,7 +317,7 @@ from a local InputStream (or a readChannel) into the
//
dataFile.setFilesize(dataAccess.getSize());
savedSuccess = true;
logger.info("Success: permanently saved file "+dataFile.getFileMetadata().getLabel());
logger.fine("Success: permanently saved file "+dataFile.getFileMetadata().getLabel());

} catch (IOException ioex) {
logger.warning("Failed to save the file, storage id " + dataFile.getStorageIdentifier() + " (" + ioex.getMessage() + ")");
@@ -335,7 +335,7 @@ from a local InputStream (or a readChannel) into the
if (generatedTempFiles != null) {
for (Path generated : generatedTempFiles) {
if (savedSuccess) { // && localFile) {
logger.info("(Will also try to permanently save generated thumbnail file "+generated.toString()+")");
logger.fine("(Will also try to permanently save generated thumbnail file "+generated.toString()+")");
try {
//Files.copy(generated, Paths.get(dataset.getFileSystemDirectory().toString(), generated.getFileName().toString()));
int i = generated.toString().lastIndexOf("thumb");
@@ -372,7 +372,7 @@ from a local InputStream (or a readChannel) into the
// Any necessary post-processing:
//performPostProcessingTasks(dataFile);
}
logger.info("Done! Finished saving new files in permanent storage.");
logger.fine("Done! Finished saving new files in permanent storage.");
}
}
}
@@ -434,7 +434,7 @@ public void startIngestJobs(Dataset dataset, AuthenticatedUser user) {

scheduledFiles.add(dataFile);

logger.info("Attempting to queue the file " + dataFile.getFileMetadata().getLabel() + " for ingest, for dataset: " + dataset.getGlobalId());
logger.fine("Attempting to queue the file " + dataFile.getFileMetadata().getLabel() + " for ingest, for dataset: " + dataset.getGlobalId());
count++;
} else {
dataFile.setIngestDone();

0 comments on commit 96e5762

Please sign in to comment.