From 00d4c0667ec19af9e41099eb15ea68286f816655 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 23 Apr 2019 12:45:04 -0400 Subject: [PATCH 01/32] assert bug: multiple of same username allowed (lower and upper) #3575 --- .../edu/harvard/iq/dataverse/api/UsersIT.java | 22 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 22 +++++++++++++++++++ 2 files changed, 44 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java index 6daaeeec920..395474243c6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java @@ -319,6 +319,28 @@ public void convertNonBcryptUserFromBuiltinToShib() { } + @Test + public void testUsernameCaseSensitivity() { + String randomUsername = UtilIT.getRandomIdentifier(); + String lowercaseUsername = randomUsername.toLowerCase(); + String uppercaseUsername = randomUsername.toUpperCase(); + String randomEmailForLowercaseuser = UtilIT.getRandomIdentifier() + "@mailinator.com"; + String randomEmailForUppercaseuser = UtilIT.getRandomIdentifier() + "@mailinator.com"; + + // Create first user (username all lower case). + Response createLowercaseUser = UtilIT.createUser(lowercaseUsername, randomEmailForLowercaseuser); + createLowercaseUser.prettyPrint(); + createLowercaseUser.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Attempt to create second user (same username but all UPPER CASE). + Response createUppercaseUser = UtilIT.createUser(uppercaseUsername, randomEmailForUppercaseuser); + createUppercaseUser.prettyPrint(); + createUppercaseUser.then().assertThat() + // FIXME: This should not return a 200 (OK) response. It should fail and report that the username has been taken. + .statusCode(OK.getStatusCode()); + } + private Response convertUserFromBcryptToSha1(long idOfBcryptUserToConvert, String password) { JsonObjectBuilder data = Json.createObjectBuilder(); data.add("builtinUserId", idOfBcryptUserToConvert); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 678e88fc036..fcdb5cba5c5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -103,6 +103,28 @@ public static Response createRandomUser() { return createRandomUser("user"); } + public static Response createUser(String username, String email) { + logger.info("Creating user " + username); + String userAsJson = getUserAsJsonString(username, username, username, email); + String password = getPassword(userAsJson); + Response response = given() + .body(userAsJson) + .contentType(ContentType.JSON) + .post("/api/builtin-users?key=" + BUILTIN_USER_KEY + "&password=" + password); + return response; + } + + private static String getUserAsJsonString(String username, String firstName, String lastName, String email) { + JsonObjectBuilder builder = Json.createObjectBuilder(); + builder.add(USERNAME_KEY, username); + builder.add("firstName", firstName); + builder.add("lastName", lastName); + builder.add(EMAIL_KEY, email); + String userAsJson = builder.build().toString(); + logger.fine("User to create: " + userAsJson); + return userAsJson; + } + private static String getUserAsJsonString(String username, String firstName, String lastName) { JsonObjectBuilder builder = Json.createObjectBuilder(); builder.add(USERNAME_KEY, username); From 682c4faf06154464e76fe1fd8c264584514d5f2e Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 23 Apr 2019 14:14:01 -0400 Subject: [PATCH 02/32] show database error via API/actionlogrecord, not "null" #3575 --- .../java/edu/harvard/iq/dataverse/api/BuiltinUsers.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java b/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java index e85f4454b24..21cb97f83cc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java @@ -176,12 +176,13 @@ private Response internalSave(BuiltinUser user, String password, String key) { } catch ( EJBException ejbx ) { alr.setActionResult(ActionLogRecord.Result.InternalError); - alr.setInfo( alr.getInfo() + "// " + ejbx.getMessage()); + String errorMessage = ejbx.getCausedByException().getLocalizedMessage(); + alr.setInfo( alr.getInfo() + "// " + errorMessage); if ( ejbx.getCausedByException() instanceof IllegalArgumentException ) { - return error(Status.BAD_REQUEST, "Bad request: can't save user. " + ejbx.getCausedByException().getMessage()); + return error(Status.BAD_REQUEST, "Bad request: can't save user. " + errorMessage); } else { logger.log(Level.WARNING, "Error saving user: ", ejbx); - return error(Status.INTERNAL_SERVER_ERROR, "Can't save user: " + ejbx.getMessage()); + return error(Status.INTERNAL_SERVER_ERROR, "Can't save user: " + errorMessage); } } catch (Exception e) { From 60cd16939972f3522df6fc8144a9f0d2caf88c62 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 23 Apr 2019 14:20:57 -0400 Subject: [PATCH 03/32] add database constraint: unique on lowercase username #3575 --- scripts/database/reference_data.sql | 1 + src/main/resources/db/migration/V4.13.0.1__3575-usernames.sql | 1 + 2 files changed, 2 insertions(+) create mode 100644 src/main/resources/db/migration/V4.13.0.1__3575-usernames.sql diff --git a/scripts/database/reference_data.sql b/scripts/database/reference_data.sql index cb12db7c882..41370a22013 100644 --- a/scripts/database/reference_data.sql +++ b/scripts/database/reference_data.sql @@ -30,6 +30,7 @@ INSERT INTO guestbook( -- gets an answer. See also https://github.com/IQSS/dataverse/issues/2598#issuecomment-158219334 CREATE UNIQUE INDEX dataverse_alias_unique_idx on dataverse (LOWER(alias)); CREATE UNIQUE INDEX index_authenticateduser_lower_email ON authenticateduser (lower(email)); +CREATE UNIQUE INDEX index_authenticateduser_lower_useridentifier ON authenticateduser (lower(useridentifier)); -- this field has been removed from builtinuser; CREATE UNIQUE INDEX index_builtinuser_lower_email ON builtinuser (lower(email)); --Edit Dataset: Investigate and correct multiple draft issue: https://github.com/IQSS/dataverse/issues/2132 diff --git a/src/main/resources/db/migration/V4.13.0.1__3575-usernames.sql b/src/main/resources/db/migration/V4.13.0.1__3575-usernames.sql new file mode 100644 index 00000000000..0b1804bdfc4 --- /dev/null +++ b/src/main/resources/db/migration/V4.13.0.1__3575-usernames.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX index_authenticateduser_lower_useridentifier ON authenticateduser (lower(useridentifier)); From 0a0dc255fc1311ad227e3127385b5e500d22f010 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 23 Apr 2019 14:27:10 -0400 Subject: [PATCH 04/32] assert duplicate usernames no longer created #3575 --- src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java index 395474243c6..1e54109b8e1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java @@ -15,6 +15,7 @@ import javax.json.JsonObjectBuilder; import static javax.ws.rs.core.Response.Status.BAD_REQUEST; import static javax.ws.rs.core.Response.Status.CREATED; +import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; import static javax.ws.rs.core.Response.Status.NOT_FOUND; import static javax.ws.rs.core.Response.Status.OK; import static javax.ws.rs.core.Response.Status.UNAUTHORIZED; @@ -337,8 +338,8 @@ public void testUsernameCaseSensitivity() { Response createUppercaseUser = UtilIT.createUser(uppercaseUsername, randomEmailForUppercaseuser); createUppercaseUser.prettyPrint(); createUppercaseUser.then().assertThat() - // FIXME: This should not return a 200 (OK) response. It should fail and report that the username has been taken. - .statusCode(OK.getStatusCode()); + // TODO: consider returning "BAD REQUEST" (400) instead of a 500. + .statusCode(INTERNAL_SERVER_ERROR.getStatusCode()); } private Response convertUserFromBcryptToSha1(long idOfBcryptUserToConvert, String password) { From 392d26ea1a727d7af14faa3b00dd63dfd8a1f931 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 25 Apr 2019 11:55:56 -0400 Subject: [PATCH 05/32] make countOfIdentifier case insensitive #3575 --- .../edu/harvard/iq/dataverse/api/BuiltinUsers.java | 6 +++--- .../authorization/users/AuthenticatedUser.java | 2 +- .../java/edu/harvard/iq/dataverse/api/UsersIT.java | 11 ++++++++--- 3 files changed, 12 insertions(+), 7 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java b/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java index 21cb97f83cc..515184e50b8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java @@ -123,9 +123,9 @@ private Response internalSave(BuiltinUser user, String password, String key) { user.updateEncryptedPassword(PasswordEncryption.get().encrypt(password), PasswordEncryption.getLatestVersionNumber()); } - // Make sure the identifier is unique - if ( (builtinUserSvc.findByUserName(user.getUserName()) != null) - || ( authSvc.identifierExists(user.getUserName())) ) { + // Make sure the identifier is unique, case insensitive. "DATAVERSEADMIN" is not allowed to be created if "dataverseAdmin" exists. + if ((builtinUserSvc.findByUserName(user.getUserName()) != null) + || (authSvc.identifierExists(user.getUserName()))) { return error(Status.BAD_REQUEST, "username '" + user.getUserName() + "' already exists"); } user = builtinUserSvc.save(user); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java index 287cce366b6..edf3013804c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java @@ -49,7 +49,7 @@ @NamedQuery( name="AuthenticatedUser.findByEmail", query="select au from AuthenticatedUser au WHERE LOWER(au.email)=LOWER(:email)"), @NamedQuery( name="AuthenticatedUser.countOfIdentifier", - query="SELECT COUNT(a) FROM AuthenticatedUser a WHERE a.userIdentifier=:identifier"), + query="SELECT COUNT(a) FROM AuthenticatedUser a WHERE LOWER(a.userIdentifier)=LOWER(:identifier)"), @NamedQuery( name="AuthenticatedUser.filter", query="select au from AuthenticatedUser au WHERE (" + "au.userIdentifier like :query OR " diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java index 1e54109b8e1..19ddf61f5bb 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java @@ -15,7 +15,6 @@ import javax.json.JsonObjectBuilder; import static javax.ws.rs.core.Response.Status.BAD_REQUEST; import static javax.ws.rs.core.Response.Status.CREATED; -import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; import static javax.ws.rs.core.Response.Status.NOT_FOUND; import static javax.ws.rs.core.Response.Status.OK; import static javax.ws.rs.core.Response.Status.UNAUTHORIZED; @@ -338,8 +337,14 @@ public void testUsernameCaseSensitivity() { Response createUppercaseUser = UtilIT.createUser(uppercaseUsername, randomEmailForUppercaseuser); createUppercaseUser.prettyPrint(); createUppercaseUser.then().assertThat() - // TODO: consider returning "BAD REQUEST" (400) instead of a 500. - .statusCode(INTERNAL_SERVER_ERROR.getStatusCode()); + .statusCode(BAD_REQUEST.getStatusCode()) + /** + * Technically, it's the lowercase version that exists but the + * point gets across. There's currently no way to bubble up the + * exact username it's in conflict with, even if we wanted to. + */ + .body("message", equalTo("username '" + uppercaseUsername + "' already exists")); + ; } private Response convertUserFromBcryptToSha1(long idOfBcryptUserToConvert, String password) { From e046f4c7cce83382d73917d03a4b32ef91c85b1c Mon Sep 17 00:00:00 2001 From: Gustavo Durand Date: Mon, 29 Apr 2019 16:17:14 -0400 Subject: [PATCH 06/32] Update ChangeUserIdentifierCommand.java allow changeIdentifier to be called on same user (in order to change case of username) --- .../engine/command/impl/ChangeUserIdentifierCommand.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ChangeUserIdentifierCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ChangeUserIdentifierCommand.java index 122aac00ed6..13868432ec5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ChangeUserIdentifierCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ChangeUserIdentifierCommand.java @@ -49,7 +49,7 @@ public ChangeUserIdentifierCommand(DataverseRequest aRequest, AuthenticatedUser public void executeImpl(CommandContext ctxt) throws CommandException { AuthenticatedUser authenticatedUserTestNewIdentifier = ctxt.authentication().getAuthenticatedUser(newIdentifier); - if (authenticatedUserTestNewIdentifier != null) { + if (authenticatedUserTestNewIdentifier != null && !authenticatedUserTestNewIdentifier.equals(au)) { String logMsg = " User " + newIdentifier + " already exists. Cannot use this as new identifier"; throw new IllegalCommandException("Validation of submitted data failed. Details: " + logMsg, this); } From 70eb08ad687ea49c6f845dee0140bade630b7ef7 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 1 May 2019 10:13:15 -0400 Subject: [PATCH 07/32] #3575 allow user to login with any case of their username --- src/main/java/edu/harvard/iq/dataverse/LoginPage.java | 2 +- .../dataverse/authorization/providers/builtin/BuiltinUser.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/LoginPage.java b/src/main/java/edu/harvard/iq/dataverse/LoginPage.java index d5c57311008..b8f2abadbad 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LoginPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/LoginPage.java @@ -160,7 +160,7 @@ public String login() { logger.info("Credential list is null!"); return null; } - for ( FilledCredential fc : filledCredentialsList ) { + for ( FilledCredential fc : filledCredentialsList ) { authReq.putCredential(fc.getCredential().getKey(), fc.getValue()); } authReq.setIpAddress( dvRequestService.getDataverseRequest().getSourceAddress() ); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java index 142ca23052d..fd7231e827c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java @@ -27,7 +27,7 @@ @NamedQuery( name="BuiltinUser.findAll", query = "SELECT u FROM BuiltinUser u ORDER BY u.userName"), @NamedQuery( name="BuiltinUser.findByUserName", - query = "SELECT u FROM BuiltinUser u WHERE u.userName=:userName"), + query = "SELECT u FROM BuiltinUser u WHERE LOWER(u.userName)=LOWER(:userName)"), @NamedQuery( name="BuiltinUser.listByUserNameLike", query = "SELECT u FROM BuiltinUser u WHERE u.userName LIKE :userNameLike") }) From 5ef2565db12bc48f6b685c3772d915e6334bc5ba Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 1 May 2019 12:19:23 -0400 Subject: [PATCH 08/32] #3575 update additional Auth user query --- .../iq/dataverse/authorization/users/AuthenticatedUser.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java index edf3013804c..449ca56e89e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java @@ -45,14 +45,14 @@ @NamedQuery( name="AuthenticatedUser.findSuperUsers", query="SELECT au FROM AuthenticatedUser au WHERE au.superuser = TRUE"), @NamedQuery( name="AuthenticatedUser.findByIdentifier", - query="select au from AuthenticatedUser au WHERE au.userIdentifier=:identifier"), + query="select au from AuthenticatedUser au WHERE LOWER(au.userIdentifier)=LOWER(:identifier)"), @NamedQuery( name="AuthenticatedUser.findByEmail", query="select au from AuthenticatedUser au WHERE LOWER(au.email)=LOWER(:email)"), @NamedQuery( name="AuthenticatedUser.countOfIdentifier", query="SELECT COUNT(a) FROM AuthenticatedUser a WHERE LOWER(a.userIdentifier)=LOWER(:identifier)"), @NamedQuery( name="AuthenticatedUser.filter", query="select au from AuthenticatedUser au WHERE (" - + "au.userIdentifier like :query OR " + + "LOWER(au.userIdentifier) like LOWER(:query) OR " + "lower(concat(au.firstName,' ',au.lastName)) like lower(:query))"), @NamedQuery( name="AuthenticatedUser.findAdminUser", query="select au from AuthenticatedUser au WHERE " From e3fbaba50093848355720587bcf0775ed98ddb5e Mon Sep 17 00:00:00 2001 From: Derek Murphy Date: Wed, 1 May 2019 14:12:19 -0400 Subject: [PATCH 09/32] Add release notes for #3575 --- doc/release-notes/3575-usernames.md | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 doc/release-notes/3575-usernames.md diff --git a/doc/release-notes/3575-usernames.md b/doc/release-notes/3575-usernames.md new file mode 100644 index 00000000000..e1ea02d5210 --- /dev/null +++ b/doc/release-notes/3575-usernames.md @@ -0,0 +1,4 @@ +* In an effort to prevent accidental duplicate accounts, user spoofing, or other username-based confusion, this release introduces a database constraint that no longer allows usernames that are exactly the same but use different capitalization, e.g. Bob11 vs. bob11. You may need to do some cleanup before upgrading to deal with existing usernames like this. +* To check whether you have any usernames like this that need cleaning up, run the case insensitive duplicate queries from our Useful Queries doc. +* Once you identify the usernames that need cleaning up, you should use either merge endpoint (if it’s the same person) or rename endpoint (if they are different people). +* After the cleanup you can safely upgrade without issue. From 5b5af3208e7aed181a460b5e2b2bab605215daff Mon Sep 17 00:00:00 2001 From: Derek Murphy Date: Wed, 1 May 2019 14:15:06 -0400 Subject: [PATCH 10/32] Fix link syntax in 3575 release notes --- doc/release-notes/3575-usernames.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/3575-usernames.md b/doc/release-notes/3575-usernames.md index e1ea02d5210..7100c7ba209 100644 --- a/doc/release-notes/3575-usernames.md +++ b/doc/release-notes/3575-usernames.md @@ -1,4 +1,4 @@ * In an effort to prevent accidental duplicate accounts, user spoofing, or other username-based confusion, this release introduces a database constraint that no longer allows usernames that are exactly the same but use different capitalization, e.g. Bob11 vs. bob11. You may need to do some cleanup before upgrading to deal with existing usernames like this. -* To check whether you have any usernames like this that need cleaning up, run the case insensitive duplicate queries from our Useful Queries doc. +* To check whether you have any usernames like this that need cleaning up, run the case insensitive duplicate queries from our[Useful Queries doc](https://docs.google.com/document/d/1-Y_iUduSxdDNeK1yiGUxe7t-Md7Fy965jp4o4m1XEoE/edit?usp=sharing "Useful Queries doc"). * Once you identify the usernames that need cleaning up, you should use either merge endpoint (if it’s the same person) or rename endpoint (if they are different people). * After the cleanup you can safely upgrade without issue. From 5cdd71f614b498529d30e2d0b22c09a286e633e2 Mon Sep 17 00:00:00 2001 From: Derek Murphy Date: Wed, 1 May 2019 14:15:31 -0400 Subject: [PATCH 11/32] Fix link syntax again for 3575 release notes --- doc/release-notes/3575-usernames.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/3575-usernames.md b/doc/release-notes/3575-usernames.md index 7100c7ba209..62734934b20 100644 --- a/doc/release-notes/3575-usernames.md +++ b/doc/release-notes/3575-usernames.md @@ -1,4 +1,4 @@ * In an effort to prevent accidental duplicate accounts, user spoofing, or other username-based confusion, this release introduces a database constraint that no longer allows usernames that are exactly the same but use different capitalization, e.g. Bob11 vs. bob11. You may need to do some cleanup before upgrading to deal with existing usernames like this. -* To check whether you have any usernames like this that need cleaning up, run the case insensitive duplicate queries from our[Useful Queries doc](https://docs.google.com/document/d/1-Y_iUduSxdDNeK1yiGUxe7t-Md7Fy965jp4o4m1XEoE/edit?usp=sharing "Useful Queries doc"). +* To check whether you have any usernames like this that need cleaning up, run the case insensitive duplicate queries from our [Useful Queries doc](https://docs.google.com/document/d/1-Y_iUduSxdDNeK1yiGUxe7t-Md7Fy965jp4o4m1XEoE/edit?usp=sharing "Useful Queries doc"). * Once you identify the usernames that need cleaning up, you should use either merge endpoint (if it’s the same person) or rename endpoint (if they are different people). * After the cleanup you can safely upgrade without issue. From 49486edf874c82d3088477f55297d28f129cc8e0 Mon Sep 17 00:00:00 2001 From: Derek Murphy Date: Wed, 1 May 2019 15:49:02 -0400 Subject: [PATCH 12/32] Update API commands for 3575 release notes Should be all set now. --- doc/release-notes/3575-usernames.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/3575-usernames.md b/doc/release-notes/3575-usernames.md index 62734934b20..896e9910817 100644 --- a/doc/release-notes/3575-usernames.md +++ b/doc/release-notes/3575-usernames.md @@ -1,4 +1,4 @@ * In an effort to prevent accidental duplicate accounts, user spoofing, or other username-based confusion, this release introduces a database constraint that no longer allows usernames that are exactly the same but use different capitalization, e.g. Bob11 vs. bob11. You may need to do some cleanup before upgrading to deal with existing usernames like this. * To check whether you have any usernames like this that need cleaning up, run the case insensitive duplicate queries from our [Useful Queries doc](https://docs.google.com/document/d/1-Y_iUduSxdDNeK1yiGUxe7t-Md7Fy965jp4o4m1XEoE/edit?usp=sharing "Useful Queries doc"). -* Once you identify the usernames that need cleaning up, you should use either merge endpoint (if it’s the same person) or rename endpoint (if they are different people). +* Once you identify the usernames that need cleaning up, you should use either [Merge User Accounts](http://guides.dataverse.org/en/latest/api/native-api.html#merge-user-accounts) (if it’s the same person) or [Change User Identifier](http://guides.dataverse.org/en/latest/api/native-api.html#change-user-identifier) (if they are different people). * After the cleanup you can safely upgrade without issue. From c4be5c755a0ce8f9d863dde7e8aed5573f17b117 Mon Sep 17 00:00:00 2001 From: Victoria Lubitch Date: Thu, 9 May 2019 15:06:00 -0400 Subject: [PATCH 13/32] filemetadata export --- .../iq/dataverse/ConfigureFragmentBean.java | 7 +- .../harvard/iq/dataverse/DatasetVersion.java | 2 +- .../iq/dataverse/FileDownloadServiceBean.java | 14 +- .../edu/harvard/iq/dataverse/api/Access.java | 42 +++-- .../edu/harvard/iq/dataverse/api/Meta.java | 10 +- .../datavariable/VariableMetadata.java | 10 ++ .../VariableMetadataDDIParser.java | 3 + .../datavariable/VariableServiceBean.java | 7 + .../export/DDIExportServiceBean.java | 153 +++++++++++++++--- .../dataverse/externaltools/ExternalTool.java | 3 +- .../externaltools/ExternalToolHandler.java | 10 +- .../harvard/iq/dataverse/util/FileUtil.java | 17 +- .../V4.13.0.1__5822-export-var-meta.sql | 2 + .../file-configure-dropdown-fragment.xhtml | 4 +- .../ExternalToolHandlerTest.java | 31 +++- .../ExternalToolServiceBeanTest.java | 11 +- .../iq/dataverse/util/FileUtilTest.java | 21 +-- 17 files changed, 271 insertions(+), 76 deletions(-) create mode 100644 src/main/resources/db/migration/V4.13.0.1__5822-export-var-meta.sql diff --git a/src/main/java/edu/harvard/iq/dataverse/ConfigureFragmentBean.java b/src/main/java/edu/harvard/iq/dataverse/ConfigureFragmentBean.java index dcb5648f316..12f05cb1e12 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ConfigureFragmentBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ConfigureFragmentBean.java @@ -39,6 +39,7 @@ public class ConfigureFragmentBean implements java.io.Serializable{ private Long fileId = null; private ExternalToolHandler toolHandler = null; private String messageApi = ""; + private Long fileMetadataId = null; @EJB DataFileServiceBean datafileService; @@ -95,7 +96,7 @@ public ExternalToolHandler getConfigurePopupToolHandler() { } - toolHandler = new ExternalToolHandler(tool, datafileService.find(fileId), apiToken); + toolHandler = new ExternalToolHandler(tool, datafileService.find(fileId), apiToken, datafileService.findFileMetadata(fileMetadataId)); return toolHandler; } @@ -116,8 +117,10 @@ public void generateApiToken() { } - public void setConfigureFileId(Long setFileId) { + public void setConfigureIds(Long setFileId, Long setFileMetadataId) { + fileId = setFileId; + fileMetadataId = setFileMetadataId; } public String getMessageApi() { diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index 0200d3258b7..ea463e21702 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -1853,7 +1853,7 @@ public String getJsonLd() { } else { if (FileUtil.isPubliclyDownloadable(fileMetadata)) { String nullDownloadType = null; - fileObject.add("contentUrl", dataverseSiteUrl + FileUtil.getFileDownloadUrlPath(nullDownloadType, fileMetadata.getDataFile().getId(), false)); + fileObject.add("contentUrl", dataverseSiteUrl + FileUtil.getFileDownloadUrlPath(nullDownloadType, fileMetadata.getDataFile().getId(), false, fileMetadata.getId())); } } fileArray.add(fileObject); diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java index 488e1372522..41d8a3afbe4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java @@ -109,7 +109,7 @@ public void writeGuestbookAndStartBatchDownload(GuestbookResponse guestbookRespo writeGuestbookResponseRecord(guestbookResponse); } - redirectToDownloadAPI(guestbookResponse.getFileFormat(), fileId, true); + redirectToDownloadAPI(guestbookResponse.getFileFormat(), fileId, true, null); return; } @@ -138,7 +138,7 @@ public void writeGuestbookAndStartFileDownload(GuestbookResponse guestbookRespon } // Make sure to set the "do not write Guestbook response" flag to TRUE when calling the Access API: - redirectToDownloadAPI(format, fileMetadata.getDataFile().getId(), true); + redirectToDownloadAPI(format, fileMetadata.getDataFile().getId(), true, fileMetadata.getId()); logger.fine("issued file download redirect for filemetadata "+fileMetadata.getId()+", datafile "+fileMetadata.getDataFile().getId()); } @@ -215,8 +215,8 @@ private void redirectToBatchDownloadAPI(String multiFileString, Boolean guestboo } - private void redirectToDownloadAPI(String downloadType, Long fileId, boolean guestBookRecordAlreadyWritten) { - String fileDownloadUrl = FileUtil.getFileDownloadUrlPath(downloadType, fileId, guestBookRecordAlreadyWritten); + private void redirectToDownloadAPI(String downloadType, Long fileId, boolean guestBookRecordAlreadyWritten, Long fileMetadataId) { + String fileDownloadUrl = FileUtil.getFileDownloadUrlPath(downloadType, fileId, guestBookRecordAlreadyWritten, fileMetadataId); logger.fine("Redirecting to file download url: " + fileDownloadUrl); try { FacesContext.getCurrentInstance().getExternalContext().redirect(fileDownloadUrl); @@ -226,7 +226,7 @@ private void redirectToDownloadAPI(String downloadType, Long fileId, boolean gue } private void redirectToDownloadAPI(String downloadType, Long fileId) { - redirectToDownloadAPI(downloadType, fileId, true); + redirectToDownloadAPI(downloadType, fileId, true, null); } private void redirectToBatchDownloadAPI(String multiFileString, Boolean downloadOriginal){ @@ -258,7 +258,7 @@ public void explore(GuestbookResponse guestbookResponse, FileMetadata fmd, Exter if(dataFile.getFileMetadata()==null) { dataFile=datafileService.find(dataFile.getId()); } - ExternalToolHandler externalToolHandler = new ExternalToolHandler(externalTool, dataFile, apiToken); + ExternalToolHandler externalToolHandler = new ExternalToolHandler(externalTool, dataFile, apiToken, fmd); // Back when we only had TwoRavens, the downloadType was always "Explore". Now we persist the name of the tool (i.e. "TwoRavens", "Data Explorer", etc.) guestbookResponse.setDownloadtype(externalTool.getDisplayName()); String toolUrl = externalToolHandler.getToolUrlWithQueryParams(); @@ -479,4 +479,4 @@ public void sendRequestFileAccessNotification(Dataset dataset, Long fileId, Auth -} \ No newline at end of file +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 1acad397e34..c7aa5261bf2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -182,7 +182,7 @@ public class Access extends AbstractApiBean { @Path("datafile/bundle/{fileId}") @GET @Produces({"application/zip"}) - public BundleDownloadInstance datafileBundle(@PathParam("fileId") String fileId, @QueryParam("gbrecs") boolean gbrecs, @QueryParam("key") String apiToken, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) /*throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/ { + public BundleDownloadInstance datafileBundle(@PathParam("fileId") String fileId, @QueryParam("fileMetadataId") Long fileMetadataId,@QueryParam("gbrecs") boolean gbrecs, @QueryParam("key") String apiToken, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) /*throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/ { GuestbookResponse gbr = null; @@ -207,9 +207,14 @@ public BundleDownloadInstance datafileBundle(@PathParam("fileId") String fileId, DownloadInfo dInfo = new DownloadInfo(df); BundleDownloadInstance downloadInstance = new BundleDownloadInstance(dInfo); - - FileMetadata fileMetadata = df.getFileMetadata(); - DatasetVersion datasetVersion = df.getOwner().getLatestVersion(); + + FileMetadata fileMetadata = null; + + if (fileMetadataId == null) { + fileMetadata = df.getFileMetadata(); + } else { + fileMetadata = dataFileService.findFileMetadata(fileMetadataId); + } downloadInstance.setFileCitationEndNote(new DataCitation(fileMetadata).toEndNoteString()); downloadInstance.setFileCitationRIS(new DataCitation(fileMetadata).toRISString()); @@ -223,7 +228,8 @@ public BundleDownloadInstance datafileBundle(@PathParam("fileId") String fileId, dfId, outStream, null, - null); + null, + fileMetadataId); downloadInstance.setFileDDIXML(outStream.toString()); @@ -382,8 +388,8 @@ public DownloadInstance datafile(@PathParam("fileId") String fileId, @QueryParam @Path("datafile/{fileId}/metadata") @GET @Produces({"text/xml"}) - public String tabularDatafileMetadata(@PathParam("fileId") String fileId, @QueryParam("exclude") String exclude, @QueryParam("include") String include, @Context HttpHeaders header, @Context HttpServletResponse response) throws NotFoundException, ServiceUnavailableException /*, PermissionDeniedException, AuthorizationRequiredException*/ { - return tabularDatafileMetadataDDI(fileId, exclude, include, header, response); + public String tabularDatafileMetadata(@PathParam("fileId") String fileId, @QueryParam("fileMetadataId") Long fileMetadataId, @QueryParam("exclude") String exclude, @QueryParam("include") String include, @Context HttpHeaders header, @Context HttpServletResponse response) throws NotFoundException, ServiceUnavailableException /*, PermissionDeniedException, AuthorizationRequiredException*/ { + return tabularDatafileMetadataDDI(fileId, fileMetadataId, exclude, include, header, response); } /* @@ -393,7 +399,7 @@ public String tabularDatafileMetadata(@PathParam("fileId") String fileId, @Query @Path("datafile/{fileId}/metadata/ddi") @GET @Produces({"text/xml"}) - public String tabularDatafileMetadataDDI(@PathParam("fileId") String fileId, @QueryParam("exclude") String exclude, @QueryParam("include") String include, @Context HttpHeaders header, @Context HttpServletResponse response) throws NotFoundException, ServiceUnavailableException /*, PermissionDeniedException, AuthorizationRequiredException*/ { + public String tabularDatafileMetadataDDI(@PathParam("fileId") String fileId, @QueryParam("fileMetadataId") Long fileMetadataId, @QueryParam("exclude") String exclude, @QueryParam("include") String include, @Context HttpHeaders header, @Context HttpServletResponse response) throws NotFoundException, ServiceUnavailableException /*, PermissionDeniedException, AuthorizationRequiredException*/ { String retValue = ""; DataFile dataFile = null; @@ -406,8 +412,16 @@ public String tabularDatafileMetadataDDI(@PathParam("fileId") String fileId, @Qu } response.setHeader("Content-disposition", "attachment; filename=\"dataverse_files.zip\""); - - String fileName = dataFile.getFileMetadata().getLabel().replaceAll("\\.tab$", "-ddi.xml"); + + FileMetadata fm = null; + if (fileMetadataId == null) { + fm = dataFile.getFileMetadata(); + } else { + fm = dataFileService.findFileMetadata(fileMetadataId); + } + + String fileName = fm.getLabel().replaceAll("\\.tab$", "-ddi.xml"); + response.setHeader("Content-disposition", "attachment; filename=\""+fileName+"\""); response.setHeader("Content-Type", "application/xml; name=\""+fileName+"\""); @@ -419,7 +433,8 @@ public String tabularDatafileMetadataDDI(@PathParam("fileId") String fileId, @Qu dataFileId, outStream, exclude, - include); + include, + fileMetadataId); retValue = outStream.toString(); @@ -439,7 +454,7 @@ public String tabularDatafileMetadataDDI(@PathParam("fileId") String fileId, @Qu @GET @Produces({ "application/xml" }) - public String dataVariableMetadataDDI(@PathParam("varId") Long varId, @QueryParam("exclude") String exclude, @QueryParam("include") String include, @Context HttpHeaders header, @Context HttpServletResponse response) /*throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/ { + public String dataVariableMetadataDDI(@PathParam("varId") Long varId, @QueryParam("fileMetadataId") Long fileMetadataId, @QueryParam("exclude") String exclude, @QueryParam("include") String include, @Context HttpHeaders header, @Context HttpServletResponse response) /*throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/ { String retValue = ""; ByteArrayOutputStream outStream = null; @@ -450,7 +465,8 @@ public String dataVariableMetadataDDI(@PathParam("varId") Long varId, @QueryPara varId, outStream, exclude, - include); + include, + fileMetadataId); } catch (Exception e) { // For whatever reason we've failed to generate a partial // metadata record requested. We simply return an empty string. diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Meta.java b/src/main/java/edu/harvard/iq/dataverse/api/Meta.java index 842a6f9676e..853340373d9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Meta.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Meta.java @@ -73,7 +73,7 @@ public class Meta { @GET @Produces({ "application/xml" }) - public String variable(@PathParam("varId") Long varId, @QueryParam("exclude") String exclude, @QueryParam("include") String include, @Context HttpHeaders header, @Context HttpServletResponse response) /*throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/ { + public String variable(@PathParam("varId") Long varId, @QueryParam("fileMetadataId") Long fileMetadataId, @QueryParam("exclude") String exclude, @QueryParam("include") String include, @Context HttpHeaders header, @Context HttpServletResponse response) /*throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/ { String retValue = ""; ByteArrayOutputStream outStream = null; @@ -84,7 +84,8 @@ public String variable(@PathParam("varId") Long varId, @QueryParam("exclude") St varId, outStream, exclude, - include); + include, + fileMetadataId); } catch (Exception e) { // For whatever reason we've failed to generate a partial // metadata record requested. We simply return an empty string. @@ -103,7 +104,7 @@ public String variable(@PathParam("varId") Long varId, @QueryParam("exclude") St @Path("datafile/{fileId}") @GET @Produces({"text/xml"}) - public String datafile(@PathParam("fileId") Long fileId, @QueryParam("exclude") String exclude, @QueryParam("include") String include, @Context HttpHeaders header, @Context HttpServletResponse response) throws NotFoundException, ServiceUnavailableException /*, PermissionDeniedException, AuthorizationRequiredException*/ { + public String datafile(@PathParam("fileId") Long fileId, @QueryParam("fileMetadataId") Long fileMetadataId, @QueryParam("exclude") String exclude, @QueryParam("include") String include, @Context HttpHeaders header, @Context HttpServletResponse response) throws NotFoundException, ServiceUnavailableException /*, PermissionDeniedException, AuthorizationRequiredException*/ { String retValue = ""; DataFile dataFile = null; @@ -129,7 +130,8 @@ public String datafile(@PathParam("fileId") Long fileId, @QueryParam("exclude") fileId, outStream, exclude, - include); + include, + fileMetadataId); retValue = outStream.toString(); diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableMetadata.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableMetadata.java index 38bf916a431..53c3f0fe28c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableMetadata.java +++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableMetadata.java @@ -56,6 +56,12 @@ public class VariableMetadata implements Serializable { @Column(columnDefinition="TEXT") private String literalquestion; + /** + * postquestion: post question, metadata variable field. + */ + @Column(columnDefinition="TEXT") + private String postquestion; + /** * interviewinstruction: Interview Instruction, metadata variable field. */ @@ -147,6 +153,10 @@ public void setLiteralquestion(String literalquestion) { this.literalquestion = literalquestion; } + public String getPostquestion() {return this.postquestion;} + + public void setPostquestion(String postquestion) {this.postquestion = postquestion;} + public String getInterviewinstruction() { return this.interviewinstruction; } diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParser.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParser.java index 9c3f26b0b25..71d62c132e3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParser.java @@ -164,6 +164,9 @@ private void processQstn(XMLStreamReader xmlr, VariableMetadata newVM) throws XM } else if (xmlr.getLocalName().equals("ivuInstr")) { String text = parseText(xmlr, false); newVM.setInterviewinstruction(text); + } else if (xmlr.getLocalName().equals("postQTxt")) { + String text = parseText(xmlr, false); + newVM.setPostquestion(text); } } else if (event == XMLStreamConstants.END_ELEMENT) { if (xmlr.getLocalName().equals("qstn")) return; diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableServiceBean.java index 004f7c1fdd1..3362b5d25e3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableServiceBean.java @@ -60,6 +60,13 @@ public List findByDataVarIdAndFileMetaId(Long datVarId, Long m return query.getResultList(); } + + public List findAllGroupsByFileMetadata(Long fileMetaId) { + TypedQuery query = em.createQuery("SELECT object(o) FROM VarGroup as o where o.fileMetadata.id =:fileMetaId", VarGroup.class); + query.setParameter("fileMetaId", fileMetaId); + + return query.getResultList(); + } /* * This is awful! diff --git a/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java index 11b7bfe0920..6c95e138d5a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java @@ -13,9 +13,10 @@ import edu.harvard.iq.dataverse.DataTable; import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.datavariable.VariableMetadata; +import edu.harvard.iq.dataverse.datavariable.CategoryMetadata; +import edu.harvard.iq.dataverse.datavariable.VarGroup; import edu.harvard.iq.dataverse.dataaccess.DataConverter; -import edu.harvard.iq.dataverse.dataaccess.StorageIO; -import edu.harvard.iq.dataverse.dataaccess.TabularSubsetGenerator; + import edu.harvard.iq.dataverse.datavariable.DataVariable; import edu.harvard.iq.dataverse.datavariable.VariableRange; import edu.harvard.iq.dataverse.datavariable.VariableServiceBean; @@ -24,9 +25,7 @@ import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import java.io.File; -import java.io.FileInputStream; -import java.util.Collection; -import java.util.Hashtable; + import java.util.List; import java.util.Set; import java.util.HashSet; @@ -114,27 +113,27 @@ public void ejbCreate() { } @TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED) - public void exportDataVariable(Long varId, OutputStream os, String partialExclude, String partialInclude) { + public void exportDataVariable(Long varId, OutputStream os, String partialExclude, String partialInclude, Long fileMetadataId) { - export(OBJECT_TAG_VARIABLE, varId, os, partialExclude, partialInclude); + export(OBJECT_TAG_VARIABLE, varId, os, partialExclude, partialInclude, fileMetadataId); } @TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED) public void exportDataset(Long datasetId, OutputStream os, String partialExclude, String partialInclude) { - export(OBJECT_TAG_DATASET, datasetId, os, partialExclude, partialInclude); + export(OBJECT_TAG_DATASET, datasetId, os, partialExclude, partialInclude, null); } @TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED) - public void exportDataFile(Long varId, OutputStream os, String partialExclude, String partialInclude) { - export(OBJECT_TAG_DATAFILE, varId, os, partialExclude, partialInclude); + public void exportDataFile(Long varId, OutputStream os, String partialExclude, String partialInclude, Long fileMetadataId) { + export(OBJECT_TAG_DATAFILE, varId, os, partialExclude, partialInclude, fileMetadataId); } /* * Workhorse methods, that do all the work: */ - private void export(String objectTag, Long objectId, OutputStream os, String partialExclude, String partialInclude) { + private void export(String objectTag, Long objectId, OutputStream os, String partialExclude, String partialInclude, Long fileMetadataId) { /* * Some checks will need to be here, to see if the corresponding dataset @@ -209,9 +208,9 @@ private void export(String objectTag, Long objectId, OutputStream os, String par xmlw.writeStartDocument(); if (OBJECT_TAG_VARIABLE.equals(objectTag)) { - createVarDDI(xmlw, excludedFieldSet, includedFieldSet, (DataVariable) dataObject); + createVarDDI(xmlw, excludedFieldSet, includedFieldSet, (DataVariable) dataObject, fileMetadataId); } else if (OBJECT_TAG_DATAFILE.equals(objectTag)) { - createDataFileDDI(xmlw, excludedFieldSet, includedFieldSet, (DataFile) dataObject); + createDataFileDDI(xmlw, excludedFieldSet, includedFieldSet, (DataFile) dataObject, fileMetadataId); } else if (OBJECT_TAG_DATASET.equals(objectTag)) { createDatasetDDI(xmlw, excludedFieldSet, includedFieldSet, releasedVersion); } @@ -230,11 +229,42 @@ private void export(String objectTag, Long objectId, OutputStream os, String par } } - private void createVarDDI(XMLStreamWriter xmlw, Set excludedFieldSet, Set includedFieldSet, DataVariable dv) throws XMLStreamException { + private void createVarGroupDDI(XMLStreamWriter xmlw, Set excludedFieldSet, Set includedFieldSet, VarGroup varGrp) throws XMLStreamException{ + xmlw.writeStartElement("varGrp"); + writeAttribute(xmlw, "ID", "VG" + varGrp.getId().toString()); + String vars = ""; + Set varsInGroup = varGrp.getVarsInGroup(); + for (DataVariable var : varsInGroup) { + vars = vars + " v" + var.getId(); + } + vars = vars.trim(); + writeAttribute(xmlw, "var", vars ); + + if (checkField("labl", excludedFieldSet, includedFieldSet)) { + if (!StringUtilisEmpty(varGrp.getLabel())) { + xmlw.writeStartElement("labl"); + xmlw.writeCharacters(varGrp.getLabel()); + xmlw.writeEndElement(); // group label (labl) + } + } + xmlw.writeEndElement(); //varGrp + } + + private void createVarDDI(XMLStreamWriter xmlw, Set excludedFieldSet, Set includedFieldSet, DataVariable dv, Long fileMetadataId) throws XMLStreamException { xmlw.writeStartElement("var"); writeAttribute(xmlw, "ID", "v" + dv.getId().toString()); writeAttribute(xmlw, "name", dv.getName()); + if (fileMetadataId == null) { + fileMetadataId = dv.getDataTable().getDataFile().getFileMetadata().getId(); + } + + List vmList = variableService.findByDataVarIdAndFileMetaId(dv.getId(), fileMetadataId); + VariableMetadata vm = null; + if (vmList != null && vmList.size() >0) { + vm = vmList.get(0); + } + if (dv.getNumberOfDecimalPoints() != null) { writeAttribute(xmlw, "dcml", dv.getNumberOfDecimalPoints().toString()); } @@ -250,6 +280,15 @@ private void createVarDDI(XMLStreamWriter xmlw, Set excludedFieldSet, Se } } + if (vm != null) { + if (vm.isIsweightvar()) { + writeAttribute(xmlw, "wgt", "wgt"); + } + if (vm.isWeighted() && vm.getWeightvariable() != null) { + writeAttribute(xmlw, "wgt-var", "v"+vm.getWeightvariable().getId().toString()); + } + } + // location if (checkField("location", excludedFieldSet, includedFieldSet)) { xmlw.writeEmptyElement("location"); @@ -268,11 +307,16 @@ private void createVarDDI(XMLStreamWriter xmlw, Set excludedFieldSet, Se // labl if (checkField("labl", excludedFieldSet, includedFieldSet)) { - if (!StringUtilisEmpty(dv.getLabel())) { + if (vmList.size() == 0 || StringUtilisEmpty(vmList.get(0).getLabel()) && !StringUtilisEmpty(dv.getLabel())) { xmlw.writeStartElement("labl"); writeAttribute(xmlw, "level", "variable"); xmlw.writeCharacters(dv.getLabel()); xmlw.writeEndElement(); //labl + } else if (vm != null && !StringUtilisEmpty(vm.getLabel())) { + xmlw.writeStartElement("labl"); + writeAttribute(xmlw, "level", "variable"); + xmlw.writeCharacters(vmList.get(0).getLabel()); + xmlw.writeEndElement(); //labl } } @@ -313,12 +357,7 @@ private void createVarDDI(XMLStreamWriter xmlw, Set excludedFieldSet, Se //universe if (checkField("universe", excludedFieldSet, includedFieldSet)) { - FileMetadata latestFm = dv.getDataTable().getDataFile().getFileMetadata(); - - List vmList = variableService.findByDataVarIdAndFileMetaId(dv.getId(),latestFm.getId()); - - if (vmList != null && vmList.size() >0) { - VariableMetadata vm = vmList.get(0); + if (vm != null) { if (!StringUtilisEmpty(vm.getUniverse())) { xmlw.writeStartElement("universe"); xmlw.writeCharacters(vm.getUniverse()); @@ -374,6 +413,19 @@ private void createVarDDI(XMLStreamWriter xmlw, Set excludedFieldSet, Se } xmlw.writeEndElement(); //catStat } + //catStat weighted freq + if (vm != null && vm.isWeighted()) { + for (CategoryMetadata cm : vm.getCategoriesMetadata()) { + if (cm.getCategory().getValue().equals(cat.getValue())) { + xmlw.writeStartElement("catStat"); + writeAttribute(xmlw, "wgtd", "wgtd"); + writeAttribute(xmlw, "type", "freq"); + xmlw.writeCharacters(cm.getWfreq().toString()); + xmlw.writeEndElement(); //catStat + break; + } + } + } xmlw.writeEndElement(); //catgry } @@ -403,12 +455,45 @@ private void createVarDDI(XMLStreamWriter xmlw, Set excludedFieldSet, Se xmlw.writeCharacters(dv.getUnf()); xmlw.writeEndElement(); //notes } + if (checkField("notes", excludedFieldSet, includedFieldSet)) { + if (vm != null) { + if (!StringUtilisEmpty(vm.getNotes())) { + xmlw.writeStartElement("notes"); + xmlw.writeCData(vm.getNotes()); + xmlw.writeEndElement(); //notes CDATA + } + } + } + + if (checkField("qstn", excludedFieldSet, includedFieldSet)) { + if (vm != null) { + if (!StringUtilisEmpty(vm.getLiteralquestion()) || !StringUtilisEmpty(vm.getInterviewinstruction()) || !StringUtilisEmpty(vm.getPostquestion())) { + xmlw.writeStartElement("qstn"); + if (!StringUtilisEmpty(vm.getLiteralquestion())) { + xmlw.writeStartElement("qstnLit"); + xmlw.writeCharacters(vm.getLiteralquestion()); + xmlw.writeEndElement(); // qstnLit + } + if (!StringUtilisEmpty(vm.getInterviewinstruction())) { + xmlw.writeStartElement("ivuInstr"); + xmlw.writeCharacters(vm.getInterviewinstruction()); + xmlw.writeEndElement(); //ivuInstr + } + if (!StringUtilisEmpty(vm.getPostquestion())) { + xmlw.writeStartElement("postQTxt"); + xmlw.writeCharacters(vm.getPostquestion()); + xmlw.writeEndElement(); //ivuInstr + } + xmlw.writeEndElement(); //qstn + } + } + } xmlw.writeEndElement(); //var } - private void createDataFileDDI(XMLStreamWriter xmlw, Set excludedFieldSet, Set includedFieldSet, DataFile df) throws XMLStreamException { + private void createDataFileDDI(XMLStreamWriter xmlw, Set excludedFieldSet, Set includedFieldSet, DataFile df, Long fileMetadataId) throws XMLStreamException { /* This method will create both the and * portions of the DDI that describe the tabular data contained in * the file, the file-, datatable- and variable-level metadata; or @@ -430,6 +515,11 @@ private void createDataFileDDI(XMLStreamWriter xmlw, Set excludedFieldSe DataTable dt = fileService.findDataTableByFileId(df.getId()); + FileMetadata latestFm = df.getFileMetadata(); + if (fileMetadataId == null) { + fileMetadataId = latestFm.getId(); + } + if (checkField("fileDscr", excludedFieldSet, includedFieldSet)) { createFileDscr(xmlw, excludedFieldSet, null, df, dt); } @@ -437,6 +527,15 @@ private void createDataFileDDI(XMLStreamWriter xmlw, Set excludedFieldSe // And now, the variables: xmlw.writeStartElement("dataDscr"); + if (checkField("varGrp", excludedFieldSet, includedFieldSet)) { + + List varGroups = variableService.findAllGroupsByFileMetadata(fileMetadataId); + + for (VarGroup varGrp : varGroups) { + createVarGroupDDI(xmlw, excludedFieldSet, null, varGrp); + } + } + if (checkField("var", excludedFieldSet, includedFieldSet)) { List vars = variableService.findByDataTableId(dt.getId()); if (checkField("catgry", excludedFieldSet, includedFieldSet)) { @@ -446,7 +545,7 @@ private void createDataFileDDI(XMLStreamWriter xmlw, Set excludedFieldSe } for (DataVariable var : vars) { - createVarDDI(xmlw, excludedFieldSet, null, var); + createVarDDI(xmlw, excludedFieldSet, null, var, fileMetadataId); } } @@ -530,8 +629,14 @@ private void createDatasetDDI(XMLStreamWriter xmlw, Set excludedFieldSet DataTable dt = fileService.findDataTableByFileId(fileMetadata.getDataFile().getId()); List vars = variableService.findByDataTableId(dt.getId()); + List varGroups = variableService.findAllGroupsByFileMetadata(fileMetadata.getId()); + + for (VarGroup varGrp : varGroups) { + createVarGroupDDI(xmlw, excludedFieldSet, null, varGrp); + } + for (DataVariable var : vars) { - createVarDDI(xmlw, excludedFieldSet, null, var); + createVarDDI(xmlw, excludedFieldSet, null, var, fileMetadata.getId()); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java index d3c36675c2e..005f61d6d38 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java @@ -194,7 +194,8 @@ public enum ReservedWord { SITE_URL("siteUrl"), API_TOKEN("apiToken"), DATASET_ID("datasetId"), - DATASET_VERSION("datasetVersion"); + DATASET_VERSION("datasetVersion"), + FILE_METADATA_ID("fileMetadataId"); private final String text; private final String START = "{"; diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java index ca784f36697..c177442ee81 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java @@ -28,6 +28,7 @@ public class ExternalToolHandler { private final ExternalTool externalTool; private final DataFile dataFile; private final Dataset dataset; + private final FileMetadata fileMetadata; private ApiToken apiToken; @@ -37,7 +38,7 @@ public class ExternalToolHandler { * @param apiToken The apiToken can be null because "explore" tools can be * used anonymously. */ - public ExternalToolHandler(ExternalTool externalTool, DataFile dataFile, ApiToken apiToken) { + public ExternalToolHandler(ExternalTool externalTool, DataFile dataFile, ApiToken apiToken, FileMetadata fileMetadata) { this.externalTool = externalTool; if (dataFile == null) { String error = "A DataFile is required."; @@ -47,12 +48,17 @@ public ExternalToolHandler(ExternalTool externalTool, DataFile dataFile, ApiToke this.dataFile = dataFile; this.apiToken = apiToken; dataset = getDataFile().getFileMetadata().getDatasetVersion().getDataset(); + this.fileMetadata = fileMetadata; } public DataFile getDataFile() { return dataFile; } + public FileMetadata getFileMetadata() { + return fileMetadata; + } + public ApiToken getApiToken() { return apiToken; } @@ -109,6 +115,8 @@ private String getQueryParam(String key, String value) { // version. } return key + "=" + version; + case FILE_METADATA_ID: + return key + "=" + fileMetadata.getId(); default: break; } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 0725fafd39a..f4342f6ab7b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1364,10 +1364,14 @@ public static String getPublicDownloadUrl(String dataverseSiteUrl, String persis /** * The FileDownloadServiceBean operates on file IDs, not DOIs. */ - public static String getFileDownloadUrlPath(String downloadType, Long fileId, boolean gbRecordsWritten) { + public static String getFileDownloadUrlPath(String downloadType, Long fileId, boolean gbRecordsWritten, Long fileMetadataId) { String fileDownloadUrl = "/api/access/datafile/" + fileId; if (downloadType != null && downloadType.equals("bundle")) { - fileDownloadUrl = "/api/access/datafile/bundle/" + fileId; + if (fileMetadataId == null) { + fileDownloadUrl = "/api/access/datafile/bundle/" + fileId; + } else { + fileDownloadUrl = "/api/access/datafile/bundle/" + fileId + "?fileMetadataId=" + fileMetadataId; + } } if (downloadType != null && downloadType.equals("original")) { fileDownloadUrl = "/api/access/datafile/" + fileId + "?format=original"; @@ -1376,13 +1380,18 @@ public static String getFileDownloadUrlPath(String downloadType, Long fileId, bo fileDownloadUrl = "/api/access/datafile/" + fileId + "?format=RData"; } if (downloadType != null && downloadType.equals("var")) { - fileDownloadUrl = "/api/access/datafile/" + fileId + "/metadata"; + if (fileMetadataId == null) { + fileDownloadUrl = "/api/access/datafile/" + fileId + "/metadata"; + } else { + fileDownloadUrl = "/api/access/datafile/" + fileId + "/metadata?fileMetadataId=" + fileMetadataId; + } } if (downloadType != null && downloadType.equals("tab")) { fileDownloadUrl = "/api/access/datafile/" + fileId + "?format=tab"; } if (gbRecordsWritten) { - if (downloadType != null && (downloadType.equals("original") || downloadType.equals("RData") || downloadType.equals("tab"))) { + if (downloadType != null && ((downloadType.equals("original") || downloadType.equals("RData") || downloadType.equals("tab")) || + ((downloadType.equals("var") || downloadType.equals("bundle") ) && fileMetadataId != null))) { fileDownloadUrl += "&gbrecs=true"; } else { fileDownloadUrl += "?gbrecs=true"; diff --git a/src/main/resources/db/migration/V4.13.0.1__5822-export-var-meta.sql b/src/main/resources/db/migration/V4.13.0.1__5822-export-var-meta.sql new file mode 100644 index 00000000000..7c43ac29d4d --- /dev/null +++ b/src/main/resources/db/migration/V4.13.0.1__5822-export-var-meta.sql @@ -0,0 +1,2 @@ +ALTER TABLE variablemetadata +ADD COLUMN postquestion text; diff --git a/src/main/webapp/file-configure-dropdown-fragment.xhtml b/src/main/webapp/file-configure-dropdown-fragment.xhtml index d23e39bf38c..9a5bd19506e 100644 --- a/src/main/webapp/file-configure-dropdown-fragment.xhtml +++ b/src/main/webapp/file-configure-dropdown-fragment.xhtml @@ -30,7 +30,7 @@ update="@([id$=configureToolPopup])" action="#{configureFragmentBean.setConfigurePopupTool(tool)}" oncomplete="PF('configureToolPopup').show()"> - + #{tool.displayName} @@ -51,4 +51,4 @@ - \ No newline at end of file + diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java index d05f6e75d8d..bfa00aac677 100644 --- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java @@ -34,9 +34,10 @@ public void testGetToolUrlWithOptionalQueryParameters() { .build().toString()); DataFile nullDataFile = null; ApiToken nullApiToken = null; + FileMetadata nullFileMetadata = null; Exception expectedException1 = null; try { - ExternalToolHandler externalToolHandler1 = new ExternalToolHandler(externalTool, nullDataFile, nullApiToken); + ExternalToolHandler externalToolHandler1 = new ExternalToolHandler(externalTool, nullDataFile, nullApiToken, nullFileMetadata); } catch (Exception ex) { expectedException1 = ex; } @@ -56,7 +57,7 @@ public void testGetToolUrlWithOptionalQueryParameters() { .build().toString()); Exception expectedException2 = null; try { - ExternalToolHandler externalToolHandler2 = new ExternalToolHandler(externalTool, nullDataFile, nullApiToken); + ExternalToolHandler externalToolHandler2 = new ExternalToolHandler(externalTool, nullDataFile, nullApiToken, nullFileMetadata); } catch (Exception ex) { expectedException2 = ex; } @@ -86,11 +87,31 @@ public void testGetToolUrlWithOptionalQueryParameters() { dataFile.setFileMetadatas(fmdl); ApiToken apiToken = new ApiToken(); apiToken.setTokenString("7196b5ce-f200-4286-8809-03ffdbc255d7"); - ExternalToolHandler externalToolHandler3 = new ExternalToolHandler(externalTool, dataFile, apiToken); + ExternalToolHandler externalToolHandler3 = new ExternalToolHandler(externalTool, dataFile, apiToken, nullFileMetadata); String result3 = externalToolHandler3.getQueryParametersForUrl(); System.out.println("result3: " + result3); assertEquals("?key1=42&key2=7196b5ce-f200-4286-8809-03ffdbc255d7", result3); + // Three query parameters, all reserved words, one is {fileId} which is required. + fmd.setId(2L); + externalTool.setToolParameters(Json.createObjectBuilder() + .add("queryParameters", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("key1", "{fileId}") + ) + .add(Json.createObjectBuilder() + .add("key2", "{apiToken}") + ) + .add(Json.createObjectBuilder() + .add("key3", "{fileMetadataId}") + ) + ) + .build().toString()); + ExternalToolHandler externalToolHandler6 = new ExternalToolHandler(externalTool, dataFile, apiToken, fmd); + String result6 = externalToolHandler6.getQueryParametersForUrl(); + System.out.println("result6: " + result6); + assertEquals("?key1=42&key2=7196b5ce-f200-4286-8809-03ffdbc255d7&key3=2", result6); + // Two query parameters, both reserved words, no apiToken externalTool.setToolParameters(Json.createObjectBuilder() .add("queryParameters", Json.createArrayBuilder() @@ -102,7 +123,7 @@ public void testGetToolUrlWithOptionalQueryParameters() { ) ) .build().toString()); - ExternalToolHandler externalToolHandler4 = new ExternalToolHandler(externalTool, dataFile, nullApiToken); + ExternalToolHandler externalToolHandler4 = new ExternalToolHandler(externalTool, dataFile, nullApiToken, nullFileMetadata); String result4 = externalToolHandler4.getQueryParametersForUrl(); System.out.println("result4: " + result4); assertEquals("?key1=42", result4); @@ -120,7 +141,7 @@ public void testGetToolUrlWithOptionalQueryParameters() { .build().toString()); Exception expectedException = null; try { - ExternalToolHandler externalToolHandler5 = new ExternalToolHandler(externalTool, dataFile, nullApiToken); + ExternalToolHandler externalToolHandler5 = new ExternalToolHandler(externalTool, dataFile, nullApiToken, nullFileMetadata); String result5 = externalToolHandler5.getQueryParametersForUrl(); System.out.println("result5: " + result5); } catch (Exception ex) { diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java index 9212aff2fe4..54462b3ca0b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java @@ -25,6 +25,7 @@ public void testfindAll() { DataFile dataFile = new DataFile(); dataFile.setId(42l); FileMetadata fmd = new FileMetadata(); + fmd.setId(2L); DatasetVersion dv = new DatasetVersion(); Dataset ds = new Dataset(); dv.setDataset(ds); @@ -39,7 +40,7 @@ public void testfindAll() { apiToken.setTokenString("7196b5ce-f200-4286-8809-03ffdbc255d7"); ExternalTool.Type type = ExternalTool.Type.EXPLORE; ExternalTool externalTool = new ExternalTool("displayName", "description", type, "http://foo.com", "{}", DataFileServiceBean.MIME_TYPE_TSV_ALT); - ExternalToolHandler externalToolHandler4 = new ExternalToolHandler(externalTool, dataFile, apiToken); + ExternalToolHandler externalToolHandler4 = new ExternalToolHandler(externalTool, dataFile, apiToken, fmd); List externalTools = new ArrayList<>(); externalTools.add(externalTool); List availableExternalTools = ExternalToolServiceBean.findExternalToolsByFile(externalTools, dataFile); @@ -61,6 +62,9 @@ public void testParseAddExternalToolInput() { .add(Json.createObjectBuilder() .add("key", "{apiToken}") .build()) + .add(Json.createObjectBuilder() + .add("fileMetadataId", "{fileMetadataId}") + .build()) .build()) .build()); job.add(ExternalTool.CONTENT_TYPE, DataFileServiceBean.MIME_TYPE_TSV_ALT); @@ -71,6 +75,7 @@ public void testParseAddExternalToolInput() { DataFile dataFile = new DataFile(); dataFile.setId(42l); FileMetadata fmd = new FileMetadata(); + fmd.setId(2L); DatasetVersion dv = new DatasetVersion(); Dataset ds = new Dataset(); dv.setDataset(ds); @@ -80,10 +85,10 @@ public void testParseAddExternalToolInput() { dataFile.setFileMetadatas(fmdl); ApiToken apiToken = new ApiToken(); apiToken.setTokenString("7196b5ce-f200-4286-8809-03ffdbc255d7"); - ExternalToolHandler externalToolHandler = new ExternalToolHandler(externalTool, dataFile, apiToken); + ExternalToolHandler externalToolHandler = new ExternalToolHandler(externalTool, dataFile, apiToken,fmd); String toolUrl = externalToolHandler.getToolUrlWithQueryParams(); System.out.println("result: " + toolUrl); - assertEquals("http://awesometool.com?fileid=42&key=7196b5ce-f200-4286-8809-03ffdbc255d7", toolUrl); + assertEquals("http://awesometool.com?fileid=42&key=7196b5ce-f200-4286-8809-03ffdbc255d7&fileMetadataId=2", toolUrl); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java index d2f4a636845..ced06f74d6e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java @@ -219,15 +219,18 @@ public void testIsPubliclyDownloadable2() { @Test public void testgetFileDownloadUrl() { Long fileId = 42l; - assertEquals("/api/access/datafile/42", FileUtil.getFileDownloadUrlPath(null, fileId, false)); - assertEquals("/api/access/datafile/42", FileUtil.getFileDownloadUrlPath("", fileId, false)); - assertEquals("/api/access/datafile/bundle/42", FileUtil.getFileDownloadUrlPath("bundle", fileId, false)); - assertEquals("/api/access/datafile/42?format=original", FileUtil.getFileDownloadUrlPath("original", fileId, false)); - assertEquals("/api/access/datafile/42?format=RData", FileUtil.getFileDownloadUrlPath("RData", fileId, false)); - assertEquals("/api/access/datafile/42/metadata", FileUtil.getFileDownloadUrlPath("var", fileId, false)); - assertEquals("/api/access/datafile/42?format=tab", FileUtil.getFileDownloadUrlPath("tab", fileId, false)); - assertEquals("/api/access/datafile/42?format=tab&gbrecs=true", FileUtil.getFileDownloadUrlPath("tab", fileId, true)); - assertEquals("/api/access/datafile/42?gbrecs=true", FileUtil.getFileDownloadUrlPath(null, fileId, true)); + Long fileMetadataId = 2L; + assertEquals("/api/access/datafile/42", FileUtil.getFileDownloadUrlPath(null, fileId, false, null)); + assertEquals("/api/access/datafile/42", FileUtil.getFileDownloadUrlPath("", fileId, false, null)); + assertEquals("/api/access/datafile/bundle/42", FileUtil.getFileDownloadUrlPath("bundle", fileId, false, null)); + assertEquals("/api/access/datafile/bundle/42?fileMetadataId=2", FileUtil.getFileDownloadUrlPath("bundle", fileId, false, fileMetadataId)); + assertEquals("/api/access/datafile/42?format=original", FileUtil.getFileDownloadUrlPath("original", fileId, false, null)); + assertEquals("/api/access/datafile/42?format=RData", FileUtil.getFileDownloadUrlPath("RData", fileId, false, null)); + assertEquals("/api/access/datafile/42/metadata", FileUtil.getFileDownloadUrlPath("var", fileId, false, null)); + assertEquals("/api/access/datafile/42/metadata?fileMetadataId=2", FileUtil.getFileDownloadUrlPath("var", fileId, false, fileMetadataId)); + assertEquals("/api/access/datafile/42?format=tab", FileUtil.getFileDownloadUrlPath("tab", fileId, false, null)); + assertEquals("/api/access/datafile/42?format=tab&gbrecs=true", FileUtil.getFileDownloadUrlPath("tab", fileId, true, null)); + assertEquals("/api/access/datafile/42?gbrecs=true", FileUtil.getFileDownloadUrlPath(null, fileId, true, null)); } @Test From 9e072ad3e8cb888cd909f1b465eefb97bcb1a9d0 Mon Sep 17 00:00:00 2001 From: chenganj Date: Mon, 13 May 2019 11:07:19 -0400 Subject: [PATCH 14/32] API call to add language zip file --- .../dataverse/api/DatasetFieldServiceApi.java | 91 +++++++++++++++++++ 1 file changed, 91 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java index a33d3ee1ea6..f8e43077bec 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java @@ -17,6 +17,9 @@ import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.List; import javax.ejb.EJB; @@ -31,6 +34,8 @@ import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.core.Response; + +import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.asJsonArray; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; @@ -40,6 +45,17 @@ import javax.persistence.TypedQuery; import javax.ws.rs.core.Response.Status; +import java.io.BufferedInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.FileSystem; +import java.nio.file.FileSystems; +import java.nio.file.Files; +import java.util.Enumeration; +import java.util.zip.ZipEntry; +import java.util.zip.ZipFile; + @Path("admin/datasetfield") public class DatasetFieldServiceApi extends AbstractApiBean { @@ -403,4 +419,79 @@ private String parseControlledVocabulary(String[] values) { datasetFieldService.save(cvv); return cvv.getStrValue(); } + + + @POST + @Consumes("application/zip") + @Path("loadpropertyfiles") + public Response loadLanguagePropertyFile(File inputFile) { + try + { + ZipFile file = new ZipFile(inputFile); + FileSystem fileSystem = FileSystems.getDefault(); + //Get file entries + Enumeration entries = file.entries(); + + //We will unzip files in this folder + String dataverseLangDirectory = getDataverseLangDirectory(); + + System.out.println(" ==== dataverseLangDirectory ===== " + dataverseLangDirectory); + + //Iterate over entries + while (entries.hasMoreElements()) + { + ZipEntry entry = entries.nextElement(); + System.out.println(" ==== entry.getName() ===== " + entry.getName()); + if (entry.isDirectory()) + { + System.out.println("Creating Directory:" + dataverseLangDirectory + entry.getName()); + Files.createDirectories(fileSystem.getPath(dataverseLangDirectory + "/" + entry.getName())); + } + //Else create the file + else { + String dataverseLangFileName = dataverseLangDirectory + "/" + entry.getName(); + java.nio.file.Path uncompressedFilePath = fileSystem.getPath(dataverseLangFileName); + + System.out.println("======Creating file:" + uncompressedFilePath ); + + + Files.createFile(uncompressedFilePath); + FileOutputStream fileOutput = new FileOutputStream(dataverseLangFileName); + + InputStream is = file.getInputStream(entry); + BufferedInputStream bis = new BufferedInputStream(is); + + while (bis.available() > 0) { + fileOutput.write(bis.read()); + } + fileOutput.close(); + } + } + } + catch(IOException e) + { + e.printStackTrace(); + } + + return Response.status(200).entity("Uploaded the file successfully ").build(); + } + + public static String getDataverseLangDirectory() { + String dataverseLangDirectory = System.getProperty("dataverse.lang.directory"); + if (dataverseLangDirectory == null || dataverseLangDirectory.equals("")) { + dataverseLangDirectory = "/tmp/files"; + } + + if (!Files.exists(Paths.get(dataverseLangDirectory))) { + try { + Files.createDirectories(Paths.get(dataverseLangDirectory)); + } catch (IOException ex) { + logger.severe("Failed to create dataverseLangDirectory: " + dataverseLangDirectory ); + return null; + } + } + + return dataverseLangDirectory; + } + } From b3a4ae2554cde253db25db6894137a41a3294ce4 Mon Sep 17 00:00:00 2001 From: chenganj Date: Tue, 14 May 2019 12:36:22 -0400 Subject: [PATCH 15/32] removed unwanted comments --- .../dataverse/api/DatasetFieldServiceApi.java | 40 +++++-------------- 1 file changed, 9 insertions(+), 31 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java index f8e43077bec..8882742d20c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java @@ -17,9 +17,6 @@ import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.List; import javax.ejb.EJB; @@ -47,11 +44,9 @@ import java.io.BufferedInputStream; import java.io.FileOutputStream; -import java.io.IOException; import java.io.InputStream; -import java.nio.file.FileSystem; -import java.nio.file.FileSystems; import java.nio.file.Files; +import java.nio.file.Paths; import java.util.Enumeration; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; @@ -428,49 +423,32 @@ public Response loadLanguagePropertyFile(File inputFile) { try { ZipFile file = new ZipFile(inputFile); - FileSystem fileSystem = FileSystems.getDefault(); //Get file entries Enumeration entries = file.entries(); //We will unzip files in this folder String dataverseLangDirectory = getDataverseLangDirectory(); - System.out.println(" ==== dataverseLangDirectory ===== " + dataverseLangDirectory); - //Iterate over entries while (entries.hasMoreElements()) { ZipEntry entry = entries.nextElement(); - System.out.println(" ==== entry.getName() ===== " + entry.getName()); - if (entry.isDirectory()) - { - System.out.println("Creating Directory:" + dataverseLangDirectory + entry.getName()); - Files.createDirectories(fileSystem.getPath(dataverseLangDirectory + "/" + entry.getName())); - } - //Else create the file - else { - String dataverseLangFileName = dataverseLangDirectory + "/" + entry.getName(); - java.nio.file.Path uncompressedFilePath = fileSystem.getPath(dataverseLangFileName); - - System.out.println("======Creating file:" + uncompressedFilePath ); - + String dataverseLangFileName = dataverseLangDirectory + "/" + entry.getName(); + FileOutputStream fileOutput = new FileOutputStream(dataverseLangFileName); - Files.createFile(uncompressedFilePath); - FileOutputStream fileOutput = new FileOutputStream(dataverseLangFileName); + InputStream is = file.getInputStream(entry); + BufferedInputStream bis = new BufferedInputStream(is); - InputStream is = file.getInputStream(entry); - BufferedInputStream bis = new BufferedInputStream(is); - - while (bis.available() > 0) { - fileOutput.write(bis.read()); - } - fileOutput.close(); + while (bis.available() > 0) { + fileOutput.write(bis.read()); } + fileOutput.close(); } } catch(IOException e) { e.printStackTrace(); + return Response.status(500).entity("Internal server error. More details available at the server logs.").build(); } return Response.status(200).entity("Uploaded the file successfully ").build(); From 793d9f068be1ad1537a2d33bff5b77e0c8972dac Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Tue, 14 May 2019 13:51:58 -0400 Subject: [PATCH 16/32] Out with the Waffle, in with the Github Projects We mention Waffle in a few places in our docs. Since it's going away (#5651) we need to remove the references and (where appropriate) replace with a new process powered by Github Projects. --- README.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/README.md b/README.md index 3e1c61a52df..1046cef3b8c 100644 --- a/README.md +++ b/README.md @@ -17,8 +17,6 @@ Dataverse is a trademark of President and Fellows of Harvard College and is regi [![Dataverse Project logo](src/main/webapp/resources/images/dataverseproject_logo.jpg?raw=true "Dataverse Project")](http://dataverse.org) -[![Waffle.io - Columns and their card count](https://badge.waffle.io/IQSS/dataverse.svg?columns=all)](https://waffle.io/IQSS/dataverse) - [![Build Status](https://travis-ci.org/IQSS/dataverse.svg?branch=develop)](https://travis-ci.org/IQSS/dataverse) [![Coverage Status](https://coveralls.io/repos/IQSS/dataverse/badge.svg?branch=develop&service=github)](https://coveralls.io/github/IQSS/dataverse?branch=develop) [dataverse.org]: https://dataverse.org From fddd18a337c6bcc0d92ac608235b1f3db0fe828b Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Tue, 14 May 2019 13:59:35 -0400 Subject: [PATCH 17/32] Update version-control.rst --- doc/sphinx-guides/source/developers/version-control.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/developers/version-control.rst b/doc/sphinx-guides/source/developers/version-control.rst index b6a81ad7676..c6b19885f27 100644 --- a/doc/sphinx-guides/source/developers/version-control.rst +++ b/doc/sphinx-guides/source/developers/version-control.rst @@ -62,7 +62,7 @@ For guidance on which issue to work on, please ask! Also, see https://github.com Let's say you want to tackle https://github.com/IQSS/dataverse/issues/3728 which points out a typo in a page of Dataverse's documentation. -If you tell us your GitHub username we are happy to add you to the "read only" team at https://github.com/orgs/IQSS/teams/dataverse-readonly/members so that we can assign the issue to you while you're working on it. You can also tell us if you'd like to be added to the `Dataverse Community Contributors spreadsheet `_ and the `Dev Efforts by the Dataverse Community spreadsheet `_. +If you tell us your GitHub username we are happy to add you to the "read only" team at https://github.com/orgs/IQSS/teams/dataverse-readonly/members so that we can assign the issue to you while you're working on it. You can also tell us if you'd like to be added to the `Dataverse Community Contributors spreadsheet `_. Create a New Branch off the develop Branch ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -82,14 +82,14 @@ Push your feature branch to your fork of Dataverse. Your git command may look so Make a Pull Request ~~~~~~~~~~~~~~~~~~~ -Make a pull request to get approval to merge your changes into the develop branch. Feedback on the pull request template we use is welcome! The "connects to #3728" syntax is important because it's used at https://waffle.io/IQSS/dataverse to associate pull requests with issues. +Make a pull request to get approval to merge your changes into the develop branch. Feedback on the pull request template we use is welcome! The "connects to #3728" syntax is important because it's used at https://github.com/orgs/IQSS/projects/2 to associate pull requests with issues. Here's an example of a pull request for issue #3728: https://github.com/IQSS/dataverse/pull/3827 Make Sure Your Pull Request Has Been Advanced to Code Review ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Now that you've made your pull request, your goal is to make sure it appears in the "Code Review" column at https://waffle.io/IQSS/dataverse +Now that you've made your pull request, your goal is to make sure it appears in the "Code Review" column at https://github.com/orgs/IQSS/projects/2 Look at https://github.com/IQSS/dataverse/blob/master/CONTRIBUTING.md for various ways to reach out to developers who have enough access to the GitHub repo to move your issue and pull request to the "Code Review" column. From ecb6b091d3c9a97847bae8ae7b64e8ebc418e79b Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Tue, 14 May 2019 14:01:09 -0400 Subject: [PATCH 18/32] Replacing waffle in the dev guide intro --- doc/sphinx-guides/source/developers/intro.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/intro.rst b/doc/sphinx-guides/source/developers/intro.rst index 98ccc61285e..e82f44cae27 100755 --- a/doc/sphinx-guides/source/developers/intro.rst +++ b/doc/sphinx-guides/source/developers/intro.rst @@ -34,7 +34,7 @@ For the Dataverse development roadmap, please see https://dataverse.org/goals-ro Kanban Board ------------ -You can get a sense of what's currently in flight (in dev, in QA, etc.) by looking at https://waffle.io/IQSS/dataverse +You can get a sense of what's currently in flight (in dev, in QA, etc.) by looking at https://github.com/orgs/IQSS/projects/2 Issue Tracker ------------- From 24237afd738474bf67fe6fb8012e860cc8d40414 Mon Sep 17 00:00:00 2001 From: Danny Brooke Date: Tue, 14 May 2019 14:02:29 -0400 Subject: [PATCH 19/32] replacing waffle with github projects --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 83b7c2d0cea..a747d384f83 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -57,7 +57,7 @@ If you are interested in working on the main Dataverse code, great! Before you s Please read http://guides.dataverse.org/en/latest/developers/version-control.html to understand how we use the "git flow" model of development and how we will encourage you to create a GitHub issue (if it doesn't exist already) to associate with your pull request. That page also includes tips on making a pull request. -After making your pull request, your goal should be to help it advance through our kanban board at https://waffle.io/IQSS/dataverse . If no one has moved your pull request to the code review column in a timely manner, please reach out. Thanks! +After making your pull request, your goal should be to help it advance through our kanban board at https://github.com/orgs/IQSS/projects/2 . If no one has moved your pull request to the code review column in a timely manner, please reach out. Thanks! [dataverse-community Google Group]: https://groups.google.com/group/dataverse-community [Community Call]: https://dataverse.org/community-calls From ef4793c22e0094bcab6e82489bb637a18697cee2 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 14 May 2019 14:23:38 -0400 Subject: [PATCH 20/32] We don't need "connects to" anymore #5651 --- PULL_REQUEST_TEMPLATE.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/PULL_REQUEST_TEMPLATE.md b/PULL_REQUEST_TEMPLATE.md index 2c834088c1f..d8c6871a0c3 100644 --- a/PULL_REQUEST_TEMPLATE.md +++ b/PULL_REQUEST_TEMPLATE.md @@ -4,7 +4,7 @@ Welcome! New contributors should at least glance at [CONTRIBUTING.md](/CONTRIBUT ## Related Issues -- connects to #ISSUE_NUMBER: ISSUE_TITLE +- #ISSUE_NUMBER: ISSUE_TITLE ## Pull Request Checklist From f2ee26ca2fd16ac7548bf72af327a35e079edadd Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 14 May 2019 14:26:03 -0400 Subject: [PATCH 21/32] remove another reference to "connects to" #5651 --- doc/sphinx-guides/source/developers/version-control.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/version-control.rst b/doc/sphinx-guides/source/developers/version-control.rst index c6b19885f27..f3611cb3f48 100644 --- a/doc/sphinx-guides/source/developers/version-control.rst +++ b/doc/sphinx-guides/source/developers/version-control.rst @@ -82,7 +82,7 @@ Push your feature branch to your fork of Dataverse. Your git command may look so Make a Pull Request ~~~~~~~~~~~~~~~~~~~ -Make a pull request to get approval to merge your changes into the develop branch. Feedback on the pull request template we use is welcome! The "connects to #3728" syntax is important because it's used at https://github.com/orgs/IQSS/projects/2 to associate pull requests with issues. +Make a pull request to get approval to merge your changes into the develop branch. Feedback on the pull request template we use is welcome! Here's an example of a pull request for issue #3728: https://github.com/IQSS/dataverse/pull/3827 From 5a8c18df1d4f1ca1a8c8b7eaf1f530d35bccbb2a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 15 May 2019 15:14:50 -0400 Subject: [PATCH 22/32] add Internationalization section to guides #5806 --- .../source/installation/config.rst | 87 +++++++++++++++++-- 1 file changed, 81 insertions(+), 6 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index c19bfde3b75..b7afad3f2af 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -540,6 +540,81 @@ Once you have the location of your custom CSS file, run this curl command to add ``curl -X PUT -d '/var/www/dataverse/branding/custom-stylesheet.css' http://localhost:8080/api/admin/settings/:StyleCustomizationFile`` +.. _i18n: + +Internationalization +-------------------- + +Dataverse is being translated into multiple languages by the Dataverse community! Please see below for how to help with this effort! + +Adding Multiple Languages to the Dropdown in the Header +++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +The presence of the :ref:`:Languages` database setting adds a dropdown in the header for multiple languages. For example to add English and French to the dropdown: + +``curl http://localhost:8080/api/admin/settings/:Languages -X PUT -d '[{"locale":"en","title":"English"},{"locale":"fr","title":"Français"}]'`` + +Configuring the "lang" Directory +++++++++++++++++++++++++++++++++ + +Translations for Dataverse are stored in "properties" files in a directory on disk (e.g. ``/home/glassfish/langBundles``) that you specify with the :ref:`dataverse.lang.directory` ``dataverse.lang.directory`` JVM option, like this: + +``./asadmin create-jvm-options '-Ddataverse.lang.directory=/home/glassfish/langBundles'`` + +Go ahead and create the directory you specified. + +``mkdir /home/glassfish/langBundles`` + +Creating a languages.zip File ++++++++++++++++++++++++++++++ + +Dataverse provides and API endpoint for adding languages using a zip file. + +First, clone the "dataverse-language-packs" git repo. + +``git clone https://github.com/GlobalDataverseCommunityConsortium/dataverse-language-packs.git`` + +Take a look at https://github.com/GlobalDataverseCommunityConsortium/dataverse-language-packs/branches to see if the version of Dataverse you're running has translations. + +Change to the directory for the git repo you just cloned. + +``cd dataverse-language-packs`` + +Switch (``git checkout``) to the branch based on Dataverse version you are running. The branch "dataverse-v4.13" is used in the example below. + +``export BRANCH_NAME=dataverse-v4.13`` + +``git checkout $BRANCH_NAME`` + +Create a "languages" directory in "/tmp". + +``mkdir /tmp/languages`` + +Copy the properties files into the "languages" directory + +``cp -R en_US/*.properties /tmp/languages`` + +``cp -R fr_CA/*.properties /tmp/languages`` + +Create the zip file + +``cd /tmp/languages`` + +``zip languages.zip *.properties`` + +Load the languages.zip file into Dataverse. + +``curl http://localhost:8080/api/admin/datasetfield/loadpropertyfiles -X POST --upload-file /tmp/languages/languages.zip -H "Content-Type: application/zip"`` + +Click on the languages using the drop down in the header to try them out. + +How to Help Translate Dataverse Into Your Language +++++++++++++++++++++++++++++++++++++++++++++++++++ + +Please join the `dataverse-internationalization-wg`_ mailing list and contribute to https://github.com/GlobalDataverseCommunityConsortium/dataverse-language-packs to help translate Dataverse into various languages! + +.. _dataverse-internationalization-wg: https://groups.google.com/forum/#!forum/dataverse-internationalization-wg + .. _Web-Analytics-Code: Web Analytics Code @@ -897,13 +972,13 @@ This JVM option is only relevant if you plan to run multiple Glassfish servers f dataverse.lang.directory ++++++++++++++++++++++++ -This JVM option is used to configure the path where all the language specific property files are to be stored. If this option is set then the english property file must be present in the path along with any other language property file. You can download language property files from https://github.com/GlobalDataverseCommunityConsortium/dataverse-language-packs +This JVM option is used to configure the path where all the language specific property files are to be stored. If this option is set then the English property file must be present in the path along with any other language property file. You can download language property files from https://github.com/GlobalDataverseCommunityConsortium/dataverse-language-packs ``./asadmin create-jvm-options '-Ddataverse.lang.directory=PATH_LOCATION_HERE'`` If this value is not set, by default, a Dataverse installation will read the English language property files from the Java Application. -See also the ``:Languages`` setting below. +See also :ref:`i18n`. dataverse.files.hide-schema-dot-org-download-urls +++++++++++++++++++++++++++++++++++++++++++++++++ @@ -1633,15 +1708,15 @@ Sets the path where the raw Make Data Count logs are stored before being process ``curl -X PUT -d '/usr/local/glassfish4/glassfish/domains/domain1/logs' http://localhost:8080/api/admin/settings/:MDCLogPath`` +.. _:Languages: + :Languages ++++++++++ Sets which languages should be available. If there is more than one, a dropdown is displayed -in the header. This should be formated as a JSON array as shown below. - -``curl http://localhost:8080/api/admin/settings/:Languages -X PUT -d '[{ "locale":"en", "title":"English"}, { "locale":"fr", "title":"Français"}]'`` +in the header. -See also the ``dataverse.lang.directory`` JVM option above. +See :ref:`i18n` for a curl example and related settings. :InheritParentRoleAssignments +++++++++++++++++++++++++++++ From 06580afb016c1c327ce47f0a1ec3be42014c4d19 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 16 May 2019 10:31:59 -0400 Subject: [PATCH 23/32] separate header for loading lang zip #5806 --- doc/sphinx-guides/source/installation/config.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index b7afad3f2af..9f860cd0e1e 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -602,7 +602,10 @@ Create the zip file ``zip languages.zip *.properties`` -Load the languages.zip file into Dataverse. +Load the languages.zip file into Dataverse +++++++++++++++++++++++++++++++++++++++++++ + +Now that you have a "languages.zip" file, you can load it into Dataverse with the command below. ``curl http://localhost:8080/api/admin/datasetfield/loadpropertyfiles -X POST --upload-file /tmp/languages/languages.zip -H "Content-Type: application/zip"`` From 4024c4ebedc0afff0d30c9cd2737c22ec26bc774 Mon Sep 17 00:00:00 2001 From: Victoria Lubitch Date: Thu, 16 May 2019 12:11:00 -0400 Subject: [PATCH 24/32] phinx-guides dataaccess api --- doc/sphinx-guides/source/api/dataaccess.rst | 26 ++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst index 3a91dca5024..e8d7fb8d827 100755 --- a/doc/sphinx-guides/source/api/dataaccess.rst +++ b/doc/sphinx-guides/source/api/dataaccess.rst @@ -100,9 +100,19 @@ It returns a zipped bundle that contains the data in the following formats: * Data (Variable) metadata record, in DDI XML; * File citation, in Endnote and RIS formats. + Parameters: ~~~~~~~~~~~ -none. + +``fileMetadataId`` + +============== =========== +Value Description +============== =========== +ID Exports file with specific file metadata ``ID``. + For example for data file with id 6 and file metadata id 2: ``curl 'http://localhost:8080/api/access/datafile/6?fileMetadataId=2'`` +============== =========== + Data Variable Metadata Access ----------------------------- @@ -177,6 +187,20 @@ Example: + +Parameters: +~~~~~~~~~~~ + +``fileMetadataId`` + +============== =========== +Value Description +============== =========== +ID Exports file with specific file metadata ``ID``. + For example for data file with id 6 and file metadata id 2: ``curl 'http://localhost:8080/api/access/datafile/6?fileMetadataId=2'`` +============== =========== + + More information on DDI is available in the :doc:`/user/tabulardataingest/ingestprocess` section of the User Guide. Advanced options/Parameters: From 73e36c623069f4273f3bc781420dc554e786e9b7 Mon Sep 17 00:00:00 2001 From: Victoria Lubitch Date: Thu, 16 May 2019 12:20:56 -0400 Subject: [PATCH 25/32] sphinx-guides dataaccess api edit --- doc/sphinx-guides/source/api/dataaccess.rst | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst index e8d7fb8d827..e7688d52078 100755 --- a/doc/sphinx-guides/source/api/dataaccess.rst +++ b/doc/sphinx-guides/source/api/dataaccess.rst @@ -109,8 +109,7 @@ Parameters: ============== =========== Value Description ============== =========== -ID Exports file with specific file metadata ``ID``. - For example for data file with id 6 and file metadata id 2: ``curl 'http://localhost:8080/api/access/datafile/6?fileMetadataId=2'`` +ID Exports file with specific file metadata ``ID``. For example for data file with id 6 and file metadata id 2: ``curl 'http://localhost:8080/api/access/datafile/6?fileMetadataId=2'`` ============== =========== @@ -196,8 +195,7 @@ Parameters: ============== =========== Value Description ============== =========== -ID Exports file with specific file metadata ``ID``. - For example for data file with id 6 and file metadata id 2: ``curl 'http://localhost:8080/api/access/datafile/6?fileMetadataId=2'`` +ID Exports file with specific file metadata ``ID``. For example for data file with id 6 and file metadata id 2: ``curl 'http://localhost:8080/api/access/datafile/6?fileMetadataId=2'`` ============== =========== From a94a0f4bdd1e7341bfc5bcfd779cb052c15fe202 Mon Sep 17 00:00:00 2001 From: Victoria Lubitch Date: Thu, 16 May 2019 12:36:05 -0400 Subject: [PATCH 26/32] sphinx-guides dataaccess api fix --- doc/sphinx-guides/source/api/dataaccess.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst index e7688d52078..eca43ba1c5e 100755 --- a/doc/sphinx-guides/source/api/dataaccess.rst +++ b/doc/sphinx-guides/source/api/dataaccess.rst @@ -109,7 +109,7 @@ Parameters: ============== =========== Value Description ============== =========== -ID Exports file with specific file metadata ``ID``. For example for data file with id 6 and file metadata id 2: ``curl 'http://localhost:8080/api/access/datafile/6?fileMetadataId=2'`` +ID Exports file with specific file metadata ``ID``. ============== =========== @@ -195,7 +195,7 @@ Parameters: ============== =========== Value Description ============== =========== -ID Exports file with specific file metadata ``ID``. For example for data file with id 6 and file metadata id 2: ``curl 'http://localhost:8080/api/access/datafile/6?fileMetadataId=2'`` +ID Exports file with specific file metadata ``ID``. For example for data file with id 6 and file metadata id 2: ``curl 'http://localhost:8080/api/access/datafile/6/metadata/ddi?fileMetadataId=2'`` ============== =========== From 49f74df0307fd980760c67b0916cce7940eda7bf Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 16 May 2019 15:12:52 -0400 Subject: [PATCH 27/32] rename flyway script since we're post 4.14 #5822 --- ...22-export-var-meta.sql => V4.14.0.1__5822-export-var-meta.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V4.13.0.1__5822-export-var-meta.sql => V4.14.0.1__5822-export-var-meta.sql} (100%) diff --git a/src/main/resources/db/migration/V4.13.0.1__5822-export-var-meta.sql b/src/main/resources/db/migration/V4.14.0.1__5822-export-var-meta.sql similarity index 100% rename from src/main/resources/db/migration/V4.13.0.1__5822-export-var-meta.sql rename to src/main/resources/db/migration/V4.14.0.1__5822-export-var-meta.sql From 0dc7c91f43907c5d07f23b3b7ce498f449fb98e4 Mon Sep 17 00:00:00 2001 From: Merce Crosas Date: Thu, 16 May 2019 15:32:51 -0400 Subject: [PATCH 28/32] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 1046cef3b8c..737cf1acd17 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ Dataverse® =============== -Dataverse is an [open source][] web application for sharing, citing, analyzing, and preserving research data (developed by the [Data Science and Products team](http://www.iq.harvard.edu/people/people/data-science-products) at the [Institute for Quantitative Social Science](http://iq.harvard.edu/) and the [Dataverse community][]). +Dataverse is an [open source][] software platform for sharing, finding, citing, and preserving research data (developed by the [Data Science and Products team](http://www.iq.harvard.edu/people/people/data-science-products) at the [Institute for Quantitative Social Science](http://iq.harvard.edu/) and the [Dataverse community][]). [dataverse.org][] is our home on the web and shows a map of Dataverse installations around the world, a list of [features][], [integrations][] that have been made possible through [REST APIs][], our development [roadmap][], and more. From ebd7ad41b70919fe0eda6c3b27ef0c1ca62c73c0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marie-H=C3=A9l=C3=A8ne=20V=C3=A9zina?= Date: Fri, 17 May 2019 09:42:42 -0400 Subject: [PATCH 29/32] Typo correction in English ValidationMessage file Corrected a typo in the English ValidationMessage file introduced here: https://github.com/IQSS/dataverse/commit/ada7863c558c72cac241efe8bfe1ba7157f0e3b7 from https://github.com/IQSS/dataverse/commit/74738731503dee46a533f20903b0c563ceb5d0f1#diff-85c3748e9fba660ae3ddb5785284c40dR78 --- src/main/java/ValidationMessages.properties | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/ValidationMessages.properties b/src/main/java/ValidationMessages.properties index 5744b6c4dbd..9c4f69252cf 100644 --- a/src/main/java/ValidationMessages.properties +++ b/src/main/java/ValidationMessages.properties @@ -3,7 +3,7 @@ user.lastName=Please enter your last name. user.invalidEmail=Please enter a valid email address. user.enterUsername=Please enter a username. user.usernameLength=Username must be between 2 and 60 characters. -user.illegalCharacters=Found an illegal character(s). Valid characters are a-Z, 0-9, '_', '-', and '.'." +user.illegalCharacters=Found an illegal character(s). Valid characters are a-Z, 0-9, '_', '-', and '.'. user.enterNickname=Please enter a nickname. user.nicknameLength=Nickname must be at most 30 characters. @@ -46,4 +46,4 @@ password.retype=The new password is blank: re-type it again. password.current=Please enter your current password. password.validate=Password reset page default email message. -guestbook.name=Enter a name for the guestbook \ No newline at end of file +guestbook.name=Enter a name for the guestbook From bb689938dbcc5fe4f012d420d338f9abe03356f4 Mon Sep 17 00:00:00 2001 From: Thanh-Thanh <39987392+Thanh-Thanh@users.noreply.github.com> Date: Fri, 17 May 2019 15:53:10 +0200 Subject: [PATCH 30/32] 5855 Typo : in admin document --- doc/sphinx-guides/source/admin/dataverses-datasets.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/admin/dataverses-datasets.rst b/doc/sphinx-guides/source/admin/dataverses-datasets.rst index 2bc809c4910..7b5c5fbd4a0 100644 --- a/doc/sphinx-guides/source/admin/dataverses-datasets.rst +++ b/doc/sphinx-guides/source/admin/dataverses-datasets.rst @@ -103,5 +103,5 @@ As a superuser, click "Update Current Version" when publishing. (This option is Diagnose Constraint Violations Issues in Datasets ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -To identifiy invalid data values in specific datasets (if, for example, an attempt to edit a dataset results in a ConstraintViolationException in the server log), or to check all the datasets in the Dataverse for constraint violations, see :ref:`Dataset Validation ` in the :doc:`/api/native-api` section of the User Guide. +To identify invalid data values in specific datasets (if, for example, an attempt to edit a dataset results in a ConstraintViolationException in the server log), or to check all the datasets in the Dataverse for constraint violations, see :ref:`Dataset Validation ` in the :doc:`/api/native-api` section of the User Guide. From 747bc518b8f2af945bfe74beb2feab2691300a7f Mon Sep 17 00:00:00 2001 From: Victoria Lubitch Date: Fri, 17 May 2019 11:16:28 -0400 Subject: [PATCH 31/32] fileDscr and BiblCit fix --- .../dataverse/export/DDIExportServiceBean.java | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java index 6c95e138d5a..7dd9b1e293c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java @@ -511,17 +511,20 @@ private void createDataFileDDI(XMLStreamWriter xmlw, Set excludedFieldSe xmlw.writeDefaultNamespace("http://www.icpsr.umich.edu/DDI"); writeAttribute(xmlw, "version", "2.0"); - createStdyDscr(xmlw, excludedFieldSet, includedFieldSet, df.getOwner().getLatestVersion()); - - DataTable dt = fileService.findDataTableByFileId(df.getId()); FileMetadata latestFm = df.getFileMetadata(); if (fileMetadataId == null) { fileMetadataId = latestFm.getId(); } + FileMetadata fm = fileService.findFileMetadata(fileMetadataId); + + createStdyDscr(xmlw, excludedFieldSet, includedFieldSet, fm.getDatasetVersion()); + + DataTable dt = fileService.findDataTableByFileId(df.getId()); + if (checkField("fileDscr", excludedFieldSet, includedFieldSet)) { - createFileDscr(xmlw, excludedFieldSet, null, df, dt); + createFileDscr(xmlw, excludedFieldSet, null, df, dt, fm); } // And now, the variables: @@ -619,7 +622,7 @@ private void createDatasetDDI(XMLStreamWriter xmlw, Set excludedFieldSet if (checkField("fileDscr", excludedFieldSet, includedFieldSet)) { for (FileMetadata fileMetadata : tabularDataFiles) { DataTable dt = fileService.findDataTableByFileId(fileMetadata.getDataFile().getId()); - createFileDscr(xmlw, excludedFieldSet, includedFieldSet, fileMetadata.getDataFile(),dt); + createFileDscr(xmlw, excludedFieldSet, includedFieldSet, fileMetadata.getDataFile(),dt, fileMetadata); } // 2nd pass, to create data (variable) description sections: @@ -710,7 +713,7 @@ private void createOtherMat(XMLStreamWriter xmlw, Set excludedFieldSet, xmlw.writeEndElement(); // otherMat } - private void createFileDscr(XMLStreamWriter xmlw, Set excludedFieldSet, Set includedFieldSet, DataFile df, DataTable dt) throws XMLStreamException { + private void createFileDscr(XMLStreamWriter xmlw, Set excludedFieldSet, Set includedFieldSet, DataFile df, DataTable dt, FileMetadata fm) throws XMLStreamException { xmlw.writeStartElement("fileDscr"); writeAttribute(xmlw, "ID", "f" + df.getId().toString()); @@ -722,7 +725,7 @@ private void createFileDscr(XMLStreamWriter xmlw, Set excludedFieldSet, if (checkField("fileTxt", excludedFieldSet, includedFieldSet)) { xmlw.writeStartElement("fileName"); - xmlw.writeCharacters(df.getFileMetadata().getLabel()); + xmlw.writeCharacters(fm.getLabel()); xmlw.writeEndElement(); // fileName } From 240e959e3a825a51726c87265007ca5c6731d07e Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 17 May 2019 13:32:21 -0400 Subject: [PATCH 32/32] no need for SQL create scripts with flyway #5854 --- .../source/developers/making-releases.rst | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) diff --git a/doc/sphinx-guides/source/developers/making-releases.rst b/doc/sphinx-guides/source/developers/making-releases.rst index 5db32efb9c9..f37ae84b7f6 100755 --- a/doc/sphinx-guides/source/developers/making-releases.rst +++ b/doc/sphinx-guides/source/developers/making-releases.rst @@ -5,7 +5,7 @@ Making Releases .. contents:: |toctitle| :local: -Use the number of the milestone with a "v" in front for the relase tag. For example: ``v4.6.2``. +Use the number of the milestone with a "v" in front for the release tag. For example: ``v4.6.2``. Create the release GitHub issue and branch ------------------------------------------ @@ -32,18 +32,7 @@ Add the version being released to the lists in the following two files: Here's an example commit where three of the four files above were updated at once: https://github.com/IQSS/dataverse/commit/99e23f96ec362ac2f524cb5cd80ca375fa13f196 -2. Save the EJB Database Create Script -====================================== - -Save the script ``domains/domain1/generated/ejb/dataverse/dataverse_VDCNet-ejbPU_createDDL.jdbc`` created by EJB during the deployment of the release candidate. **Important:** add semicolons to the ends of the SQL commands in the EJB-generated file (see below)! Save the resulting file as ``scripts/database/create/create_v{VERSION_TAG}.sql`` using the version number tag for the release. For example: - -.. code-block:: none - - sed 's/$/;/' dataverse_VDCNet-ejbPU_createDDL.jdbc > scripts/database/create/create_v4.10.sql - -(We are saving the script above to support the new experimental process for updating the database across multiple versions; see ``scripts/database/README_upgrade_across_versions.txt`` for more information.) - -3. Check in the Changes Above... +2. Check in the Changes Above... ================================ ... into the release branch, make a pull request and merge the release branch into develop.