From bc34031660316c7afde7b121c7fd32603c0d710f Mon Sep 17 00:00:00 2001 From: jingma Date: Tue, 16 Mar 2021 11:39:06 +0100 Subject: [PATCH 001/183] First db table and api. --- .../edu/harvard/iq/dataverse/License.java | 135 ++++++++++++++++++ .../iq/dataverse/LicenseServiceBean.java | 80 +++++++++++ .../edu/harvard/iq/dataverse/api/Admin.java | 56 ++++++++ .../iq/dataverse/util/json/JsonPrinter.java | 11 ++ 4 files changed, 282 insertions(+) create mode 100644 src/main/java/edu/harvard/iq/dataverse/License.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java diff --git a/src/main/java/edu/harvard/iq/dataverse/License.java b/src/main/java/edu/harvard/iq/dataverse/License.java new file mode 100644 index 00000000000..713ac218222 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/License.java @@ -0,0 +1,135 @@ +package edu.harvard.iq.dataverse; + +import java.util.Objects; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.NamedQueries; +import javax.persistence.NamedQuery; + +/** + * @author Jing Ma + */ +// @NamedQueries({ +// @NamedQuery( name="License.findAll", +// query="SELECT l FROM License l"), +// @NamedQuery( name="Setting.findById", +// query = "SELECT l FROM License l WHERE l.id=:id"), +// @NamedQuery( name="License.deleteById", +// query="DELETE FROM License l WHERE l.id=:id") +// +//}) +//@Entity +public class License { + +// @Id +// @GeneratedValue(strategy = GenerationType.IDENTITY) +// private Long id; +// +// @Column(columnDefinition="TEXT", nullable = false, unique = true) +// private String name; +// +// @Column(columnDefinition="TEXT") +// private String shortDescription; +// +// @Column(columnDefinition="TEXT", nullable = false) +// private String uri; +// +// @Column(columnDefinition="TEXT") +// private String iconUrl; +// +// @Column(nullable = false) +// private boolean active; +// +// public License() { +// } +// +// public License(String name, String shortDescription, String uri, String iconUrl, boolean active) { +// this.name = name; +// this.shortDescription = shortDescription; +// this.uri = uri; +// this.iconUrl = iconUrl; +// this.active = active; +// } +// +// public Long getId() { +// return id; +// } +// +// public void setId(Long id) { +// this.id = id; +// } +// +// public String getName() { +// return name; +// } +// +// public void setName(String name) { +// this.name = name; +// } +// +// public String getShortDescription() { +// return shortDescription; +// } +// +// public void setShortDescription(String shortDescription) { +// this.shortDescription = shortDescription; +// } +// +// public String getUri() { +// return uri; +// } +// +// public void setUri(String uri) { +// this.uri = uri; +// } +// +// public String getIconUrl() { +// return iconUrl; +// } +// +// public void setIconUrl(String iconUrl) { +// this.iconUrl = iconUrl; +// } +// +// public boolean isActive() { +// return active; +// } +// +// public void setActive(boolean active) { +// this.active = active; +// } +// +// @Override +// public boolean equals(Object o) { +// if (this == o) return true; +// if (o == null || getClass() != o.getClass()) return false; +// License license = (License) o; +// return active == license.active && +// Objects.equals(id, license.id) && +// Objects.equals(name, license.name) && +// Objects.equals(shortDescription, license.shortDescription) && +// Objects.equals(uri, license.uri) && +// Objects.equals(iconUrl, license.iconUrl); +// } +// +// @Override +// public int hashCode() { +// return Objects.hash(id, name, shortDescription, uri, iconUrl, active); +// } +// +// @Override +// public String toString() { +// return "License{" + +// "id=" + id + +// ", name='" + name + '\'' + +// ", shortDescription='" + shortDescription + '\'' + +// ", uri='" + uri + '\'' + +// ", iconUrl='" + iconUrl + '\'' + +// ", active=" + active + +// '}'; +// } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java new file mode 100644 index 00000000000..7caa5b4bdc8 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java @@ -0,0 +1,80 @@ +package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; +import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; +import edu.harvard.iq.dataverse.authorization.DataverseRole; +import edu.harvard.iq.dataverse.search.IndexResponse; +import edu.harvard.iq.dataverse.settings.Setting; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import javax.ejb.EJB; +import javax.ejb.Stateless; +import javax.inject.Named; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import javax.persistence.PersistenceException; + +/** + * @author Jing Ma + */ +//@Stateless +//@Named +public class LicenseServiceBean { + +// @PersistenceContext +// EntityManager em; +// +// @EJB +// ActionLogServiceBean actionLogSvc; +// +// public List listAll() { +// return em.createNamedQuery("License.findAll", License.class).getResultList(); +// } +// +// public License get( long id ) { +// List tokens = em.createNamedQuery("License.findById", License.class) +// .setParameter("id", id ) +// .getResultList(); +// return tokens.isEmpty() ? null : tokens.get(0); +// } +// +// public License save(License l) throws PersistenceException { +// if (l.getId() == null) { +// em.persist(l); +// return l; +// } else { +// return null; +// } +// } +// +// public License set( long id, String name, String shortDescription, String uri, String iconUrl, boolean active ) { +// List tokens = em.createNamedQuery("License.findById", License.class) +// .setParameter("id", Long.toString(id) ) +// .getResultList(); +// +// if(tokens.size() > 0) { +// License l = tokens.get(0); +// l.setName(name); +// l.setShortDescription(shortDescription); +// l.setUri(uri); +// l.setIconUrl(iconUrl); +// l.setActive(active); +// em.merge(l); +// actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "set") +// .setInfo(name + ": " + shortDescription + ": " + uri + ": " + iconUrl + ": " + active)); +// return l; +// } else { +// return null; +// } +// } +// +// public void delete( long id ) throws PersistenceException { +// actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "delete") +// .setInfo(Long.toString(id))); +// em.createNamedQuery("License.deleteById") +// .setParameter("id", id) +// .executeUpdate(); +// } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index b52665a7747..ce248d97946 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -16,6 +16,8 @@ import edu.harvard.iq.dataverse.EMailValidator; import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.GlobalId; +import edu.harvard.iq.dataverse.License; +import edu.harvard.iq.dataverse.LicenseServiceBean; import edu.harvard.iq.dataverse.RoleAssignment; import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; @@ -42,9 +44,11 @@ import edu.harvard.iq.dataverse.engine.command.impl.AbstractSubmitToArchiveCommand; import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand; import edu.harvard.iq.dataverse.settings.Setting; +import edu.harvard.iq.dataverse.util.json.JsonPrinter; import javax.json.Json; import javax.json.JsonArrayBuilder; import javax.json.JsonObjectBuilder; +import javax.persistence.PersistenceException; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; @@ -152,6 +156,8 @@ public class Admin extends AbstractApiBean { ExplicitGroupServiceBean explicitGroupService; @EJB BannerMessageServiceBean bannerMessageService; + @EJB + LicenseServiceBean licenseService; // Make the session available @@ -1920,4 +1926,54 @@ public Response getBannerMessages(@PathParam("id") Long id) throws WrappedRespon } +// @GET +// @Path("/licenses") +// public Response getLicenses() { +// return ok(licenseService.listAll().stream() +// .map(JsonPrinter::json) +// .collect(toJsonArray())); +// } +// +// @GET +// @Path("/licenses/{id}") +// public Response getLicense(@PathParam("id") long id) { +// License l = licenseService.get(id); +// if (l == null) { +// return error(Response.Status.NOT_FOUND, "Not Found."); +// } +// return ok(json(l)); +// } +// +// @POST +// @Path("/licenses") +// public Response addLicense(License l) { +// try { +// licenseService.save(l); +// return created("/api/admin/licenses", Json.createObjectBuilder().add("message", "License created")); +// } catch(PersistenceException e) { +// return error(Response.Status.CONFLICT, "A license with the same URI or name is already present."); +// } +// } +// +// @PUT +// @Path("/licenses/{id}") +// public Response putLicense(@PathParam("id") long id, License l) { +// License updated = licenseService.set(id, l.getName(), l.getShortDescription(), l.getUri(), l.getIconUrl(), l.isActive()); +// if (updated == null) { +// return error(Response.Status.BAD_REQUEST, "Bad Request. There is no existing LicenseInfo with that ID. To add a license use POST."); +// } +// return ok("License with ID " + id + " was replaced."); +// } +// +// @DELETE +// @Path("/licenses/{id}") +// public Response deleteLicense(@PathParam("id") long id) { +// try { +// licenseService.delete(id); +// return ok("OK. License with ID " + id + " was deleted."); +// } catch (PersistenceException e) { +// return error(Response.Status.BAD_REQUEST, "The license is still in used and cannot be deleted."); +// } +// } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index c37efc3178f..f43860df23f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -16,6 +16,7 @@ import edu.harvard.iq.dataverse.DataverseContact; import edu.harvard.iq.dataverse.DataverseFacet; import edu.harvard.iq.dataverse.DataverseTheme; +import edu.harvard.iq.dataverse.License; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.groups.impl.maildomain.MailDomainGroup; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser; @@ -775,6 +776,16 @@ public static JsonObjectBuilder json( DataverseFacet aFacet ) { .add("id", String.valueOf(aFacet.getId())) // TODO should just be id I think .add("name", aFacet.getDatasetFieldType().getDisplayName()); } + +// public static JsonObjectBuilder json(License l) { +// return jsonObjectBuilder() +// .add("id", l.getId()) +// .add("name", l.getName()) +// .add("shortDescription", l.getShortDescription()) +// .add("uri", l.getUri()) +// .add("iconUrl", l.getIconUrl()) +// .add("active", l.isActive()); +// } public static Collector stringsToJsonArray() { return new Collector() { From 5d08b0e0d3b0d5064e94fc9156ad40d8e050407b Mon Sep 17 00:00:00 2001 From: jingma Date: Tue, 16 Mar 2021 17:11:50 +0100 Subject: [PATCH 002/183] Final changes for prototype. --- .../edu/harvard/iq/dataverse/License.java | 240 +++++++++--------- .../iq/dataverse/LicenseServiceBean.java | 112 ++++---- .../edu/harvard/iq/dataverse/api/Admin.java | 100 ++++---- .../iq/dataverse/util/json/JsonPrinter.java | 18 +- 4 files changed, 239 insertions(+), 231 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/License.java b/src/main/java/edu/harvard/iq/dataverse/License.java index 713ac218222..56742f76042 100644 --- a/src/main/java/edu/harvard/iq/dataverse/License.java +++ b/src/main/java/edu/harvard/iq/dataverse/License.java @@ -8,128 +8,134 @@ import javax.persistence.Id; import javax.persistence.NamedQueries; import javax.persistence.NamedQuery; +import javax.persistence.Table; +import javax.persistence.UniqueConstraint; /** * @author Jing Ma */ -// @NamedQueries({ -// @NamedQuery( name="License.findAll", -// query="SELECT l FROM License l"), -// @NamedQuery( name="Setting.findById", -// query = "SELECT l FROM License l WHERE l.id=:id"), -// @NamedQuery( name="License.deleteById", -// query="DELETE FROM License l WHERE l.id=:id") -// -//}) -//@Entity + @NamedQueries({ + @NamedQuery( name="License.findAll", + query="SELECT l FROM License l"), + @NamedQuery( name="License.findById", + query = "SELECT l FROM License l WHERE l.id=:id"), + @NamedQuery( name="License.deleteById", + query="DELETE FROM License l WHERE l.id=:id") + +}) +@Entity +@Table(uniqueConstraints = { + @UniqueConstraint(columnNames = "name"), + @UniqueConstraint(columnNames = "uri")} +) public class License { -// @Id -// @GeneratedValue(strategy = GenerationType.IDENTITY) -// private Long id; -// -// @Column(columnDefinition="TEXT", nullable = false, unique = true) -// private String name; -// -// @Column(columnDefinition="TEXT") -// private String shortDescription; -// -// @Column(columnDefinition="TEXT", nullable = false) -// private String uri; -// -// @Column(columnDefinition="TEXT") -// private String iconUrl; -// -// @Column(nullable = false) -// private boolean active; -// -// public License() { -// } -// -// public License(String name, String shortDescription, String uri, String iconUrl, boolean active) { -// this.name = name; -// this.shortDescription = shortDescription; -// this.uri = uri; -// this.iconUrl = iconUrl; -// this.active = active; -// } -// -// public Long getId() { -// return id; -// } -// -// public void setId(Long id) { -// this.id = id; -// } -// -// public String getName() { -// return name; -// } -// -// public void setName(String name) { -// this.name = name; -// } -// -// public String getShortDescription() { -// return shortDescription; -// } -// -// public void setShortDescription(String shortDescription) { -// this.shortDescription = shortDescription; -// } -// -// public String getUri() { -// return uri; -// } -// -// public void setUri(String uri) { -// this.uri = uri; -// } -// -// public String getIconUrl() { -// return iconUrl; -// } -// -// public void setIconUrl(String iconUrl) { -// this.iconUrl = iconUrl; -// } -// -// public boolean isActive() { -// return active; -// } -// -// public void setActive(boolean active) { -// this.active = active; -// } -// -// @Override -// public boolean equals(Object o) { -// if (this == o) return true; -// if (o == null || getClass() != o.getClass()) return false; -// License license = (License) o; -// return active == license.active && -// Objects.equals(id, license.id) && -// Objects.equals(name, license.name) && -// Objects.equals(shortDescription, license.shortDescription) && -// Objects.equals(uri, license.uri) && -// Objects.equals(iconUrl, license.iconUrl); -// } -// -// @Override -// public int hashCode() { -// return Objects.hash(id, name, shortDescription, uri, iconUrl, active); -// } -// -// @Override -// public String toString() { -// return "License{" + -// "id=" + id + -// ", name='" + name + '\'' + -// ", shortDescription='" + shortDescription + '\'' + -// ", uri='" + uri + '\'' + -// ", iconUrl='" + iconUrl + '\'' + -// ", active=" + active + -// '}'; -// } + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + @Column(columnDefinition="TEXT", nullable = false) + private String name; + + @Column(columnDefinition="TEXT") + private String shortDescription; + + @Column(columnDefinition="TEXT", nullable = false) + private String uri; + + @Column(columnDefinition="TEXT") + private String iconUrl; + + @Column(nullable = false) + private boolean active; + + public License() { + } + + public License(String name, String shortDescription, String uri, String iconUrl, boolean active) { + this.name = name; + this.shortDescription = shortDescription; + this.uri = uri; + this.iconUrl = iconUrl; + this.active = active; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getShortDescription() { + return shortDescription; + } + + public void setShortDescription(String shortDescription) { + this.shortDescription = shortDescription; + } + + public String getUri() { + return uri; + } + + public void setUri(String uri) { + this.uri = uri; + } + + public String getIconUrl() { + return iconUrl; + } + + public void setIconUrl(String iconUrl) { + this.iconUrl = iconUrl; + } + + public boolean isActive() { + return active; + } + + public void setActive(boolean active) { + this.active = active; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + License license = (License) o; + return active == license.active && + Objects.equals(id, license.id) && + Objects.equals(name, license.name) && + Objects.equals(shortDescription, license.shortDescription) && + Objects.equals(uri, license.uri) && + Objects.equals(iconUrl, license.iconUrl); + } + + @Override + public int hashCode() { + return Objects.hash(id, name, shortDescription, uri, iconUrl, active); + } + + @Override + public String toString() { + return "License{" + + "id=" + id + + ", name='" + name + '\'' + + ", shortDescription='" + shortDescription + '\'' + + ", uri='" + uri + '\'' + + ", iconUrl='" + iconUrl + '\'' + + ", active=" + active + + '}'; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java index 7caa5b4bdc8..0604e51ae3d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java @@ -18,63 +18,63 @@ /** * @author Jing Ma */ -//@Stateless -//@Named +@Stateless +@Named public class LicenseServiceBean { -// @PersistenceContext -// EntityManager em; -// -// @EJB -// ActionLogServiceBean actionLogSvc; -// -// public List listAll() { -// return em.createNamedQuery("License.findAll", License.class).getResultList(); -// } -// -// public License get( long id ) { -// List tokens = em.createNamedQuery("License.findById", License.class) -// .setParameter("id", id ) -// .getResultList(); -// return tokens.isEmpty() ? null : tokens.get(0); -// } -// -// public License save(License l) throws PersistenceException { -// if (l.getId() == null) { -// em.persist(l); -// return l; -// } else { -// return null; -// } -// } -// -// public License set( long id, String name, String shortDescription, String uri, String iconUrl, boolean active ) { -// List tokens = em.createNamedQuery("License.findById", License.class) -// .setParameter("id", Long.toString(id) ) -// .getResultList(); -// -// if(tokens.size() > 0) { -// License l = tokens.get(0); -// l.setName(name); -// l.setShortDescription(shortDescription); -// l.setUri(uri); -// l.setIconUrl(iconUrl); -// l.setActive(active); -// em.merge(l); -// actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "set") -// .setInfo(name + ": " + shortDescription + ": " + uri + ": " + iconUrl + ": " + active)); -// return l; -// } else { -// return null; -// } -// } -// -// public void delete( long id ) throws PersistenceException { -// actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "delete") -// .setInfo(Long.toString(id))); -// em.createNamedQuery("License.deleteById") -// .setParameter("id", id) -// .executeUpdate(); -// } + @PersistenceContext + EntityManager em; + + @EJB + ActionLogServiceBean actionLogSvc; + + public List listAll() { + return em.createNamedQuery("License.findAll", License.class).getResultList(); + } + + public License get( long id ) { + List tokens = em.createNamedQuery("License.findById", License.class) + .setParameter("id", id ) + .getResultList(); + return tokens.isEmpty() ? null : tokens.get(0); + } + + public License save(License l) throws PersistenceException { + if (l.getId() == null) { + em.persist(l); + return l; + } else { + return null; + } + } + + public License set( long id, String name, String shortDescription, String uri, String iconUrl, boolean active ) { + List tokens = em.createNamedQuery("License.findById", License.class) + .setParameter("id", id ) + .getResultList(); + + if(tokens.size() > 0) { + License l = tokens.get(0); + l.setName(name); + l.setShortDescription(shortDescription); + l.setUri(uri); + l.setIconUrl(iconUrl); + l.setActive(active); + em.merge(l); + actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "set") + .setInfo(name + ": " + shortDescription + ": " + uri + ": " + iconUrl + ": " + active)); + return l; + } else { + return null; + } + } + + public int delete( long id ) throws PersistenceException { + actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "delete") + .setInfo(Long.toString(id))); + return em.createNamedQuery("License.deleteById") + .setParameter("id", id) + .executeUpdate(); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index ce248d97946..0e7c8dd32de 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1926,54 +1926,56 @@ public Response getBannerMessages(@PathParam("id") Long id) throws WrappedRespon } -// @GET -// @Path("/licenses") -// public Response getLicenses() { -// return ok(licenseService.listAll().stream() -// .map(JsonPrinter::json) -// .collect(toJsonArray())); -// } -// -// @GET -// @Path("/licenses/{id}") -// public Response getLicense(@PathParam("id") long id) { -// License l = licenseService.get(id); -// if (l == null) { -// return error(Response.Status.NOT_FOUND, "Not Found."); -// } -// return ok(json(l)); -// } -// -// @POST -// @Path("/licenses") -// public Response addLicense(License l) { -// try { -// licenseService.save(l); -// return created("/api/admin/licenses", Json.createObjectBuilder().add("message", "License created")); -// } catch(PersistenceException e) { -// return error(Response.Status.CONFLICT, "A license with the same URI or name is already present."); -// } -// } -// -// @PUT -// @Path("/licenses/{id}") -// public Response putLicense(@PathParam("id") long id, License l) { -// License updated = licenseService.set(id, l.getName(), l.getShortDescription(), l.getUri(), l.getIconUrl(), l.isActive()); -// if (updated == null) { -// return error(Response.Status.BAD_REQUEST, "Bad Request. There is no existing LicenseInfo with that ID. To add a license use POST."); -// } -// return ok("License with ID " + id + " was replaced."); -// } -// -// @DELETE -// @Path("/licenses/{id}") -// public Response deleteLicense(@PathParam("id") long id) { -// try { -// licenseService.delete(id); -// return ok("OK. License with ID " + id + " was deleted."); -// } catch (PersistenceException e) { -// return error(Response.Status.BAD_REQUEST, "The license is still in used and cannot be deleted."); -// } -// } + @GET + @Path("/licenses") + public Response getLicenses() { + return ok(licenseService.listAll().stream() + .map(JsonPrinter::json) + .collect(toJsonArray())); + } + + @GET + @Path("/licenses/{id}") + public Response getLicense(@PathParam("id") long id) { + License l = licenseService.get(id); + if (l == null) { + return error(Response.Status.NOT_FOUND, "Not Found."); + } + return ok(json(l)); + } + + @POST + @Path("/licenses") + public Response addLicense(License l) { + try { + License added = licenseService.save(l); + if (added == null) { + return error(Response.Status.BAD_REQUEST, "Bad Request."); + } + return created("/api/admin/licenses", Json.createObjectBuilder().add("message", "License created")); + } catch(PersistenceException e) { + return error(Response.Status.CONFLICT, "A license with the same URI or name is already present."); + } + } + + @PUT + @Path("/licenses/{id}") + public Response putLicense(@PathParam("id") long id, License l) { + License updated = licenseService.set(id, l.getName(), l.getShortDescription(), l.getUri(), l.getIconUrl(), l.isActive()); + if (updated == null) { + return error(Response.Status.BAD_REQUEST, "Bad Request. There is no existing LicenseInfo with that ID. To add a license use POST."); + } + return ok("License with ID " + id + " was replaced."); + } + + @DELETE + @Path("/licenses/{id}") + public Response deleteLicense(@PathParam("id") long id) { + int result = licenseService.delete(id); + if (result == 1) { + return ok("OK. License with ID " + id + " was deleted."); + } + return error(Response.Status.NOT_FOUND, "A license with ID " + id + " doesn't exist."); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index f43860df23f..3cbe8da8717 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -777,15 +777,15 @@ public static JsonObjectBuilder json( DataverseFacet aFacet ) { .add("name", aFacet.getDatasetFieldType().getDisplayName()); } -// public static JsonObjectBuilder json(License l) { -// return jsonObjectBuilder() -// .add("id", l.getId()) -// .add("name", l.getName()) -// .add("shortDescription", l.getShortDescription()) -// .add("uri", l.getUri()) -// .add("iconUrl", l.getIconUrl()) -// .add("active", l.isActive()); -// } + public static JsonObjectBuilder json(License l) { + return jsonObjectBuilder() + .add("id", l.getId()) + .add("name", l.getName()) + .add("shortDescription", l.getShortDescription()) + .add("uri", l.getUri()) + .add("iconUrl", l.getIconUrl()) + .add("active", l.isActive()); + } public static Collector stringsToJsonArray() { return new Collector() { From f9e3a3ef212171c0756ed4adf1ef196faf80e702 Mon Sep 17 00:00:00 2001 From: jingma Date: Thu, 18 Mar 2021 20:27:12 +0100 Subject: [PATCH 003/183] Add integration tests. --- scripts/api/data/license.json | 7 +++ scripts/api/data/licenseError.json | 8 +++ scripts/api/data/licenseUpdate.json | 7 +++ .../edu/harvard/iq/dataverse/api/AdminIT.java | 62 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 41 ++++++++++++ 5 files changed, 125 insertions(+) create mode 100644 scripts/api/data/license.json create mode 100644 scripts/api/data/licenseError.json create mode 100644 scripts/api/data/licenseUpdate.json diff --git a/scripts/api/data/license.json b/scripts/api/data/license.json new file mode 100644 index 00000000000..f891d84dd33 --- /dev/null +++ b/scripts/api/data/license.json @@ -0,0 +1,7 @@ +{ + "name": "Apache License", + "shortDescription": "License description", + "uri": "www.apache.com", + "iconUrl": "www.icon.com", + "active": false +} \ No newline at end of file diff --git a/scripts/api/data/licenseError.json b/scripts/api/data/licenseError.json new file mode 100644 index 00000000000..51eb31ecc0c --- /dev/null +++ b/scripts/api/data/licenseError.json @@ -0,0 +1,8 @@ +{ + "id": 6, + "name": "Apache License", + "shortDescription": "License description", + "uri": "www.apache.com", + "iconUrl": "www.icon.com", + "active": false +} \ No newline at end of file diff --git a/scripts/api/data/licenseUpdate.json b/scripts/api/data/licenseUpdate.json new file mode 100644 index 00000000000..aed1cb0ae26 --- /dev/null +++ b/scripts/api/data/licenseUpdate.json @@ -0,0 +1,7 @@ +{ + "name": "Updated Apache License", + "shortDescription": "Updated license description", + "uri": "www.update-apache.com", + "iconUrl": "www.update-icon.com", + "active": true +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index 84da33cd3ee..84ec9defdec 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -785,4 +785,66 @@ public void testBannerMessages(){ assertEquals("OK", status); } + + @Test + public void testLicenses(){ + + String pathToJsonFile = "scripts/api/data/license.json"; + Response addLicenseResponse = UtilIT.addLicense(pathToJsonFile); + addLicenseResponse.prettyPrint(); + String body = addLicenseResponse.getBody().asString(); + String status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + pathToJsonFile = "scripts/api/data/licenseError.json"; + Response addLicenseErrorResponse = UtilIT.addLicense(pathToJsonFile); + addLicenseErrorResponse.prettyPrint(); + body = addLicenseErrorResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + Response getLicensesResponse = UtilIT.getLicenses(); + getLicensesResponse.prettyPrint(); + body = getLicensesResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response getLicenseResponse = UtilIT.getLicense(1L); + getLicenseResponse.prettyPrint(); + body = getLicenseResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response getLicenseErrorResponse = UtilIT.getLicense(10L); + getLicenseErrorResponse.prettyPrint(); + body = getLicenseErrorResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + pathToJsonFile = "scripts/api/data/licenseUpdate.json"; + Response updateLicenseResponse = UtilIT.updateLicense(pathToJsonFile, 1L); + updateLicenseResponse.prettyPrint(); + body = updateLicenseResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response updateLicenseErrorResponse = UtilIT.updateLicense(pathToJsonFile, 10L); + updateLicenseErrorResponse.prettyPrint(); + body = updateLicenseErrorResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + Response deleteLicenseResponse = UtilIT.deleteLicense(1L); + deleteLicenseResponse.prettyPrint(); + body = deleteLicenseResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response deleteLicenseErrorResponse = UtilIT.deleteLicense(10L); + deleteLicenseErrorResponse.prettyPrint(); + body = deleteLicenseErrorResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index f3ff8f8fae4..51a0cdae93e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -2523,5 +2523,46 @@ static String getBannerMessageIdFromResponse(String getBannerMessagesResponse) { return "0"; } + static Response addLicense(String pathToJsonFile) { + String jsonIn = getDatasetJson(pathToJsonFile); + + Response addLicenseResponse = given() + .body(jsonIn) + .contentType("application/json") + .post("/api/admin/licenses"); + return addLicenseResponse; + } + + static Response getLicenses() { + + Response getLicensesResponse = given() + .get("/api/admin/licenses"); + return getLicensesResponse; + } + + static Response getLicense(Long id) { + + Response getLicenseResponse = given() + .get("/api/admin/licenses/"+id.toString()); + return getLicenseResponse; + } + + static Response updateLicense(String pathToJsonFile, Long id) { + String jsonIn = getDatasetJson(pathToJsonFile); + + Response updateLicenseResponse = given() + .body(jsonIn) + .contentType("application/json") + .put("/api/admin/licenses/"+id.toString()); + return updateLicenseResponse; + } + + static Response deleteLicense(Long id) { + + Response deleteLicenseResponse = given() + .delete("/api/admin/licenses/"+id.toString()); + return deleteLicenseResponse; + } + } From 5e3fb88bc2d67bbdf96012f69cb4e0ed307ab914 Mon Sep 17 00:00:00 2001 From: jingma Date: Mon, 22 Mar 2021 12:08:33 +0100 Subject: [PATCH 004/183] Fix indentation. --- .../edu/harvard/iq/dataverse/api/Admin.java | 68 +++++++++---------- 1 file changed, 34 insertions(+), 34 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 0e7c8dd32de..e77ac08ef83 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1930,52 +1930,52 @@ public Response getBannerMessages(@PathParam("id") Long id) throws WrappedRespon @Path("/licenses") public Response getLicenses() { return ok(licenseService.listAll().stream() - .map(JsonPrinter::json) - .collect(toJsonArray())); + .map(JsonPrinter::json) + .collect(toJsonArray())); } @GET @Path("/licenses/{id}") public Response getLicense(@PathParam("id") long id) { - License l = licenseService.get(id); - if (l == null) { - return error(Response.Status.NOT_FOUND, "Not Found."); - } - return ok(json(l)); + License l = licenseService.get(id); + if (l == null) { + return error(Response.Status.NOT_FOUND, "Not Found."); + } + return ok(json(l)); } @POST @Path("/licenses") public Response addLicense(License l) { - try { - License added = licenseService.save(l); - if (added == null) { - return error(Response.Status.BAD_REQUEST, "Bad Request."); - } - return created("/api/admin/licenses", Json.createObjectBuilder().add("message", "License created")); - } catch(PersistenceException e) { - return error(Response.Status.CONFLICT, "A license with the same URI or name is already present."); - } + try { + License added = licenseService.save(l); + if (added == null) { + return error(Response.Status.BAD_REQUEST, "Bad Request."); + } + return created("/api/admin/licenses", Json.createObjectBuilder().add("message", "License created")); + } catch(PersistenceException e) { + return error(Response.Status.CONFLICT, "A license with the same URI or name is already present."); + } } - @PUT - @Path("/licenses/{id}") - public Response putLicense(@PathParam("id") long id, License l) { - License updated = licenseService.set(id, l.getName(), l.getShortDescription(), l.getUri(), l.getIconUrl(), l.isActive()); - if (updated == null) { - return error(Response.Status.BAD_REQUEST, "Bad Request. There is no existing LicenseInfo with that ID. To add a license use POST."); - } - return ok("License with ID " + id + " was replaced."); - } + @PUT + @Path("/licenses/{id}") + public Response putLicense(@PathParam("id") long id, License l) { + License updated = licenseService.set(id, l.getName(), l.getShortDescription(), l.getUri(), l.getIconUrl(), l.isActive()); + if (updated == null) { + return error(Response.Status.BAD_REQUEST, "Bad Request. There is no existing LicenseInfo with that ID. To add a license use POST."); + } + return ok("License with ID " + id + " was replaced."); + } - @DELETE - @Path("/licenses/{id}") - public Response deleteLicense(@PathParam("id") long id) { - int result = licenseService.delete(id); - if (result == 1) { - return ok("OK. License with ID " + id + " was deleted."); - } - return error(Response.Status.NOT_FOUND, "A license with ID " + id + " doesn't exist."); - } + @DELETE + @Path("/licenses/{id}") + public Response deleteLicense(@PathParam("id") long id) { + int result = licenseService.delete(id); + if (result == 1) { + return ok("OK. License with ID " + id + " was deleted."); + } + return error(Response.Status.NOT_FOUND, "A license with ID " + id + " doesn't exist."); + } } From 03946521f684f647f99418e01a98089ae8804983 Mon Sep 17 00:00:00 2001 From: jingma Date: Wed, 24 Mar 2021 15:04:46 +0100 Subject: [PATCH 005/183] Add prototype of newest changes. --- scripts/api/data/license.json | 8 +- scripts/api/data/licenseError.json | 8 +- scripts/api/data/licenseUpdate.json | 8 +- .../edu/harvard/iq/dataverse/License.java | 20 +++-- .../iq/dataverse/LicenseServiceBean.java | 87 ++++++++++++++----- .../edu/harvard/iq/dataverse/api/Admin.java | 84 ++++++++++++------ .../iq/dataverse/api/FetchException.java | 17 ++++ .../dataverse/api/RequestBodyException.java | 17 ++++ .../iq/dataverse/api/UpdateException.java | 17 ++++ .../iq/dataverse/util/json/JsonPrinter.java | 4 +- .../edu/harvard/iq/dataverse/api/AdminIT.java | 43 ++++++--- .../edu/harvard/iq/dataverse/api/UtilIT.java | 36 ++++++-- 12 files changed, 257 insertions(+), 92 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/api/FetchException.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/api/RequestBodyException.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/api/UpdateException.java diff --git a/scripts/api/data/license.json b/scripts/api/data/license.json index f891d84dd33..3b56b7dbc16 100644 --- a/scripts/api/data/license.json +++ b/scripts/api/data/license.json @@ -1,7 +1,7 @@ { - "name": "Apache License", - "shortDescription": "License description", - "uri": "www.apache.com", - "iconUrl": "www.icon.com", + "name": "Apache License 1.0", + "shortDescription": "This is the original Apache License which applies only to very old versions of Apache packages (such as version 1.2 of the Web server).", + "uri": "https://www.apache.org/licenses/LICENSE-1.0", + "iconUrl": "https://hips.hearstapps.com/hmg-prod.s3.amazonaws.com/images/rick-astley-1-1552336336.png", "active": false } \ No newline at end of file diff --git a/scripts/api/data/licenseError.json b/scripts/api/data/licenseError.json index 51eb31ecc0c..63f7a0f700a 100644 --- a/scripts/api/data/licenseError.json +++ b/scripts/api/data/licenseError.json @@ -1,8 +1,8 @@ { "id": 6, - "name": "Apache License", - "shortDescription": "License description", - "uri": "www.apache.com", - "iconUrl": "www.icon.com", + "name": "Apache License 1.0", + "shortDescription": "This is the original Apache License which applies only to very old versions of Apache packages (such as version 1.2 of the Web server).", + "uri": "https://www.apache.org/licenses/LICENSE-1.0", + "iconUrl": "https://hips.hearstapps.com/hmg-prod.s3.amazonaws.com/images/rick-astley-1-1552336336.png", "active": false } \ No newline at end of file diff --git a/scripts/api/data/licenseUpdate.json b/scripts/api/data/licenseUpdate.json index aed1cb0ae26..7fc89d19058 100644 --- a/scripts/api/data/licenseUpdate.json +++ b/scripts/api/data/licenseUpdate.json @@ -1,7 +1,7 @@ { - "name": "Updated Apache License", - "shortDescription": "Updated license description", - "uri": "www.update-apache.com", - "iconUrl": "www.update-icon.com", + "name": "Apache License 2.0", + "shortDescription": "The 2.0 version of the Apache License, approved by the ASF in 2004.", + "uri": "https://www.apache.org/licenses/LICENSE-2.0", + "iconUrl": "https://yt3.ggpht.com/ytc/AAUvwni36SveDisR-vOAmmklBfJxnnjuRG3ihzfrwEfORA=s900-c-k-c0x00ffffff-no-rj", "active": true } \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/License.java b/src/main/java/edu/harvard/iq/dataverse/License.java index 56742f76042..c046b6b373f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/License.java +++ b/src/main/java/edu/harvard/iq/dataverse/License.java @@ -1,5 +1,7 @@ package edu.harvard.iq.dataverse; +import java.net.URI; +import java.net.URL; import java.util.Objects; import javax.persistence.Column; import javax.persistence.Entity; @@ -41,10 +43,10 @@ public class License { private String shortDescription; @Column(columnDefinition="TEXT", nullable = false) - private String uri; + private URI uri; @Column(columnDefinition="TEXT") - private String iconUrl; + private URL iconUrl; @Column(nullable = false) private boolean active; @@ -52,7 +54,7 @@ public class License { public License() { } - public License(String name, String shortDescription, String uri, String iconUrl, boolean active) { + public License(String name, String shortDescription, URI uri, URL iconUrl, boolean active) { this.name = name; this.shortDescription = shortDescription; this.uri = uri; @@ -84,19 +86,19 @@ public void setShortDescription(String shortDescription) { this.shortDescription = shortDescription; } - public String getUri() { + public URI getUri() { return uri; } - public void setUri(String uri) { + public void setUri(URI uri) { this.uri = uri; } - public String getIconUrl() { + public URL getIconUrl() { return iconUrl; } - public void setIconUrl(String iconUrl) { + public void setIconUrl(URL iconUrl) { this.iconUrl = iconUrl; } @@ -132,8 +134,8 @@ public String toString() { "id=" + id + ", name='" + name + '\'' + ", shortDescription='" + shortDescription + '\'' + - ", uri='" + uri + '\'' + - ", iconUrl='" + iconUrl + '\'' + + ", uri=" + uri + + ", iconUrl=" + iconUrl + ", active=" + active + '}'; } diff --git a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java index 0604e51ae3d..af2cfd1328e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java @@ -2,12 +2,12 @@ import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; -import edu.harvard.iq.dataverse.authorization.DataverseRole; -import edu.harvard.iq.dataverse.search.IndexResponse; -import edu.harvard.iq.dataverse.settings.Setting; -import java.util.HashSet; +import edu.harvard.iq.dataverse.api.FetchException; +import edu.harvard.iq.dataverse.api.RequestBodyException; +import edu.harvard.iq.dataverse.api.UpdateException; +import java.net.URI; +import java.net.URL; import java.util.List; -import java.util.Set; import javax.ejb.EJB; import javax.ejb.Stateless; import javax.inject.Named; @@ -32,44 +32,77 @@ public List listAll() { return em.createNamedQuery("License.findAll", License.class).getResultList(); } - public License get( long id ) { + public License getById(long id) throws FetchException { List tokens = em.createNamedQuery("License.findById", License.class) .setParameter("id", id ) .getResultList(); - return tokens.isEmpty() ? null : tokens.get(0); + if (tokens.isEmpty()) { + throw new FetchException("License with that ID doesn't exist."); + } + return tokens.get(0); + } + + public License getByName(String name) throws FetchException { + List tokens = em.createNamedQuery("License.findByName", License.class) + .setParameter("name", name ) + .getResultList(); + if (tokens.isEmpty()) { + throw new FetchException("License with that name doesn't exist."); + } + return tokens.get(0); } - public License save(License l) throws PersistenceException { - if (l.getId() == null) { - em.persist(l); - return l; + public License save(License license) throws PersistenceException, RequestBodyException { + if (license.getId() == null) { + em.persist(license); + return license; } else { - return null; + throw new RequestBodyException("There shouldn't be an ID in the request body"); } } - public License set( long id, String name, String shortDescription, String uri, String iconUrl, boolean active ) { + public License setById(long id, String name, String shortDescription, URI uri, URL iconUrl, boolean active) throws UpdateException { List tokens = em.createNamedQuery("License.findById", License.class) .setParameter("id", id ) .getResultList(); if(tokens.size() > 0) { - License l = tokens.get(0); - l.setName(name); - l.setShortDescription(shortDescription); - l.setUri(uri); - l.setIconUrl(iconUrl); - l.setActive(active); - em.merge(l); + License license = tokens.get(0); + license.setName(name); + license.setShortDescription(shortDescription); + license.setUri(uri); + license.setIconUrl(iconUrl); + license.setActive(active); + em.merge(license); + actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "set") + .setInfo(name + ": " + shortDescription + ": " + uri + ": " + iconUrl + ": " + active)); + return license; + } else { + throw new UpdateException("There is no existing License with that ID. To add a license use POST."); + } + } + + public License setByName(String name, String shortDescription, URI uri, URL iconUrl, boolean active) throws UpdateException { + List tokens = em.createNamedQuery("License.findByName", License.class) + .setParameter("name", name ) + .getResultList(); + + if(tokens.size() > 0) { + License license = tokens.get(0); + license.setShortDescription(shortDescription); + license.setUri(uri); + license.setIconUrl(iconUrl); + license.setActive(active); + em.merge(license); actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "set") .setInfo(name + ": " + shortDescription + ": " + uri + ": " + iconUrl + ": " + active)); - return l; + return license; } else { - return null; + throw new UpdateException("There is no existing License with that name. To add a license use POST."); } } - public int delete( long id ) throws PersistenceException { + public int deleteById(long id) throws PersistenceException { actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "delete") .setInfo(Long.toString(id))); return em.createNamedQuery("License.deleteById") @@ -77,4 +110,12 @@ public int delete( long id ) throws PersistenceException { .executeUpdate(); } + public int deleteByName(String name) throws PersistenceException { + actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "delete") + .setInfo(name)); + return em.createNamedQuery("License.deleteByName") + .setParameter("name", name) + .executeUpdate(); + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index e77ac08ef83..396ef05aea8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -18,7 +18,6 @@ import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.License; import edu.harvard.iq.dataverse.LicenseServiceBean; -import edu.harvard.iq.dataverse.RoleAssignment; import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.api.dto.RoleDTO; @@ -75,7 +74,6 @@ import javax.ws.rs.core.Response.Status; import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; import java.util.List; import edu.harvard.iq.dataverse.authorization.AuthTestDataServiceBean; @@ -89,8 +87,6 @@ import edu.harvard.iq.dataverse.dataset.DatasetUtil; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import edu.harvard.iq.dataverse.engine.command.impl.MergeInAccountCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ChangeUserIdentifierCommand; import edu.harvard.iq.dataverse.engine.command.impl.RegisterDvObjectCommand; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -105,7 +101,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Date; -import java.util.function.Consumer; import javax.inject.Inject; import javax.json.JsonArray; import javax.persistence.Query; @@ -1935,47 +1930,80 @@ public Response getLicenses() { } @GET - @Path("/licenses/{id}") - public Response getLicense(@PathParam("id") long id) { - License l = licenseService.get(id); - if (l == null) { - return error(Response.Status.NOT_FOUND, "Not Found."); - } - return ok(json(l)); + @Path("/licenses/id/{id}") + public Response getLicenseById(@PathParam("id") long id) { + try { + License license = licenseService.getById(id); + return ok(json(license)); + } catch (FetchException e) { + return error(Response.Status.NOT_FOUND, e.getMessage()); + } + } + + @GET + @Path("/licenses/name/{name}") + public Response getLicenseByName(@PathParam("name") String name) { + try { + License license = licenseService.getByName(name); + return ok(json(license)); + } catch (FetchException e) { + return error(Response.Status.NOT_FOUND, e.getMessage()); + } } @POST @Path("/licenses") - public Response addLicense(License l) { + public Response addLicense(License license) { try { - License added = licenseService.save(l); - if (added == null) { - return error(Response.Status.BAD_REQUEST, "Bad Request."); - } + licenseService.save(license); return created("/api/admin/licenses", Json.createObjectBuilder().add("message", "License created")); - } catch(PersistenceException e) { + } catch (RequestBodyException e) { + return error(Response.Status.BAD_REQUEST, e.getMessage()); + } catch(PersistenceException e) { return error(Response.Status.CONFLICT, "A license with the same URI or name is already present."); } - } + } @PUT - @Path("/licenses/{id}") - public Response putLicense(@PathParam("id") long id, License l) { - License updated = licenseService.set(id, l.getName(), l.getShortDescription(), l.getUri(), l.getIconUrl(), l.isActive()); - if (updated == null) { - return error(Response.Status.BAD_REQUEST, "Bad Request. There is no existing LicenseInfo with that ID. To add a license use POST."); - } + @Path("/licenses/id/{id}") + public Response putLicenseById(@PathParam("id") long id, License license) { + try { + licenseService.setById(id, license.getName(), license.getShortDescription(), license.getUri(), license.getIconUrl(), license.isActive()); + } catch (UpdateException e) { + return error(Response.Status.BAD_REQUEST, e.getMessage()); + } return ok("License with ID " + id + " was replaced."); } + @PUT + @Path("/licenses/name/{name}") + public Response putLicenseByName(@PathParam("name") String name, License license) { + try { + licenseService.setByName(license.getName(), license.getShortDescription(), license.getUri(), license.getIconUrl(), license.isActive()); + } catch (UpdateException e) { + return error(Response.Status.BAD_REQUEST, e.getMessage()); + } + return ok("License with name " + name + " was replaced."); + } + @DELETE - @Path("/licenses/{id}") - public Response deleteLicense(@PathParam("id") long id) { - int result = licenseService.delete(id); + @Path("/licenses/id/{id}") + public Response deleteLicenseById(@PathParam("id") long id) { + int result = licenseService.deleteById(id); if (result == 1) { return ok("OK. License with ID " + id + " was deleted."); } return error(Response.Status.NOT_FOUND, "A license with ID " + id + " doesn't exist."); } + + @DELETE + @Path("/licenses/name/{name}") + public Response deleteLicenseByName(@PathParam("name") String name) { + int result = licenseService.deleteByName(name); + if (result == 1) { + return ok("OK. License with name " + name + " was deleted."); + } + return error(Response.Status.NOT_FOUND, "A license with name " + name + " doesn't exist."); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FetchException.java b/src/main/java/edu/harvard/iq/dataverse/api/FetchException.java new file mode 100644 index 00000000000..a9c77c7a4c5 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/FetchException.java @@ -0,0 +1,17 @@ +package edu.harvard.iq.dataverse.api; + +/** + * + * @author Jing Ma + */ +public class FetchException extends Exception { + + public FetchException(String message) { + super(message); + } + + public FetchException(String message, Throwable cause) { + super(message, cause); + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/RequestBodyException.java b/src/main/java/edu/harvard/iq/dataverse/api/RequestBodyException.java new file mode 100644 index 00000000000..e78c87abdfa --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/RequestBodyException.java @@ -0,0 +1,17 @@ +package edu.harvard.iq.dataverse.api; + +/** + * + * @author Jing Ma + */ +public class RequestBodyException extends Exception { + + public RequestBodyException(String message) { + super(message); + } + + public RequestBodyException(String message, Throwable cause) { + super(message, cause); + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/UpdateException.java b/src/main/java/edu/harvard/iq/dataverse/api/UpdateException.java new file mode 100644 index 00000000000..4dbd3ab19a3 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/UpdateException.java @@ -0,0 +1,17 @@ +package edu.harvard.iq.dataverse.api; + +/** + * + * @author Jing Ma + */ +public class UpdateException extends Exception { + + public UpdateException(String message) { + super(message); + } + + public UpdateException(String message, Throwable cause) { + super(message, cause); + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 3cbe8da8717..1827a5e1d34 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -782,8 +782,8 @@ public static JsonObjectBuilder json(License l) { .add("id", l.getId()) .add("name", l.getName()) .add("shortDescription", l.getShortDescription()) - .add("uri", l.getUri()) - .add("iconUrl", l.getIconUrl()) + .add("uri", l.getUri().toString()) + .add("iconUrl", l.getIconUrl().toString()) .add("active", l.isActive()); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index 84ec9defdec..b14ef12d93a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -809,38 +809,57 @@ public void testLicenses(){ status = JsonPath.from(body).getString("status"); assertEquals("OK", status); - Response getLicenseResponse = UtilIT.getLicense(1L); - getLicenseResponse.prettyPrint(); - body = getLicenseResponse.getBody().asString(); + Response getLicenseByIdResponse = UtilIT.getLicenseById(1L); + getLicenseByIdResponse.prettyPrint(); + body = getLicenseByIdResponse.getBody().asString(); status = JsonPath.from(body).getString("status"); assertEquals("OK", status); - Response getLicenseErrorResponse = UtilIT.getLicense(10L); + Response getLicenseByNameResponse = UtilIT.getLicenseByName(""); + getLicenseByNameResponse.prettyPrint(); + body = getLicenseByNameResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response getLicenseErrorResponse = UtilIT.getLicenseById(10L); getLicenseErrorResponse.prettyPrint(); body = getLicenseErrorResponse.getBody().asString(); status = JsonPath.from(body).getString("status"); assertEquals("ERROR", status); pathToJsonFile = "scripts/api/data/licenseUpdate.json"; - Response updateLicenseResponse = UtilIT.updateLicense(pathToJsonFile, 1L); - updateLicenseResponse.prettyPrint(); - body = updateLicenseResponse.getBody().asString(); + Response updateLicenseByIdResponse = UtilIT.updateLicenseById(pathToJsonFile, 1L); + updateLicenseByIdResponse.prettyPrint(); + body = updateLicenseByIdResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + pathToJsonFile = "scripts/api/data/licenseUpdate.json"; + Response updateLicenseByNameResponse = UtilIT.updateLicenseByName(pathToJsonFile, ""); + updateLicenseByNameResponse.prettyPrint(); + body = updateLicenseByNameResponse.getBody().asString(); status = JsonPath.from(body).getString("status"); assertEquals("OK", status); - Response updateLicenseErrorResponse = UtilIT.updateLicense(pathToJsonFile, 10L); + Response updateLicenseErrorResponse = UtilIT.updateLicenseById(pathToJsonFile, 10L); updateLicenseErrorResponse.prettyPrint(); body = updateLicenseErrorResponse.getBody().asString(); status = JsonPath.from(body).getString("status"); assertEquals("ERROR", status); - Response deleteLicenseResponse = UtilIT.deleteLicense(1L); - deleteLicenseResponse.prettyPrint(); - body = deleteLicenseResponse.getBody().asString(); + Response deleteLicenseByIdResponse = UtilIT.deleteLicenseById(1L); + deleteLicenseByIdResponse.prettyPrint(); + body = deleteLicenseByIdResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response deleteLicenseByNameResponse = UtilIT.deleteLicenseByName(""); + deleteLicenseByNameResponse.prettyPrint(); + body = deleteLicenseByNameResponse.getBody().asString(); status = JsonPath.from(body).getString("status"); assertEquals("OK", status); - Response deleteLicenseErrorResponse = UtilIT.deleteLicense(10L); + Response deleteLicenseErrorResponse = UtilIT.deleteLicenseById(10L); deleteLicenseErrorResponse.prettyPrint(); body = deleteLicenseErrorResponse.getBody().asString(); status = JsonPath.from(body).getString("status"); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 51a0cdae93e..c5f4da033d1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -2540,27 +2540,51 @@ static Response getLicenses() { return getLicensesResponse; } - static Response getLicense(Long id) { + static Response getLicenseById(Long id) { Response getLicenseResponse = given() - .get("/api/admin/licenses/"+id.toString()); + .get("/api/admin/licenses/id/"+id.toString()); return getLicenseResponse; } - static Response updateLicense(String pathToJsonFile, Long id) { + static Response getLicenseByName(String name) { + + Response getLicenseResponse = given() + .get("/api/admin/licenses/name/"+name); + return getLicenseResponse; + } + + static Response updateLicenseById(String pathToJsonFile, Long id) { + String jsonIn = getDatasetJson(pathToJsonFile); + + Response updateLicenseResponse = given() + .body(jsonIn) + .contentType("application/json") + .put("/api/admin/licenses/id/"+id.toString()); + return updateLicenseResponse; + } + + static Response updateLicenseByName(String pathToJsonFile, String name) { String jsonIn = getDatasetJson(pathToJsonFile); Response updateLicenseResponse = given() .body(jsonIn) .contentType("application/json") - .put("/api/admin/licenses/"+id.toString()); + .put("/api/admin/licenses/name/"+name); return updateLicenseResponse; } - static Response deleteLicense(Long id) { + static Response deleteLicenseById(Long id) { + + Response deleteLicenseResponse = given() + .delete("/api/admin/licenses/id/"+id.toString()); + return deleteLicenseResponse; + } + + static Response deleteLicenseByName(String name) { Response deleteLicenseResponse = given() - .delete("/api/admin/licenses/"+id.toString()); + .delete("/api/admin/licenses/name/"+name); return deleteLicenseResponse; } From cb5863720b181c688058caa26c9fc010fcbe188e Mon Sep 17 00:00:00 2001 From: jingma Date: Thu, 25 Mar 2021 11:52:23 +0100 Subject: [PATCH 006/183] Add URI and URL objects, and new endpoints. --- .../edu/harvard/iq/dataverse/License.java | 33 ++++++++++--------- .../iq/dataverse/LicenseServiceBean.java | 6 ++-- .../edu/harvard/iq/dataverse/api/Admin.java | 29 ++++++++++++---- .../iq/dataverse/util/json/JsonPrinter.java | 15 +++++---- 4 files changed, 50 insertions(+), 33 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/License.java b/src/main/java/edu/harvard/iq/dataverse/License.java index c046b6b373f..957a0a0529f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/License.java +++ b/src/main/java/edu/harvard/iq/dataverse/License.java @@ -1,7 +1,7 @@ package edu.harvard.iq.dataverse; import java.net.URI; -import java.net.URL; +import java.net.URISyntaxException; import java.util.Objects; import javax.persistence.Column; import javax.persistence.Entity; @@ -21,9 +21,12 @@ query="SELECT l FROM License l"), @NamedQuery( name="License.findById", query = "SELECT l FROM License l WHERE l.id=:id"), + @NamedQuery( name="License.findByName", + query = "SELECT l FROM License l WHERE l.name=:name"), @NamedQuery( name="License.deleteById", - query="DELETE FROM License l WHERE l.id=:id") - + query="DELETE FROM License l WHERE l.id=:id"), + @NamedQuery( name="License.deleteByName", + query="DELETE FROM License l WHERE l.name=:name") }) @Entity @Table(uniqueConstraints = { @@ -43,10 +46,10 @@ public class License { private String shortDescription; @Column(columnDefinition="TEXT", nullable = false) - private URI uri; + private String uri; @Column(columnDefinition="TEXT") - private URL iconUrl; + private String iconUrl; @Column(nullable = false) private boolean active; @@ -54,11 +57,11 @@ public class License { public License() { } - public License(String name, String shortDescription, URI uri, URL iconUrl, boolean active) { + public License(String name, String shortDescription, URI uri, URI iconUrl, boolean active) { this.name = name; this.shortDescription = shortDescription; - this.uri = uri; - this.iconUrl = iconUrl; + this.uri = uri.toASCIIString(); + this.iconUrl = iconUrl.toASCIIString(); this.active = active; } @@ -86,20 +89,20 @@ public void setShortDescription(String shortDescription) { this.shortDescription = shortDescription; } - public URI getUri() { - return uri; + public URI getUri() throws URISyntaxException { + return new URI(uri); } public void setUri(URI uri) { - this.uri = uri; + this.uri = uri.toASCIIString(); } - public URL getIconUrl() { - return iconUrl; + public URI getIconUrl() throws URISyntaxException { + return new URI(iconUrl); } - public void setIconUrl(URL iconUrl) { - this.iconUrl = iconUrl; + public void setIconUrl(URI iconUrl) { + this.iconUrl = iconUrl.toASCIIString(); } public boolean isActive() { diff --git a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java index af2cfd1328e..0c6828fabd0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java @@ -61,7 +61,7 @@ public License save(License license) throws PersistenceException, RequestBodyExc } } - public License setById(long id, String name, String shortDescription, URI uri, URL iconUrl, boolean active) throws UpdateException { + public void setById(long id, String name, String shortDescription, URI uri, URI iconUrl, boolean active) throws UpdateException { List tokens = em.createNamedQuery("License.findById", License.class) .setParameter("id", id ) .getResultList(); @@ -76,13 +76,12 @@ public License setById(long id, String name, String shortDescription, URI uri, U em.merge(license); actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "set") .setInfo(name + ": " + shortDescription + ": " + uri + ": " + iconUrl + ": " + active)); - return license; } else { throw new UpdateException("There is no existing License with that ID. To add a license use POST."); } } - public License setByName(String name, String shortDescription, URI uri, URL iconUrl, boolean active) throws UpdateException { + public void setByName(String name, String shortDescription, URI uri, URI iconUrl, boolean active) throws UpdateException { List tokens = em.createNamedQuery("License.findByName", License.class) .setParameter("name", name ) .getResultList(); @@ -96,7 +95,6 @@ public License setByName(String name, String shortDescription, URI uri, URL icon em.merge(license); actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "set") .setInfo(name + ": " + shortDescription + ": " + uri + ": " + iconUrl + ": " + active)); - return license; } else { throw new UpdateException("There is no existing License with that name. To add a license use POST."); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 396ef05aea8..74a1e47c1ae 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -44,6 +44,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand; import edu.harvard.iq.dataverse.settings.Setting; import edu.harvard.iq.dataverse.util.json.JsonPrinter; +import java.net.URISyntaxException; import javax.json.Json; import javax.json.JsonArrayBuilder; import javax.json.JsonObjectBuilder; @@ -1924,9 +1925,15 @@ public Response getBannerMessages(@PathParam("id") Long id) throws WrappedRespon @GET @Path("/licenses") public Response getLicenses() { - return ok(licenseService.listAll().stream() - .map(JsonPrinter::json) - .collect(toJsonArray())); + JsonArrayBuilder arrayBuilder = Json.createArrayBuilder(); + for(License license : licenseService.listAll()) { + try { + arrayBuilder.add(JsonPrinter.json(license)); + } catch (URISyntaxException e) { + return error(Status.INTERNAL_SERVER_ERROR, "Incorrect URI in JSON"); + } + } + return ok(arrayBuilder); } @GET @@ -1937,8 +1944,10 @@ public Response getLicenseById(@PathParam("id") long id) { return ok(json(license)); } catch (FetchException e) { return error(Response.Status.NOT_FOUND, e.getMessage()); + } catch (URISyntaxException e) { + return error(Response.Status.BAD_REQUEST, "Incorrect URI in JSON"); } - } + } @GET @Path("/licenses/name/{name}") @@ -1948,8 +1957,10 @@ public Response getLicenseByName(@PathParam("name") String name) { return ok(json(license)); } catch (FetchException e) { return error(Response.Status.NOT_FOUND, e.getMessage()); + } catch (URISyntaxException e) { + return error(Response.Status.BAD_REQUEST, "Incorrect URI in JSON"); } - } + } @POST @Path("/licenses") @@ -1971,8 +1982,10 @@ public Response putLicenseById(@PathParam("id") long id, License license) { licenseService.setById(id, license.getName(), license.getShortDescription(), license.getUri(), license.getIconUrl(), license.isActive()); } catch (UpdateException e) { return error(Response.Status.BAD_REQUEST, e.getMessage()); + } catch (URISyntaxException e) { + return error(Response.Status.BAD_REQUEST, "Incorrect URI in JSON"); } - return ok("License with ID " + id + " was replaced."); + return ok("License with ID " + id + " was replaced."); } @PUT @@ -1982,8 +1995,10 @@ public Response putLicenseByName(@PathParam("name") String name, License license licenseService.setByName(license.getName(), license.getShortDescription(), license.getUri(), license.getIconUrl(), license.isActive()); } catch (UpdateException e) { return error(Response.Status.BAD_REQUEST, e.getMessage()); + } catch (URISyntaxException e) { + return error(Response.Status.BAD_REQUEST, "Incorrect URI in JSON"); } - return ok("License with name " + name + " was replaced."); + return ok("License with name " + name + " was replaced."); } @DELETE diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 1827a5e1d34..9b243397cfa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -45,6 +45,7 @@ import edu.harvard.iq.dataverse.workflow.Workflow; import edu.harvard.iq.dataverse.workflow.step.WorkflowStepData; +import java.net.URISyntaxException; import java.util.*; import javax.json.Json; import javax.json.JsonArrayBuilder; @@ -777,14 +778,14 @@ public static JsonObjectBuilder json( DataverseFacet aFacet ) { .add("name", aFacet.getDatasetFieldType().getDisplayName()); } - public static JsonObjectBuilder json(License l) { + public static JsonObjectBuilder json(License license) throws URISyntaxException { return jsonObjectBuilder() - .add("id", l.getId()) - .add("name", l.getName()) - .add("shortDescription", l.getShortDescription()) - .add("uri", l.getUri().toString()) - .add("iconUrl", l.getIconUrl().toString()) - .add("active", l.isActive()); + .add("id", license.getId()) + .add("name", license.getName()) + .add("shortDescription", license.getShortDescription()) + .add("uri", license.getUri().toString()) + .add("iconUrl", license.getIconUrl().toString()) + .add("active", license.isActive()); } public static Collector stringsToJsonArray() { From dda1335bedc45f4e2cdef93d2d36e15879c9dfe7 Mon Sep 17 00:00:00 2001 From: jingma Date: Wed, 31 Mar 2021 00:47:13 +0200 Subject: [PATCH 007/183] Add Apache icons. --- scripts/api/data/license.json | 2 +- scripts/api/data/licenseError.json | 2 +- scripts/api/data/licenseUpdate.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/api/data/license.json b/scripts/api/data/license.json index 3b56b7dbc16..11e0d44c14b 100644 --- a/scripts/api/data/license.json +++ b/scripts/api/data/license.json @@ -2,6 +2,6 @@ "name": "Apache License 1.0", "shortDescription": "This is the original Apache License which applies only to very old versions of Apache packages (such as version 1.2 of the Web server).", "uri": "https://www.apache.org/licenses/LICENSE-1.0", - "iconUrl": "https://hips.hearstapps.com/hmg-prod.s3.amazonaws.com/images/rick-astley-1-1552336336.png", + "iconUrl": "https://itgala.xyz/wp-content/uploads/2017/10/Apache-HTTP-Server.png", "active": false } \ No newline at end of file diff --git a/scripts/api/data/licenseError.json b/scripts/api/data/licenseError.json index 63f7a0f700a..d6b1dbbd01b 100644 --- a/scripts/api/data/licenseError.json +++ b/scripts/api/data/licenseError.json @@ -3,6 +3,6 @@ "name": "Apache License 1.0", "shortDescription": "This is the original Apache License which applies only to very old versions of Apache packages (such as version 1.2 of the Web server).", "uri": "https://www.apache.org/licenses/LICENSE-1.0", - "iconUrl": "https://hips.hearstapps.com/hmg-prod.s3.amazonaws.com/images/rick-astley-1-1552336336.png", + "iconUrl": "https://itgala.xyz/wp-content/uploads/2017/10/Apache-HTTP-Server.png", "active": false } \ No newline at end of file diff --git a/scripts/api/data/licenseUpdate.json b/scripts/api/data/licenseUpdate.json index 7fc89d19058..eefc4e6f16f 100644 --- a/scripts/api/data/licenseUpdate.json +++ b/scripts/api/data/licenseUpdate.json @@ -2,6 +2,6 @@ "name": "Apache License 2.0", "shortDescription": "The 2.0 version of the Apache License, approved by the ASF in 2004.", "uri": "https://www.apache.org/licenses/LICENSE-2.0", - "iconUrl": "https://yt3.ggpht.com/ytc/AAUvwni36SveDisR-vOAmmklBfJxnnjuRG3ihzfrwEfORA=s900-c-k-c0x00ffffff-no-rj", + "iconUrl": "https://itgala.xyz/wp-content/uploads/2017/10/Apache-HTTP-Server.png", "active": true } \ No newline at end of file From c87c3675bed413621a6af0e9b7e05212520beedd Mon Sep 17 00:00:00 2001 From: jingma Date: Wed, 31 Mar 2021 14:31:57 +0200 Subject: [PATCH 008/183] Change tokens to licenses. --- .../iq/dataverse/LicenseServiceBean.java | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java index 0c6828fabd0..c49ebd9659e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java @@ -33,23 +33,23 @@ public List listAll() { } public License getById(long id) throws FetchException { - List tokens = em.createNamedQuery("License.findById", License.class) + List licenses = em.createNamedQuery("License.findById", License.class) .setParameter("id", id ) .getResultList(); - if (tokens.isEmpty()) { + if (licenses.isEmpty()) { throw new FetchException("License with that ID doesn't exist."); } - return tokens.get(0); + return licenses.get(0); } public License getByName(String name) throws FetchException { - List tokens = em.createNamedQuery("License.findByName", License.class) + List licenses = em.createNamedQuery("License.findByName", License.class) .setParameter("name", name ) .getResultList(); - if (tokens.isEmpty()) { + if (licenses.isEmpty()) { throw new FetchException("License with that name doesn't exist."); } - return tokens.get(0); + return licenses.get(0); } public License save(License license) throws PersistenceException, RequestBodyException { @@ -62,12 +62,12 @@ public License save(License license) throws PersistenceException, RequestBodyExc } public void setById(long id, String name, String shortDescription, URI uri, URI iconUrl, boolean active) throws UpdateException { - List tokens = em.createNamedQuery("License.findById", License.class) + List licenses = em.createNamedQuery("License.findById", License.class) .setParameter("id", id ) .getResultList(); - if(tokens.size() > 0) { - License license = tokens.get(0); + if(licenses.size() > 0) { + License license = licenses.get(0); license.setName(name); license.setShortDescription(shortDescription); license.setUri(uri); @@ -82,12 +82,12 @@ public void setById(long id, String name, String shortDescription, URI uri, URI } public void setByName(String name, String shortDescription, URI uri, URI iconUrl, boolean active) throws UpdateException { - List tokens = em.createNamedQuery("License.findByName", License.class) + List licenses = em.createNamedQuery("License.findByName", License.class) .setParameter("name", name ) .getResultList(); - if(tokens.size() > 0) { - License license = tokens.get(0); + if(licenses.size() > 0) { + License license = licenses.get(0); license.setShortDescription(shortDescription); license.setUri(uri); license.setIconUrl(iconUrl); From 67b7471e0ea127d66ba660f4631461650d2f139e Mon Sep 17 00:00:00 2001 From: jingma Date: Wed, 31 Mar 2021 14:38:25 +0200 Subject: [PATCH 009/183] Change URIException to IllegalStateException. --- .../java/edu/harvard/iq/dataverse/License.java | 16 ++++++++++++---- .../java/edu/harvard/iq/dataverse/api/Admin.java | 15 +-------------- .../iq/dataverse/util/json/JsonPrinter.java | 2 +- 3 files changed, 14 insertions(+), 19 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/License.java b/src/main/java/edu/harvard/iq/dataverse/License.java index 957a0a0529f..29653271e01 100644 --- a/src/main/java/edu/harvard/iq/dataverse/License.java +++ b/src/main/java/edu/harvard/iq/dataverse/License.java @@ -89,16 +89,24 @@ public void setShortDescription(String shortDescription) { this.shortDescription = shortDescription; } - public URI getUri() throws URISyntaxException { - return new URI(uri); + public URI getUri() { + try { + return new URI(uri); + } catch (URISyntaxException e) { + throw new IllegalStateException("Incorrect URI in JSON"); + } } public void setUri(URI uri) { this.uri = uri.toASCIIString(); } - public URI getIconUrl() throws URISyntaxException { - return new URI(iconUrl); + public URI getIconUrl() { + try { + return new URI(iconUrl); + } catch (URISyntaxException e) { + throw new IllegalStateException("Incorrect URI in JSON"); + } } public void setIconUrl(URI iconUrl) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 74a1e47c1ae..fc7cf73d505 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -44,7 +44,6 @@ import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand; import edu.harvard.iq.dataverse.settings.Setting; import edu.harvard.iq.dataverse.util.json.JsonPrinter; -import java.net.URISyntaxException; import javax.json.Json; import javax.json.JsonArrayBuilder; import javax.json.JsonObjectBuilder; @@ -1927,11 +1926,7 @@ public Response getBannerMessages(@PathParam("id") Long id) throws WrappedRespon public Response getLicenses() { JsonArrayBuilder arrayBuilder = Json.createArrayBuilder(); for(License license : licenseService.listAll()) { - try { - arrayBuilder.add(JsonPrinter.json(license)); - } catch (URISyntaxException e) { - return error(Status.INTERNAL_SERVER_ERROR, "Incorrect URI in JSON"); - } + arrayBuilder.add(JsonPrinter.json(license)); } return ok(arrayBuilder); } @@ -1944,8 +1939,6 @@ public Response getLicenseById(@PathParam("id") long id) { return ok(json(license)); } catch (FetchException e) { return error(Response.Status.NOT_FOUND, e.getMessage()); - } catch (URISyntaxException e) { - return error(Response.Status.BAD_REQUEST, "Incorrect URI in JSON"); } } @@ -1957,8 +1950,6 @@ public Response getLicenseByName(@PathParam("name") String name) { return ok(json(license)); } catch (FetchException e) { return error(Response.Status.NOT_FOUND, e.getMessage()); - } catch (URISyntaxException e) { - return error(Response.Status.BAD_REQUEST, "Incorrect URI in JSON"); } } @@ -1982,8 +1973,6 @@ public Response putLicenseById(@PathParam("id") long id, License license) { licenseService.setById(id, license.getName(), license.getShortDescription(), license.getUri(), license.getIconUrl(), license.isActive()); } catch (UpdateException e) { return error(Response.Status.BAD_REQUEST, e.getMessage()); - } catch (URISyntaxException e) { - return error(Response.Status.BAD_REQUEST, "Incorrect URI in JSON"); } return ok("License with ID " + id + " was replaced."); } @@ -1995,8 +1984,6 @@ public Response putLicenseByName(@PathParam("name") String name, License license licenseService.setByName(license.getName(), license.getShortDescription(), license.getUri(), license.getIconUrl(), license.isActive()); } catch (UpdateException e) { return error(Response.Status.BAD_REQUEST, e.getMessage()); - } catch (URISyntaxException e) { - return error(Response.Status.BAD_REQUEST, "Incorrect URI in JSON"); } return ok("License with name " + name + " was replaced."); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 9b243397cfa..7a5334114e7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -778,7 +778,7 @@ public static JsonObjectBuilder json( DataverseFacet aFacet ) { .add("name", aFacet.getDatasetFieldType().getDisplayName()); } - public static JsonObjectBuilder json(License license) throws URISyntaxException { + public static JsonObjectBuilder json(License license) { return jsonObjectBuilder() .add("id", license.getId()) .add("name", license.getName()) From 4b6c367857e7afa4270281bf48652f0160dc079a Mon Sep 17 00:00:00 2001 From: Jing Ma Date: Wed, 31 Mar 2021 14:41:59 +0200 Subject: [PATCH 010/183] DD-387 Add license entity and api (#57) * First db table and api. * Final changes for prototype. * Add integration tests. * Fix indentation. * Add prototype of newest changes. * Add URI and URL objects, and new endpoints. * Add Apache icons. * Change tokens to licenses. * Change URIException to IllegalStateException. --- scripts/api/data/license.json | 7 + scripts/api/data/licenseError.json | 8 + scripts/api/data/licenseUpdate.json | 7 + .../edu/harvard/iq/dataverse/License.java | 154 ++++++++++++++++++ .../iq/dataverse/LicenseServiceBean.java | 119 ++++++++++++++ .../edu/harvard/iq/dataverse/api/Admin.java | 98 ++++++++++- .../iq/dataverse/api/FetchException.java | 17 ++ .../dataverse/api/RequestBodyException.java | 17 ++ .../iq/dataverse/api/UpdateException.java | 17 ++ .../iq/dataverse/util/json/JsonPrinter.java | 12 ++ .../edu/harvard/iq/dataverse/api/AdminIT.java | 81 +++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 65 ++++++++ 12 files changed, 597 insertions(+), 5 deletions(-) create mode 100644 scripts/api/data/license.json create mode 100644 scripts/api/data/licenseError.json create mode 100644 scripts/api/data/licenseUpdate.json create mode 100644 src/main/java/edu/harvard/iq/dataverse/License.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/api/FetchException.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/api/RequestBodyException.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/api/UpdateException.java diff --git a/scripts/api/data/license.json b/scripts/api/data/license.json new file mode 100644 index 00000000000..11e0d44c14b --- /dev/null +++ b/scripts/api/data/license.json @@ -0,0 +1,7 @@ +{ + "name": "Apache License 1.0", + "shortDescription": "This is the original Apache License which applies only to very old versions of Apache packages (such as version 1.2 of the Web server).", + "uri": "https://www.apache.org/licenses/LICENSE-1.0", + "iconUrl": "https://itgala.xyz/wp-content/uploads/2017/10/Apache-HTTP-Server.png", + "active": false +} \ No newline at end of file diff --git a/scripts/api/data/licenseError.json b/scripts/api/data/licenseError.json new file mode 100644 index 00000000000..d6b1dbbd01b --- /dev/null +++ b/scripts/api/data/licenseError.json @@ -0,0 +1,8 @@ +{ + "id": 6, + "name": "Apache License 1.0", + "shortDescription": "This is the original Apache License which applies only to very old versions of Apache packages (such as version 1.2 of the Web server).", + "uri": "https://www.apache.org/licenses/LICENSE-1.0", + "iconUrl": "https://itgala.xyz/wp-content/uploads/2017/10/Apache-HTTP-Server.png", + "active": false +} \ No newline at end of file diff --git a/scripts/api/data/licenseUpdate.json b/scripts/api/data/licenseUpdate.json new file mode 100644 index 00000000000..eefc4e6f16f --- /dev/null +++ b/scripts/api/data/licenseUpdate.json @@ -0,0 +1,7 @@ +{ + "name": "Apache License 2.0", + "shortDescription": "The 2.0 version of the Apache License, approved by the ASF in 2004.", + "uri": "https://www.apache.org/licenses/LICENSE-2.0", + "iconUrl": "https://itgala.xyz/wp-content/uploads/2017/10/Apache-HTTP-Server.png", + "active": true +} \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/License.java b/src/main/java/edu/harvard/iq/dataverse/License.java new file mode 100644 index 00000000000..29653271e01 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/License.java @@ -0,0 +1,154 @@ +package edu.harvard.iq.dataverse; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Objects; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.NamedQueries; +import javax.persistence.NamedQuery; +import javax.persistence.Table; +import javax.persistence.UniqueConstraint; + +/** + * @author Jing Ma + */ + @NamedQueries({ + @NamedQuery( name="License.findAll", + query="SELECT l FROM License l"), + @NamedQuery( name="License.findById", + query = "SELECT l FROM License l WHERE l.id=:id"), + @NamedQuery( name="License.findByName", + query = "SELECT l FROM License l WHERE l.name=:name"), + @NamedQuery( name="License.deleteById", + query="DELETE FROM License l WHERE l.id=:id"), + @NamedQuery( name="License.deleteByName", + query="DELETE FROM License l WHERE l.name=:name") +}) +@Entity +@Table(uniqueConstraints = { + @UniqueConstraint(columnNames = "name"), + @UniqueConstraint(columnNames = "uri")} +) +public class License { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + @Column(columnDefinition="TEXT", nullable = false) + private String name; + + @Column(columnDefinition="TEXT") + private String shortDescription; + + @Column(columnDefinition="TEXT", nullable = false) + private String uri; + + @Column(columnDefinition="TEXT") + private String iconUrl; + + @Column(nullable = false) + private boolean active; + + public License() { + } + + public License(String name, String shortDescription, URI uri, URI iconUrl, boolean active) { + this.name = name; + this.shortDescription = shortDescription; + this.uri = uri.toASCIIString(); + this.iconUrl = iconUrl.toASCIIString(); + this.active = active; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getShortDescription() { + return shortDescription; + } + + public void setShortDescription(String shortDescription) { + this.shortDescription = shortDescription; + } + + public URI getUri() { + try { + return new URI(uri); + } catch (URISyntaxException e) { + throw new IllegalStateException("Incorrect URI in JSON"); + } + } + + public void setUri(URI uri) { + this.uri = uri.toASCIIString(); + } + + public URI getIconUrl() { + try { + return new URI(iconUrl); + } catch (URISyntaxException e) { + throw new IllegalStateException("Incorrect URI in JSON"); + } + } + + public void setIconUrl(URI iconUrl) { + this.iconUrl = iconUrl.toASCIIString(); + } + + public boolean isActive() { + return active; + } + + public void setActive(boolean active) { + this.active = active; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + License license = (License) o; + return active == license.active && + Objects.equals(id, license.id) && + Objects.equals(name, license.name) && + Objects.equals(shortDescription, license.shortDescription) && + Objects.equals(uri, license.uri) && + Objects.equals(iconUrl, license.iconUrl); + } + + @Override + public int hashCode() { + return Objects.hash(id, name, shortDescription, uri, iconUrl, active); + } + + @Override + public String toString() { + return "License{" + + "id=" + id + + ", name='" + name + '\'' + + ", shortDescription='" + shortDescription + '\'' + + ", uri=" + uri + + ", iconUrl=" + iconUrl + + ", active=" + active + + '}'; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java new file mode 100644 index 00000000000..c49ebd9659e --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java @@ -0,0 +1,119 @@ +package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; +import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; +import edu.harvard.iq.dataverse.api.FetchException; +import edu.harvard.iq.dataverse.api.RequestBodyException; +import edu.harvard.iq.dataverse.api.UpdateException; +import java.net.URI; +import java.net.URL; +import java.util.List; +import javax.ejb.EJB; +import javax.ejb.Stateless; +import javax.inject.Named; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import javax.persistence.PersistenceException; + +/** + * @author Jing Ma + */ +@Stateless +@Named +public class LicenseServiceBean { + + @PersistenceContext + EntityManager em; + + @EJB + ActionLogServiceBean actionLogSvc; + + public List listAll() { + return em.createNamedQuery("License.findAll", License.class).getResultList(); + } + + public License getById(long id) throws FetchException { + List licenses = em.createNamedQuery("License.findById", License.class) + .setParameter("id", id ) + .getResultList(); + if (licenses.isEmpty()) { + throw new FetchException("License with that ID doesn't exist."); + } + return licenses.get(0); + } + + public License getByName(String name) throws FetchException { + List licenses = em.createNamedQuery("License.findByName", License.class) + .setParameter("name", name ) + .getResultList(); + if (licenses.isEmpty()) { + throw new FetchException("License with that name doesn't exist."); + } + return licenses.get(0); + } + + public License save(License license) throws PersistenceException, RequestBodyException { + if (license.getId() == null) { + em.persist(license); + return license; + } else { + throw new RequestBodyException("There shouldn't be an ID in the request body"); + } + } + + public void setById(long id, String name, String shortDescription, URI uri, URI iconUrl, boolean active) throws UpdateException { + List licenses = em.createNamedQuery("License.findById", License.class) + .setParameter("id", id ) + .getResultList(); + + if(licenses.size() > 0) { + License license = licenses.get(0); + license.setName(name); + license.setShortDescription(shortDescription); + license.setUri(uri); + license.setIconUrl(iconUrl); + license.setActive(active); + em.merge(license); + actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "set") + .setInfo(name + ": " + shortDescription + ": " + uri + ": " + iconUrl + ": " + active)); + } else { + throw new UpdateException("There is no existing License with that ID. To add a license use POST."); + } + } + + public void setByName(String name, String shortDescription, URI uri, URI iconUrl, boolean active) throws UpdateException { + List licenses = em.createNamedQuery("License.findByName", License.class) + .setParameter("name", name ) + .getResultList(); + + if(licenses.size() > 0) { + License license = licenses.get(0); + license.setShortDescription(shortDescription); + license.setUri(uri); + license.setIconUrl(iconUrl); + license.setActive(active); + em.merge(license); + actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "set") + .setInfo(name + ": " + shortDescription + ": " + uri + ": " + iconUrl + ": " + active)); + } else { + throw new UpdateException("There is no existing License with that name. To add a license use POST."); + } + } + + public int deleteById(long id) throws PersistenceException { + actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "delete") + .setInfo(Long.toString(id))); + return em.createNamedQuery("License.deleteById") + .setParameter("id", id) + .executeUpdate(); + } + + public int deleteByName(String name) throws PersistenceException { + actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "delete") + .setInfo(name)); + return em.createNamedQuery("License.deleteByName") + .setParameter("name", name) + .executeUpdate(); + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index b52665a7747..fc7cf73d505 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -16,7 +16,8 @@ import edu.harvard.iq.dataverse.EMailValidator; import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.GlobalId; -import edu.harvard.iq.dataverse.RoleAssignment; +import edu.harvard.iq.dataverse.License; +import edu.harvard.iq.dataverse.LicenseServiceBean; import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.api.dto.RoleDTO; @@ -42,9 +43,11 @@ import edu.harvard.iq.dataverse.engine.command.impl.AbstractSubmitToArchiveCommand; import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand; import edu.harvard.iq.dataverse.settings.Setting; +import edu.harvard.iq.dataverse.util.json.JsonPrinter; import javax.json.Json; import javax.json.JsonArrayBuilder; import javax.json.JsonObjectBuilder; +import javax.persistence.PersistenceException; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; @@ -71,7 +74,6 @@ import javax.ws.rs.core.Response.Status; import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; import java.util.List; import edu.harvard.iq.dataverse.authorization.AuthTestDataServiceBean; @@ -85,8 +87,6 @@ import edu.harvard.iq.dataverse.dataset.DatasetUtil; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import edu.harvard.iq.dataverse.engine.command.impl.MergeInAccountCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ChangeUserIdentifierCommand; import edu.harvard.iq.dataverse.engine.command.impl.RegisterDvObjectCommand; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -101,7 +101,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Date; -import java.util.function.Consumer; import javax.inject.Inject; import javax.json.JsonArray; import javax.persistence.Query; @@ -152,6 +151,8 @@ public class Admin extends AbstractApiBean { ExplicitGroupServiceBean explicitGroupService; @EJB BannerMessageServiceBean bannerMessageService; + @EJB + LicenseServiceBean licenseService; // Make the session available @@ -1920,4 +1921,91 @@ public Response getBannerMessages(@PathParam("id") Long id) throws WrappedRespon } + @GET + @Path("/licenses") + public Response getLicenses() { + JsonArrayBuilder arrayBuilder = Json.createArrayBuilder(); + for(License license : licenseService.listAll()) { + arrayBuilder.add(JsonPrinter.json(license)); + } + return ok(arrayBuilder); + } + + @GET + @Path("/licenses/id/{id}") + public Response getLicenseById(@PathParam("id") long id) { + try { + License license = licenseService.getById(id); + return ok(json(license)); + } catch (FetchException e) { + return error(Response.Status.NOT_FOUND, e.getMessage()); + } + } + + @GET + @Path("/licenses/name/{name}") + public Response getLicenseByName(@PathParam("name") String name) { + try { + License license = licenseService.getByName(name); + return ok(json(license)); + } catch (FetchException e) { + return error(Response.Status.NOT_FOUND, e.getMessage()); + } + } + + @POST + @Path("/licenses") + public Response addLicense(License license) { + try { + licenseService.save(license); + return created("/api/admin/licenses", Json.createObjectBuilder().add("message", "License created")); + } catch (RequestBodyException e) { + return error(Response.Status.BAD_REQUEST, e.getMessage()); + } catch(PersistenceException e) { + return error(Response.Status.CONFLICT, "A license with the same URI or name is already present."); + } + } + + @PUT + @Path("/licenses/id/{id}") + public Response putLicenseById(@PathParam("id") long id, License license) { + try { + licenseService.setById(id, license.getName(), license.getShortDescription(), license.getUri(), license.getIconUrl(), license.isActive()); + } catch (UpdateException e) { + return error(Response.Status.BAD_REQUEST, e.getMessage()); + } + return ok("License with ID " + id + " was replaced."); + } + + @PUT + @Path("/licenses/name/{name}") + public Response putLicenseByName(@PathParam("name") String name, License license) { + try { + licenseService.setByName(license.getName(), license.getShortDescription(), license.getUri(), license.getIconUrl(), license.isActive()); + } catch (UpdateException e) { + return error(Response.Status.BAD_REQUEST, e.getMessage()); + } + return ok("License with name " + name + " was replaced."); + } + + @DELETE + @Path("/licenses/id/{id}") + public Response deleteLicenseById(@PathParam("id") long id) { + int result = licenseService.deleteById(id); + if (result == 1) { + return ok("OK. License with ID " + id + " was deleted."); + } + return error(Response.Status.NOT_FOUND, "A license with ID " + id + " doesn't exist."); + } + + @DELETE + @Path("/licenses/name/{name}") + public Response deleteLicenseByName(@PathParam("name") String name) { + int result = licenseService.deleteByName(name); + if (result == 1) { + return ok("OK. License with name " + name + " was deleted."); + } + return error(Response.Status.NOT_FOUND, "A license with name " + name + " doesn't exist."); + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FetchException.java b/src/main/java/edu/harvard/iq/dataverse/api/FetchException.java new file mode 100644 index 00000000000..a9c77c7a4c5 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/FetchException.java @@ -0,0 +1,17 @@ +package edu.harvard.iq.dataverse.api; + +/** + * + * @author Jing Ma + */ +public class FetchException extends Exception { + + public FetchException(String message) { + super(message); + } + + public FetchException(String message, Throwable cause) { + super(message, cause); + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/RequestBodyException.java b/src/main/java/edu/harvard/iq/dataverse/api/RequestBodyException.java new file mode 100644 index 00000000000..e78c87abdfa --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/RequestBodyException.java @@ -0,0 +1,17 @@ +package edu.harvard.iq.dataverse.api; + +/** + * + * @author Jing Ma + */ +public class RequestBodyException extends Exception { + + public RequestBodyException(String message) { + super(message); + } + + public RequestBodyException(String message, Throwable cause) { + super(message, cause); + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/UpdateException.java b/src/main/java/edu/harvard/iq/dataverse/api/UpdateException.java new file mode 100644 index 00000000000..4dbd3ab19a3 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/UpdateException.java @@ -0,0 +1,17 @@ +package edu.harvard.iq.dataverse.api; + +/** + * + * @author Jing Ma + */ +public class UpdateException extends Exception { + + public UpdateException(String message) { + super(message); + } + + public UpdateException(String message, Throwable cause) { + super(message, cause); + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index c37efc3178f..7a5334114e7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -16,6 +16,7 @@ import edu.harvard.iq.dataverse.DataverseContact; import edu.harvard.iq.dataverse.DataverseFacet; import edu.harvard.iq.dataverse.DataverseTheme; +import edu.harvard.iq.dataverse.License; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.groups.impl.maildomain.MailDomainGroup; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser; @@ -44,6 +45,7 @@ import edu.harvard.iq.dataverse.workflow.Workflow; import edu.harvard.iq.dataverse.workflow.step.WorkflowStepData; +import java.net.URISyntaxException; import java.util.*; import javax.json.Json; import javax.json.JsonArrayBuilder; @@ -775,6 +777,16 @@ public static JsonObjectBuilder json( DataverseFacet aFacet ) { .add("id", String.valueOf(aFacet.getId())) // TODO should just be id I think .add("name", aFacet.getDatasetFieldType().getDisplayName()); } + + public static JsonObjectBuilder json(License license) { + return jsonObjectBuilder() + .add("id", license.getId()) + .add("name", license.getName()) + .add("shortDescription", license.getShortDescription()) + .add("uri", license.getUri().toString()) + .add("iconUrl", license.getIconUrl().toString()) + .add("active", license.isActive()); + } public static Collector stringsToJsonArray() { return new Collector() { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index f83e9d9c839..a1bcc0b08fd 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -786,4 +786,85 @@ public void testBannerMessages(){ assertEquals("OK", status); } + + @Test + public void testLicenses(){ + + String pathToJsonFile = "scripts/api/data/license.json"; + Response addLicenseResponse = UtilIT.addLicense(pathToJsonFile); + addLicenseResponse.prettyPrint(); + String body = addLicenseResponse.getBody().asString(); + String status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + pathToJsonFile = "scripts/api/data/licenseError.json"; + Response addLicenseErrorResponse = UtilIT.addLicense(pathToJsonFile); + addLicenseErrorResponse.prettyPrint(); + body = addLicenseErrorResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + Response getLicensesResponse = UtilIT.getLicenses(); + getLicensesResponse.prettyPrint(); + body = getLicensesResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response getLicenseByIdResponse = UtilIT.getLicenseById(1L); + getLicenseByIdResponse.prettyPrint(); + body = getLicenseByIdResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response getLicenseByNameResponse = UtilIT.getLicenseByName(""); + getLicenseByNameResponse.prettyPrint(); + body = getLicenseByNameResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response getLicenseErrorResponse = UtilIT.getLicenseById(10L); + getLicenseErrorResponse.prettyPrint(); + body = getLicenseErrorResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + pathToJsonFile = "scripts/api/data/licenseUpdate.json"; + Response updateLicenseByIdResponse = UtilIT.updateLicenseById(pathToJsonFile, 1L); + updateLicenseByIdResponse.prettyPrint(); + body = updateLicenseByIdResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + pathToJsonFile = "scripts/api/data/licenseUpdate.json"; + Response updateLicenseByNameResponse = UtilIT.updateLicenseByName(pathToJsonFile, ""); + updateLicenseByNameResponse.prettyPrint(); + body = updateLicenseByNameResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response updateLicenseErrorResponse = UtilIT.updateLicenseById(pathToJsonFile, 10L); + updateLicenseErrorResponse.prettyPrint(); + body = updateLicenseErrorResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + Response deleteLicenseByIdResponse = UtilIT.deleteLicenseById(1L); + deleteLicenseByIdResponse.prettyPrint(); + body = deleteLicenseByIdResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response deleteLicenseByNameResponse = UtilIT.deleteLicenseByName(""); + deleteLicenseByNameResponse.prettyPrint(); + body = deleteLicenseByNameResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response deleteLicenseErrorResponse = UtilIT.deleteLicenseById(10L); + deleteLicenseErrorResponse.prettyPrint(); + body = deleteLicenseErrorResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index f3ff8f8fae4..c5f4da033d1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -2523,5 +2523,70 @@ static String getBannerMessageIdFromResponse(String getBannerMessagesResponse) { return "0"; } + static Response addLicense(String pathToJsonFile) { + String jsonIn = getDatasetJson(pathToJsonFile); + + Response addLicenseResponse = given() + .body(jsonIn) + .contentType("application/json") + .post("/api/admin/licenses"); + return addLicenseResponse; + } + + static Response getLicenses() { + + Response getLicensesResponse = given() + .get("/api/admin/licenses"); + return getLicensesResponse; + } + + static Response getLicenseById(Long id) { + + Response getLicenseResponse = given() + .get("/api/admin/licenses/id/"+id.toString()); + return getLicenseResponse; + } + + static Response getLicenseByName(String name) { + + Response getLicenseResponse = given() + .get("/api/admin/licenses/name/"+name); + return getLicenseResponse; + } + + static Response updateLicenseById(String pathToJsonFile, Long id) { + String jsonIn = getDatasetJson(pathToJsonFile); + + Response updateLicenseResponse = given() + .body(jsonIn) + .contentType("application/json") + .put("/api/admin/licenses/id/"+id.toString()); + return updateLicenseResponse; + } + + static Response updateLicenseByName(String pathToJsonFile, String name) { + String jsonIn = getDatasetJson(pathToJsonFile); + + Response updateLicenseResponse = given() + .body(jsonIn) + .contentType("application/json") + .put("/api/admin/licenses/name/"+name); + return updateLicenseResponse; + } + + static Response deleteLicenseById(Long id) { + + Response deleteLicenseResponse = given() + .delete("/api/admin/licenses/id/"+id.toString()); + return deleteLicenseResponse; + } + + static Response deleteLicenseByName(String name) { + + Response deleteLicenseResponse = given() + .delete("/api/admin/licenses/name/"+name); + return deleteLicenseResponse; + } + } From 784fdb0879b167bd3c209dea6447a256357db64d Mon Sep 17 00:00:00 2001 From: jingma Date: Wed, 31 Mar 2021 22:25:25 +0200 Subject: [PATCH 011/183] Handle POST request HTTP 409 conflict error and introduce bug fixes. --- .../edu/harvard/iq/dataverse/License.java | 2 ++ .../iq/dataverse/LicenseServiceBean.java | 23 ++++++++++++------- .../edu/harvard/iq/dataverse/api/Admin.java | 11 ++++----- .../iq/dataverse/api/ConflictException.java | 17 ++++++++++++++ 4 files changed, 39 insertions(+), 14 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/api/ConflictException.java diff --git a/src/main/java/edu/harvard/iq/dataverse/License.java b/src/main/java/edu/harvard/iq/dataverse/License.java index 29653271e01..60f2805e072 100644 --- a/src/main/java/edu/harvard/iq/dataverse/License.java +++ b/src/main/java/edu/harvard/iq/dataverse/License.java @@ -23,6 +23,8 @@ query = "SELECT l FROM License l WHERE l.id=:id"), @NamedQuery( name="License.findByName", query = "SELECT l FROM License l WHERE l.name=:name"), + @NamedQuery( name="License.findByNameOrUri", + query = "SELECT l FROM License l WHERE l.name=:name OR l.uri=:uri"), @NamedQuery( name="License.deleteById", query="DELETE FROM License l WHERE l.id=:id"), @NamedQuery( name="License.deleteByName", diff --git a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java index c49ebd9659e..7e760bb9bf7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java @@ -2,11 +2,11 @@ import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; +import edu.harvard.iq.dataverse.api.ConflictException; import edu.harvard.iq.dataverse.api.FetchException; import edu.harvard.iq.dataverse.api.RequestBodyException; import edu.harvard.iq.dataverse.api.UpdateException; import java.net.URI; -import java.net.URL; import java.util.List; import javax.ejb.EJB; import javax.ejb.Stateless; @@ -52,13 +52,19 @@ public License getByName(String name) throws FetchException { return licenses.get(0); } - public License save(License license) throws PersistenceException, RequestBodyException { - if (license.getId() == null) { - em.persist(license); - return license; - } else { + public License save(License license) throws RequestBodyException, ConflictException { + if (license.getId() != null) { throw new RequestBodyException("There shouldn't be an ID in the request body"); } + List licenses = em.createNamedQuery("License.findByNameOrUri", License.class) + .setParameter("name", license.getName() ) + .setParameter("uri", license.getUri().toASCIIString() ) + .getResultList(); + if (!licenses.isEmpty()) { + throw new ConflictException("A license with the same URI or name is already present."); + } + em.persist(license); + return license; } public void setById(long id, String name, String shortDescription, URI uri, URI iconUrl, boolean active) throws UpdateException { @@ -81,13 +87,14 @@ public void setById(long id, String name, String shortDescription, URI uri, URI } } - public void setByName(String name, String shortDescription, URI uri, URI iconUrl, boolean active) throws UpdateException { + public void setByName(String nameArg, String name, String shortDescription, URI uri, URI iconUrl, boolean active) throws UpdateException { List licenses = em.createNamedQuery("License.findByName", License.class) - .setParameter("name", name ) + .setParameter("name", nameArg ) .getResultList(); if(licenses.size() > 0) { License license = licenses.get(0); + license.setName(name); license.setShortDescription(shortDescription); license.setUri(uri); license.setIconUrl(iconUrl); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index fc7cf73d505..f57fa1e9746 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -47,7 +47,6 @@ import javax.json.Json; import javax.json.JsonArrayBuilder; import javax.json.JsonObjectBuilder; -import javax.persistence.PersistenceException; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; @@ -1961,8 +1960,8 @@ public Response addLicense(License license) { return created("/api/admin/licenses", Json.createObjectBuilder().add("message", "License created")); } catch (RequestBodyException e) { return error(Response.Status.BAD_REQUEST, e.getMessage()); - } catch(PersistenceException e) { - return error(Response.Status.CONFLICT, "A license with the same URI or name is already present."); + } catch(ConflictException e) { + return error(Response.Status.CONFLICT, e.getMessage()); } } @@ -1979,13 +1978,13 @@ public Response putLicenseById(@PathParam("id") long id, License license) { @PUT @Path("/licenses/name/{name}") - public Response putLicenseByName(@PathParam("name") String name, License license) { + public Response putLicenseByName(@PathParam("name") String nameArg, License license) { try { - licenseService.setByName(license.getName(), license.getShortDescription(), license.getUri(), license.getIconUrl(), license.isActive()); + licenseService.setByName(nameArg, license.getName(), license.getShortDescription(), license.getUri(), license.getIconUrl(), license.isActive()); } catch (UpdateException e) { return error(Response.Status.BAD_REQUEST, e.getMessage()); } - return ok("License with name " + name + " was replaced."); + return ok("License with name " + nameArg + " was replaced."); } @DELETE diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ConflictException.java b/src/main/java/edu/harvard/iq/dataverse/api/ConflictException.java new file mode 100644 index 00000000000..60e60ed41a4 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/ConflictException.java @@ -0,0 +1,17 @@ +package edu.harvard.iq.dataverse.api; + +/** + * + * @author Jing Ma + */ +public class ConflictException extends Exception { + + public ConflictException(String message) { + super(message); + } + + public ConflictException(String message, Throwable cause) { + super(message, cause); + } + +} From 8a9974d6ac2f3158fa3b8b7d86a3de4a8f94484c Mon Sep 17 00:00:00 2001 From: jingma Date: Thu, 1 Apr 2021 13:22:55 +0200 Subject: [PATCH 012/183] Change location to correct URL. --- .../java/edu/harvard/iq/dataverse/LicenseServiceBean.java | 3 +-- src/main/java/edu/harvard/iq/dataverse/api/Admin.java | 4 +++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java index c49ebd9659e..26689d1c085 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java @@ -52,10 +52,9 @@ public License getByName(String name) throws FetchException { return licenses.get(0); } - public License save(License license) throws PersistenceException, RequestBodyException { + public void save(License license) throws PersistenceException, RequestBodyException { if (license.getId() == null) { em.persist(license); - return license; } else { throw new RequestBodyException("There shouldn't be an ID in the request body"); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index fc7cf73d505..dea054579a3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse.api; +import com.google.common.net.UrlEscapers; import edu.harvard.iq.dataverse.BannerMessage; import edu.harvard.iq.dataverse.BannerMessageServiceBean; import edu.harvard.iq.dataverse.BannerMessageText; @@ -1958,7 +1959,8 @@ public Response getLicenseByName(@PathParam("name") String name) { public Response addLicense(License license) { try { licenseService.save(license); - return created("/api/admin/licenses", Json.createObjectBuilder().add("message", "License created")); + String location = "/api/admin/licenses/name/" + UrlEscapers.urlFragmentEscaper().escape(license.getName()); + return created(location, Json.createObjectBuilder().add("message", "License created")); } catch (RequestBodyException e) { return error(Response.Status.BAD_REQUEST, e.getMessage()); } catch(PersistenceException e) { From 67178c964b698d35074ec32fb8906f0347aae3cc Mon Sep 17 00:00:00 2001 From: jingma Date: Fri, 9 Apr 2021 13:29:59 +0200 Subject: [PATCH 013/183] Handle wrong JSON. --- .../java/edu/harvard/iq/dataverse/License.java | 7 +------ .../java/edu/harvard/iq/dataverse/api/Admin.java | 14 ++++++++++---- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/License.java b/src/main/java/edu/harvard/iq/dataverse/License.java index 29653271e01..2b78ab46278 100644 --- a/src/main/java/edu/harvard/iq/dataverse/License.java +++ b/src/main/java/edu/harvard/iq/dataverse/License.java @@ -126,12 +126,7 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; License license = (License) o; - return active == license.active && - Objects.equals(id, license.id) && - Objects.equals(name, license.name) && - Objects.equals(shortDescription, license.shortDescription) && - Objects.equals(uri, license.uri) && - Objects.equals(iconUrl, license.iconUrl); + return active == license.active && id.equals(license.id) && name.equals(license.name) && shortDescription.equals(license.shortDescription) && uri.equals(license.uri) && iconUrl.equals(license.iconUrl); } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index fc7cf73d505..3940a35d35c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -44,10 +44,10 @@ import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand; import edu.harvard.iq.dataverse.settings.Setting; import edu.harvard.iq.dataverse.util.json.JsonPrinter; +import java.net.URI; import javax.json.Json; import javax.json.JsonArrayBuilder; import javax.json.JsonObjectBuilder; -import javax.persistence.PersistenceException; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; @@ -1955,14 +1955,20 @@ public Response getLicenseByName(@PathParam("name") String name) { @POST @Path("/licenses") - public Response addLicense(License license) { + public Response addLicense(JsonObject jsonObject) { try { + License license = new License(); + license.setName(jsonObject.getString("name")); + license.setShortDescription(jsonObject.getString("shortDescription")); + license.setUri(new URI(jsonObject.getString("uri"))); + license.setIconUrl(new URI(jsonObject.getString("iconUrl"))); + license.setActive(jsonObject.getBoolean("active")); licenseService.save(license); return created("/api/admin/licenses", Json.createObjectBuilder().add("message", "License created")); } catch (RequestBodyException e) { return error(Response.Status.BAD_REQUEST, e.getMessage()); - } catch(PersistenceException e) { - return error(Response.Status.CONFLICT, "A license with the same URI or name is already present."); + } catch (Exception e) { + return error(Response.Status.BAD_REQUEST, "Something went wrong."); } } From c9d33e263d0cc8c82b3d009dabdde18ca110ccf8 Mon Sep 17 00:00:00 2001 From: jingma Date: Fri, 9 Apr 2021 17:43:15 +0200 Subject: [PATCH 014/183] Handle posting same JSON twice. --- src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java index c49ebd9659e..37fae3501be 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java @@ -55,6 +55,7 @@ public License getByName(String name) throws FetchException { public License save(License license) throws PersistenceException, RequestBodyException { if (license.getId() == null) { em.persist(license); + em.flush(); return license; } else { throw new RequestBodyException("There shouldn't be an ID in the request body"); From f6c99ebd9514aa6d2099cba19c1cc8eea4678ff6 Mon Sep 17 00:00:00 2001 From: jingma Date: Fri, 9 Apr 2021 18:01:44 +0200 Subject: [PATCH 015/183] Reformat tabs. --- src/main/java/edu/harvard/iq/dataverse/api/Admin.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 3940a35d35c..e9a54c7cb72 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1957,8 +1957,8 @@ public Response getLicenseByName(@PathParam("name") String name) { @Path("/licenses") public Response addLicense(JsonObject jsonObject) { try { - License license = new License(); - license.setName(jsonObject.getString("name")); + License license = new License(); + license.setName(jsonObject.getString("name")); license.setShortDescription(jsonObject.getString("shortDescription")); license.setUri(new URI(jsonObject.getString("uri"))); license.setIconUrl(new URI(jsonObject.getString("iconUrl"))); @@ -1966,8 +1966,8 @@ public Response addLicense(JsonObject jsonObject) { licenseService.save(license); return created("/api/admin/licenses", Json.createObjectBuilder().add("message", "License created")); } catch (RequestBodyException e) { - return error(Response.Status.BAD_REQUEST, e.getMessage()); - } catch (Exception e) { + return error(Response.Status.BAD_REQUEST, e.getMessage()); + } catch (Exception e) { return error(Response.Status.BAD_REQUEST, "Something went wrong."); } } From 878ef6fff6a27510b39cffb058b26afca31301c3 Mon Sep 17 00:00:00 2001 From: Jan van Mansum Date: Wed, 14 Apr 2021 12:09:46 +0200 Subject: [PATCH 016/183] Merged back develop --- .gitignore | 4 + conf/docker-aio/run-test-suite.sh | 2 +- doc/release-notes/4259-java-11.md | 111 --- doc/release-notes/5.4-release-notes.md | 337 +++++++++ doc/release-notes/5.4.1-release-notes.md | 46 ++ .../7084-crawlable-file-access.md | 29 - doc/release-notes/7188-utf-8-filenames.md | 9 - doc/release-notes/7205-orig-file-size.md | 11 - doc/release-notes/7373-solr-upgrade.md | 22 - doc/release-notes/7374-postgresql-update.md | 9 - .../7398-saved-search-performance.md | 30 - doc/release-notes/7409-remove-worldmap.md | 9 - doc/release-notes/7501-guides-updates.md | 5 - doc/release-notes/7502-more-mime-types.md | 7 - ...panded-compound-datasetfield-validation.md | 36 - .../7619-restricted-summary-starts.md | 14 - .../820-non-ascii-chars-in-search.md | 10 - .../source/admin/harvestserver.rst | 8 + .../source/admin/make-data-count.rst | 2 +- .../source/admin/solr-search-index.rst | 4 +- .../source/admin/user-administration.rst | 10 + .../source/api/client-libraries.rst | 7 + doc/sphinx-guides/source/api/dataaccess.rst | 2 +- doc/sphinx-guides/source/api/native-api.rst | 221 +++++- doc/sphinx-guides/source/conf.py | 4 +- doc/sphinx-guides/source/developers/index.rst | 1 + .../source/developers/remote-users.rst | 2 +- .../developers/s3-direct-upload-api.rst | 146 ++++ .../source/developers/workflows.rst | 38 +- .../source/installation/config.rst | 28 +- doc/sphinx-guides/source/user/account.rst | 4 + doc/sphinx-guides/source/versions.rst | 4 +- pom.xml | 2 +- scripts/api/data/metadatablocks/citation.tsv | 280 +++---- scripts/api/data/role-test-addRole.json | 10 + scripts/dev/dev-rebuild.sh | 3 - .../7687/file_access_flag_update_bug.txt | 11 + .../AbstractGlobalIdServiceBean.java | 4 +- .../dataverse/ControlledVocabularyValue.java | 2 +- .../dataverse/DOIDataCiteRegisterService.java | 4 +- .../harvard/iq/dataverse/DataCitation.java | 3 +- .../edu/harvard/iq/dataverse/DataFile.java | 10 +- .../iq/dataverse/DataFileServiceBean.java | 59 +- .../edu/harvard/iq/dataverse/Dataset.java | 4 + .../edu/harvard/iq/dataverse/DatasetPage.java | 70 +- .../iq/dataverse/DatasetServiceBean.java | 38 +- .../harvard/iq/dataverse/DatasetVersion.java | 19 +- .../edu/harvard/iq/dataverse/Dataverse.java | 2 + .../iq/dataverse/DataverseServiceBean.java | 18 + .../iq/dataverse/DataverseSession.java | 66 +- .../edu/harvard/iq/dataverse/DvObject.java | 2 +- .../iq/dataverse/EditDatafilesPage.java | 88 ++- .../iq/dataverse/EjbDataverseEngine.java | 19 +- .../edu/harvard/iq/dataverse/FilePage.java | 56 +- .../iq/dataverse/HarvestingClientsPage.java | 7 + .../edu/harvard/iq/dataverse/LoginPage.java | 1 - .../harvard/iq/dataverse/MailServiceBean.java | 35 +- .../iq/dataverse/ManagePermissionsPage.java | 2 +- .../iq/dataverse/RoleAssigneeServiceBean.java | 4 +- .../harvard/iq/dataverse/RoleAssignment.java | 2 +- .../iq/dataverse/SendFeedbackDialog.java | 10 +- .../harvard/iq/dataverse/SettingsWrapper.java | 4 +- .../java/edu/harvard/iq/dataverse/Shib.java | 6 +- .../iq/dataverse/UserNotification.java | 2 +- .../harvard/iq/dataverse/UserServiceBean.java | 8 +- .../iq/dataverse/api/AbstractApiBean.java | 35 + .../edu/harvard/iq/dataverse/api/Access.java | 7 +- .../edu/harvard/iq/dataverse/api/Admin.java | 69 +- .../dataverse/api/DatasetFieldServiceApi.java | 26 +- .../harvard/iq/dataverse/api/Datasets.java | 16 +- .../harvard/iq/dataverse/api/FeedbackApi.java | 5 +- .../edu/harvard/iq/dataverse/api/Files.java | 36 +- .../edu/harvard/iq/dataverse/api/Roles.java | 33 +- .../edu/harvard/iq/dataverse/api/Users.java | 30 + .../ServiceDocumentManagerImpl.java | 11 +- .../dataverse/api/datadeposit/SwordAuth.java | 1 + .../AuthenticationServiceBean.java | 27 +- .../authorization/DataverseRole.java | 24 +- .../providers/builtin/BuiltinUser.java | 7 + .../providers/builtin/DataverseUserPage.java | 11 +- .../oauth2/OAuth2FirstLoginPage.java | 6 +- .../oauth2/OAuth2LoginBackingBean.java | 2 +- .../users/AuthenticatedUser.java | 32 +- .../authorization/users/GuestUser.java | 2 +- .../dataverse/authorization/users/User.java | 4 + .../iq/dataverse/branding/BrandingUtil.java | 36 +- .../branding/BrandingUtilHelper.java | 28 + .../confirmemail/ConfirmEmailPage.java | 1 - .../confirmemail/ConfirmEmailServiceBean.java | 26 +- .../datasetutility/AddReplaceFileHelper.java | 207 +++--- .../datasetutility/FileReplacePageHelper.java | 9 +- .../datasetutility/OptionalFileParams.java | 26 +- ...ddRoleAssigneesToExplicitGroupCommand.java | 7 + .../command/impl/AssignRoleCommand.java | 8 + .../command/impl/CreateRoleCommand.java | 17 +- .../CuratePublishedDatasetVersionCommand.java | 9 +- .../DeaccessionDatasetVersionCommand.java | 2 +- .../command/impl/DeactivateUserCommand.java | 44 ++ .../engine/command/impl/DeletePidCommand.java | 8 +- .../FinalizeDatasetPublicationCommand.java | 8 +- .../command/impl/GetUserTracesCommand.java | 228 ++++++ .../impl/GrantSuperuserStatusCommand.java | 4 + .../command/impl/MergeInAccountCommand.java | 15 +- .../command/impl/PublishDatasetCommand.java | 2 +- .../command/impl/RedetectFileTypeCommand.java | 2 +- .../impl/UpdateDatasetVersionCommand.java | 89 ++- .../iq/dataverse/export/ExportService.java | 18 +- .../export/HtmlCodeBookExporter.java | 2 +- .../iq/dataverse/export/OAI_OREExporter.java | 2 +- .../dataverse/export/ddi/DdiExportUtil.java | 28 +- .../export/ddi/DdiExportUtilHelper.java | 25 + .../harvest/server/OAIRecordServiceBean.java | 4 +- .../server/web/servlet/OAIServlet.java | 6 +- .../passwordreset/PasswordResetData.java | 4 +- .../passwordreset/PasswordResetPage.java | 15 +- .../PasswordResetServiceBean.java | 18 +- .../dataverse/privateurl/PrivateUrlPage.java | 1 - .../iq/dataverse/search/IndexServiceBean.java | 32 +- .../settings/SettingsServiceBean.java | 13 +- .../source/DbSettingConfigSource.java | 11 +- .../iq/dataverse/util/FileMetadataUtil.java | 95 +++ .../harvard/iq/dataverse/util/MailUtil.java | 8 +- .../iq/dataverse/util/SystemConfig.java | 2 +- .../iq/dataverse/util/bagit/BagGenerator.java | 4 +- .../iq/dataverse/util/bagit/OREMap.java | 29 +- .../iq/dataverse/util/bagit/OREMapHelper.java | 25 + .../iq/dataverse/util/json/JsonPrinter.java | 28 +- .../util/json/JsonPrinterHelper.java | 25 + .../workflow/WorkflowServiceBean.java | 115 ++- .../internalspi/AuthorizedExternalStep.java | 26 +- .../internalspi/InternalWorkflowStepSP.java | 2 + .../internalspi/PauseWithMessageStep.java | 48 ++ .../iq/dataverse/workflow/step/Failure.java | 2 +- .../iq/dataverse/workflow/step/Success.java | 28 +- .../dataverse/workflows/WorkflowComment.java | 20 +- .../iq/dataverse/workflows/WorkflowUtil.java | 60 ++ src/main/java/propertyFiles/Bundle.properties | 53 +- .../META-INF/microprofile-aliases.properties | 3 +- .../db/migration/V5.3.0.5__7564-workflow.sql | 2 + .../V5.3.0.6__2419-deactivate-users.sql | 6 + src/main/webapp/dashboard-users.xhtml | 4 +- src/main/webapp/dataverseuser.xhtml | 22 + src/main/webapp/editdatafiles.xhtml | 16 +- .../webapp/file-edit-button-fragment.xhtml | 26 +- .../webapp/file-edit-popup-fragment.xhtml | 20 +- src/main/webapp/file.xhtml | 43 +- src/main/webapp/filesFragment.xhtml | 13 +- src/main/webapp/passwordreset.xhtml | 1 - src/main/webapp/roles-edit.xhtml | 28 +- .../edu/harvard/iq/dataverse/api/AdminIT.java | 57 ++ .../iq/dataverse/api/DeactivateUsersIT.java | 282 +++++++ .../iq/dataverse/api/DeleteUsersIT.java | 701 ++++++++++++++++++ .../edu/harvard/iq/dataverse/api/FilesIT.java | 52 +- .../edu/harvard/iq/dataverse/api/RolesIT.java | 101 +++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 89 ++- .../dataverse/branding/BrandingUtilTest.java | 57 +- .../command/impl/CreateRoleCommandTest.java | 41 + .../iq/dataverse/export/DDIExporterTest.java | 8 +- .../export/ddi/DdiExportUtilTest.java | 22 +- .../source/DbSettingConfigSourceTest.java | 2 +- .../iq/dataverse/util/MailUtilTest.java | 59 +- .../dataverse/util/json/JsonPrinterTest.java | 4 +- tests/jenkins/ec2/Jenkinsfile | 4 +- 163 files changed, 4302 insertions(+), 1293 deletions(-) delete mode 100644 doc/release-notes/4259-java-11.md create mode 100644 doc/release-notes/5.4-release-notes.md create mode 100644 doc/release-notes/5.4.1-release-notes.md delete mode 100644 doc/release-notes/7084-crawlable-file-access.md delete mode 100644 doc/release-notes/7188-utf-8-filenames.md delete mode 100644 doc/release-notes/7205-orig-file-size.md delete mode 100644 doc/release-notes/7373-solr-upgrade.md delete mode 100644 doc/release-notes/7374-postgresql-update.md delete mode 100644 doc/release-notes/7398-saved-search-performance.md delete mode 100644 doc/release-notes/7409-remove-worldmap.md delete mode 100644 doc/release-notes/7501-guides-updates.md delete mode 100644 doc/release-notes/7502-more-mime-types.md delete mode 100644 doc/release-notes/7551-expanded-compound-datasetfield-validation.md delete mode 100644 doc/release-notes/7619-restricted-summary-starts.md delete mode 100644 doc/release-notes/820-non-ascii-chars-in-search.md create mode 100644 doc/sphinx-guides/source/developers/s3-direct-upload-api.rst create mode 100644 scripts/api/data/role-test-addRole.json create mode 100644 scripts/issues/7687/file_access_flag_update_bug.txt create mode 100644 src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtilHelper.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeactivateUserCommand.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilHelper.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/util/FileMetadataUtil.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMapHelper.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/PauseWithMessageStep.java create mode 100644 src/main/resources/db/migration/V5.3.0.5__7564-workflow.sql create mode 100644 src/main/resources/db/migration/V5.3.0.6__2419-deactivate-users.sql create mode 100644 src/test/java/edu/harvard/iq/dataverse/api/DeactivateUsersIT.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/api/DeleteUsersIT.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java diff --git a/.gitignore b/.gitignore index 4d08cfb2257..7be8263f483 100644 --- a/.gitignore +++ b/.gitignore @@ -46,6 +46,10 @@ conf/docker-aio/dv/deps/ conf/docker-aio/dv/install/dvinstall.zip # or copy of test data conf/docker-aio/testdata/ + +# docker-aio creates maven/ which reports 86 new files. ignore this wd. +maven/ + scripts/installer/default.config *.pem diff --git a/conf/docker-aio/run-test-suite.sh b/conf/docker-aio/run-test-suite.sh index 2b24f6c90b2..47a4c3b9576 100755 --- a/conf/docker-aio/run-test-suite.sh +++ b/conf/docker-aio/run-test-suite.sh @@ -8,4 +8,4 @@ fi # Please note the "dataverse.test.baseurl" is set to run for "all-in-one" Docker environment. # TODO: Rather than hard-coding the list of "IT" classes here, add a profile to pom.xml. -source maven/maven.sh && mvn test -Dtest=DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT,FilesIT,SearchIT,InReviewWorkflowIT,HarvestingServerIT,MoveIT,MakeDataCountApiIT,FileTypeDetectionIT,EditDDIIT,ExternalToolsIT,AccessIT,DuplicateFilesIT,DownloadFilesIT,LinkIT -Ddataverse.test.baseurl=$dvurl +source maven/maven.sh && mvn test -Dtest=DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT,FilesIT,SearchIT,InReviewWorkflowIT,HarvestingServerIT,MoveIT,MakeDataCountApiIT,FileTypeDetectionIT,EditDDIIT,ExternalToolsIT,AccessIT,DuplicateFilesIT,DownloadFilesIT,LinkIT,DeleteUsersIT,DeactivateUsersIT -Ddataverse.test.baseurl=$dvurl diff --git a/doc/release-notes/4259-java-11.md b/doc/release-notes/4259-java-11.md deleted file mode 100644 index 1e5ee19230a..00000000000 --- a/doc/release-notes/4259-java-11.md +++ /dev/null @@ -1,111 +0,0 @@ -## Release Highlights - -### Java 11 - -The Dataverse software now runs and requires Java 11. This will provide performance and security enhancements, allows developers to take advantage of new and updated Java features, and moves the project to a platform with better longer term support. This upgrade requires a few extra steps in the release process, outlined below. - -## Notes to Admins - -### Java 11 Upgrade - -There are some things to note and keep in mind regarding the move to Java 11: - -- You should install the JDK/JRE following your usual methods, depending on your operating system. An example of this on a RHEL/CentOS 7 or RHEL/CentOS 8 system is: - - `$ sudo yum remove java-1.8.0-openjdk java-1.8.0-openjdk-devel java-1.8.0-openjdk-headless` - - `$ sudo yum install java-11-openjdk-devel` - - The `remove` command may provide an error message if -headless isn't installed. - -- We targeted and tested Java 11, but 11+ will likely work. Java 11 was targeted because of its long term support. -- If you're moving from a Dataverse installation that was previously running Glassfish 4.x (typically this would be Dataverse Software 4.x), you will need to adjust some JVM options in domain.xml as part of the upgrade process. We've provided these optional steps below. These steps are not required if your first installed Dataverse version was running Payara 5.x (typically Dataverse Software 5.x). - -### New JVM and DB Options - -#### New JVM Options - -For installations that were previously running on Dataverse Software 4.x, the following JVM Options have been added in support of Java 11: - -- `[9|]--add-opens=java.base/jdk.internal.loader=ALL-UNNAMED` -- `[9|]--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED` -- `[9|]--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED` -- `[9|]--add-opens=java.base/java.lang=ALL-UNNAMED` -- `[9|]--add-opens=java.base/java.net=ALL-UNNAMED` -- `[9|]--add-opens=java.base/java.nio=ALL-UNNAMED` -- `[9|]--add-opens=java.base/java.util=ALL-UNNAMED` -- `[9|]--add-opens=java.base/sun.nio.ch=ALL-UNNAMED` -- `[9|]--add-opens=java.management/sun.management=ALL-UNNAMED` -- `[9|]--add-opens=java.base/sun.net.www.protocol.jrt=ALL-UNNAMED` -- `[9|]--add-opens=java.base/sun.net.www.protocol.jar=ALL-UNNAMED` -- `[9|]--add-opens=java.naming/javax.naming.spi=ALL-UNNAMED` -- `[9|]--add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED` -- `[9|]--add-opens=java.logging/java.util.logging=ALL-UNNAMED` - -## Upgrade Instructions - -1\. Undeploy the previous version. - -- `/bin/asadmin list-applications` -- `/bin/asadmin undeploy dataverse<-version>` - -(where `` is where Payara 5 is installed, for example: `/usr/local/payara5`) - -2\. Stop Payara and remove the generated directory - -- `service payara stop` -- remove the generated directory: -`rm -rf /payara/domains/domain1/generated` - -3\. (only required for installations previously running on Dataverse Software 4.x) - -In domain.xml: - -Remove the following JVM options from the domain> element: - -`-Djava.endorsed.dirs=/usr/local/payara5/glassfish/modules/endorsed:/usr/local/payara5/glassfish/lib/endorsed` - -`-Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext` - -Add the following JVM options to the domain> element: - -`[9|]--add-opens=java.base/jdk.internal.loader=ALL-UNNAMED` - -`[9|]--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED` - -`[9|]--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED` - -`[9|]--add-opens=java.base/java.lang=ALL-UNNAMED` - -`[9|]--add-opens=java.base/java.net=ALL-UNNAMED` - -`[9|]--add-opens=java.base/java.nio=ALL-UNNAMED` - -`[9|]--add-opens=java.base/java.util=ALL-UNNAMED` - -`[9|]--add-opens=java.base/sun.nio.ch=ALL-UNNAMED` - -`[9|]--add-opens=java.management/sun.management=ALL-UNNAMED` - -`[9|]--add-opens=java.base/sun.net.www.protocol.jrt=ALL-UNNAMED` - -`[9|]--add-opens=java.base/sun.net.www.protocol.jar=ALL-UNNAMED` - -`[9|]--add-opens=java.naming/javax.naming.spi=ALL-UNNAMED` - -`[9|]--add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED` - -`[9|]--add-opens=java.logging/java.util.logging=ALL-UNNAMED` - -4\. Start Payara - -- `service payara start` - -5\. Deploy this version. - -- `/bin/asadmin deploy dataverse-5.4.war` - -6\. Restart payara - -- `service payara stop` -- `service payara start` diff --git a/doc/release-notes/5.4-release-notes.md b/doc/release-notes/5.4-release-notes.md new file mode 100644 index 00000000000..0f34db254fa --- /dev/null +++ b/doc/release-notes/5.4-release-notes.md @@ -0,0 +1,337 @@ +# Dataverse Software 5.4 + +This release brings new features, enhancements, and bug fixes to the Dataverse Software. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. + +## Release Highlights + +### Deactivate Users API, Get User Traces API, Revoke Roles API + +A new API has been added to deactivate users to prevent them from logging in, receiving communications, or otherwise being active in the system. Deactivating a user is an alternative to deleting a user, especially when the latter is not possible due to the amount of interaction the user has had with the Dataverse installation. In order to learn more about a user before deleting, deactivating, or merging, a new "get user traces" API is available that will show objects created, roles, group memberships, and more. Finally, the "remove all roles" button available in the superuser dashboard is now also available via API. + +### New File Access API + +A new API offers *crawlable* access view of the folders and files within a dataset: + +`/api/datasets//dirindex/` + +will output a simple html listing, based on the standard Apache directory index, with Access API download links for individual files, and recursive calls to the API above for sub-folders. Please see the [Native API Guide](https://guides.dataverse.org/en/5.4/api/native-api.html) for more information. + +Using this API, ``wget --recursive`` (or similar crawling client) can be used to download all the files in a dataset, preserving the file names and folder structure; without having to use the download-as-zip API. In addition to being faster (zipping is a relatively resource-intensive operation on the server side), this process can be restarted if interrupted (with ``wget --continue`` or equivalent) - unlike zipped multi-file downloads that always have to start from the beginning. + +On a system that uses S3 with download redirects, the individual file downloads will be handled by S3 directly (with the exception of tabular files), without having to be proxied through the Dataverse application. + +### Restricted Files and DDI "dataDscr" Information (Summary Statistics, Variable Names, Variable Labels) + +In previous releases, DDI "dataDscr" information (summary statistics, variable names, and variable labels, sometimes known as "variable metadata") for tabular files that were ingested successfully were available even if files were restricted. This has been changed in the following ways: + +- At the dataset level, DDI exports no longer show "dataDscr" information for restricted files. There is only one version of this export and it is the version that's suitable for public consumption with the "dataDscr" information hidden for restricted files. +- Similarly, at the dataset level, the DDI HTML Codebook no longer shows "dataDscr" information for restricted files. +- At the file level, "dataDscr" information is no longer publicly available for restricted files. In practice, it was only possible to get this publicly via API (the download/access button was hidden). +- At the file level, "dataDscr" (variable metadata) information can still be downloaded for restricted files if you have access to download the file. + +### Search with Accented Characters + +Many languages include characters that have close analogs in ascii, e.g. (á, à, â, ç, é, è, ê, ë, í, ó, ö, ú, ù, û, ü…). This release changes the default Solr configuration to allow search to match words based on these associations, e.g. a search for Mercè would match the word Merce in a Dataset, and vice versa. This should generally be helpful, but can result in false positives, e.g. "canon" will be found searching for "cañon". + +### Java 11, PostgreSQL 13, and Solr 8 Support/Upgrades + +Several of the core components of the Dataverse Software have been upgraded. Specifically: + +- The Dataverse Software now runs on and requires Java 11. This will provide performance and security enhancements, allows developers to take advantage of new and updated Java features, and moves the project to a platform with better longer term support. This upgrade requires a few extra steps in the release process, outlined below. +- The Dataverse Software has now been tested with PostgreSQL versions up to 13. Versions 9.6+ will still work, but this update is necessary to support the software beyond PostgreSQL EOL later in 2021. +- The Dataverse Software now runs on Solr 8.8.1, the latest available stable release in the Solr 8.x series. + +### Saved Search Performance Improvements + +A refactoring has greatly improved Saved Search performance in the application. If your installation has multiple, potentially long-running Saved Searches in place, this greatly improves the probability that those search jobs will complete without timing out. + +### Worldmap/Geoconnect Integration Now Obsolete + +As of this release, the Geoconnect/Worldmap integration is no longer available. The Harvard University Worldmap is going through a migration process, and instead of updating this code to work with the new infrastructure, the decision was made to pursue future Geospatial exploration/analysis through other tools, following the External Tools Framework in the Dataverse Software. + +### Guides Updates + +The Dataverse Software Guides have been updated to follow recent changes to how different terms are used across the Dataverse Project. For more information, see Mercè's note to the community: + + + +### Conditionally Required Metadata Fields + +Prior to this release, when defining metadata for compound fields (via their dataset field types), fields could be either be optional or required, i.e. if required you must always have (at least one) value for that field. For example, Author Name being required means you must have at least one Author with an nonempty Author name. + +In order to support more robust metadata (and specifically to resolve #7551), we need to allow a third case: Conditionally Required, that is, the field is required if and only if any of its "sibling" fields are entered. For example, Producer Name is now conditionally required in the citation metadata block. A user does not have to enter a Producer, but if they do, they have to enter a Producer Name. + +## Major Use Cases + +Newly-supported major use cases in this release include: + +- Dataverse Installation Administrators can now deactivate users using a new API. (Issue #2419, PR #7629) +- Superusers can remove all of a user's assigned roles using a new API. (Issue #2419, PR #7629) +- Superusers can use an API to gather more information about actions a user has taken in the system in order to make an informed decisions about whether or not to deactivate or delete a user. (Issue #2419, PR #7629) +- Superusers will now be able to harvest from installations using ISO-639-3 language codes. (Issue #7638, PR #7690) +- Users interacting with the workflow system will receive status messages (Issue #7564, PR #7635) +- Users interacting with prepublication workflows will see speed improvements (Issue #7681, PR #7682) +- API Users will receive Dataverse collection API responses in a deterministic order. (Issue #7634, PR #7708) +- API Users will be able to access a list of crawlable URLs for file download, allowing for faster and easily resumable transfers. (Issue #7084, PR #7579) +- Users will no longer be able to access summary stats for restricted files. (Issue #7619, PR #7642) +- Users will now see truncated versions of long strings (primarily checksums) throughout the application (Issue #6685, PR #7312) +- Users will now be able to easily copy checksums, API tokens, and private URLs with a single click (Issue #6039, Issue #6685, PR #7539, PR #7312) +- Users uploading data through the Direct Upload API will now be able to use additional checksums (Issue #7600, PR #7602) +- Users searching for content will now be able to search using non-ascii characters. (Issue #820, PR #7378) +- Users can now replace files in draft datasets, a functionality previously only available on published datasets. (Issue #7149, PR #7337) +- Dataverse Installation Administrators can now set subfields of compound fields as **conditionally required**, that is, the field is required if and only if any of its "sibling" fields are entered. For example, Producer Name is now conditionally required in the citation metadata block. A user does not have to enter a Producer, but if they do, they have to enter a Producer Name. (Issue #7606, PR #7608) + +## Notes for Dataverse Installation Administrators + +### Java 11 Upgrade + +There are some things to note and keep in mind regarding the move to Java 11: + +- You should install the JDK/JRE following your usual methods, depending on your operating system. An example of this on a RHEL/CentOS 7 or RHEL/CentOS 8 system is: + + `$ sudo yum remove java-1.8.0-openjdk java-1.8.0-openjdk-devel java-1.8.0-openjdk-headless` + + `$ sudo yum install java-11-openjdk-devel` + + The `remove` command may provide an error message if -headless isn't installed. + +- We targeted and tested Java 11, but 11+ will likely work. Java 11 was targeted because of its long term support. +- If you're moving from a Dataverse installation that was previously running Glassfish 4.x (typically this would be Dataverse Software 4.x), you will need to adjust some JVM options in domain.xml as part of the upgrade process. We've provided these optional steps below. These steps are not required if your first installed Dataverse Software version was running Payara 5.x (typically Dataverse Software 5.x). + +### PostgreSQL Versions Up To 13 Supported + +Up until this release our installation guide "strongly recommended" to install PostgreSQL v. 9.6. While that version is known to be very stable, it is nearing its end-of-life (in Nov. 2021). Dataverse Software has now been tested with versions up to 13. If you decide to upgrade PostgreSQL, the tested and recommended way of doing that is as follows: + +- Export your current database with ``pg_dumpall``; +- Install the new version of PostgreSQL; (make sure it's running on the same port, etc. so that no changes are needed in the Payara configuration) +- Re-import the database with ``psql``, as the postgres user. + +Consult the PostgreSQL upgrade documentation for more information, for example . + +### Solr Upgrade + +With this release we upgrade to the latest available stable release in the Solr 8.x branch. We recommend a fresh installation of Solr 8.8.1 (the index will be empty) followed by an "index all". + +Before you start the "index all", the Dataverse installation will appear to be empty because the search results come from Solr. As indexing progresses, partial results will appear until indexing is complete. + +See for more information. + +### Managing Conditionally Required Metadata Fields + +Prior to this release, when defining metadata for compound fields (via their dataset field types), fields could be either be optional or required, i.e. if required you must always have (at least one) value for that field. For example, Author Name being required means you must have at least one Author with an nonempty Author name. + +In order to support more robust metadata (and specifically to resolve #7551), we need to allow a third case: Conditionally Required, that is, the field is required if and only if any of its "sibling" fields are entered. For example, Producer Name is now conditionally required in the citation metadata block. A user does not have to enter a Producer, but if they do, they have to enter a Producer Name. + +This change required some modifications to how "required" is defined in the metadata .tsv files (for compound fields). + +Prior to this release, the value of required for the parent compound field did not matter and so was set to false. + +Going forward: + +- For optional, the parent compound field would be required = false and all children would be required = false. +- For required, the parent compound field would be required = true and at least one child would be required = true. +- For conditionally required, the parent compound field would be required = false and at least one child would be required = true. + +This release updates the citation .tsv file that is distributed with the software for the required parent compound fields (e.g. author), as well as sets Producer Name to be conditionally required. No other distributed .tsv files were updated, as they did not have any required compound values. + +**If you have created any custom metadata .tsv files**, you will need to make the same (type of) changes there. + +### Citation Metadata Block Update + +Due to the changes for Conditionally Required Metadata Fields, and a minor update in the citation metadata block to support extra ISO-639-3 language codes, a block upgrade is required. Instructions are provided below. + +### Retroactively Store Original File Size + +Beginning in Dataverse Software 4.10, the size of the saved original file (for an ingested tabular datafile) was stored in the database. For files added before this change, we provide an API that retrieves and permanently stores the sizes for any already existing saved originals. See [Datafile Integrity API](https://guides.dataverse.org/en/5.4/api/native-api.html#datafile-integrity) for more information. + +This was documented as a step in previous release notes, but we are noting it in these release notes to give it more visibility. + +### DB Cleanup for Saved Searches + +A previous version of the Dataverse Software changed the indexing logic so that when a user links a Dataverse collection, its children are also indexed as linked. This means that the children do not need to be separately linked, and in this version we removed the logic that creates a saved search to create those links when a Dataverse collection is linked. + +We recommend cleaning up the db to a) remove these saved searches and b) remove the links for the objects. We can do this via a few queries, which are available in the folder here: + + + +There are four sets of queries available, and they should be run in this order: + +- ss_for_deletion.txt to identify the Saved Searches to be deleted +- delete_ss.txt to delete the Saved Searches identified in the previous query +- dld_for_deletion.txt to identify the linked datasets and Dataverse collections to be deleted +- delete_dld.txt to delete the linked datasets and Dataverse collections identified in the previous queries + +Note: removing these saved searches and links should not affect what users will see as linked due to the aforementioned indexing change. Similarly, not removing these saved searches and links should not affect anything, but is a cleanup of unnecessary rows in the database. + +### DB Cleanup for Superusers Releasing without Version Updates + +In datasets where a superuser has run the Curate command and the update included a change to the fileaccessrequest flag, those changes would not be reflected appropriately in the published version. This should be a rare occurrence. + +Instead of an automated solution, we recommend inspecting the affected datasets and correcting the fileaccessrequest flag as appropriate. You can identify the affected datasets this via a query, which is available in the folder here: + + + +## New JVM Options and Database Settings + +For installations that were previously running on Dataverse Software 4.x, a number of new JVM options need to be added as part of the upgrade. The JVM Options are enumerated in the detailed upgrade instructions below. + +Two new Database settings were added: + +- :InstallationName +- :ExportInstallationAsDistributorOnlyWhenNotSet + +For an overview of these new options, please see the +[Installation Guide](https://guides.dataverse.org/en/5.4/installation/config.html#database-settings) + +## Notes for Tool Developers and Integrators + +### UTF-8 Characters and Spaces in File Names + +UTF-8 characters in filenames are now preserved when downloaded. + +Dataverse installations will no longer replace spaces in file names of downloaded files with the + character. If your tool or integration has any special handling around this, you may need to make further adjustments to maintain backwards compatibility while also supporting Dataverse installations on 5.4+. + +Note that this follows a change from 5.1 that only corrected this for installations running with S3 storage. This makes the behavior consistent across installations running all types of file storage. + +## Complete List of Changes + +For the complete list of code changes in this release, see the [5.4 Milestone](https://github.com/IQSS/dataverse/milestone/94?closed=1) in Github. + +For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/forum/#!forum/dataverse-community) or email support@dataverse.org. + +## Installation + +If this is a new installation, please see our [Installation Guide](https://guides.dataverse.org/en/5.4/installation/). + +## Upgrade Instructions + +0\. These instructions assume that you've already successfully upgraded from Dataverse Software 4.x to Dataverse Software 5 following the instructions in the [Dataverse Software 5 Release Notes](https://github.com/IQSS/dataverse/releases/tag/v5.0). After upgrading from the 4.x series to 5.0, you should progress through the other 5.x releases before attempting the upgrade to 5.4. + +1\. Upgrade to Java 11. + +2\. Upgrade to Solr 8.8.1. + +If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. Use `sudo` to change to that user first. For example, `sudo -i -u dataverse` if `dataverse` is your dedicated application user. + +In the following commands we assume that Payara 5 is installed in `/usr/local/payara5`. If not, adjust as needed. + +`export PAYARA=/usr/local/payara5` + +(or `setenv PAYARA /usr/local/payara5` if you are using a `csh`-like shell) + +3\. Undeploy the previous version. + +- `$PAYARA/bin/asadmin list-applications` +- `$PAYARA/bin/asadmin undeploy dataverse<-version>` + +4\. Stop Payara and remove the generated directory + +- `service payara stop` +- `rm -rf $PAYARA/glassfish/domains/domain1/generated` + +5\. **(only required for installations previously running Dataverse Software 4.x!)** In other words, if you have a domain.xml that **originated under Glassfish 4**, the below JVM Options need to be added. If your Dataverse installation was first installed on the 5.x series, these JVM options should already be present. + +In domain.xml: + +Remove the following JVM options from the `` section: + +`-Djava.endorsed.dirs=/usr/local/payara5/glassfish/modules/endorsed:/usr/local/payara5/glassfish/lib/endorsed` + +`-Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext` + +Add the following JVM options to the `` section: + +`[9|]--add-opens=java.base/jdk.internal.loader=ALL-UNNAMED` + +`[9|]--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED` + +`[9|]--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED` + +`[9|]--add-opens=java.base/java.lang=ALL-UNNAMED` + +`[9|]--add-opens=java.base/java.net=ALL-UNNAMED` + +`[9|]--add-opens=java.base/java.nio=ALL-UNNAMED` + +`[9|]--add-opens=java.base/java.util=ALL-UNNAMED` + +`[9|]--add-opens=java.base/sun.nio.ch=ALL-UNNAMED` + +`[9|]--add-opens=java.management/sun.management=ALL-UNNAMED` + +`[9|]--add-opens=java.base/sun.net.www.protocol.jrt=ALL-UNNAMED` + +`[9|]--add-opens=java.base/sun.net.www.protocol.jar=ALL-UNNAMED` + +`[9|]--add-opens=java.naming/javax.naming.spi=ALL-UNNAMED` + +`[9|]--add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED` + +`[9|]--add-opens=java.logging/java.util.logging=ALL-UNNAMED` + +6\. Start Payara + +- `service payara start` + +7\. Deploy this version. + +- `$PAYARA/bin/asadmin deploy dataverse-5.4.war` + +8\. Restart payara + +- `service payara stop` +- `service payara start` + +9\. Reload Citation Metadata Block: + + `wget https://github.com/IQSS/dataverse/releases/download/v5.4/citation.tsv` + `curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @citation.tsv -H "Content-type: text/tab-separated-values"` + +## Additional Release Steps + +1\. Confirm that the schema.xml was updated with the new v5.4 version when you updated Solr. + +2\. Run the script updateSchemaMDB.sh to generate updated solr schema files and preserve any other custom fields in your Solr configuration. + +For example: (modify the path names as needed) + +cd /usr/local/solr-8.8.1/server/solr/collection1/conf +wget https://github.com/IQSS/dataverse/releases/download/v5.4/updateSchemaMDB.sh +chmod +x updateSchemaMDB.sh +./updateSchemaMDB.sh -t . + +See for more information. + +3\. Do a clean reindex by first clearing then indexing. Re-indexing is required to get full-functionality from this change. Please refer to the guides on how to clear and index if needed. + +4\. Upgrade Postgres. + +- Export your current database with ``pg_dumpall``; +- Install the new version of PostgreSQL; (make sure it's running on the same port, etc. so that no changes are needed in the Payara configuration) +- Re-import the database with ``psql``, as the postgres user. + +Consult the PostgreSQL upgrade documentation for more information, for example . + +5\. Retroactively store original file size + +Use the [Datafile Integrity API](https://guides.dataverse.org/en/5.4/api/native-api.html#datafile-integrity) to ensure that the sizes of all original files are stored in the database. + +6\. DB Cleanup for Superusers Releasing without Version Updates + +In datasets where a superuser has run the Curate command and the update included a change to the fileaccessrequest flag, those changes would not be reflected appropriately in the published version. This should be a rare occurrence. + +Instead of an automated solution, we recommend inspecting the affected datasets and correcting the fileaccessrequest flag as appropriate. You can identify the affected datasets this via a query, which is available in the folder here: + + + +7\. (Optional, but recommended) DB Cleanup for Saved Searches and Linked Objects + +Perform the DB Cleanup for Saved Searches and Linked Objects, summarized in the "Notes for Dataverse Installation Administrators" section above. + +8\. Take a backup of the Worldmap links, if any. + +9\. (Only required if custom metadata blocks are used in your Dataverse installation) Update any custom metadata blocks: + +In the .tsv for any custom metadata blocks, for any subfield that has a required value of TRUE, find the corresponding parent field and change its required value to TRUE. + +Note: As there is an accompanying Flyway script that updates the values directly in the database, you do not need to reload these metadata .tsv files via API, unless you make additional changes, e.g set some compound fields to be conditionally required. diff --git a/doc/release-notes/5.4.1-release-notes.md b/doc/release-notes/5.4.1-release-notes.md new file mode 100644 index 00000000000..13433ac12d2 --- /dev/null +++ b/doc/release-notes/5.4.1-release-notes.md @@ -0,0 +1,46 @@ +# Dataverse Software 5.4.1 + +This release provides a fix for a regression introduced in 5.4 and implements a few other small changes. Please use 5.4.1 for production deployments instead of 5.4. + +## Release Highlights + +### API Backwards Compatibility Maintained + +The syntax in the example in the [Basic File Access](https://guides.dataverse.org/en/5.4.1/api/dataaccess.html#basic-file-access) section of the Dataverse Software Guides will continue to work. + +## Complete List of Changes + +For the complete list of code changes in this release, see the [5.4.1 Milestone](https://github.com/IQSS/dataverse/milestone/95?closed=1) in Github. + +For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/forum/#!forum/dataverse-community) or email support@dataverse.org. + +## Installation + +If this is a new installation, please see our [Installation Guide](https://guides.dataverse.org/en/5.4.1/installation/). + +## Upgrade Instructions + +0\. These instructions assume that you've already successfully upgraded from Dataverse Software 4.x to Dataverse Software 5 following the instructions in the [Dataverse Software 5 Release Notes](https://github.com/IQSS/dataverse/releases/tag/v5.0). After upgrading from the 4.x series to 5.0, you should progress through the other 5.x releases before attempting the upgrade to 5.4.1. + +1\. Undeploy the previous version. + +- `$PAYARA/bin/asadmin list-applications` +- `$PAYARA/bin/asadmin undeploy dataverse<-version>` + +2\. Stop Payara and remove the generated directory + +- `service payara stop` +- `rm -rf $PAYARA/glassfish/domains/domain1/generated` + +3\. Start Payara + +- `service payara start` + +4\. Deploy this version. + +- `$PAYARA/bin/asadmin deploy dataverse-5.4.1.war` + +5\. Restart payara + +- `service payara stop` +- `service payara start` diff --git a/doc/release-notes/7084-crawlable-file-access.md b/doc/release-notes/7084-crawlable-file-access.md deleted file mode 100644 index 0e721728e28..00000000000 --- a/doc/release-notes/7084-crawlable-file-access.md +++ /dev/null @@ -1,29 +0,0 @@ - ## Release Highlights - -### A new file access API - -A new api offers *crawlable* access view of the folders and files within a datset: - -``` - /api/datasets//dirindex/ -``` - -will output a simple html listing, based on the standard Apache -directory index, with Access API download links for individual files, -and recursive calls to the API above for sub-folders. (See the -documentation entry in the guides for more information). - -Using this API, ``wget --recursive`` (or similar crawling client) can -be used to download all the files in a dataset, preserving the file -names and folder structure; without having to use the download-as-zip -API. In addition to being faster (zipping is a relatively -resource-intensive operation on the server side), this process can be -restarted if interrupted (with ``wget --continue`` or equivalent) - -unlike zipped multi-file downloads that always have to start from the -beginning. - -On a system that uses S3 with download redirects, the individual file -downloads will be handled by S3 directly, without having to be proxied -through the Dataverse application. - - diff --git a/doc/release-notes/7188-utf-8-filenames.md b/doc/release-notes/7188-utf-8-filenames.md deleted file mode 100644 index 014ea4b4566..00000000000 --- a/doc/release-notes/7188-utf-8-filenames.md +++ /dev/null @@ -1,9 +0,0 @@ -## Notes for Tool Developers and Integrators - -### UTF-8 Characters and Spaces in File Names - -UTF-8 characters in filenames are now preserved when downloaded. - -Dataverse Installations will no longer replace spaces in file names of downloaded files with the + character. If your tool or integration has any special handling around this, you may need to make further adjustments to maintain backwards compatibility while also supporting Dataverse installations on 5.4+. - -Note that this follows a change from 5.1 that only corrected this for installations running with S3 storage. This makes the behavior consistent across installations running all types of file storage. diff --git a/doc/release-notes/7205-orig-file-size.md b/doc/release-notes/7205-orig-file-size.md deleted file mode 100644 index 2f7af23b6f3..00000000000 --- a/doc/release-notes/7205-orig-file-size.md +++ /dev/null @@ -1,11 +0,0 @@ -## Notes to Admins - -Beginning in Dataverse Software 4.10, the size of the saved original file (for an ingested tabular datafile) was stored in the database. For files added before this change, we provide an API that retrieves and permanently stores the sizes for any already existing saved originals. See [Datafile Integrity API](https://guides.dataverse.org/en/5.4/api/native-api.html#datafile-integrity) for more information. - -This was documented as a step in previous release notes, but we are noting it in these release notes to give it more visibility. - -## Upgrade Instructions - -X./ Retroactively store original file size - -Use the [Datafile Integrity API](https://guides.dataverse.org/en/5.4/api/native-api.html#datafile-integrity) to ensure that the sizes of all original files are stored in the database. \ No newline at end of file diff --git a/doc/release-notes/7373-solr-upgrade.md b/doc/release-notes/7373-solr-upgrade.md deleted file mode 100644 index 06e7bc62e5a..00000000000 --- a/doc/release-notes/7373-solr-upgrade.md +++ /dev/null @@ -1,22 +0,0 @@ -### Solr Update - -With this release we upgrade to the latest available stable release in the Solr 8.x branch. We recommend a fresh installation of Solr 8.8.1 (the index will be empty) -followed by an "index all". - -Before you start the "index all", Dataverse will appear to be empty because -the search results come from Solr. As indexing progresses, partial results will -appear until indexing is complete. - - -See http://guides.dataverse.org/installation/prerequisites.html#installing-solr - -[for the additional upgrade steps section] - -Run the script updateSchemaMDB.sh to generate updated solr schema files and preserve any other custom fields in your Solr configuration. -For example: (modify the path names as needed) -cd /usr/local/solr-8.8.1/server/solr/collection1/conf -wget https://github.com/IQSS/dataverse/releases/download/v5.4/updateSchemaMDB.sh -chmod +x updateSchemaMDB.sh -./updateSchemaMDB.sh -t . - -See http://guides.dataverse.org/en/5.4/admin/metadatacustomization.html?highlight=updateschemamdb for more information. diff --git a/doc/release-notes/7374-postgresql-update.md b/doc/release-notes/7374-postgresql-update.md deleted file mode 100644 index 3ac74ad70a1..00000000000 --- a/doc/release-notes/7374-postgresql-update.md +++ /dev/null @@ -1,9 +0,0 @@ -## Notes for Dataverse Administrators - -Up until this release our installation guide "strongly recommended" to install PostgreSQL v. 9.6. While that version is known to be very stable, it is nearing its end-of-life (in Nov. 2021). Dataverse Software has now been tested with versions up to 13. If you decide to upgrade PostgreSQL, the tested and recommended way of doing that is as follows: - -- Export your current database with ``pg_dumpall``; -- Install the new version of PostgreSQL; (make sure it's running on the same port, etc. so that no changes are needed in the Payara configuration) -- Re-import the database with ``psql``, as the postgres user. - -Consult the PostgreSQL upgrade documentation for more information, for example https://www.postgresql.org/docs/13/upgrading.html#UPGRADING-VIA-PGDUMPALL. diff --git a/doc/release-notes/7398-saved-search-performance.md b/doc/release-notes/7398-saved-search-performance.md deleted file mode 100644 index 4986524ed4f..00000000000 --- a/doc/release-notes/7398-saved-search-performance.md +++ /dev/null @@ -1,30 +0,0 @@ -## Release Highlights - -### Saved Search Performance Improvements - -A refactoring has greatly improved Saved Search performance in the application. If your installation has multiple, potentially long-running Saved Searches in place, this greatly improves the probability that those search jobs will complete without timing out. - -## Notes for Dataverse Installation Administrators - -### DB Cleanup for Saved Searches - -A previous version of dataverse changed the indexing logic so that when a user links a dataverse, its children are also indexed as linked. This means that the children do not need to be separately linked, and in this version we removed the logic that creates a saved search to create those links when a dataverse is linked. - -We recommend cleaning up the db to a) remove these saved searches and b) remove the links for the objects. We can do this via a few queries, which are available in the folder here: - -https://github.com/IQSS/dataverse/raw/develop/scripts/issues/7398/ - -There are four sets of queries available, and they should be run in this order: - -- ss_for_deletion.txt to identify the Saved Searches to be deleted -- delete_ss.txt to delete the Saved Searches identified in the previous query -- dld_for_deletion.txt to identify the linked datasets and dataverses to be deleted -- delete_dld.txt to delete the linked datasets and dataverses identified in the previous queries - -Note: removing these saved searches and links should not affect what users will see as linked due to the aforementioned indexing change. Similarly, not removing these saved searches and links should not affect anything, but is a cleanup of unnecessary rows in the database. - -## Additional Upgrade Instructions - -X\. (Optional, but recommended) DB Cleanup - -Perform the DB Cleanup for Saved Searches and Linked Objects, summarized in the "Notes for Dataverse Installation Administrators" section above. diff --git a/doc/release-notes/7409-remove-worldmap.md b/doc/release-notes/7409-remove-worldmap.md deleted file mode 100644 index 3e6ea3ed06e..00000000000 --- a/doc/release-notes/7409-remove-worldmap.md +++ /dev/null @@ -1,9 +0,0 @@ -## Release Highlights - -### Worldmap/Geoconnect Integration Now Obsolete - -As of this release, the Geoconnect/Worldmap integration is no longer available. The Harvard University Worldmap is going through a migration process, and instead of updating this code to work with the new infrastructure, the decision was made to pursue future Geospatial exploration/analysis through other tools, following the External Tools Framework in the Dataverse Software. - -## Notes to Admins - -Take a backup of the Worldmap links, if any. \ No newline at end of file diff --git a/doc/release-notes/7501-guides-updates.md b/doc/release-notes/7501-guides-updates.md deleted file mode 100644 index 29bc7557f05..00000000000 --- a/doc/release-notes/7501-guides-updates.md +++ /dev/null @@ -1,5 +0,0 @@ -## Guides Updates - -The Dataverse Guides have been updated to follow recent changes to how different terms are used across the Dataverse Project. For more information, see Mercè's note to the community: - - diff --git a/doc/release-notes/7502-more-mime-types.md b/doc/release-notes/7502-more-mime-types.md deleted file mode 100644 index 0c57dc3e389..00000000000 --- a/doc/release-notes/7502-more-mime-types.md +++ /dev/null @@ -1,7 +0,0 @@ -## Upgrade Steps - -In addition to mapping friendly names to these file types, the types are further mapped to aggregate file types facets on the homepage. A full reindex is required for the facets to be refreshed. - -Kick off full reindex - -http://guides.dataverse.org/en/4.20/admin/solr-search-index.html diff --git a/doc/release-notes/7551-expanded-compound-datasetfield-validation.md b/doc/release-notes/7551-expanded-compound-datasetfield-validation.md deleted file mode 100644 index 19d6d573699..00000000000 --- a/doc/release-notes/7551-expanded-compound-datasetfield-validation.md +++ /dev/null @@ -1,36 +0,0 @@ -## Notes for Dataverse Administrators - -Prior to this release, when defining metadata for compound fields (via their dataset field types), fields could be either be optional or required, i.e. if required you must always have (at least one) value for that field. For example, Author Name being required means you must have at least one Author with an nonempty Author name. - -In order to support more robust metadata (and specifically to resolve #7551), we need to allow a third case: Conditionally Required, that is, the field is required if and only if any of its "sibling" fields are entered. For example, Producer Name is now conditionally required in the citation metadata block. A user does not have to enter a Producer, but if they do, they have to enter a Producer Name. - -This change required some modifications to how "required" is defined in the metadata .tsv files (for compound fields). - -Prior to this release, the value of required for the parent compound field did not matter and so was set to false. - -Going forward: - -- For optional, the parent compound field would be required = false and all children would be required = false. -- For required, the parent compound field would be required = true and at least one child would be required = true. -- For conditionally required, the parent compound field would be required = false and at least one child would be required = true. - -This release updates the citation .tsv file that is distributed with the software for the required parent compound fields (e.g. author), as well as sets Producer Name to be conditionally required. No other distributed .tsv files were updated, as they did not have any required compound values. - -**If you have created any custom metadata .tsv files**, you will need to make the same (type of) changes there. - -### Additional Upgrade Steps - -1. Reload Citation Metadata Block: - - `wget https://github.com/IQSS/dataverse/releases/download/v5.4/citation.tsv` - `curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @citation.tsv -H "Content-type: text/tab-separated-values"` - -2. Update any custom metadata blocks (if used): - - For any subfield that has a required value of TRUE, find the corresponding parent field and change its required value to TRUE. - - Note: As there is an accompanying Flyway script that updates the values directly in the database, you do not need to reload these metadata .tsv files via API, unless you make additional changes, e.g set some compound fields to be conditionally required. - -### Use Case - -Metadata designers can now set subfields of compound fields as **conditionally required**, that is, the field is required if and only if any of its "sibling" fields are entered. For example, Producer Name is now conditionally required in the citation metadata block. A user does not have to enter a Producer, but if they do, they have to enter a Producer Name. diff --git a/doc/release-notes/7619-restricted-summary-starts.md b/doc/release-notes/7619-restricted-summary-starts.md deleted file mode 100644 index e1c20f3bde2..00000000000 --- a/doc/release-notes/7619-restricted-summary-starts.md +++ /dev/null @@ -1,14 +0,0 @@ -Restricted Files and DDI "dataDscr" Information (Summary Statistics, Variable Names, Variable Labels) - -In previous releases, DDI "dataDscr" information (summary statistics, variable names, and variable labels, sometimes known as "variable metadata") for tabular files that were ingested successfully were available even if files were restricted. This has been changed in the following ways: - -- At the dataset level, DDI exports no longer show "dataDscr" information for restricted files. There is only one version of this export and it is the version that's suitable for public consumption with the "dataDscr" information hidden for restricted files. -- Similarly, at the dataset level, the DDI HTML Codebook no longer shows "dataDscr" information for restricted files. -- At the file level, "dataDscr" information is no longer publicly available for restricted files. In practice, it was only possible to get this publicly via API (the download/access button was hidden). -- At the file level, "dataDscr" (variable metadata) information can still be downloaded for restricted files if you have access to download the file. - -After upgrading, you should re-export to replace cached DDI exports with restricted summary stats with DDI exports fit for public consumption: - - curl http://localhost:8080/api/admin/metadata/reExportAll - -For details on this operation, see https://guides.dataverse.org/en/5.4/admin/metadataexport.html diff --git a/doc/release-notes/820-non-ascii-chars-in-search.md b/doc/release-notes/820-non-ascii-chars-in-search.md deleted file mode 100644 index 9e21dd83694..00000000000 --- a/doc/release-notes/820-non-ascii-chars-in-search.md +++ /dev/null @@ -1,10 +0,0 @@ -(review these notes if this gets into the same release as #7645 as the steps are included there - we expect to include this in the same release) - -### Search with non-ascii characters - -Many languages include characters that have close analogs in ascii, e.g. (á, à, â, ç, é, è, ê, ë, í, ó, ö, ú, ù, û, ü…). This release changes the default Solr configuration to allow search to match words based on these associations, e.g. a search for Mercè would match the word Merce in a Dataset, and vice versa. This should generally be helpful, but can result in false positives.,e.g. "canon" will be found searching for "cañon". - -## Upgrade Instructions - -1. You will need to replace or modify your `schema.xml` and restart solr. Re-indexing is required to get full-functionality from this change - the standard instructions for an incremental reindex could be added here. - \ No newline at end of file diff --git a/doc/sphinx-guides/source/admin/harvestserver.rst b/doc/sphinx-guides/source/admin/harvestserver.rst index 73b458ee84b..95e68168893 100644 --- a/doc/sphinx-guides/source/admin/harvestserver.rst +++ b/doc/sphinx-guides/source/admin/harvestserver.rst @@ -73,6 +73,14 @@ Some useful examples of search queries to define OAI sets: ``parentId:NNN`` + where NNN is the database id of the Dataverse collection object (consult the Dataverse table of the SQL database used by the application to verify the database id). + + Note that this query does **not** provide datasets that are linked into the specified Dataverse collection. + +- A query to create a set to include the datasets from a specific Dataverse collection including datasets that have been deposited into other Dataverse collections but linked into the specified Dataverse collection: + + ``subtreePaths:"/NNN"`` + where NNN is the database id of the Dataverse collection object (consult the Dataverse table of the SQL database used by the application to verify the database id). - A query to find all the dataset by a certain author: diff --git a/doc/sphinx-guides/source/admin/make-data-count.rst b/doc/sphinx-guides/source/admin/make-data-count.rst index 7a0840c2216..8d3dbba5127 100644 --- a/doc/sphinx-guides/source/admin/make-data-count.rst +++ b/doc/sphinx-guides/source/admin/make-data-count.rst @@ -129,7 +129,7 @@ Populate Views and Downloads Nightly Running ``main.py`` to create the SUSHI JSON file and the subsequent calling of the Dataverse Software API to process it should be added as a cron job. -The Dataverse Software provides example scripts that run the steps to process new accesses and uploads and update your Dataverse installation's database (`counter_daily.sh`) and to retrieve citations for all Datasets from DataCite (`counter_weekly.sh`). These scripts should be configured for your environment and can be run manually or as cron jobs. +The Dataverse Software provides example scripts that run the steps to process new accesses and uploads and update your Dataverse installation's database :download:`counter_daily.sh <../_static/util/counter_daily.sh>` and to retrieve citations for all Datasets from DataCite :download:`counter_weekly.sh <../_static/util/counter_weekly.sh>`. These scripts should be configured for your environment and can be run manually or as cron jobs. Sending Usage Metrics to the DataCite Hub ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/admin/solr-search-index.rst b/doc/sphinx-guides/source/admin/solr-search-index.rst index adf30b90425..5685672eceb 100644 --- a/doc/sphinx-guides/source/admin/solr-search-index.rst +++ b/doc/sphinx-guides/source/admin/solr-search-index.rst @@ -67,13 +67,13 @@ Reindexing Dataverse Collections Dataverse collections must be referenced by database object ID. If you have direct database access an SQL query such as -``select id from dataverse where alias='datavarsealias';`` +``select id from dataverse where alias='dataversealias';`` should work, or you may click the Dataverse Software's "Edit" menu and look for dataverseId= in the URLs produced by the drop-down. Then, to re-index: ``curl http://localhost:8080/api/admin/index/dataverses/135`` -which should return: _{"status":"OK","data":{"message":"starting reindex of dataverse 135"}}_ +which should return: *{"status":"OK","data":{"message":"starting reindex of dataverse 135"}}* Reindexing Datasets ++++++++++++++++++++ diff --git a/doc/sphinx-guides/source/admin/user-administration.rst b/doc/sphinx-guides/source/admin/user-administration.rst index bc9be64775f..867f06bde8e 100644 --- a/doc/sphinx-guides/source/admin/user-administration.rst +++ b/doc/sphinx-guides/source/admin/user-administration.rst @@ -44,6 +44,16 @@ Change User Identifier See :ref:`change-identifier-label` +Delete a User +------------- + +See :ref:`delete-a-user` + +Deactivate a User +----------------- + +See :ref:`deactivate-a-user` + Confirm Email ------------- diff --git a/doc/sphinx-guides/source/api/client-libraries.rst b/doc/sphinx-guides/source/api/client-libraries.rst index 62c8046ad3f..af8b2f19015 100755 --- a/doc/sphinx-guides/source/api/client-libraries.rst +++ b/doc/sphinx-guides/source/api/client-libraries.rst @@ -37,3 +37,10 @@ Java https://github.com/IQSS/dataverse-client-java is the official Java library for Dataverse Software APIs. `Richard Adams `_ from `ResearchSpace `_ created and maintains this library. + +Ruby +---- + +https://github.com/libis/dataverse_api is a Ruby gem for Dataverse Software APIs. It is registered as a library on Rubygems (https://rubygems.org/search?query=dataverse). + +The gem is created and maintained by the LIBIS team (https://www.libis.be) at the University of Leuven (https://www.kuleuven.be). diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst index 9bafde1d819..5ee086382c0 100755 --- a/doc/sphinx-guides/source/api/dataaccess.rst +++ b/doc/sphinx-guides/source/api/dataaccess.rst @@ -87,7 +87,7 @@ Basic access URI: Example: Getting the file whose DOI is *10.5072/FK2/J8SJZB* :: - GET http://$SERVER/api/access/datafile/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB + GET http://$SERVER/api/access/datafile/:persistentId?persistentId=doi:10.5072/FK2/J8SJZB Parameters: diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 94cb6c720c6..9f422f83c72 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -130,7 +130,7 @@ The fully expanded example above (without environment variables) looks like this Show Contents of a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -|CORS| Lists all the Dataverse collections and datasets directly under a Dataverse collection (direct children only, not recursive) specified by database id or alias. If you pass your API token and have access, unpublished Dataverse collections and datasets will be included in the response. +|CORS| Lists all the Dataverse collections and datasets directly under a Dataverse collection (direct children only, not recursive) specified by database id or alias. If you pass your API token and have access, unpublished Dataverse collections and datasets will be included in the response. The list will be ordered by database id within type of object. That is, all Dataverse collections will be listed first and ordered by database id, then all datasets will be listed ordered by database id. .. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of ``export`` below. @@ -246,7 +246,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/dataverses/root/roles --upload-file roles.json + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -H "Content-type:application/json" https://demo.dataverse.org/api/dataverses/root/roles --upload-file roles.json Where ``roles.json`` looks like this:: @@ -259,6 +259,8 @@ Where ``roles.json`` looks like this:: ] } +.. note:: Only a Dataverse installation account with superuser permissions is allowed to create roles in a Dataverse Collection. + .. _list-role-assignments-on-a-dataverse-api: List Role Assignments in a Dataverse Collection @@ -1244,6 +1246,8 @@ When adding a file to a dataset, you can optionally specify the following: - The "File Path" of the file, indicating which folder the file should be uploaded to within the dataset. - Whether or not the file is restricted. +Note that when a Dataverse instance is configured to use S3 storage with direct upload enabled, there is API support to send a file directly to S3. This is more complex and is described in the :doc:`/developers/s3-direct-upload-api` guide. + In the curl example below, all of the above are specified but they are optional. .. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of ``export`` below. @@ -1520,7 +1524,7 @@ The API will output the list of locks, for example:: If the dataset is not locked (or if there is no lock of the requested type), the API will return an empty list. -The following API end point will lock a Dataset with a lock of specified type: +The following API end point will lock a Dataset with a lock of specified type. Note that this requires “superuser” credentials: .. code-block:: bash @@ -1537,7 +1541,7 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/datasets/24/lock/Ingest -Use the following API to unlock the dataset, by deleting all the locks currently on the dataset: +Use the following API to unlock the dataset, by deleting all the locks currently on the dataset. Note that this requires “superuser” credentials: .. code-block:: bash @@ -1553,7 +1557,7 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/24/locks -Or, to delete a lock of the type specified only: +Or, to delete a lock of the type specified only. Note that this requires “superuser” credentials: .. code-block:: bash @@ -1957,6 +1961,8 @@ Replacing Files Replace an existing file where ``ID`` is the database id of the file to replace or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file. Requires the ``file`` to be passed as well as a ``jsonString`` expressing the new metadata. Note that metadata such as description, directoryLabel (File Path) and tags are not carried over from the file being replaced. +Note that when a Dataverse instance is configured to use S3 storage with direct upload enabled, there is API support to send a replacement file directly to S3. This is more complex and is described in the :doc:`/developers/s3-direct-upload-api` guide. + A curl example using an ``ID`` .. code-block:: bash @@ -2418,9 +2424,35 @@ Roles Create a New Role in a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Creates a new role in Dataverse collection object whose Id is ``dataverseIdtf`` (that's an id/alias):: +Creates a new role under Dataverse collection ``id``. Needs a json file with the role description: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H X-Dataverse-key:$API_TOKEN -X POST -H "Content-type:application/json" $SERVER_URL/api/dataverses/$ID/roles --upload-file roles.json + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -H "Content-type:application/json" https://demo.dataverse.org/api/dataverses/root/roles --upload-file roles.json + +Where ``roles.json`` looks like this:: + + { + "alias": "sys1", + "name": “Restricted System Role”, + "description": “A person who may only add datasets.”, + "permissions": [ + "AddDataset" + ] + } + +.. note:: Only a Dataverse installation account with superuser permissions is allowed to create roles in a Dataverse Collection. - POST http://$SERVER/api/roles?dvo=$dataverseIdtf&key=$apiKey Show Role ~~~~~~~~~ @@ -2432,9 +2464,38 @@ Shows the role with ``id``:: Delete Role ~~~~~~~~~~~ -Deletes the role with ``id``:: +A curl example using an ``ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE $SERVER_URL/api/roles/$ID + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/roles/24 + +A curl example using a Role alias ``ALIAS`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ALIAS=roleAlias + + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/roles/:alias?alias=$ALIAS" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/roles/:alias?alias=roleAlias - DELETE http://$SERVER/api/roles/$id Explicit Groups --------------- @@ -2874,6 +2935,41 @@ Create Global Role Creates a global role in the Dataverse installation. The data POSTed are assumed to be a role JSON. :: POST http://$SERVER/api/admin/roles + +Delete Global Role +~~~~~~~~~~~~~~~~~~ + +A curl example using an ``ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE $SERVER_URL/api/admin/roles/$ID + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/admin/roles/24 + +A curl example using a Role alias ``ALIAS`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ALIAS=roleAlias + + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/admin/roles/:alias?alias=$ALIAS" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/admin/roles/:alias?alias=roleAlias List Users ~~~~~~~~~~ @@ -3055,6 +3151,8 @@ Example: ``curl -H "X-Dataverse-key: $API_TOKEN" -X POST http://demo.dataverse.o This action moves account data from jsmith2 into the account jsmith and deletes the account of jsmith2. +Note: User accounts can only be merged if they are either both active or both deactivated. See :ref:`deactivate a user`. + .. _change-identifier-label: Change User Identifier @@ -3074,7 +3172,9 @@ Make User a SuperUser Toggles superuser mode on the ``AuthenticatedUser`` whose ``identifier`` (without the ``@`` sign) is passed. :: POST http://$SERVER/api/admin/superuser/$identifier - + +.. _delete-a-user: + Delete a User ~~~~~~~~~~~~~ @@ -3086,9 +3186,104 @@ Deletes an ``AuthenticatedUser`` whose ``id`` is passed. :: DELETE http://$SERVER/api/admin/authenticatedUsers/id/$id -Note: If the user has performed certain actions such as creating or contributing to a Dataset or downloading a file they cannot be deleted. - - +Note: If the user has performed certain actions such as creating or contributing to a Dataset or downloading a file they cannot be deleted. To see where in the database these actions are stored you can use the :ref:`show-user-traces-api` API. If a user cannot be deleted for this reason, you can choose to :ref:`deactivate a user`. + +.. _deactivate-a-user: + +Deactivate a User +~~~~~~~~~~~~~~~~~ + +Deactivates a user. A superuser API token is not required but the command will operate using the first superuser it finds. + +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of export below. + +.. code-block:: bash + + export SERVER_URL=http://localhost:8080 + export USERNAME=jdoe + + curl -X POST $SERVER_URL/api/admin/authenticatedUsers/$USERNAME/deactivate + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -X POST http://localhost:8080/api/admin/authenticatedUsers/jdoe/deactivate + +The database ID of the user can be passed instead of the username. + +.. code-block:: bash + + export SERVER_URL=http://localhost:8080 + export USERID=42 + + curl -X POST $SERVER_URL/api/admin/authenticatedUsers/id/$USERID/deactivate + +Note: A primary purpose of most Dataverse installations is to serve an archive. In the archival space, there are best practices around the tracking of data access and the tracking of modifications to data and metadata. In support of these key workflows, a simple mechanism to delete users that have performed edit or access actions in the system is not provided. Providing a Deactivate User endpoint for users who have taken certain actions in the system alongside a Delete User endpoint to remove users that haven't taken certain actions in the system is by design. + +This is an irreversible action. There is no option to undeactivate a user. + +Deactivating a user with this endpoint will: + +- Deactivate the user's ability to log in to the Dataverse installation. A message will be shown, stating that the account has been deactivated. The user will not able to create a new account with the same email address, ORCID, Shibboleth, or other login type. +- Deactivate the user's ability to use the API +- Remove the user's access from all Dataverse collections, datasets and files +- Prevent a user from being assigned any roles +- Cancel any pending file access requests generated by the user +- Remove the user from all groups +- No longer have notifications generated or sent by the Dataverse installation +- Prevent the account from being converted into an OAuth or Shibboleth account. +- Prevent the user from becoming a superuser. + +Deactivating a user with this endpoint will keep: + +- The user's contributions to datasets, including dataset creation, file uploads, and publishing. +- The user's access history to datafiles in the Dataverse installation, including guestbook records. +- The user's account information (specifically name, email, affiliation, and position) + +.. _show-user-traces-api: + +Show User Traces +~~~~~~~~~~~~~~~~ + +Show the traces that the user has left in the system, such as datasets created, guestbooks filled out, etc. This can be useful for understanding why a user cannot be deleted. A superuser API token is required. + +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of export below. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export USERNAME=jdoe + + curl -H "X-Dataverse-key:$API_TOKEN" -X GET $SERVER_URL/api/users/$USERNAME/traces + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X GET https://demo.dataverse.org/api/users/jdoe/traces + +Remove All Roles from a User +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Removes all roles from the user. This is equivalent of clicking the "Remove All Roles" button in the superuser dashboard. Note that you can preview the roles that will be removed with the :ref:`show-user-traces-api` API. A superuser API token is required. + +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of export below. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export USERNAME=jdoe + + curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/users/$USERNAME/removeRoles + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST http://localhost:8080/api/users/jdoe/removeRoles List Role Assignments of a Role Assignee ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py index dfae614bf14..661285dff73 100755 --- a/doc/sphinx-guides/source/conf.py +++ b/doc/sphinx-guides/source/conf.py @@ -65,9 +65,9 @@ # built documents. # # The short X.Y version. -version = '5.3' +version = '5.4.1' # The full version, including alpha/beta/rc tags. -release = '5.3' +release = '5.4.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/sphinx-guides/source/developers/index.rst b/doc/sphinx-guides/source/developers/index.rst index 184d8aff85a..eebfd50ba35 100755 --- a/doc/sphinx-guides/source/developers/index.rst +++ b/doc/sphinx-guides/source/developers/index.rst @@ -34,4 +34,5 @@ Developer Guide selinux big-data-support aux-file-support + s3-direct-upload-api workflows diff --git a/doc/sphinx-guides/source/developers/remote-users.rst b/doc/sphinx-guides/source/developers/remote-users.rst index c85571a55c0..3f8dd836661 100755 --- a/doc/sphinx-guides/source/developers/remote-users.rst +++ b/doc/sphinx-guides/source/developers/remote-users.rst @@ -10,7 +10,7 @@ Shibboleth and OAuth If you are working on anything related to users, please keep in mind that your changes will likely affect Shibboleth and OAuth users. For some background on user accounts in the Dataverse Software, see :ref:`auth-modes` section of Configuration in the Installation Guide. -Rather than setting up Shibboleth on your laptop, developers are advised to simply add a value to their database to enable Shibboleth "dev mode" like this: +Rather than setting up Shibboleth on your laptop, developers are advised to add the Shibboleth auth provider (see "Add the Shibboleth Authentication Provider to Your Dataverse Installation" at :doc:`/installation/shibboleth`) and add a value to their database to enable Shibboleth "dev mode" like this: ``curl http://localhost:8080/api/admin/settings/:DebugShibAccountType -X PUT -d RANDOM`` diff --git a/doc/sphinx-guides/source/developers/s3-direct-upload-api.rst b/doc/sphinx-guides/source/developers/s3-direct-upload-api.rst new file mode 100644 index 00000000000..9f2386facb1 --- /dev/null +++ b/doc/sphinx-guides/source/developers/s3-direct-upload-api.rst @@ -0,0 +1,146 @@ +Direct DataFile Upload/Replace API +================================== + +The direct Datafile Upload API is used internally to support direct upload of files to S3 storage and by tools such as the DVUploader. + +Direct upload involves a series of three activities, each involving interacting with the server for a Dataverse installation: + +* Requesting initiation of a transfer from the server +* Use of the pre-signed URL(s) returned in that call to perform an upload/multipart-upload of the file to S3 +* A call to the server to register the file as part of the dataset/replace a file in the dataset or to cancel the transfer + +This API is only enabled when a Dataset is configured with a data store supporting direct S3 upload. +Administrators should be aware that partial transfers, where a client starts uploading the file/parts of the file and does not contact the server to complete/cancel the transfer, will result in data stored in S3 that is not referenced in the Dataverse installation (e.g. should be considered temporary and deleted.) + + +Requesting Direct Upload of a DataFile +-------------------------------------- +To initiate a transfer of a file to S3, make a call to the Dataverse installation indicating the size of the file to upload. The response will include a pre-signed URL(s) that allow the client to transfer the file. Pre-signed URLs include a short-lived token authorizing the action represented by the URL. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK27U7YBV + export SIZE=1000000000 + + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/uploadurls?persistentId=$PERSISTENT_IDENTIFIER&size=$SIZE" + +The response to this call, assuming direct uploads are enabled, will be one of two forms: + +Single URL: when the file is smaller than the size at which uploads must be broken into multiple parts + +.. code-block:: bash + + { + "status":"OK", + "data":{ + "url":"...", + "partSize":1073741824, + "storageIdentifier":"s3://demo-dataverse-bucket:177883619b8-892ca9f7112e" + } + } + +Multiple URLs: when the file must be uploaded in multiple parts. The part size is set by the Dataverse installation and, for AWS-based storage, range from 5 MB to 5 GB + +.. code-block:: bash + + { + "status":"OK", + "data":{ + "urls":{ + "1":"...", + "2":"...", + "3":"...", + "4":"...", + "5":"..." + } + "abort":"/api/datasets/mpupload?...", + "complete":"/api/datasets/mpupload?..." + "partSize":1073741824, + "storageIdentifier":"s3://demo-dataverse-bucket:177883b000e-49cedef268ac" + } + +In the example responses above, the URLs, which are very long, have been omitted. These URLs reference the S3 server and the specific object identifier that will be used, starting with, for example, https://demo-dataverse-bucket.s3.amazonaws.com/10.5072/FK2FOQPJS/177883b000e-49cedef268ac?... + +The client must then use the URL(s) to PUT the file, or if the file is larger than the specified partSize, parts of the file. + +In the single part case, only one call to the supplied URL is required: + +.. code-block:: bash + + curl -H 'x-amz-tagging:dv-state=temp' -X PUT -T "" + + +In the multipart case, the client must send each part and collect the 'eTag' responses from the server. The calls for this are the same as the one for the single part case except that each call should send a slice of the total file, with the last part containing the remaining bytes. +The responses from the S3 server for these calls will include the 'eTag' for the uploaded part. + +To successfully conclude the multipart upload, the client must call the 'complete' URI, sending a json object including the part eTags: + +.. code-block:: bash + + curl -X PUT "$SERVER_URL/api/datasets/mpload?..." -d '{"1":"","2":"","3":"","4":"","5":""}' + +If the client is unable to complete the multipart upload, it should call the abort URL: + +.. code-block:: bash + + curl -X DELETE "$SERVER_URL/api/datasets/mpload?..." + + +Adding the Uploaded file to the Dataset +--------------------------------------- + +Once the file exists in the s3 bucket, a final API call is needed to add it to the Dataset. This call is the same call used to upload a file to a Dataverse installation but, rather than sending the file bytes, additional metadata is added using the "jsonData" parameter. +jsonData normally includes information such as a file description, tags, provenance, whether the file is restricted, etc. For direct uploads, the jsonData object must also include values for: + +* "storageIdentifier" - String, as specified in prior calls +* "fileName" - String +* "mimeType" - String +* fixity/checksum: either: + + * "md5Hash" - String with MD5 hash value, or + * "checksum" - Json Object with "@type" field specifying the algorithm used and "@value" field with the value from that algorithm, both Strings + +The allowed checksum algorithms are defined by the edu.harvard.iq.dataverse.DataFile.CheckSumType class and currently include MD5, SHA-1, SHA-256, and SHA-512 + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK27U7YBV + export JSON_DATA="{'description':'My description.','directoryLabel':'data/subdir1','categories':['Data'], 'restrict':'false', 'storageIdentifier':'s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42', 'fileName':'file1.txt', 'mimeType':'text/plain', 'checksum': {'@type': 'SHA-1', '@value': '123456'}}" + + curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/add?persistentId=$PERSISTENT_IDENTIFIER" -F "jsonData=$JSON_DATA" + +Note that this API call can be used independently of the others, e.g. supporting use cases in which the file already exists in S3/has been uploaded via some out-of-band method. +With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifer must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. + +Replacing an existing file in the Dataset +----------------------------------------- + +Once the file exists in the s3 bucket, a final API call is needed to register it as a replacement of an existing file. This call is the same call used to replace a file to a Dataverse installation but, rather than sending the file bytes, additional metadata is added using the "jsonData" parameter. +jsonData normally includes information such as a file description, tags, provenance, whether the file is restricted, whether to allow the mimetype to change (forceReplace=true), etc. For direct uploads, the jsonData object must also include values for: + +* "storageIdentifier" - String, as specified in prior calls +* "fileName" - String +* "mimeType" - String +* fixity/checksum: either: + + * "md5Hash" - String with MD5 hash value, or + * "checksum" - Json Object with "@type" field specifying the algorithm used and "@value" field with the value from that algorithm, both Strings + +The allowed checksum algorithms are defined by the edu.harvard.iq.dataverse.DataFile.CheckSumType class and currently include MD5, SHA-1, SHA-256, and SHA-512. +Note that the API call does not validate that the file matches the hash value supplied. If a Dataverse instance is configured to validate file fixity hashes at publication time, a mismatch would be caught at that time and cause publication to fail. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export FILE_IDENTIFIER=5072 + export JSON_DATA="{'description':'My description.','directoryLabel':'data/subdir1','categories':['Data'], 'restrict':'false', 'forceReplace':'true', 'storageIdentifier':'s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42', 'fileName':'file1.txt', 'mimeType':'text/plain', 'checksum': {'@type': 'SHA-1', '@value': '123456'}}" + + curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/files/$FILE_IDENTIFIER/replace" -F "jsonData=$JSON_DATA" + +Note that this API call can be used independently of the others, e.g. supporting use cases in which the file already exists in S3/has been uploaded via some out-of-band method. +With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifer must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. diff --git a/doc/sphinx-guides/source/developers/workflows.rst b/doc/sphinx-guides/source/developers/workflows.rst index 0550aa5653d..c982edc08bb 100644 --- a/doc/sphinx-guides/source/developers/workflows.rst +++ b/doc/sphinx-guides/source/developers/workflows.rst @@ -18,12 +18,14 @@ Steps can be internal (say, writing some data to the log) or external. External The external system reports the step result back to the Dataverse installation, by sending a HTTP ``POST`` command to ``api/workflows/{invocation-id}`` with Content-Type: text/plain. The body of the request is passed to the paused step for further processing. +Steps can define messages to send to the log and to users. If defined, the message to users is sent as a user notification (creating an email and showing in the user notification tab) and will show once for the given user if/when they view the relevant dataset page. The latter provides a means for the asynchronous workflow execution to report success or failure analogous to the way the publication and other processes report on the page. + If a step in a workflow fails, the Dataverse installation makes an effort to roll back all the steps that preceded it. Some actions, such as writing to the log, cannot be rolled back. If such an action has a public external effect (e.g. send an EMail to a mailing list) it is advisable to put it in the post-release workflow. .. tip:: For invoking external systems using a REST api, the Dataverse Software's internal step provider offers two steps for sending and receiving customizable HTTP requests. - *http/sr* and *http/authExt*, detailed below, with the latter able to use the API to make changes to the dataset being processed. Both lock the dataset to prevent other processes from changing the dataset between the time the step is launched to when the external process responds to the Dataverse instance. + *http/sr* and *http/authExt*, detailed below, with the latter able to use the API to make changes to the dataset being processed. (Both lock the dataset to prevent other processes from changing the dataset between the time the step is launched to when the external process responds to the Dataverse instance.) Administration ~~~~~~~~~~~~~~ @@ -70,6 +72,23 @@ The pause step is intended for testing - the invocationId required to end the pa "stepType":"pause" } +pause/message ++++++++++++++ + +A variant of the pause step that pauses the workflow and allows the external process to send a success/failure message. The workflow is paused until a POST request is sent to ``/api/workflows/{invocation-id}``. +The response in the POST body (Content-type:application/json) should be a json object (the same as for the http/extauth step) containing: +- "status" - can be "success" or "failure" +- "reason" - a message that will be logged +- "message" - a message to send to the user that will be sent as a notification and as a banner on the relevant dataset page. +An unparsable reponse will be considered a Failure that will be logged with no user message. (See the http/authext step for an example POST call) + +.. code:: json + + { + "provider":":internal", + "stepType":"pause/message" + } + http/sr +++++++ @@ -113,11 +132,20 @@ The invocationId must be sent as an 'X-Dataverse-invocationId' HTTP Header or as Once this step completes and responds, the invocationId is invalidated and will not allow further access. The url, content type, and message body can use data from the workflow context, using a simple markup language. This step has specific parameters for rollback. -The workflow is restarted when the external system replies with a POST request to ``/api/workflows/{invocation-id}``. +The workflow is restarted when the external system replies with a POST request to ``/api/workflows/{invocation-id}`` (Content-Type: application/json). + The response has is expected to be a json object with three keys: -- "Status" - can be "Success" or "Failure" -- "Reason" - a message that will be logged -- "Message" - a message to send to the user (message sending is not yet implemented). +- "status" - can be "success" or "failure" +- "reason" - a message that will be logged +- "message" - a message to send to the user that will be sent as a notification and as a banner on the relevant dataset page. + +.. code-block:: bash + + export INVOCATION_ID=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export MESSAGE={"status":"success", "reason":"Workflow completed in 10 seconds", "message":"An external workflow to virus check your data was successfully run prior to publication of your data"} + + curl -H 'Content-Type:application/json' -X POST -d $MESSAGE "$SERVER_URL/api/workflows/$INVOCATION_ID" .. code:: json diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index bc038489c42..b8571cd06c5 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -594,7 +594,7 @@ You may also want to look at samples at https://github.com/shlake/LibraDataHomep A simpler option to brand and customize your installation is to utilize the Dataverse collection theme, which each Dataverse collection has, that allows you to change colors, add a logo, tagline or website link to the Dataverse collection header section of the page. Those options are outlined in the :doc:`/user/dataverse-management` section of the User Guide. Custom Homepage -++++++++++++++++ ++++++++++++++++ The Dataverse Software allows you to use a custom homepage or welcome page in place of the default root Dataverse collection page. This allows for complete control over the look and feel of your installation's homepage. @@ -613,7 +613,7 @@ For more background on what this curl command above is doing, see the "Database ``curl -X DELETE http://localhost:8080/api/admin/settings/:HomePageCustomizationFile`` Custom Navbar Logo -+++++++++++++++++++ +++++++++++++++++++ The Dataverse Software allows you to replace the default Dataverse Project icon and name branding in the navbar with your own custom logo. Note that this logo is separate from the *root dataverse theme* logo. @@ -667,7 +667,7 @@ Internationalization The Dataverse Software is being translated into multiple languages by the Dataverse Project Community! Please see below for how to help with this effort! Adding Multiple Languages to the Dropdown in the Header -++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ The presence of the :ref:`:Languages` database setting adds a dropdown in the header for multiple languages. For example to add English and French to the dropdown: @@ -1465,7 +1465,7 @@ Please note that the authority cannot have a slash ("/") in it. .. _:Shoulder: :Shoulder -++++++++++++ ++++++++++ Out of the box, the DOI shoulder is set to "FK2/" but this is for testing only! When you apply for your DOI namespace, you may have requested a shoulder. The following is only an example and a trailing slash is optional. @@ -1509,9 +1509,9 @@ The chart below shows examples from each possible combination of parameters from | | randomString | sequentialNumber | | | | | +=================+===============+==================+ -| **DEPENDENT** | TJCLKP/MLGWJO | 100001/1 | +| **DEPENDENT** | TJCLKP/MLGWJO | 100001/1 | +-----------------+---------------+------------------+ -| **INDEPENDENT** | MLGWJO | 100002 | +| **INDEPENDENT** | MLGWJO | 100002 | +-----------------+---------------+------------------+ As seen above, in cases where ``:IdentifierGenerationStyle`` is set to *sequentialNumber* and ``:DataFilePIDFormat`` is set to *DEPENDENT*, each file within a dataset will be assigned a number *within* that dataset starting with "1". @@ -1536,7 +1536,7 @@ Note: File-level PID registration was added in Dataverse Software 4.9; it could .. _:IndependentHandleService: :IndependentHandleService -+++++++++++++++++++++++++++ ++++++++++++++++++++++++++ Specific for Handle PIDs. Set this setting to true if you want to use a Handle service which is setup to work 'independently' (No communication with the Global Handle Registry). By default this setting is absent and the Dataverse Software assumes it to be false. @@ -2035,7 +2035,7 @@ Set the name of the cloud environment you've integrated with your Dataverse inst .. _:PublicInstall: :PublicInstall -+++++++++++++++++++++ +++++++++++++++ Setting an installation to public will remove the ability to restrict data files or datasets. This functionality of the Dataverse Software will be disabled from your installation. @@ -2224,3 +2224,15 @@ This is the local file system path to be used with the LocalSubmitToArchiveComma +++++++++++++++++++ These are the bucket and project names to be used with the GoogleCloudSubmitToArchiveCommand class. Further information is in the :ref:`Google Cloud Configuration` section above. + +.. _:InstallationName: + +:InstallationName ++++++++++++++++++ + +By default, the name of the root Dataverse collection is used as the 'brandname' of the repository, i.e. in emails and metadata exports. If set, :InstallationName overrides this default, allowing the root collection name and brandname to be set independently. (Note that, since metadata export files are cached, they will have to be reexported (see :doc:`/admin/metadataexport`) before they incorporate a change in this setting.) + +:ExportInstallationAsDistributorOnlyWhenNotSet +++++++++++++++++++++++++++++++++++++++++++++++ + +In the DDI metadata exports, the default behavior is to always add the repository (using its brandname - the root collection name or the value of :ref:`:InstallationName <:InstallationName>`) to the stdyDscr/distStmt/distrbtr element. If this setting is true, this will only be done when a Distributor is not already defined in the Dataset metadata. (Note that, since metadata export files are cached, they will have to be reexported (see :doc:`/admin/metadataexport`) before they incorporate a change in this setting.) \ No newline at end of file diff --git a/doc/sphinx-guides/source/user/account.rst b/doc/sphinx-guides/source/user/account.rst index 18a44bcb85d..4c343ff85d4 100755 --- a/doc/sphinx-guides/source/user/account.rst +++ b/doc/sphinx-guides/source/user/account.rst @@ -99,6 +99,8 @@ If you already have a Dataverse installation account associated with the Usernam #. Enter your current password for your Dataverse installation account and click "Convert Account". #. Now you have finished converting your Dataverse installation account to use your institutional log in. +Note that you cannot go through this conversion process if your Dataverse installation account associated with the Username/Email log in option has been deactivated. + Convert your Dataverse installation account away from your Institutional Log In ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -132,6 +134,8 @@ If you already have a Dataverse installation account associated with the Usernam #. Enter your username and password for your Dataverse installation account and click "Convert Account". #. Now you have finished converting your Dataverse installation account to use ORCID for log in. +Note that you cannot go through this conversion process if your Dataverse installation account associated with the Username/Email log in option has been deactivated. + Convert your Dataverse installation account away from ORCID for log in ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/doc/sphinx-guides/source/versions.rst b/doc/sphinx-guides/source/versions.rst index f64ed23e776..a74192cd02e 100755 --- a/doc/sphinx-guides/source/versions.rst +++ b/doc/sphinx-guides/source/versions.rst @@ -6,8 +6,10 @@ Dataverse Software Documentation Versions This list provides a way to refer to the documentation for previous versions of the Dataverse Software. In order to learn more about the updates delivered from one version to another, visit the `Releases `__ page in our GitHub repo. -- 5.3 +- 5.4.1 +- `5.4 `__ +- `5.3 `__ - `5.2 `__ - `5.1.1 `__ - `5.1 `__ diff --git a/pom.xml b/pom.xml index ed7e800f797..f33ba575009 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ --> edu.harvard.iq dataverse - 5.3 + 5.4.1 war dataverse diff --git a/scripts/api/data/metadatablocks/citation.tsv b/scripts/api/data/metadatablocks/citation.tsv index d63390e9866..375a8c67cec 100644 --- a/scripts/api/data/metadatablocks/citation.tsv +++ b/scripts/api/data/metadatablocks/citation.tsv @@ -137,188 +137,188 @@ authorIdentifierScheme ResearcherID 6 authorIdentifierScheme ScopusID 7 language Abkhaz 0 - language Afar 1 - language Afrikaans 2 - language Akan 3 - language Albanian 4 - language Amharic 5 - language Arabic 6 - language Aragonese 7 - language Armenian 8 - language Assamese 9 - language Avaric 10 - language Avestan 11 - language Aymara 12 - language Azerbaijani 13 - language Bambara 14 - language Bashkir 15 - language Basque 16 - language Belarusian 17 + language Afar 1 aar + language Afrikaans 2 afr + language Akan 3 aka + language Albanian 4 sqi + language Amharic 5 amh + language Arabic 6 ara + language Aragonese 7 arg + language Armenian 8 hye + language Assamese 9 asm + language Avaric 10 ava + language Avestan 11 ave + language Aymara 12 aym + language Azerbaijani 13 aze + language Bambara 14 bam + language Bashkir 15 bak + language Basque 16 eus + language Belarusian 17 bel language Bengali, Bangla 18 language Bihari 19 - language Bislama 20 - language Bosnian 21 - language Breton 22 - language Bulgarian 23 - language Burmese 24 + language Bislama 20 bis + language Bosnian 21 bos + language Breton 22 bre + language Bulgarian 23 bul + language Burmese 24 mya language Catalan,Valencian 25 - language Chamorro 26 - language Chechen 27 + language Chamorro 26 cha + language Chechen 27 che language Chichewa, Chewa, Nyanja 28 - language Chinese 29 - language Chuvash 30 - language Cornish 31 - language Corsican 32 - language Cree 33 - language Croatian 34 - language Czech 35 - language Danish 36 + language Chinese 29 zho + language Chuvash 30 chv + language Cornish 31 cor + language Corsican 32 cos + language Cree 33 cre + language Croatian 34 hrv + language Czech 35 ces + language Danish 36 dan language Divehi, Dhivehi, Maldivian 37 - language Dutch 38 - language Dzongkha 39 - language English 40 - language Esperanto 41 - language Estonian 42 - language Ewe 43 - language Faroese 44 - language Fijian 45 - language Finnish 46 - language French 47 + language Dutch 38 nld + language Dzongkha 39 dzo + language English 40 eng + language Esperanto 41 epo + language Estonian 42 est + language Ewe 43 ewe + language Faroese 44 fao + language Fijian 45 fij + language Finnish 46 fin + language French 47 fra language Fula, Fulah, Pulaar, Pular 48 - language Galician 49 - language Georgian 50 - language German 51 + language Galician 49 glg + language Georgian 50 kat + language German 51 deu language Greek (modern) 52 language Guaraní 53 - language Gujarati 54 + language Gujarati 54 guj language Haitian, Haitian Creole 55 - language Hausa 56 + language Hausa 56 hau language Hebrew (modern) 57 - language Herero 58 - language Hindi 59 - language Hiri Motu 60 - language Hungarian 61 + language Herero 58 her + language Hindi 59 hin + language Hiri Motu 60 hmo + language Hungarian 61 hun language Interlingua 62 - language Indonesian 63 - language Interlingue 64 - language Irish 65 - language Igbo 66 - language Inupiaq 67 - language Ido 68 - language Icelandic 69 - language Italian 70 - language Inuktitut 71 - language Japanese 72 - language Javanese 73 + language Indonesian 63 ind + language Interlingue 64 ile + language Irish 65 gle + language Igbo 66 ibo + language Inupiaq 67 ipk + language Ido 68 ido + language Icelandic 69 isl + language Italian 70 ita + language Inuktitut 71 iku + language Japanese 72 jpn + language Javanese 73 jav language Kalaallisut, Greenlandic 74 - language Kannada 75 - language Kanuri 76 - language Kashmiri 77 - language Kazakh 78 - language Khmer 79 + language Kannada 75 kan + language Kanuri 76 kau + language Kashmiri 77 kas + language Kazakh 78 kaz + language Khmer 79 khm language Kikuyu, Gikuyu 80 - language Kinyarwanda 81 + language Kinyarwanda 81 kin language Kyrgyz 82 - language Komi 83 - language Kongo 84 - language Korean 85 - language Kurdish 86 + language Komi 83 kom + language Kongo 84 kon + language Korean 85 kor + language Kurdish 86 kur language Kwanyama, Kuanyama 87 - language Latin 88 + language Latin 88 lat language Luxembourgish, Letzeburgesch 89 - language Ganda 90 + language Ganda 90 lug language Limburgish, Limburgan, Limburger 91 - language Lingala 92 - language Lao 93 - language Lithuanian 94 - language Luba-Katanga 95 - language Latvian 96 - language Manx 97 - language Macedonian 98 - language Malagasy 99 + language Lingala 92 lin + language Lao 93 lao + language Lithuanian 94 lit + language Luba-Katanga 95 lub + language Latvian 96 lav + language Manx 97 glv + language Macedonian 98 mkd + language Malagasy 99 mlg language Malay 100 - language Malayalam 101 - language Maltese 102 + language Malayalam 101 mal + language Maltese 102 mlt language Māori 103 language Marathi (Marāṭhī) 104 - language Marshallese 105 - language Mixtepec Mixtec 106 - language Mongolian 107 - language Nauru 108 + language Marshallese 105 mah + language Mixtepec Mixtec 106 mix + language Mongolian 107 mon + language Nauru 108 nau language Navajo, Navaho 109 language Northern Ndebele 110 language Nepali 111 - language Ndonga 112 - language Norwegian Bokmål 113 - language Norwegian Nynorsk 114 - language Norwegian 115 + language Ndonga 112 ndo + language Norwegian Bokmål 113 nob + language Norwegian Nynorsk 114 nno + language Norwegian 115 nor language Nuosu 116 language Southern Ndebele 117 language Occitan 118 language Ojibwe, Ojibwa 119 language Old Church Slavonic,Church Slavonic,Old Bulgarian 120 - language Oromo 121 + language Oromo 121 orm language Oriya 122 language Ossetian, Ossetic 123 language Panjabi, Punjabi 124 language Pāli 125 language Persian (Farsi) 126 - language Polish 127 + language Polish 127 pol language Pashto, Pushto 128 - language Portuguese 129 - language Quechua 130 - language Romansh 131 + language Portuguese 129 por + language Quechua 130 que + language Romansh 131 roh language Kirundi 132 - language Romanian 133 - language Russian 134 + language Romanian 133 ron + language Russian 134 rus language Sanskrit (Saṁskṛta) 135 - language Sardinian 136 - language Sindhi 137 - language Northern Sami 138 - language Samoan 139 - language Sango 140 - language Serbian 141 + language Sardinian 136 srd + language Sindhi 137 snd + language Northern Sami 138 sme + language Samoan 139 smo + language Sango 140 sag + language Serbian 141 srp language Scottish Gaelic, Gaelic 142 - language Shona 143 + language Shona 143 sna language Sinhala, Sinhalese 144 - language Slovak 145 + language Slovak 145 slk language Slovene 146 - language Somali 147 - language Southern Sotho 148 + language Somali 147 som + language Southern Sotho 148 sot language Spanish, Castilian 149 - language Sundanese 150 + language Sundanese 150 sun language Swahili 151 - language Swati 152 - language Swedish 153 - language Tamil 154 - language Telugu 155 - language Tajik 156 - language Thai 157 - language Tigrinya 158 + language Swati 152 ssw + language Swedish 153 swe + language Tamil 154 tam + language Telugu 155 tel + language Tajik 156 tgk + language Thai 157 tha + language Tigrinya 158 tir language Tibetan Standard, Tibetan, Central 159 - language Turkmen 160 - language Tagalog 161 - language Tswana 162 - language Tonga (Tonga Islands) 163 - language Turkish 164 - language Tsonga 165 - language Tatar 166 - language Twi 167 - language Tahitian 168 + language Turkmen 160 tuk + language Tagalog 161 tgl + language Tswana 162 tsn + language Tonga (Tonga Islands) 163 ton + language Turkish 164 tur + language Tsonga 165 tso + language Tatar 166 tat + language Twi 167 twi + language Tahitian 168 tah language Uyghur, Uighur 169 - language Ukrainian 170 - language Urdu 171 - language Uzbek 172 - language Venda 173 - language Vietnamese 174 - language Volapük 175 - language Walloon 176 - language Welsh 177 - language Wolof 178 - language Western Frisian 179 - language Xhosa 180 - language Yiddish 181 - language Yoruba 182 + language Ukrainian 170 ukr + language Urdu 171 urd + language Uzbek 172 uzb + language Venda 173 ven + language Vietnamese 174 vie + language Volapük 175 vol + language Walloon 176 wln + language Welsh 177 cym + language Wolof 178 wol + language Western Frisian 179 fry + language Xhosa 180 xho + language Yiddish 181 yid + language Yoruba 182 yor language Zhuang, Chuang 183 - language Zulu 184 + language Zulu 184 zul language Not applicable 185 diff --git a/scripts/api/data/role-test-addRole.json b/scripts/api/data/role-test-addRole.json new file mode 100644 index 00000000000..7923eed916b --- /dev/null +++ b/scripts/api/data/role-test-addRole.json @@ -0,0 +1,10 @@ +{ + "alias":"testRole", + "name":"Test Role", + "description":"Test Role for adding/deleting.", + "permissions":[ + "ViewUnpublishedDataset", + "ViewUnpublishedDataverse", + "DownloadFile" + ] +} diff --git a/scripts/dev/dev-rebuild.sh b/scripts/dev/dev-rebuild.sh index 0a053c1d91d..f98edc8d938 100755 --- a/scripts/dev/dev-rebuild.sh +++ b/scripts/dev/dev-rebuild.sh @@ -52,9 +52,6 @@ cd scripts/api ./setup-all.sh --insecure -p=admin1 | tee /tmp/setup-all.sh.out cd ../.. -echo "Loading SQL reference data..." -psql -U $DB_USER $DB_NAME -f scripts/database/reference_data.sql - echo "Creating SQL sequence..." psql -U $DB_USER $DB_NAME -f doc/sphinx-guides/source/_static/util/createsequence.sql diff --git a/scripts/issues/7687/file_access_flag_update_bug.txt b/scripts/issues/7687/file_access_flag_update_bug.txt new file mode 100644 index 00000000000..a96b9dfab1b --- /dev/null +++ b/scripts/issues/7687/file_access_flag_update_bug.txt @@ -0,0 +1,11 @@ +-- this query will identify datasets where a superuser has run the Curate command and the update included a change to the fileaccessrequest flag, resulting in the file access request updates not being reflected in the published version + +select da.id, dv.id, ta.id, da.fileaccessrequest, ta.fileaccessrequest, dv.releasetime +from datasetversion dv, termsofuseandaccess ta, dataset da +where dv.dataset_id=da.id +and dv.termsofuseandaccess_id=ta.id +and ta.fileaccessrequest != da.fileaccessrequest +and dv.versionstate='RELEASED' +and dv.releasetime in (select max(releasetime) +from datasetversion +where dataset_id=da.id); \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java index 6305b871cc0..f6cbd01ece0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java @@ -62,7 +62,7 @@ protected Map addBasicMetadata(DvObject dvObjectIn, Map met metadataTemplate.setContacts(dataset.getLatestVersion().getDatasetContacts()); metadataTemplate.setProducers(dataset.getLatestVersion().getDatasetProducers()); metadataTemplate.setTitle(dvObject.getCurrentName()); - String producerString = dataverseService.findRootDataverse().getName(); + String producerString = dataverseService.getRootDataverseName(); if (producerString.isEmpty() || producerString.equals(DatasetField.NA_VALUE) ) { producerString = UNAVAILABLE; } diff --git a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java index 0b868c73c42..c88fac00c27 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java +++ b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java @@ -100,7 +100,7 @@ public void setDatasetFieldType(DatasetFieldType datasetFieldType) { this.datasetFieldType = datasetFieldType; } - @OneToMany(mappedBy = "controlledVocabularyValue", cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) + @OneToMany(mappedBy = "controlledVocabularyValue", cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}, orphanRemoval=true) private Collection controlledVocabAlternates = new ArrayList<>(); public Collection getControlledVocabAlternates() { diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java index ba503a18d22..815733f1b7b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java +++ b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java @@ -6,6 +6,8 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.AbstractGlobalIdServiceBean.GlobalIdMetadataTemplate; +import edu.harvard.iq.dataverse.branding.BrandingUtil; + import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; @@ -205,7 +207,7 @@ public static String getMetadataFromDvObject(String identifier, Map findByCreatorId(Long creatorId) { + return em.createNamedQuery("DataFile.findByCreatorId").setParameter("creatorId", creatorId).getResultList(); + } + + public List findByReleaseUserId(Long releaseUserId) { + return em.createNamedQuery("DataFile.findByReleaseUserId").setParameter("releaseUserId", releaseUserId).getResultList(); + } + public DataFile findReplacementFile(Long previousFileId){ Query query = em.createQuery("select object(o) from DataFile as o where o.previousDataFileId = :previousFileId"); query.setParameter("previousFileId", previousFileId); @@ -1363,55 +1371,6 @@ public boolean isFileClassPackage (DataFile file) { public void populateFileSearchCard(SolrSearchResult solrSearchResult) { solrSearchResult.setEntity(this.findCheapAndEasy(solrSearchResult.getEntityId())); } - - - /** - * Does this file have a replacement. - * Any file should have AT MOST 1 replacement - * - * @param df - * @return - * @throws java.lang.Exception if a DataFile has more than 1 replacement - * or is unpublished and has a replacement. - */ - public boolean hasReplacement(DataFile df) throws Exception{ - - if (df.getId() == null){ - // An unsaved file cannot have a replacment - return false; - } - - - List dataFiles = em.createQuery("select o from DataFile o" + - " WHERE o.previousDataFileId = :dataFileId", DataFile.class) - .setParameter("dataFileId", df.getId()) - .getResultList(); - - if (dataFiles.isEmpty()){ - return false; - } - - if (!df.isReleased()){ - // An unpublished SHOULD NOT have a replacment - String errMsg = "DataFile with id: [" + df.getId() + "] is UNPUBLISHED with a REPLACEMENT. This should NOT happen."; - logger.severe(errMsg); - - throw new Exception(errMsg); - } - - - - else if (dataFiles.size() == 1){ - return true; - }else{ - - String errMsg = "DataFile with id: [" + df.getId() + "] has more than one replacment!"; - logger.severe(errMsg); - - throw new Exception(errMsg); - } - - } public boolean hasBeenDeleted(DataFile df){ Dataset dataset = df.getOwner(); diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 4cf95dda250..cd40e76a304 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -53,6 +53,10 @@ query = "SELECT o.id FROM Dataset o WHERE o.owner.id=:ownerId"), @NamedQuery(name = "Dataset.findByOwnerId", query = "SELECT o FROM Dataset o WHERE o.owner.id=:ownerId"), + @NamedQuery(name = "Dataset.findByCreatorId", + query = "SELECT o FROM Dataset o WHERE o.creator.id=:creatorId"), + @NamedQuery(name = "Dataset.findByReleaseUserId", + query = "SELECT o FROM Dataset o WHERE o.releaseUser.id=:releaseUserId"), }) /* diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 5f596d1b556..b9cc26ab89b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -54,6 +54,8 @@ import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.workflows.WorkflowComment; + import java.io.File; import java.io.FileOutputStream; import java.io.IOException; @@ -1280,6 +1282,10 @@ public boolean canUpdateDataset() { public boolean canPublishDataverse() { return permissionsWrapper.canIssuePublishDataverseCommand(dataset.getOwner()); } + + public boolean canPublishDataset(){ + return permissionsWrapper.canIssuePublishDatasetCommand(dataset); + } public boolean canViewUnpublishedDataset() { return permissionsWrapper.canViewUnpublishedDataset( dvRequestService.getDataverseRequest(), dataset); @@ -1856,7 +1862,9 @@ private String init(boolean initFull) { MakeDataCountEntry entry = new MakeDataCountEntry(FacesContext.getCurrentInstance(), dvRequestService, workingVersion); mdcLogService.logEntry(entry); } - + displayWorkflowComments(); + + if (initFull) { // init the list of FileMetadatas if (workingVersion.isDraft() && canUpdateDataset()) { @@ -2016,6 +2024,22 @@ private String init(boolean initFull) { return null; } + private void displayWorkflowComments() { + List comments = workingVersion.getWorkflowComments(); + for (WorkflowComment wfc : comments) { + if (wfc.isToBeShown() && wfc.getDatasetVersion().equals(workingVersion) + && wfc.getAuthenticatedUser().equals(session.getUser())) { + if (wfc.getType() == WorkflowComment.Type.WORKFLOW_SUCCESS) { + JsfHelper.addSuccessMessage(wfc.getMessage()); + + } else if (wfc.getType() == WorkflowComment.Type.WORKFLOW_FAILURE) { + JsfHelper.addWarningMessage(wfc.getMessage()); + } + datasetService.markWorkflowCommentAsRead(wfc); + } + } + } + private void displayLockInfo(Dataset dataset) { // Various info messages, when the dataset is locked (for various reasons): if (dataset.isLocked() && canUpdateDataset()) { @@ -2055,9 +2079,8 @@ private void displayLockInfo(Dataset dataset) { BundleUtil.getStringFromBundle("dataset.publish.file.validation.error.contactSupport")); } if (dataset.isLockedFor(DatasetLock.Reason.EditInProgress)) { - String rootDataverseName = dataverseService.findRootDataverse().getName(); JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message"), - BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message.details", Arrays.asList(BrandingUtil.getSupportTeamName(null, rootDataverseName)))); + BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message.details", Arrays.asList(BrandingUtil.getSupportTeamName(null)))); } } @@ -2703,6 +2726,8 @@ public String refresh() { } } + displayWorkflowComments(); + return ""; } @@ -3347,6 +3372,8 @@ public String save() { return ""; } + + // Use the Create or Update command to save the dataset: Command cmd; Map deleteStorageLocations = null; @@ -3371,8 +3398,7 @@ public String save() { if (dataset.isLockedFor(DatasetLock.Reason.EditInProgress) || lockTest.isLockedFor(DatasetLock.Reason.EditInProgress)) { logger.log(Level.INFO, "Couldn''t save dataset: {0}", "It is locked." + ""); - String rootDataverseName = dataverseService.findRootDataverse().getName(); - JH.addMessage(FacesMessage.SEVERITY_FATAL, BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message"),BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message.details", Arrays.asList(BrandingUtil.getSupportTeamName(null, rootDataverseName)))); + JH.addMessage(FacesMessage.SEVERITY_FATAL, BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message"),BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message.details", Arrays.asList(BrandingUtil.getSupportTeamName(null)))); return returnToDraftVersion(); } } @@ -3450,7 +3476,7 @@ public String save() { } if (addFilesSuccess && dataset.getFiles().size() > 0) { if (nNewFiles == dataset.getFiles().size()) { - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.createSuccess")); + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.createSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); } else { String partialSuccessMessage = BundleUtil.getStringFromBundle("dataset.message.createSuccess.partialSuccessSavingFiles"); partialSuccessMessage = partialSuccessMessage.replace("{0}", "" + dataset.getFiles().size() + ""); @@ -3460,26 +3486,26 @@ public String save() { } else { JsfHelper.addWarningMessage(BundleUtil.getStringFromBundle("dataset.message.createSuccess.failedToSaveFiles")); } - } else { - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.createSuccess")); + } else { + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.createSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); } } if (editMode.equals(EditMode.METADATA)) { - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.metadataSuccess")); + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.metadataSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); } if (editMode.equals(EditMode.LICENSE)) { - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.termsSuccess")); + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.termsSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); } if (editMode.equals(EditMode.FILE)) { - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.filesSuccess")); + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.filesSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); } } else { // must have been a bulk file update or delete: if (bulkFileDeleteInProgress) { - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.bulkFileDeleteSuccess")); + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.bulkFileDeleteSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); } else { - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.bulkFileUpdateSuccess")); + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.bulkFileUpdateSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); } } @@ -4200,13 +4226,13 @@ public String getTabularDataFileURL(Long fileid) { public List< String[]> getExporters(){ List retList = new ArrayList<>(); String myHostURL = getDataverseSiteUrl(); - for (String [] provider : ExportService.getInstance(settingsService).getExportersLabels() ){ + for (String [] provider : ExportService.getInstance().getExportersLabels() ){ String formatName = provider[1]; String formatDisplayName = provider[0]; Exporter exporter = null; try { - exporter = ExportService.getInstance(settingsService).getExporter(formatName); + exporter = ExportService.getInstance().getExporter(formatName); } catch (ExportException ex) { exporter = null; } @@ -5150,8 +5176,7 @@ public List getDatasetAuthors() { * @return the publisher of the version */ public String getPublisher() { - assert (null != workingVersion); - return workingVersion.getRootDataverseNameforCitation(); + return dataverseService.getRootDataverseName(); } public void downloadRsyncScript() { @@ -5296,7 +5321,7 @@ public boolean isThisLatestReleasedVersion() { public String getJsonLd() { if (isThisLatestReleasedVersion()) { - ExportService instance = ExportService.getInstance(settingsService); + ExportService instance = ExportService.getInstance(); String jsonLd = instance.getExportAsString(dataset, SchemaDotOrgExporter.NAME); if (jsonLd != null) { logger.fine("Returning cached schema.org JSON-LD."); @@ -5462,4 +5487,13 @@ public boolean isFileAccessRequest() { public void setFileAccessRequest(boolean fileAccessRequest) { this.fileAccessRequest = fileAccessRequest; } + + // wrapper method to see if the file has been deleted (or replaced) in the current version + public boolean isFileDeleted (DataFile dataFile) { + if (dataFile.getDeleted() == null) { + dataFile.setDeleted(datafileService.hasBeenDeleted(dataFile)); + } + + return dataFile.getDeleted(); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index c1efe119fd2..224ccfd22f3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -19,6 +19,7 @@ import edu.harvard.iq.dataverse.harvest.server.OAIRecordServiceBean; import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.workflows.WorkflowComment; import java.io.File; @@ -150,6 +151,14 @@ private List findIdsByOwnerId(Long ownerId, boolean onlyPublished) { } } + public List findByCreatorId(Long creatorId) { + return em.createNamedQuery("Dataset.findByCreatorId").setParameter("creatorId", creatorId).getResultList(); + } + + public List findByReleaseUserId(Long releaseUserId) { + return em.createNamedQuery("Dataset.findByReleaseUserId").setParameter("releaseUserId", releaseUserId).getResultList(); + } + public List filterByPidQuery(String filterQuery) { // finds only exact matches Dataset ds = findByGlobalId(filterQuery); @@ -719,6 +728,27 @@ public void exportAllDatasets(boolean forceReExport) { } + //get a string to add to save success message + //depends on dataset state and user privleges + public String getReminderString(Dataset dataset, boolean canPublishDataset) { + + if(!dataset.isReleased() ){ + //messages for draft state. + if (canPublishDataset){ + return BundleUtil.getStringFromBundle("dataset.message.publish.remind.draft"); + } else { + return BundleUtil.getStringFromBundle("dataset.message.submit.remind.draft"); + } + } else{ + //messages for new version - post-publish + if (canPublishDataset){ + return BundleUtil.getStringFromBundle("dataset.message.publish.remind.version"); + } else { + return BundleUtil.getStringFromBundle("dataset.message.submit.remind.version"); + } + } + } + public void updateLastExportTimeStamp(Long datasetId) { Date now = new Date(); em.createNativeQuery("UPDATE Dataset SET lastExportTime='"+now.toString()+"' WHERE id="+datasetId).executeUpdate(); @@ -787,6 +817,12 @@ public WorkflowComment addWorkflowComment(WorkflowComment workflowComment) { return workflowComment; } + public void markWorkflowCommentAsRead(WorkflowComment workflowComment) { + workflowComment.setToBeShown(false); + em.merge(workflowComment); + } + + /** * This method used to throw CommandException, which was pretty pointless * seeing how it's called asynchronously. As of v5.0 any CommanExceptiom @@ -963,7 +999,7 @@ public long findStorageSize(Dataset dataset, boolean countCachedExtras, GetDatas // (i.e., the metadata exports): StorageIO datasetSIO = DataAccess.getStorageIO(dataset); - for (String[] exportProvider : ExportService.getInstance(settingsService).getExportersLabels()) { + for (String[] exportProvider : ExportService.getInstance().getExportersLabels()) { String exportLabel = "export_" + exportProvider[1] + ".cached"; try { total += datasetSIO.getAuxObjectSize(exportLabel); diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index 067cba23d04..d28ce5175d4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -1391,21 +1391,6 @@ public String getDistributorName() { } return null; } - - // TODO: Consider renaming this method since it's also used for getting the "provider" for Schema.org JSON-LD. - public String getRootDataverseNameforCitation(){ - //Get root dataverse name for Citation - Dataverse root = this.getDataset().getOwner(); - while (root.getOwner() != null) { - root = root.getOwner(); - } - String rootDataverseName = root.getName(); - if (!StringUtil.isEmpty(rootDataverseName)) { - return rootDataverseName; - } else { - return ""; - } - } public List getDatasetDistributors() { //todo get distributors from DatasetfieldValues @@ -1888,11 +1873,11 @@ public String getJsonLd() { job.add("includedInDataCatalog", Json.createObjectBuilder() .add("@type", "DataCatalog") - .add("name", this.getRootDataverseNameforCitation()) + .add("name", BrandingUtil.getRootDataverseCollectionName()) .add("url", SystemConfig.getDataverseSiteUrlStatic()) ); - String installationBrandName = BrandingUtil.getInstallationBrandName(getRootDataverseNameforCitation()); + String installationBrandName = BrandingUtil.getInstallationBrandName(); /** * Both "publisher" and "provider" are included but they have the same * values. Some services seem to prefer one over the other. diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java index 5aab9ef9a9e..b46333a4287 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java @@ -50,6 +50,8 @@ @NamedQuery(name = "Dataverse.findRoot", query = "SELECT d FROM Dataverse d where d.owner.id=null"), @NamedQuery(name = "Dataverse.findByAlias", query="SELECT dv FROM Dataverse dv WHERE LOWER(dv.alias)=:alias"), @NamedQuery(name = "Dataverse.findByOwnerId", query="select object(o) from Dataverse as o where o.owner.id =:ownerId order by o.name"), + @NamedQuery(name = "Dataverse.findByCreatorId", query="select object(o) from Dataverse as o where o.creator.id =:creatorId order by o.name"), + @NamedQuery(name = "Dataverse.findByReleaseUserId", query="select object(o) from Dataverse as o where o.releaseUser.id =:releaseUserId order by o.name"), @NamedQuery(name = "Dataverse.filterByAlias", query="SELECT dv FROM Dataverse dv WHERE LOWER(dv.alias) LIKE :alias order by dv.alias"), @NamedQuery(name = "Dataverse.filterByAliasNameAffiliation", query="SELECT dv FROM Dataverse dv WHERE (LOWER(dv.alias) LIKE :alias) OR (LOWER(dv.name) LIKE :name) OR (LOWER(dv.affiliation) LIKE :affiliation) order by dv.alias"), @NamedQuery(name = "Dataverse.filterByName", query="SELECT dv FROM Dataverse dv WHERE LOWER(dv.name) LIKE :name order by dv.alias") diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index 96963bb9cb4..520c3ff14df 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -18,6 +18,7 @@ import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.search.SolrIndexServiceBean; import edu.harvard.iq.dataverse.search.SolrSearchResult; +import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.File; import java.io.IOException; @@ -172,6 +173,14 @@ public List findDataverseIdsForIndexing(boolean skipIndexed) { } + public List findByCreatorId(Long creatorId) { + return em.createNamedQuery("Dataverse.findByCreatorId").setParameter("creatorId", creatorId).getResultList(); + } + + public List findByReleaseUserId(Long releaseUserId) { + return em.createNamedQuery("Dataverse.findByReleaseUserId").setParameter("releaseUserId", releaseUserId).getResultList(); + } + public List findByOwnerId(Long ownerId) { return em.createNamedQuery("Dataverse.findByOwnerId").setParameter("ownerId", ownerId).getResultList(); } @@ -195,6 +204,15 @@ public Dataverse findRootDataverse() { return em.createNamedQuery("Dataverse.findRoot", Dataverse.class).getSingleResult(); } + + //Similarly - if the above throws that exception, do we need to catch it here? + //ToDo - consider caching? + public String getRootDataverseName() { + Dataverse root = findRootDataverse(); + String rootDataverseName=root.getName(); + return StringUtil.isEmpty(rootDataverseName) ? "" : rootDataverseName; + } + public List findAllPublishedByOwnerId(Long ownerId) { List retVal = new ArrayList<>(); List previousLevel = findPublishedByOwnerId(ownerId); diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java b/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java index 2a2b02c5b18..c6016939c08 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java @@ -4,9 +4,12 @@ import edu.harvard.iq.dataverse.PermissionServiceBean.StaticPermissionQuery; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; +import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.JsfHelper; import edu.harvard.iq.dataverse.util.SessionUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.IOException; @@ -54,7 +57,10 @@ public class DataverseSession implements Serializable{ @EJB BannerMessageServiceBean bannerMessageService; - + + @EJB + AuthenticationServiceBean authenticationService; + private static final Logger logger = Logger.getLogger(DataverseSession.class.getCanonicalName()); private boolean statusDismissed = false; @@ -84,19 +90,57 @@ public void setDismissedMessages(List dismissedMessages) { private Boolean debug; public User getUser() { + return getUser(false); + } + + /** + * For performance reasons, we only lookup the authenticated user again (to + * check if it has been deleted or deactivated, for example) when we have + * to. + * + * @param lookupAuthenticatedUserAgain A boolean to indicate if we should go + * to the database again to lookup the user to get the latest values that + * may have been updated outside the session. + */ + public User getUser(boolean lookupAuthenticatedUserAgain) { if ( user == null ) { user = GuestUser.get(); } - + if (lookupAuthenticatedUserAgain && user instanceof AuthenticatedUser) { + AuthenticatedUser auFromSession = (AuthenticatedUser) user; + AuthenticatedUser auFreshLookup = authenticationService.findByID(auFromSession.getId()); + if (auFreshLookup == null) { + logger.fine("getUser found user no longer exists (was deleted). Returning GuestUser."); + user = GuestUser.get(); + } else { + if (auFreshLookup.isDeactivated()) { + logger.fine("getUser found user is deactivated. Returning GuestUser."); + user = GuestUser.get(); + } + } + } return user; } + /** + * Sets the user and configures the session timeout. + */ public void setUser(User aUser) { - + // We check for deactivated status here in "setUser" to ensure a common user + // experience across Builtin, Shib, OAuth, and OIDC users. + // If we want a different user experience for Builtin users, we can + // modify getUpdateAuthenticatedUser in AuthenticationServiceBean + // (and probably other places). + if (aUser instanceof AuthenticatedUser && aUser.isDeactivated()) { + logger.info("Login attempt by deactivated user " + aUser.getIdentifier() + "."); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("deactivated.error")); + return; + } FacesContext context = FacesContext.getCurrentInstance(); // Log the login/logout and Change the session id if we're using the UI and have // a session, versus an API call with no session - (i.e. /admin/submitToArchive() // which sets the user in the session to pass it through to the underlying command) + // TODO: reformat to remove tabs etc. if(context != null) { logSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.SessionManagement,(aUser==null) ? "logout" : "login") @@ -104,6 +148,12 @@ public void setUser(User aUser) { //#3254 - change session id when user changes SessionUtil.changeSessionId((HttpServletRequest) context.getExternalContext().getRequest()); + HttpSession httpSession = (HttpSession) context.getExternalContext().getSession(false); + if (httpSession != null) { + // Configure session timeout. + logger.fine("jsession: " + httpSession.getId() + " setting the lifespan of the session to " + systemConfig.getLoginSessionTimeout() + " minutes"); + httpSession.setMaxInactiveInterval(systemConfig.getLoginSessionTimeout() * 60); // session timeout, in seconds + } } this.user = aUser; } @@ -208,15 +258,5 @@ public void dismissMessage(BannerMessage message){ } } - - public void configureSessionTimeout() { - HttpSession httpSession = (HttpSession) FacesContext.getCurrentInstance().getExternalContext().getSession(false); - - if (httpSession != null) { - logger.fine("jsession: "+httpSession.getId()+" setting the lifespan of the session to " + systemConfig.getLoginSessionTimeout() + " minutes"); - httpSession.setMaxInactiveInterval(systemConfig.getLoginSessionTimeout() * 60); // session timeout, in seconds - } - - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index f1041303fdd..09a2ef85893 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -33,7 +33,7 @@ @NamedQuery(name = "DvObject.findByProtocolIdentifierAuthority", query = "SELECT o FROM DvObject o WHERE o.identifier=:identifier and o.authority=:authority and o.protocol=:protocol"), @NamedQuery(name = "DvObject.findByOwnerId", - query = "SELECT o FROM DvObject o WHERE o.owner.id=:ownerId"), + query = "SELECT o FROM DvObject o WHERE o.owner.id=:ownerId order by o.dtype desc, o.id"), @NamedQuery(name = "DvObject.findByAuthenticatedUserId", query = "SELECT o FROM DvObject o WHERE o.creator.id=:ownerId or o.releaseUser.id=:releaseUserId") }) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 89b21d84856..aef3f7d3446 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -36,6 +36,8 @@ import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.EjbUtil; +import edu.harvard.iq.dataverse.util.FileMetadataUtil; + import static edu.harvard.iq.dataverse.util.JsfHelper.JH; import java.io.File; import java.io.FileOutputStream; @@ -90,7 +92,11 @@ public class EditDatafilesPage implements java.io.Serializable { public enum FileEditMode { - EDIT, UPLOAD, CREATE, SINGLE, SINGLE_REPLACE + EDIT, UPLOAD, CREATE, REPLACE + }; + + public enum Referrer { + DATASET, FILE }; @EJB @@ -136,6 +142,7 @@ public enum FileEditMode { private String selectedFileIdsString = null; private FileEditMode mode; + private Referrer referrer = Referrer.DATASET; private List selectedFileIdsList = new ArrayList<>(); private List fileMetadatas = new ArrayList<>();; @@ -205,6 +212,16 @@ public FileEditMode getMode() { public void setMode(FileEditMode mode) { this.mode = mode; } + + public Referrer getReferrer() { + return referrer; + } + + public void setReferrer(Referrer referrer) { + this.referrer = referrer; + } + + public List getFileMetadatas() { @@ -501,9 +518,9 @@ public String init() { // ------------------------------------------- // Is this a file replacement operation? // ------------------------------------------- - if (mode == FileEditMode.SINGLE_REPLACE){ + if (mode == FileEditMode.REPLACE){ /* - http://localhost:8080/editdatafiles.xhtml?mode=SINGLE_REPLACE&datasetId=26&fid=726 + http://localhost:8080/editdatafiles.xhtml?mode=REPLACE&datasetId=26&fid=726 */ DataFile fileToReplace = loadFileToReplace(); if (fileToReplace == null){ @@ -525,7 +542,7 @@ public String init() { populateFileMetadatas(); singleFile = getFileToReplace(); - }else if (mode == FileEditMode.EDIT || mode == FileEditMode.SINGLE) { + }else if (mode == FileEditMode.EDIT) { if (selectedFileIdsString != null) { String[] ids = selectedFileIdsString.split(","); @@ -539,7 +556,7 @@ public String init() { test = null; } if (test != null) { - if (mode == FileEditMode.SINGLE) { + if (FileEditMode.EDIT == mode && Referrer.FILE == referrer) { singleFile = datafileService.find(test); } selectedFileIdsList.add(test); @@ -567,7 +584,7 @@ public String init() { return permissionsWrapper.notFound(); } - if (FileEditMode.SINGLE == mode){ + if (FileEditMode.EDIT == mode && Referrer.FILE == referrer){ if (fileMetadatas.get(0).getDatasetVersion().getId() != null){ versionString = "DRAFT"; } @@ -796,8 +813,12 @@ private void deleteFiles(List filesForDelete) { // and let the delete be handled in the command (by adding it to the // filesToBeDeleted list): + // ToDo - FileMetadataUtil.removeFileMetadataFromList should handle these two + // removes so they could be put after this if clause and the else clause could + // be removed. dataset.getEditVersion().getFileMetadatas().remove(markedForDelete); fileMetadatas.remove(markedForDelete); + filesToBeDeleted.add(markedForDelete); } else { logger.fine("this is a brand-new (unsaved) filemetadata"); @@ -810,9 +831,9 @@ private void deleteFiles(List filesForDelete) { // fileMetadatas list. (but doing both just adds a no-op and won't cause an // error) // 1. delete the filemetadata from the local display list: - removeFileMetadataFromList(fileMetadatas, markedForDelete); + FileMetadataUtil.removeFileMetadataFromList(fileMetadatas, markedForDelete); // 2. delete the filemetadata from the version: - removeFileMetadataFromList(dataset.getEditVersion().getFileMetadatas(), markedForDelete); + FileMetadataUtil.removeFileMetadataFromList(dataset.getEditVersion().getFileMetadatas(), markedForDelete); } if (markedForDelete.getDataFile().getId() == null) { @@ -821,8 +842,8 @@ private void deleteFiles(List filesForDelete) { // removing it from the fileMetadatas lists (above), we also remove it from // the newFiles list and the dataset's files, so it never gets saved. - removeDataFileFromList(dataset.getFiles(), markedForDelete.getDataFile()); - removeDataFileFromList(newFiles, markedForDelete.getDataFile()); + FileMetadataUtil.removeDataFileFromList(dataset.getFiles(), markedForDelete.getDataFile()); + FileMetadataUtil.removeDataFileFromList(newFiles, markedForDelete.getDataFile()); FileUtil.deleteTempFile(markedForDelete.getDataFile(), dataset, ingestService); // Also remove checksum from the list of newly uploaded checksums (perhaps odd // to delete and then try uploading the same file again, but it seems like it @@ -851,28 +872,6 @@ private void deleteFiles(List filesForDelete) { } - private void removeFileMetadataFromList(List fmds, FileMetadata fmToDelete) { - Iterator fmit = fmds.iterator(); - while (fmit.hasNext()) { - FileMetadata fmd = fmit.next(); - if (fmToDelete.getDataFile().getStorageIdentifier().equals(fmd.getDataFile().getStorageIdentifier())) { - fmit.remove(); - break; - } - } - } - - private void removeDataFileFromList(List dfs, DataFile dfToDelete) { - Iterator dfit = dfs.iterator(); - while (dfit.hasNext()) { - DataFile df = dfit.next(); - if (dfToDelete.getStorageIdentifier().equals(df.getStorageIdentifier())) { - dfit.remove(); - break; - } - } - } - /** @@ -912,7 +911,11 @@ public String saveReplacementFile() throws FileReplaceException{ if (fileReplacePageHelper.runSaveReplacementFile_Phase2()){ JsfHelper.addSuccessMessage(getBundleString("file.message.replaceSuccess")); // It worked!!! Go to page of new file!! - return returnToFileLandingPageAfterReplace(fileReplacePageHelper.getFirstNewlyAddedFile()); + if (Referrer.FILE == referrer) { + return returnToFileLandingPageAfterReplace(fileReplacePageHelper.getFirstNewlyAddedFile()); + } else { + return returnToDraftVersion(); + } }else{ // Uh oh. String errMsg = fileReplacePageHelper.getErrorMessages(); @@ -941,8 +944,7 @@ public String save() { if (dataset.isLockedFor(DatasetLock.Reason.EditInProgress) || lockTest.isLockedFor(DatasetLock.Reason.EditInProgress)) { logger.log(Level.INFO, "Couldn''t save dataset: {0}", "It is locked." + ""); - String rootDataverseName = dataverseService.findRootDataverse().getName(); - JH.addMessage(FacesMessage.SEVERITY_FATAL, getBundleString("dataset.locked.editInProgress.message"),BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message.details", Arrays.asList(BrandingUtil.getSupportTeamName(null, rootDataverseName)))); + JH.addMessage(FacesMessage.SEVERITY_FATAL, getBundleString("dataset.locked.editInProgress.message"),BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message.details", Arrays.asList(BrandingUtil.getSupportTeamName(null)))); return null; } } @@ -1131,13 +1133,13 @@ public String save() { workingVersion = dataset.getEditVersion(); logger.fine("working version id: "+workingVersion.getId()); - if (mode == FileEditMode.SINGLE){ + if (FileEditMode.EDIT == mode && Referrer.FILE == referrer){ JsfHelper.addSuccessMessage(getBundleString("file.message.editSuccess")); } else { int nFilesTotal = workingVersion.getFileMetadatas().size(); if (nNewFiles == 0 || nFilesTotal == nExpectedFilesTotal) { - JsfHelper.addSuccessMessage(getBundleString("dataset.message.filesSuccess")); + JsfHelper.addSuccessMessage(getBundleString("dataset.message.filesSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); } else if (nFilesTotal == nOldFiles) { JsfHelper.addErrorMessage(getBundleString("dataset.message.addFiles.Failure")); } else { @@ -1154,7 +1156,7 @@ public String save() { ingestService.startIngestJobsForDataset(dataset, (AuthenticatedUser) session.getUser()); } - if (mode == FileEditMode.SINGLE && fileMetadatas.size() > 0) { + if (FileEditMode.EDIT == mode && Referrer.FILE == referrer && fileMetadatas.size() > 0) { // If this was a "single file edit", i.e. an edit request sent from // the individual File Landing page, we want to redirect back to // the landing page. BUT ONLY if the file still exists - i.e., if @@ -1167,6 +1169,10 @@ public String save() { return returnToDraftVersion(); } + public boolean canPublishDataset(){ + return permissionsWrapper.canIssuePublishDatasetCommand(dataset); + } + private void populateDatasetUpdateFailureMessage(){ JH.addMessage(FacesMessage.SEVERITY_FATAL, getBundleString("dataset.message.filesFailure")); @@ -1214,7 +1220,7 @@ public String cancel() { FileUtil.deleteTempFile(newFile, dataset, ingestService); } - if (mode == FileEditMode.SINGLE || mode == FileEditMode.SINGLE_REPLACE ) { + if (Referrer.FILE == referrer) { return returnToFileLandingPage(); } if (workingVersion.getId() != null) { @@ -1271,7 +1277,7 @@ private HttpClient getClient() { * @return */ public boolean isFileReplaceOperation(){ - return (mode == FileEditMode.SINGLE_REPLACE)&&(fileReplacePageHelper!= null); + return (mode == FileEditMode.REPLACE)&&(fileReplacePageHelper!= null); } public boolean allowMultipleFileUpload(){ @@ -1280,7 +1286,7 @@ public boolean allowMultipleFileUpload(){ } public boolean showFileUploadFragment(){ - return mode == FileEditMode.UPLOAD || mode == FileEditMode.CREATE || mode == FileEditMode.SINGLE_REPLACE; + return mode == FileEditMode.UPLOAD || mode == FileEditMode.CREATE || mode == FileEditMode.REPLACE; } diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java index da9aba43498..0937f6f6cf7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java +++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java @@ -8,6 +8,7 @@ import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean; import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailServiceBean; import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleServiceBean; import edu.harvard.iq.dataverse.engine.command.Command; @@ -30,8 +31,10 @@ import edu.harvard.iq.dataverse.search.SolrIndexServiceBean; import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean; +import java.util.Arrays; import java.util.EnumSet; import java.util.Stack; import java.util.logging.Level; @@ -215,7 +218,21 @@ public R submit(Command aCommand) throws CommandException { } DataverseRequest dvReq = aCommand.getRequest(); - + + AuthenticatedUser authenticatedUser = dvReq.getAuthenticatedUser(); + if (authenticatedUser != null) { + AuthenticatedUser auFreshLookup = authentication.findByID(authenticatedUser.getId()); + if (auFreshLookup == null) { + logger.fine("submit method found user no longer exists (was deleted)."); + throw new CommandException(BundleUtil.getStringFromBundle("command.exception.user.deleted", Arrays.asList(aCommand.getClass().getSimpleName())), aCommand); + } else { + if (auFreshLookup.isDeactivated()) { + logger.fine("submit method found user is deactivated."); + throw new CommandException(BundleUtil.getStringFromBundle("command.exception.user.deactivated", Arrays.asList(aCommand.getClass().getSimpleName())), aCommand); + } + } + } + Map affectedDvObjects = aCommand.getAffectedDvObjects(); logRec.setInfo(aCommand.describe()); for (Map.Entry> pair : requiredMap.entrySet()) { diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index da315424220..d863500d137 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -304,13 +304,13 @@ public void setVersion(String version) { public List< String[]> getExporters(){ List retList = new ArrayList<>(); String myHostURL = systemConfig.getDataverseSiteUrl(); - for (String [] provider : ExportService.getInstance(settingsService).getExportersLabels() ){ + for (String [] provider : ExportService.getInstance().getExportersLabels() ){ String formatName = provider[1]; String formatDisplayName = provider[0]; Exporter exporter = null; try { - exporter = ExportService.getInstance(settingsService).getExporter(formatName); + exporter = ExportService.getInstance().getExporter(formatName); } catch (ExportException ex) { exporter = null; } @@ -786,57 +786,15 @@ private List allRelatedFiles() { return dataFiles; } - public boolean isDraftReplacementFile(){ - /* - This method tests to see if the file has been replaced in a draft version of the dataset - Since it must must work when you are on prior versions of the dataset - it must accrue all replacement files that may have been created - */ - if(null == dataset) { - dataset = fileMetadata.getDataFile().getOwner(); - } - - DataFile dataFileToTest = fileMetadata.getDataFile(); - - DatasetVersion currentVersion = dataset.getLatestVersion(); - - if (!currentVersion.isDraft()){ - return false; - } - - if (dataset.getReleasedVersion() == null){ - return false; - } - - List dataFiles = new ArrayList<>(); - - dataFiles.add(dataFileToTest); - - while (datafileService.findReplacementFile(dataFileToTest.getId()) != null ){ - dataFiles.add(datafileService.findReplacementFile(dataFileToTest.getId())); - dataFileToTest = datafileService.findReplacementFile(dataFileToTest.getId()); - } - - if(dataFiles.size() <2){ - return false; - } - - int numFiles = dataFiles.size(); - - DataFile current = dataFiles.get(numFiles - 1 ); - - DatasetVersion publishedVersion = dataset.getReleasedVersion(); - - if( datafileService.findFileMetadataByDatasetVersionIdAndDataFileId(publishedVersion.getId(), current.getId()) == null){ - return true; + // wrappermethod to see if the file has been deleted (or replaced) in the current version + public boolean isDeletedFile () { + if (file.getDeleted() == null) { + file.setDeleted(datafileService.hasBeenDeleted(file)); } - return false; + return file.getDeleted(); } - - - /** * To help with replace development * @return diff --git a/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java b/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java index 3cc581ee93c..a88c4833f54 100644 --- a/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java @@ -25,6 +25,7 @@ import java.util.Arrays; import java.util.List; import java.util.Locale; +import java.util.Collections; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Pattern; @@ -915,6 +916,12 @@ public void setOaiSetsSelectItems(List oaiSetsSelectItems) { private void createOaiSetsSelectItems(List setNames) { setOaiSetsSelectItems(new ArrayList<>()); if (setNames != null) { + + // Let's sort the list - otherwise, if the list is long enough, + // using this pulldown menu may be very difficult: + + Collections.sort(setNames, String.CASE_INSENSITIVE_ORDER); + for (String set: setNames) { if (!StringUtils.isEmpty(set)) { getOaiSetsSelectItems().add(new SelectItem(set, set)); diff --git a/src/main/java/edu/harvard/iq/dataverse/LoginPage.java b/src/main/java/edu/harvard/iq/dataverse/LoginPage.java index 38376fa84c0..166c0c081d0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LoginPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/LoginPage.java @@ -171,7 +171,6 @@ public String login() { AuthenticatedUser r = authSvc.getUpdateAuthenticatedUser(credentialsAuthProviderId, authReq); logger.log(Level.FINE, "User authenticated: {0}", r.getEmail()); session.setUser(r); - session.configureSessionTimeout(); if ("dataverse.xhtml".equals(redirectPage)) { redirectPage = redirectToRoot(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java index 13a92c9cd27..a0a91e22c32 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java @@ -121,12 +121,11 @@ public boolean sendSystemEmail(String to, String subject, String messageText, bo boolean sent = false; - String rootDataverseName = dataverseService.findRootDataverse().getName(); InternetAddress systemAddress = getSystemAddress(); String body = messageText - + (isHtmlContent ? BundleUtil.getStringFromBundle("notification.email.closing.html", Arrays.asList(BrandingUtil.getSupportTeamEmailAddress(systemAddress), BrandingUtil.getSupportTeamName(systemAddress, rootDataverseName))) - : BundleUtil.getStringFromBundle("notification.email.closing", Arrays.asList(BrandingUtil.getSupportTeamEmailAddress(systemAddress), BrandingUtil.getSupportTeamName(systemAddress, rootDataverseName)))); + + (isHtmlContent ? BundleUtil.getStringFromBundle("notification.email.closing.html", Arrays.asList(BrandingUtil.getSupportTeamEmailAddress(systemAddress), BrandingUtil.getSupportTeamName(systemAddress))) + : BundleUtil.getStringFromBundle("notification.email.closing", Arrays.asList(BrandingUtil.getSupportTeamEmailAddress(systemAddress), BrandingUtil.getSupportTeamName(systemAddress)))); logger.fine("Sending email to " + to + ". Subject: <<<" + subject + ">>>. Body: " + body); try { @@ -245,8 +244,7 @@ public Boolean sendNotificationEmail(UserNotification notification, String comme Object objectOfNotification = getObjectOfNotification(notification); if (objectOfNotification != null){ String messageText = getMessageTextBasedOnNotification(notification, objectOfNotification, comment, requestor); - String rootDataverseName = dataverseService.findRootDataverse().getName(); - String subjectText = MailUtil.getSubjectTextBasedOnNotification(notification, rootDataverseName, objectOfNotification); + String subjectText = MailUtil.getSubjectTextBasedOnNotification(notification, objectOfNotification); if (!(messageText.isEmpty() || subjectText.isEmpty())){ retval = sendSystemEmail(emailAddress, subjectText, messageText, isHtmlContent); } else { @@ -496,14 +494,33 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio version.getDataset().getOwner().getDisplayName(), getDataverseLink(version.getDataset().getOwner()), optionalReturnReason}; messageText += MessageFormat.format(pattern, paramArrayReturnedDataset); return messageText; + + case WORKFLOW_SUCCESS: + version = (DatasetVersion) targetObject; + pattern = BundleUtil.getStringFromBundle("notification.email.workflow.success"); + + if (comment == null) { + comment = BundleUtil.getStringFromBundle("notification.email.workflow.nullMessage"); + } + String[] paramArrayWorkflowSuccess = {version.getDataset().getDisplayName(), getDatasetLink(version.getDataset()), comment}; + messageText += MessageFormat.format(pattern, paramArrayWorkflowSuccess); + return messageText; + case WORKFLOW_FAILURE: + version = (DatasetVersion) targetObject; + pattern = BundleUtil.getStringFromBundle("notification.email.workflow.failure"); + if (comment == null) { + comment = BundleUtil.getStringFromBundle("notification.email.workflow.nullMessage"); + } + String[] paramArrayWorkflowFailure = {version.getDataset().getDisplayName(), getDatasetLink(version.getDataset()), comment}; + messageText += MessageFormat.format(pattern, paramArrayWorkflowFailure); + return messageText; case CREATEACC: - String rootDataverseName = dataverseService.findRootDataverse().getName(); InternetAddress systemAddress = getSystemAddress(); String accountCreatedMessage = BundleUtil.getStringFromBundle("notification.email.welcome", Arrays.asList( - BrandingUtil.getInstallationBrandName(rootDataverseName), + BrandingUtil.getInstallationBrandName(), systemConfig.getGuidesBaseUrl(), systemConfig.getGuidesVersion(), - BrandingUtil.getSupportTeamName(systemAddress, rootDataverseName), + BrandingUtil.getSupportTeamName(systemAddress), BrandingUtil.getSupportTeamEmailAddress(systemAddress) )); String optionalConfirmEmailAddon = confirmEmailService.optionalConfirmEmailAddonMsg(userNotification.getUser()); @@ -594,6 +611,8 @@ private Object getObjectOfNotification (UserNotification userNotification){ case PUBLISHEDDS: case PUBLISHFAILED_PIDREG: case RETURNEDDS: + case WORKFLOW_SUCCESS: + case WORKFLOW_FAILURE: return versionService.find(userNotification.getObjectId()); case CREATEACC: return userNotification.getUser(); diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java index d73aaeb8dbd..79a3ca800e2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java @@ -590,7 +590,7 @@ public void updateRole(ActionEvent e) { } catch (PermissionException ex) { JH.addMessage(FacesMessage.SEVERITY_ERROR, BundleUtil.getStringFromBundle("permission.roleNotSaved"), BundleUtil.getStringFromBundle("permission.permissionsMissing", Arrays.asList(ex.getRequiredPermissions().toString()))); } catch (CommandException ex) { - JH.addMessage(FacesMessage.SEVERITY_FATAL, BundleUtil.getStringFromBundle("permission.roleNotSaved")); + JH.addMessage(FacesMessage.SEVERITY_ERROR, BundleUtil.getStringFromBundle("permission.roleNotSaved").concat(" " + ex.getMessage()) ); logger.log(Level.SEVERE, "Error saving role: " + ex.getMessage(), ex); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java index b31b55b2e4f..6b207ed0e75 100644 --- a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java @@ -376,7 +376,9 @@ public List filterRoleAssignees(String query, DvObject dvObject, L .getResultList().stream() .filter(ra -> roleAssignSelectedRoleAssignees == null || !roleAssignSelectedRoleAssignees.contains(ra)) .forEach((ra) -> { - roleAssigneeList.add(ra); + if (!ra.isDeactivated()) { + roleAssigneeList.add(ra); + } }); // now we add groups to the list, both global and explicit diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java index 6241f120f80..22d2679efb6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java +++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java @@ -42,7 +42,7 @@ @NamedQuery( name = "RoleAssignment.listByDefinitionPointId", query = "SELECT r FROM RoleAssignment r WHERE r.definitionPoint.id=:definitionPointId" ), @NamedQuery( name = "RoleAssignment.listByRoleId", - query = "SELECT r FROM RoleAssignment r WHERE r.role=:roleId" ), + query = "SELECT r FROM RoleAssignment r WHERE r.role.id=:roleId" ), @NamedQuery( name = "RoleAssignment.listByPrivateUrlToken", query = "SELECT r FROM RoleAssignment r WHERE r.privateUrlToken=:privateUrlToken" ), @NamedQuery( name = "RoleAssignment.deleteByAssigneeIdentifier_RoleIdDefinition_PointId", diff --git a/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java b/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java index dd27842573f..363972b48c3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java +++ b/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java @@ -130,7 +130,7 @@ public void setUserSum(Long userSum) { public String getMessageTo() { if (recipient == null) { - return BrandingUtil.getSupportTeamName(systemAddress, dataverseService.findRootDataverse().getName()); + return BrandingUtil.getSupportTeamName(systemAddress); } else if (recipient.isInstanceofDataverse()) { return ((Dataverse) recipient).getDisplayName() + " " + BundleUtil.getStringFromBundle("contact.contact"); } else { @@ -140,7 +140,7 @@ public String getMessageTo() { public String getFormHeader() { if (recipient == null) { - return BrandingUtil.getContactHeader(systemAddress, dataverseService.findRootDataverse().getName()); + return BrandingUtil.getContactHeader(systemAddress); } else if (recipient.isInstanceofDataverse()) { return BundleUtil.getStringFromBundle("contact.dataverse.header"); } else { @@ -198,10 +198,8 @@ public void validateUserEmail(FacesContext context, UIComponent component, Objec } public String sendMessage() { - // FIXME: move dataverseService.findRootDataverse() to init - String rootDataverseName = dataverseService.findRootDataverse().getName(); - String installationBrandName = BrandingUtil.getInstallationBrandName(rootDataverseName); - String supportTeamName = BrandingUtil.getSupportTeamName(systemAddress, rootDataverseName); + String installationBrandName = BrandingUtil.getInstallationBrandName(); + String supportTeamName = BrandingUtil.getSupportTeamName(systemAddress); List feedbacks = FeedbackUtil.gatherFeedback(recipient, dataverseSession, messageSubject, userMessage, systemAddress, userEmail, systemConfig.getDataverseSiteUrl(), installationBrandName, supportTeamName); if (feedbacks.isEmpty()) { logger.warning("No feedback has been sent!"); diff --git a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java index f27c7c712dc..d7512dd5cf0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java @@ -173,13 +173,13 @@ public boolean isDataFilePIDSequentialDependent(){ public String getSupportTeamName() { String systemEmail = getValueForKey(SettingsServiceBean.Key.SystemEmail); InternetAddress systemAddress = MailUtil.parseSystemAddress(systemEmail); - return BrandingUtil.getSupportTeamName(systemAddress, dataverseService.findRootDataverse().getName()); + return BrandingUtil.getSupportTeamName(systemAddress); } public String getSupportTeamEmail() { String systemEmail = getValueForKey(SettingsServiceBean.Key.SystemEmail); InternetAddress systemAddress = MailUtil.parseSystemAddress(systemEmail); - return BrandingUtil.getSupportTeamEmailAddress(systemAddress) != null ? BrandingUtil.getSupportTeamEmailAddress(systemAddress) : BrandingUtil.getSupportTeamName(systemAddress, dataverseService.findRootDataverse().getName()); + return BrandingUtil.getSupportTeamEmailAddress(systemAddress) != null ? BrandingUtil.getSupportTeamEmailAddress(systemAddress) : BrandingUtil.getSupportTeamName(systemAddress); } public Integer getUploadMethodsCount() { diff --git a/src/main/java/edu/harvard/iq/dataverse/Shib.java b/src/main/java/edu/harvard/iq/dataverse/Shib.java index 889bdaff03a..4ad50320f23 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Shib.java +++ b/src/main/java/edu/harvard/iq/dataverse/Shib.java @@ -338,6 +338,10 @@ public String confirmAndConvertAccount() { logger.fine("builtin username: " + builtinUsername); AuthenticatedUser builtInUserToConvert = authSvc.canLogInAsBuiltinUser(builtinUsername, builtinPassword); if (builtInUserToConvert != null) { + if (builtInUserToConvert.isDeactivated()) { + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("shib.convert.fail.deactivated")); + return null; + } // TODO: Switch from authSvc.convertBuiltInToShib to authSvc.convertBuiltInUserToRemoteUser AuthenticatedUser au = authSvc.convertBuiltInToShib(builtInUserToConvert, shibAuthProvider.getId(), userIdentifier); if (au != null) { @@ -358,8 +362,8 @@ public String confirmAndConvertAccount() { private void logInUserAndSetShibAttributes(AuthenticatedUser au) { au.setShibIdentityProvider(shibIdp); + // setUser checks for deactivated users. session.setUser(au); - session.configureSessionTimeout(); logger.fine("Groups for user " + au.getId() + " (" + au.getIdentifier() + "): " + getGroups(au)); } diff --git a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java index e44c5f6333e..78f8f38206b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java +++ b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java @@ -30,7 +30,7 @@ public enum Type { ASSIGNROLE, REVOKEROLE, CREATEDV, CREATEDS, CREATEACC, SUBMITTEDDS, RETURNEDDS, PUBLISHEDDS, REQUESTFILEACCESS, GRANTFILEACCESS, REJECTFILEACCESS, FILESYSTEMIMPORT, CHECKSUMIMPORT, CHECKSUMFAIL, CONFIRMEMAIL, APIGENERATED, INGESTCOMPLETED, INGESTCOMPLETEDWITHERRORS, - PUBLISHFAILED_PIDREG + PUBLISHFAILED_PIDREG, WORKFLOW_SUCCESS, WORKFLOW_FAILURE }; private static final long serialVersionUID = 1L; diff --git a/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java index e395e9a90ec..5707f477a87 100644 --- a/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java @@ -140,7 +140,10 @@ private AuthenticatedUser createAuthenticatedUserForView (Object[] dbRowValues, user.setAuthProviderId(UserUtil.getStringOrNull(dbRowValues[11])); user.setAuthProviderFactoryAlias(UserUtil.getStringOrNull(dbRowValues[12])); - + + user.setDeactivated((Boolean)(dbRowValues[13])); + user.setDeactivatedTime(UserUtil.getTimestampOrNull(dbRowValues[14])); + user.setRoles(roles); return user; } @@ -417,7 +420,8 @@ private List getUserListCore(String searchTerm, qstr += " u.affiliation, u.superuser,"; qstr += " u.position,"; qstr += " u.createdtime, u.lastlogintime, u.lastapiusetime, "; - qstr += " prov.id, prov.factoryalias"; + qstr += " prov.id, prov.factoryalias, "; + qstr += " u.deactivated, u.deactivatedtime "; qstr += " FROM authenticateduser u,"; qstr += " authenticateduserlookup prov_lookup,"; qstr += " authenticationproviderrow prov"; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index db0958800f8..6b84a883287 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -26,6 +26,7 @@ import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; +import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.RoleAssignee; import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; @@ -69,6 +70,7 @@ import javax.json.JsonValue; import javax.json.JsonValue.ValueType; import javax.persistence.EntityManager; +import javax.persistence.NoResultException; import javax.persistence.PersistenceContext; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.core.Context; @@ -86,6 +88,7 @@ public abstract class AbstractApiBean { private static final Logger logger = Logger.getLogger(AbstractApiBean.class.getName()); private static final String DATAVERSE_KEY_HEADER_NAME = "X-Dataverse-key"; private static final String PERSISTENT_ID_KEY=":persistentId"; + private static final String ALIAS_KEY=":alias"; public static final String STATUS_ERROR = "ERROR"; public static final String STATUS_OK = "OK"; public static final String STATUS_WF_IN_PROGRESS = "WORKFLOW_IN_PROGRESS"; @@ -382,6 +385,7 @@ protected AuthenticatedUser findAuthenticatedUserOrDie() throws WrappedResponse private AuthenticatedUser findAuthenticatedUserOrDie( String key, String wfid ) throws WrappedResponse { if (key != null) { + // No check for deactivated user because it's done in authSvc.lookupUser. AuthenticatedUser authUser = authSvc.lookupUser(key); if (authUser != null) { @@ -482,6 +486,37 @@ protected DataFile findDataFileOrDie(String id) throws WrappedResponse { } } } + + protected DataverseRole findRoleOrDie(String id) throws WrappedResponse { + DataverseRole role; + if (id.equals(ALIAS_KEY)) { + String alias = getRequestParameter(ALIAS_KEY.substring(1)); + try { + return em.createNamedQuery("DataverseRole.findDataverseRoleByAlias", DataverseRole.class) + .setParameter("alias", alias) + .getSingleResult(); + + //Should not be a multiple result exception due to table constraint + } catch (NoResultException nre) { + throw new WrappedResponse(notFound(BundleUtil.getStringFromBundle("find.dataverse.role.error.role.not.found.alias", Collections.singletonList(alias)))); + } + + } else { + + try { + role = rolesSvc.find(Long.parseLong(id)); + if (role == null) { + throw new WrappedResponse(notFound(BundleUtil.getStringFromBundle("find.dataverse.role.error.role.not.found.id", Collections.singletonList(id)))); + } else { + return role; + } + + } catch (NumberFormatException nfe) { + throw new WrappedResponse( + badRequest(BundleUtil.getStringFromBundle("find.dataverse.role.error.role.not.found.bad.id", Collections.singletonList(id)))); + } + } + } protected DatasetLinkingDataverse findDatasetLinkingDataverseOrDie(String datasetId, String linkingDataverseId) throws WrappedResponse { DatasetLinkingDataverse dsld; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 2cd7e0dd122..6fc2f066b36 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -275,13 +275,18 @@ private DataFile findDataFileOrDieWrapper(String fileId){ @Produces({"application/xml"}) public DownloadInstance datafile(@PathParam("fileId") String fileId, @QueryParam("gbrecs") boolean gbrecs, @QueryParam("key") String apiToken, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) /*throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/ { + // check first if there's a trailing slash, and chop it: + while (fileId.lastIndexOf('/') == fileId.length() - 1) { + fileId = fileId.substring(0, fileId.length() - 1); + } + if (fileId.indexOf('/') > -1) { // This is for embedding folder names into the Access API URLs; // something like /api/access/datafile/folder/subfolder/1234 // instead of the normal /api/access/datafile/1234 notation. // this is supported only for recreating folders during recursive downloads - // i.e. they are embedded into the URL for the remote client like wget, - // but can be safely ignored here. + // but can be safely ignored here. fileId = fileId.substring(fileId.lastIndexOf('/') + 1); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index fc7cf73d505..52d7f0280dc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -16,6 +16,7 @@ import edu.harvard.iq.dataverse.EMailValidator; import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.GlobalId; +import edu.harvard.iq.dataverse.RoleAssignment; import edu.harvard.iq.dataverse.License; import edu.harvard.iq.dataverse.LicenseServiceBean; import edu.harvard.iq.dataverse.UserServiceBean; @@ -74,10 +75,12 @@ import javax.ws.rs.core.Response.Status; import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; import java.util.List; import edu.harvard.iq.dataverse.authorization.AuthTestDataServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationProvidersRegistrationServiceBean; +import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.RoleAssignee; import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean; @@ -87,6 +90,10 @@ import edu.harvard.iq.dataverse.dataset.DatasetUtil; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.impl.MergeInAccountCommand; +import edu.harvard.iq.dataverse.engine.command.impl.ChangeUserIdentifierCommand; +import edu.harvard.iq.dataverse.engine.command.impl.DeactivateUserCommand; +import edu.harvard.iq.dataverse.engine.command.impl.DeleteRoleCommand; import edu.harvard.iq.dataverse.engine.command.impl.RegisterDvObjectCommand; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -101,6 +108,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Date; +import java.util.function.Consumer; import javax.inject.Inject; import javax.json.JsonArray; import javax.persistence.Query; @@ -152,8 +160,8 @@ public class Admin extends AbstractApiBean { @EJB BannerMessageServiceBean bannerMessageService; @EJB - LicenseServiceBean licenseService; - + LicenseServiceBean licenseService; + // Make the session available @Inject @@ -382,9 +390,40 @@ private Response deleteAuthenticatedUser(AuthenticatedUser au) { return ok("AuthenticatedUser " + au.getIdentifier() + " deleted. "); } - - + @POST + @Path("authenticatedUsers/{identifier}/deactivate") + public Response deactivateAuthenticatedUser(@PathParam("identifier") String identifier) { + AuthenticatedUser user = authSvc.getAuthenticatedUser(identifier); + if (user != null) { + return deactivateAuthenticatedUser(user); + } + return error(Response.Status.BAD_REQUEST, "User " + identifier + " not found."); + } + + @POST + @Path("authenticatedUsers/id/{id}/deactivate") + public Response deactivateAuthenticatedUserById(@PathParam("id") Long id) { + AuthenticatedUser user = authSvc.findByID(id); + if (user != null) { + return deactivateAuthenticatedUser(user); + } + return error(Response.Status.BAD_REQUEST, "User " + id + " not found."); + } + + private Response deactivateAuthenticatedUser(AuthenticatedUser userToDisable) { + AuthenticatedUser superuser = authSvc.getAdminUser(); + if (superuser == null) { + return error(Response.Status.INTERNAL_SERVER_ERROR, "Cannot find superuser to execute DeactivateUserCommand."); + } + try { + execCommand(new DeactivateUserCommand(createDataverseRequest(superuser), userToDisable)); + return ok("User " + userToDisable.getIdentifier() + " deactivated."); + } catch (WrappedResponse ex) { + return ex.getResponse(); + } + } + @POST @Path("publishDataverseAsCreator/{id}") public Response publishDataverseAsCreator(@PathParam("id") long id) { @@ -658,6 +697,10 @@ public Response builtin2shib(String content) { boolean knowsExistingPassword = false; BuiltinUser oldBuiltInUser = builtinUserService.findByUserName(builtInUserToConvert.getUserIdentifier()); if (oldBuiltInUser != null) { + if (builtInUserToConvert.isDeactivated()) { + problems.add("builtin account has been deactivated"); + return error(Status.BAD_REQUEST, problems.build().toString()); + } String usernameOfBuiltinAccountToConvert = oldBuiltInUser.getUserName(); response.add("old username", usernameOfBuiltinAccountToConvert); AuthenticatedUser authenticatedUser = authSvc.canLogInAsBuiltinUser(usernameOfBuiltinAccountToConvert, @@ -892,6 +935,17 @@ public Response listBuiltinRoles() { } } + @DELETE + @Path("roles/{id}") + public Response deleteRole(@PathParam("id") String id) { + + return response(req -> { + DataverseRole doomed = findRoleOrDie(id); + execCommand(new DeleteRoleCommand(req, doomed)); + return ok("role " + doomed.getName() + " deleted."); + }); + } + @Path("superuser/{identifier}") @POST public Response toggleSuperuser(@PathParam("identifier") String identifier) { @@ -899,6 +953,9 @@ public Response toggleSuperuser(@PathParam("identifier") String identifier) { .setInfo(identifier); try { AuthenticatedUser user = authSvc.getAuthenticatedUser(identifier); + if (user.isDeactivated()) { + return error(Status.BAD_REQUEST, "You cannot make a deactivated user a superuser."); + } user.setSuperuser(!user.isSuperuser()); @@ -1687,7 +1744,7 @@ public Response submitDatasetVersionToArchive(@PathParam("id") String dsid, @Pat // DataverseRequest and is sent to the back-end command where it is used to get // the API Token which is then used to retrieve files (e.g. via S3 direct // downloads) to create the Bag - session.setUser(au); + session.setUser(au); // TODO: Stop using session. Use createDataverseRequest instead. Dataset ds = findDatasetOrDie(dsid); DatasetVersion dv = datasetversionService.findByFriendlyVersionNumber(ds.getId(), versionNumber); @@ -2007,5 +2064,5 @@ public Response deleteLicenseByName(@PathParam("name") String name) { } return error(Response.Status.NOT_FOUND, "A license with name " + name + " doesn't exist."); } - + } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java index 3cb5f900cd6..646abf51b6c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java @@ -470,15 +470,23 @@ private String parseControlledVocabulary(String[] values) { if (cvv == null) { cvv = new ControlledVocabularyValue(); cvv.setDatasetFieldType(dsv); - //Alt is only for dataload so only add to new - for (int i = 5; i < values.length; i++) { - ControlledVocabAlternate alt = new ControlledVocabAlternate(); - alt.setDatasetFieldType(dsv); - alt.setControlledVocabularyValue(cvv); - alt.setStrValue(values[i]); - cvv.getControlledVocabAlternates().add(alt); - } - } + } + + // Alternate variants for this controlled vocab. value: + + // Note that these are overwritten every time: + cvv.getControlledVocabAlternates().clear(); + // - meaning, if an alternate has been removed from the tsv file, + // it will be removed from the database! -- L.A. 5.4 + + for (int i = 5; i < values.length; i++) { + ControlledVocabAlternate alt = new ControlledVocabAlternate(); + alt.setDatasetFieldType(dsv); + alt.setControlledVocabularyValue(cvv); + alt.setStrValue(values[i]); + cvv.getControlledVocabAlternates().add(alt); + } + cvv.setStrValue(values[2]); cvv.setIdentifier(values[3]); cvv.setDisplayOrder(Integer.parseInt(values[4])); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 69769b8172c..15865766976 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -267,7 +267,7 @@ public Response exportDataset(@QueryParam("persistentId") String persistentId, @ return error(Response.Status.NOT_FOUND, "A dataset with the persistentId " + persistentId + " could not be found."); } - ExportService instance = ExportService.getInstance(settingsSvc); + ExportService instance = ExportService.getInstance(); InputStream is = instance.getExport(dataset, exporter); @@ -807,11 +807,16 @@ private String getCompoundDisplayValue (DatasetFieldCompoundValue dscv){ @PUT @Path("{id}/editMetadata") - public Response editVersionMetadata(String jsonBody, @PathParam("id") String id, @QueryParam("replace") Boolean replace) throws WrappedResponse{ + public Response editVersionMetadata(String jsonBody, @PathParam("id") String id, @QueryParam("replace") Boolean replace) { Boolean replaceData = replace != null; - - DataverseRequest req = createDataverseRequest(findUserOrDie()); + DataverseRequest req = null; + try { + req = createDataverseRequest(findUserOrDie()); + } catch (WrappedResponse ex) { + logger.log(Level.SEVERE, "Edit metdata error: " + ex.getMessage(), ex); + return ex.getResponse(); + } return processDatasetUpdate(jsonBody, id, req, replaceData); } @@ -1834,6 +1839,9 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, } catch (DataFileTagException ex) { return error( Response.Status.BAD_REQUEST, ex.getMessage()); } + catch (ClassCastException | com.google.gson.JsonParseException ex) { + return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("file.addreplace.error.parsing")); + } // ------------------------------------- // (3) Get the file name and content type diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FeedbackApi.java b/src/main/java/edu/harvard/iq/dataverse/api/FeedbackApi.java index 7c81fe1a892..d9a94ee340b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/FeedbackApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/FeedbackApi.java @@ -38,9 +38,8 @@ public Response submitFeedback(JsonObject jsonObject) throws AddressException { String userEmail = jsonObject.getString("fromEmail"); String messageSubject = jsonObject.getString("subject"); String baseUrl = systemConfig.getDataverseSiteUrl(); - String rootDataverseName = dataverseSvc.findRootDataverse().getName(); - String installationBrandName = BrandingUtil.getInstallationBrandName(rootDataverseName); - String supportTeamName = BrandingUtil.getSupportTeamName(systemAddress, rootDataverseName); + String installationBrandName = BrandingUtil.getInstallationBrandName(); + String supportTeamName = BrandingUtil.getSupportTeamName(systemAddress); JsonArrayBuilder jab = Json.createArrayBuilder(); List feedbacks = FeedbackUtil.gatherFeedback(recipient, dataverseSession, messageSubject, userMessage, systemAddress, userEmail, baseUrl, installationBrandName, supportTeamName); feedbacks.forEach((feedback) -> { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index defc2c4d9ab..818df72c54e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -212,17 +212,33 @@ public Response replaceFileInDataset( } catch (DataFileTagException ex) { return error(Response.Status.BAD_REQUEST, ex.getMessage()); } - } catch (ClassCastException ex) { - logger.info("Exception parsing string '" + jsonData + "': " + ex); + } catch (ClassCastException | com.google.gson.JsonParseException ex) { + return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("file.addreplace.error.parsing")); } } // (3) Get the file name and content type - if(null == contentDispositionHeader) { - return error(BAD_REQUEST, "You must upload a file."); + String newFilename = null; + String newFileContentType = null; + String newStorageIdentifier = null; + if (null == contentDispositionHeader) { + if (optionalFileParams.hasStorageIdentifier()) { + newStorageIdentifier = optionalFileParams.getStorageIdentifier(); + // ToDo - check that storageIdentifier is valid + if (optionalFileParams.hasFileName()) { + newFilename = optionalFileParams.getFileName(); + if (optionalFileParams.hasMimetype()) { + newFileContentType = optionalFileParams.getMimeType(); + } + } + } else { + return error(BAD_REQUEST, + "You must upload a file or provide a storageidentifier, filename, and mimetype."); + } + } else { + newFilename = contentDispositionHeader.getFileName(); + newFileContentType = formDataBodyPart.getMediaType().toString(); } - String newFilename = contentDispositionHeader.getFileName(); - String newFileContentType = formDataBodyPart.getMediaType().toString(); // (4) Create the AddReplaceFileHelper object msg("REPLACE!"); @@ -254,14 +270,16 @@ public Response replaceFileInDataset( addFileHelper.runForceReplaceFile(fileToReplaceId, newFilename, newFileContentType, + newStorageIdentifier, testFileInputStream, optionalFileParams); }else{ addFileHelper.runReplaceFile(fileToReplaceId, newFilename, newFileContentType, + newStorageIdentifier, testFileInputStream, - optionalFileParams); + optionalFileParams); } msg("we're back....."); @@ -357,7 +375,7 @@ public Response updateFileMetadata(@FormDataParam("jsonData") String jsonData, return error(Response.Status.BAD_REQUEST, ex.getMessage()); } } catch (ClassCastException | com.google.gson.JsonParseException ex) { - return error(Response.Status.BAD_REQUEST, "Exception parsing provided json"); + return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("file.addreplace.error.parsing")); } } @@ -601,7 +619,7 @@ public Response redetectDatafile(@PathParam("id") String id, @QueryParam("dryRun private void exportDatasetMetadata(SettingsServiceBean settingsServiceBean, Dataset theDataset) { try { - ExportService instance = ExportService.getInstance(settingsServiceBean); + ExportService instance = ExportService.getInstance(); instance.exportAllFormats(theDataset); } catch (ExportException ex) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Roles.java b/src/main/java/edu/harvard/iq/dataverse/api/Roles.java index b3f75e00c5a..72add184a24 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Roles.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Roles.java @@ -1,8 +1,10 @@ package edu.harvard.iq.dataverse.api; +import static edu.harvard.iq.dataverse.api.AbstractApiBean.error; import edu.harvard.iq.dataverse.api.dto.RoleDTO; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import javax.ws.rs.GET; import javax.ws.rs.POST; @@ -11,6 +13,9 @@ import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; import edu.harvard.iq.dataverse.engine.command.impl.CreateRoleCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteRoleCommand; +import edu.harvard.iq.dataverse.util.BundleUtil; +import java.util.Arrays; +import java.util.List; import javax.ejb.Stateless; import javax.ws.rs.DELETE; import javax.ws.rs.QueryParam; @@ -26,7 +31,7 @@ public class Roles extends AbstractApiBean { @GET @Path("{id}") - public Response viewRole( @PathParam("id") Long id) { + public Response viewRole( @PathParam("id") String id) { return response( ()-> { final User user = findUserOrDie(); final DataverseRole role = findRoleOrDie(id); @@ -35,14 +40,19 @@ public Response viewRole( @PathParam("id") Long id) { }); } - @DELETE - @Path("{id}") - public Response deleteRole( @PathParam("id") Long id ) { - return response( req -> { - execCommand( new DeleteRoleCommand(req, findRoleOrDie(id)) ); - return ok("role " + id + " deleted."); + @DELETE + @Path("{id}") + public Response deleteRole(@PathParam("id") String id) { + return response(req -> { + DataverseRole role = findRoleOrDie(id); + List args = Arrays.asList(role.getName()); + if (role.getOwner() == null) { + throw new WrappedResponse(forbidden(BundleUtil.getStringFromBundle("find.dataverse.role.error.role.builtin.not.allowed", args))); + } + execCommand(new DeleteRoleCommand(req, role)); + return ok("role " + role.getName() + " deleted."); }); - } + } @POST public Response createNewRole( RoleDTO roleDto, @@ -52,11 +62,4 @@ public Response createNewRole( RoleDTO roleDto, req,findDataverseOrDie(dvoIdtf)))))); } - private DataverseRole findRoleOrDie( long id ) throws WrappedResponse { - DataverseRole role = rolesSvc.find(id); - if ( role != null ) { - return role; - } - throw new WrappedResponse(notFound( "role with id " + id + " not found")); - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Users.java b/src/main/java/edu/harvard/iq/dataverse/api/Users.java index 37eedbe7714..ce226ea14b8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Users.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Users.java @@ -10,10 +10,13 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.engine.command.impl.ChangeUserIdentifierCommand; +import edu.harvard.iq.dataverse.engine.command.impl.GetUserTracesCommand; import edu.harvard.iq.dataverse.engine.command.impl.MergeInAccountCommand; +import edu.harvard.iq.dataverse.engine.command.impl.RevokeAllRolesCommand; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import java.util.logging.Logger; import javax.ejb.Stateless; +import javax.json.JsonObjectBuilder; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; @@ -191,4 +194,31 @@ public Response getAuthenticatedUserByToken() { } + @POST + @Path("{identifier}/removeRoles") + public Response removeUserRoles(@PathParam("identifier") String identifier) { + try { + AuthenticatedUser userToModify = authSvc.getAuthenticatedUser(identifier); + if (userToModify == null) { + return error(Response.Status.BAD_REQUEST, "Cannot find user based on " + identifier + "."); + } + execCommand(new RevokeAllRolesCommand(userToModify, createDataverseRequest(findUserOrDie()))); + return ok("Roles removed for user " + identifier + "."); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } + + @GET + @Path("{identifier}/traces") + public Response getTraces(@PathParam("identifier") String identifier) { + try { + AuthenticatedUser userToQuery = authSvc.getAuthenticatedUser(identifier); + JsonObjectBuilder jsonObj = execCommand(new GetUserTracesCommand(createDataverseRequest(findUserOrDie()), userToQuery)); + return ok(jsonObj); + } catch (WrappedResponse ex) { + return ex.getResponse(); + } + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java index 7c80fe810d2..049b20f605b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java @@ -10,6 +10,8 @@ import java.util.logging.Logger; import javax.ejb.EJB; import javax.inject.Inject; + +import org.apache.commons.lang3.StringUtils; import org.swordapp.server.AuthCredentials; import org.swordapp.server.ServiceDocument; import org.swordapp.server.ServiceDocumentManager; @@ -43,12 +45,9 @@ public ServiceDocument getServiceDocument(String sdUri, AuthCredentials authCred String warning = urlManager.processUrl(sdUri); ServiceDocument service = new ServiceDocument(); SwordWorkspace swordWorkspace = new SwordWorkspace(); - Dataverse rootDataverse = dataverseService.findRootDataverse(); - if (rootDataverse != null) { - String name = rootDataverse.getName(); - if (name != null) { - swordWorkspace.setTitle(name); - } + String name = dataverseService.getRootDataverseName(); + if (!StringUtils.isEmpty(name)) { + swordWorkspace.setTitle(name); } if (warning != null) { swordWorkspace.getWrappedWorkspace().setAttributeValue("warning", warning); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordAuth.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordAuth.java index 4a474f62894..13fc37bdc40 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordAuth.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordAuth.java @@ -36,6 +36,7 @@ public AuthenticatedUser auth(AuthCredentials authCredentials) throws SwordAuthE throw new SwordAuthException(msg); } + // Checking if the user is deactivated is done inside findUserByApiToken. AuthenticatedUser authenticatedUserFromToken = findUserByApiToken(username); if (authenticatedUserFromToken == null) { String msg = "User not found based on API token."; diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java index 3d0ad99d062..349a86301a6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java @@ -30,8 +30,10 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailData; import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailServiceBean; +import edu.harvard.iq.dataverse.engine.command.impl.RevokeAllRolesCommand; import edu.harvard.iq.dataverse.passwordreset.PasswordResetData; import edu.harvard.iq.dataverse.passwordreset.PasswordResetServiceBean; +import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.validation.PasswordValidatorServiceBean; import edu.harvard.iq.dataverse.workflow.PendingWorkflowInvocation; @@ -121,7 +123,10 @@ public class AuthenticationServiceBean { @EJB ExplicitGroupServiceBean explicitGroupService; - + + @EJB + SavedSearchServiceBean savedSearchService; + @PersistenceContext(unitName = "VDCNet-ejbPU") private EntityManager em; @@ -194,10 +199,7 @@ public boolean isOrcidEnabled() { * * Before calling this method, make sure you've deleted all the stuff tied * to the user, including stuff they've created, role assignments, group - * assignments, etc. - * - * Longer term, the intention is to have a "disableAuthenticatedUser" - * method/command. See https://github.com/IQSS/dataverse/issues/2419 + * assignments, etc. See the "removeAuthentictedUserItems" (sic) method. */ public void deleteAuthenticatedUser(Object pk) { AuthenticatedUser user = em.find(AuthenticatedUser.class, pk); @@ -304,7 +306,7 @@ public AuthenticatedUser getUpdateAuthenticatedUser( String authenticationProvid // yay! see if we already have this user. AuthenticatedUser user = lookupUser(authenticationProviderId, resp.getUserId()); - if (user != null){ + if (user != null && !user.isDeactivated()) { user = userService.updateLastLogin(user); } @@ -448,7 +450,13 @@ public AuthenticatedUser lookupUser( String apiToken ) { } } - return tkn.getAuthenticatedUser(); + AuthenticatedUser user = tkn.getAuthenticatedUser(); + if (!user.isDeactivated()) { + return user; + } else { + logger.info("attempted access with token from deactivated user: " + apiToken); + return null; + } } public AuthenticatedUser lookupUserForWorkflowInvocationID(String wfId) { @@ -498,6 +506,10 @@ public String getDeleteUserErrorMessages(AuthenticatedUser au) { if (!datasetVersionService.getDatasetVersionUsersByAuthenticatedUser(au).isEmpty()) { reasons.add(BundleUtil.getStringFromBundle("admin.api.deleteUser.failure.versionUser")); } + + if (!savedSearchService.findByAuthenticatedUser(au).isEmpty()) { + reasons.add(BundleUtil.getStringFromBundle("admin.api.deleteUser.failure.savedSearches")); + } if (!reasons.isEmpty()) { retVal = BundleUtil.getStringFromBundle("admin.api.deleteUser.failure.prefix", Arrays.asList(au.getIdentifier())); @@ -538,7 +550,6 @@ private void deletePendingAccessRequests(AuthenticatedUser au){ } - public AuthenticatedUser save( AuthenticatedUser user ) { em.persist(user); em.flush(); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java b/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java index 8ca72e7e9bc..12ddf817221 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java @@ -33,18 +33,20 @@ * @author michael */ @NamedQueries({ - @NamedQuery(name = "DataverseRole.findByOwnerId", - query= "SELECT r FROM DataverseRole r WHERE r.owner.id=:ownerId ORDER BY r.name"), - @NamedQuery(name = "DataverseRole.findBuiltinRoles", - query= "SELECT r FROM DataverseRole r WHERE r.owner is null ORDER BY r.name"), + @NamedQuery(name = "DataverseRole.findByOwnerId", + query = "SELECT r FROM DataverseRole r WHERE r.owner.id=:ownerId ORDER BY r.name"), + @NamedQuery(name = "DataverseRole.findBuiltinRoles", + query = "SELECT r FROM DataverseRole r WHERE r.owner is null ORDER BY r.name"), @NamedQuery(name = "DataverseRole.findBuiltinRoleByAlias", - query= "SELECT r FROM DataverseRole r WHERE r.alias=:alias AND r.owner is null"), - @NamedQuery(name = "DataverseRole.findCustomRoleByAliasAndOwner", - query= "SELECT r FROM DataverseRole r WHERE r.alias=:alias and (r.owner is null or r.owner.id=:ownerId)"), - @NamedQuery(name = "DataverseRole.listAll", - query= "SELECT r FROM DataverseRole r"), - @NamedQuery(name = "DataverseRole.deleteById", - query= "DELETE FROM DataverseRole r WHERE r.id=:id") + query = "SELECT r FROM DataverseRole r WHERE r.alias=:alias AND r.owner is null"), + @NamedQuery(name = "DataverseRole.findDataverseRoleByAlias", + query = "SELECT r FROM DataverseRole r WHERE r.alias=:alias"), + @NamedQuery(name = "DataverseRole.findCustomRoleByAliasAndOwner", + query = "SELECT r FROM DataverseRole r WHERE r.alias=:alias and (r.owner is null or r.owner.id=:ownerId)"), + @NamedQuery(name = "DataverseRole.listAll", + query = "SELECT r FROM DataverseRole r"), + @NamedQuery(name = "DataverseRole.deleteById", + query = "DELETE FROM DataverseRole r WHERE r.id=:id") }) @Entity @Table(indexes = {@Index(columnList="owner_id") diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java index fd7231e827c..9ae4e4b0e87 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java @@ -3,8 +3,10 @@ import edu.harvard.iq.dataverse.ValidateEmail; import edu.harvard.iq.dataverse.ValidateUserName; import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo; +import edu.harvard.iq.dataverse.passwordreset.PasswordResetData; import static edu.harvard.iq.dataverse.util.StringUtil.nonEmpty; import java.io.Serializable; +import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; @@ -13,6 +15,7 @@ import javax.persistence.Index; import javax.persistence.NamedQueries; import javax.persistence.NamedQuery; +import javax.persistence.OneToOne; import javax.persistence.Table; import javax.persistence.Transient; import javax.validation.constraints.Size; @@ -47,6 +50,10 @@ public class BuiltinUser implements Serializable { private String userName; private int passwordEncryptionVersion; + + @OneToOne(mappedBy = "builtinUser", cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) + private PasswordResetData passwordResetData; + private String encryptedPassword; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java index 204d93b5b8f..673839450d6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java @@ -162,7 +162,7 @@ public String init() { } } - if ( session.getUser().isAuthenticated() ) { + if (session.getUser(true).isAuthenticated()) { setCurrentUser((AuthenticatedUser) session.getUser()); userAuthProvider = authenticationService.lookupProvider(currentUser); notificationsList = userNotificationService.findByUser(currentUser.getId()); @@ -284,6 +284,12 @@ public void validateNewPassword(FacesContext context, UIComponent toValidate, Ob public String save() { boolean passwordChanged = false; + + //First reget user to make sure they weren't deactivated or deleted + if (session.getUser().isAuthenticated() && !session.getUser(true).isAuthenticated()) { + return "dataverse.xhtml?alias=" + dataverseService.findRootDataverse().getAlias() + "&faces-redirect=true"; + } + if (editMode == EditMode.CHANGE_PASSWORD) { final AuthenticationProvider prv = getUserAuthProvider(); if (prv.isPasswordUpdateAllowed()) { @@ -327,7 +333,6 @@ public String save() { // Authenticated user registered. Save the new bulitin, and log in. builtinUserService.save(builtinUser); session.setUser(au); - session.configureSessionTimeout(); /** * @todo Move this to * AuthenticationServiceBean.createAuthenticatedUser @@ -485,6 +490,8 @@ public void displayNotification() { case PUBLISHEDDS: case PUBLISHFAILED_PIDREG: case RETURNEDDS: + case WORKFLOW_SUCCESS: + case WORKFLOW_FAILURE: userNotification.setTheObject(datasetVersionService.find(userNotification.getObjectId())); break; diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java index a3ce3c5bdf7..44f00f797a0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java @@ -183,7 +183,6 @@ public String createNewAccount() { newUser.getDisplayInfo().getPosition()); final AuthenticatedUser user = authenticationSvc.createAuthenticatedUser(newUser.getUserRecordIdentifier(), getUsername(), newAud, true); session.setUser(user); - session.configureSessionTimeout(); /** * @todo Move this to AuthenticationServiceBean.createAuthenticatedUser */ @@ -209,11 +208,14 @@ public String convertExistingAccount() { auReq.putCredential(creds.get(1).getKey(), getPassword()); try { AuthenticatedUser existingUser = authenticationSvc.getUpdateAuthenticatedUser(BuiltinAuthenticationProvider.PROVIDER_ID, auReq); + if (existingUser.isDeactivated()) { + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("oauth2.convertAccount.failedDeactivated")); + return null; + } authenticationSvc.updateProvider(existingUser, newUser.getServiceId(), newUser.getIdInService()); builtinUserSvc.removeUser(existingUser.getUserIdentifier()); session.setUser(existingUser); - session.configureSessionTimeout(); AuthenticationProvider newUserAuthProvider = authenticationSvc.getAuthenticationProvider(newUser.getServiceId()); JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("oauth2.convertAccount.success", Arrays.asList(newUserAuthProvider.getInfo().getTitle()))); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java index e42f82d48d8..225352dec43 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java @@ -106,8 +106,8 @@ public void exchangeCodeForToken() throws IOException { } else { // login the user and redirect to HOME of intended page (if any). + // setUser checks for deactivated users. session.setUser(dvUser); - session.configureSessionTimeout(); final OAuth2TokenData tokenData = oauthUser.getTokenData(); if (tokenData != null) { tokenData.setUser(dvUser); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java index 12161eb1a59..db6164e0ac7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java @@ -7,6 +7,7 @@ import edu.harvard.iq.dataverse.authorization.AccessRequest; import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo; import edu.harvard.iq.dataverse.authorization.AuthenticatedUserLookup; +import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2TokenData; import edu.harvard.iq.dataverse.userdata.UserUtil; import edu.harvard.iq.dataverse.authorization.providers.oauth2.impl.OrcidOAuth2AP; import edu.harvard.iq.dataverse.util.BundleUtil; @@ -114,6 +115,12 @@ public class AuthenticatedUser implements User, Serializable { private boolean superuser; + @Column(nullable=true) + private boolean deactivated; + + @Column(nullable=true) + private Timestamp deactivatedTime; + /** * @todo Consider storing a hash of *all* potentially interesting Shibboleth * attribute key/value pairs, not just the Identity Provider (IdP). @@ -159,7 +166,10 @@ public List getDatasetLocks() { public void setDatasetLocks(List datasetLocks) { this.datasetLocks = datasetLocks; } - + + @OneToMany(mappedBy = "user", cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) + private List oAuth2TokenDatas; + @Override public AuthenticatedUserDisplayInfo getDisplayInfo() { return new AuthenticatedUserDisplayInfo(firstName, lastName, email, affiliation, position); @@ -303,6 +313,23 @@ public void setSuperuser(boolean superuser) { this.superuser = superuser; } + @Override + public boolean isDeactivated() { + return deactivated; + } + + public void setDeactivated(boolean deactivated) { + this.deactivated = deactivated; + } + + public Timestamp getDeactivatedTime() { + return deactivatedTime; + } + + public void setDeactivatedTime(Timestamp deactivatedTime) { + this.deactivatedTime = deactivatedTime; + } + @OneToOne(mappedBy = "authenticatedUser") private AuthenticatedUserLookup authenticatedUserLookup; @@ -360,6 +387,9 @@ public JsonObjectBuilder toJson() { authenicatedUserJson.add("lastLoginTime", UserUtil.getTimestampStringOrNull(this.lastLoginTime)); authenicatedUserJson.add("lastApiUseTime", UserUtil.getTimestampStringOrNull(this.lastApiUseTime)); + authenicatedUserJson.add("deactivated", this.deactivated); + authenicatedUserJson.add("deactivatedTime", UserUtil.getTimestampStringOrNull(this.deactivatedTime)); + return authenicatedUserJson; } diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/GuestUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/GuestUser.java index f16fa5afe36..16de1b2eaff 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/GuestUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/GuestUser.java @@ -32,7 +32,7 @@ public RoleAssigneeDisplayInfo getDisplayInfo() { public boolean isSuperuser() { return false; } - + @Override public boolean equals( Object o ) { return (o instanceof GuestUser); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/User.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/User.java index ea35f87d178..4655c9c9f0d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/User.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/User.java @@ -14,4 +14,8 @@ public interface User extends RoleAssignee, Serializable { public boolean isSuperuser(); + default boolean isDeactivated() { + return false; + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtil.java b/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtil.java index ea83fc15ebc..50661ee97fc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtil.java @@ -1,22 +1,44 @@ package edu.harvard.iq.dataverse.branding; +import edu.harvard.iq.dataverse.DataverseServiceBean; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import java.util.Arrays; +import java.util.logging.Logger; + import javax.mail.internet.InternetAddress; public class BrandingUtil { - public static String getInstallationBrandName(String rootDataverseName) { - return rootDataverseName; + private static final Logger logger = Logger.getLogger(BrandingUtil.class.getCanonicalName()); + + private static DataverseServiceBean dataverseService; + private static SettingsServiceBean settingsService; + + public static String getInstallationBrandName() { + + String brandName = settingsService.getValueForKey(SettingsServiceBean.Key.InstallationName); + //Separate if statement simplifies test setup, otherwise could use the getValueForKey method with a default param + if(brandName==null) { + brandName = dataverseService.getRootDataverseName(); + } + return brandName; + } + + // Convenience to access root name without injecting dataverseService (e.g. in + // DatasetVersion) + public static String getRootDataverseCollectionName() { + return dataverseService.getRootDataverseName(); } - public static String getSupportTeamName(InternetAddress systemAddress, String rootDataverseName) { + public static String getSupportTeamName(InternetAddress systemAddress) { if (systemAddress != null) { String personalName = systemAddress.getPersonal(); if (personalName != null) { return personalName; } } + String rootDataverseName=dataverseService.getRootDataverseName(); if (rootDataverseName != null && !rootDataverseName.isEmpty()) { return rootDataverseName + " " + BundleUtil.getStringFromBundle("contact.support"); } @@ -31,8 +53,12 @@ public static String getSupportTeamEmailAddress(InternetAddress systemAddress) { return systemAddress.getAddress(); } - public static String getContactHeader(InternetAddress systemAddress, String rootDataverseName) { - return BundleUtil.getStringFromBundle("contact.header", Arrays.asList(getSupportTeamName(systemAddress, rootDataverseName))); + public static String getContactHeader(InternetAddress systemAddress) { + return BundleUtil.getStringFromBundle("contact.header", Arrays.asList(getSupportTeamName(systemAddress))); } + public static void injectServices(DataverseServiceBean dataverseSvc, SettingsServiceBean settingsSvc) { + dataverseService = dataverseSvc; + settingsService = settingsSvc; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtilHelper.java b/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtilHelper.java new file mode 100644 index 00000000000..274970f8b8e --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtilHelper.java @@ -0,0 +1,28 @@ +package edu.harvard.iq.dataverse.branding; + +import javax.annotation.PostConstruct; +import javax.ejb.EJB; +import javax.ejb.Singleton; +import javax.ejb.Startup; + +import edu.harvard.iq.dataverse.DataverseServiceBean; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; + + /** + * This is a small helper bean + * As it is a singleton and built at application start (=deployment), it will inject the (stateless) + * dataverse service into the BrandingUtil once it's ready. + */ + @Singleton + @Startup + public class BrandingUtilHelper { + + @EJB + DataverseServiceBean dataverseSvc; + @EJB SettingsServiceBean settingsSvc; + + @PostConstruct + public void injectService() { + BrandingUtil.injectServices(dataverseSvc, settingsSvc); + } + } \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java index 45a04ba4185..823d2c111f2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java @@ -55,7 +55,6 @@ public String init() { if (confirmEmailData != null) { user = confirmEmailData.getAuthenticatedUser(); session.setUser(user); - session.configureSessionTimeout(); // TODO: is this needed here? (it can't hurt, but still) JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("confirmEmail.details.success")); return "/dataverse.xhtml?faces-redirect=true"; } diff --git a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java index bc3b70326f1..e8748f1e158 100644 --- a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java @@ -121,20 +121,14 @@ private void sendLinkOnEmailChange(AuthenticatedUser aUser, String confirmationU try { String toAddress = aUser.getEmail(); - try { - Dataverse rootDataverse = dataverseService.findRootDataverse(); - if (rootDataverse != null) { - String rootDataverseName = rootDataverse.getName(); - // FIXME: consider refactoring this into MailServiceBean.sendNotificationEmail. CONFIRMEMAIL may be the only type where we don't want an in-app notification. - UserNotification userNotification = new UserNotification(); - userNotification.setType(UserNotification.Type.CONFIRMEMAIL); - String subject = MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null); - logger.fine("sending email to " + toAddress + " with this subject: " + subject); - mailService.sendSystemEmail(toAddress, subject, messageBody); - } - } catch (Exception e) { - logger.info("The root dataverse is not present. Don't send a notification to dataverseAdmin."); - } + + // FIXME: consider refactoring this into MailServiceBean.sendNotificationEmail. + // CONFIRMEMAIL may be the only type where we don't want an in-app notification. + UserNotification userNotification = new UserNotification(); + userNotification.setType(UserNotification.Type.CONFIRMEMAIL); + String subject = MailUtil.getSubjectTextBasedOnNotification(userNotification, null); + logger.fine("sending email to " + toAddress + " with this subject: " + subject); + mailService.sendSystemEmail(toAddress, subject, messageBody); } catch (Exception ex) { /** * @todo get more specific about the exception that's thrown when @@ -169,6 +163,10 @@ public ConfirmEmailExecResponse processToken(String tokenQueried) { long nowInMilliseconds = new Date().getTime(); Timestamp emailConfirmed = new Timestamp(nowInMilliseconds); AuthenticatedUser authenticatedUser = confirmEmailData.getAuthenticatedUser(); + if (authenticatedUser.isDeactivated()) { + logger.fine("User is deactivated."); + return null; + } authenticatedUser.setEmailConfirmed(emailConfirmed); em.remove(confirmEmailData); return goodTokenCanProceed; diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index a8c52414e56..e82e6d7ae37 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -21,6 +21,7 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.impl.AbstractCreateDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDatasetCommand; +import edu.harvard.iq.dataverse.engine.command.impl.DeleteDataFileCommand; import edu.harvard.iq.dataverse.engine.command.impl.RestrictFileCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; @@ -365,12 +366,14 @@ public boolean runAddFile(Dataset dataset, * @param dataset * @param newFileName * @param newFileContentType + * @param newStorageIdentifier2 * @param newFileInputStream * @return */ public boolean runForceReplaceFile(Long oldFileId, String newFileName, String newFileContentType, + String newStorageIdentifier, InputStream newFileInputStream, OptionalFileParams optionalFileParams){ @@ -392,13 +395,14 @@ public boolean runForceReplaceFile(Long oldFileId, } - return this.runAddReplaceFile(fileToReplace.getOwner(), newFileName, newFileContentType, newFileInputStream, optionalFileParams); + return this.runAddReplaceFile(fileToReplace.getOwner(), newFileName, newFileContentType, newStorageIdentifier, newFileInputStream, optionalFileParams); } public boolean runReplaceFile(Long oldFileId, String newFileName, String newFileContentType, + String newStorageIdentifier, InputStream newFileInputStream, OptionalFileParams optionalFileParams){ @@ -418,7 +422,7 @@ public boolean runReplaceFile(Long oldFileId, if (!this.step_005_loadFileToReplaceById(oldFileId)){ return false; } - return this.runAddReplaceFile(fileToReplace.getOwner(), newFileName, newFileContentType, newFileInputStream, optionalFileParams); + return this.runAddReplaceFile(fileToReplace.getOwner(), newFileName, newFileContentType, newStorageIdentifier, newFileInputStream, optionalFileParams); } @@ -442,10 +446,6 @@ public boolean runReplaceFile(Long oldFileId, * * @return */ - private boolean runAddReplaceFile(Dataset owner, String newFileName, String newFileContentType, - InputStream newFileInputStream, OptionalFileParams optionalFileParams) { - return runAddReplaceFile(owner,newFileName, newFileContentType, null, newFileInputStream, optionalFileParams); - } private boolean runAddReplaceFile(Dataset owner, String newFileName, String newFileContentType, @@ -504,6 +504,18 @@ public boolean runReplaceFromUI_Phase1(Long oldFileId, if (!this.step_005_loadFileToReplaceById(oldFileId)){ return false; } + //Update params to match existing file (except checksum, which should match the new file) + if(fileToReplace != null) { + String checksum = optionalFileParams.getCheckSum(); + ChecksumType checkSumType = optionalFileParams.getCheckSumType(); + try { + optionalFileParams = new OptionalFileParams(fileToReplace); + optionalFileParams.setCheckSum(checksum, checkSumType); + } catch (DataFileTagException e) { + // Shouldn't happen since fileToReplace should have valid tags + e.printStackTrace(); + } + } return this.runAddReplacePhase1(fileToReplace.getOwner(), newFileName, @@ -574,6 +586,26 @@ private boolean runAddReplacePhase1(Dataset owner, return false; } + // if the fileToReplace hasn't been released, + if (fileToReplace != null && !fileToReplace.isReleased()) { + DataFile df = finalFileList.get(0); // step_055 uses a loop and assumes only one file + // set the replacement file's previous and root datafileIds to match (unless + // they are the defaults) + if (fileToReplace.getPreviousDataFileId() != null) { + df.setPreviousDataFileId(fileToReplace.getPreviousDataFileId()); + df.setRootDataFileId(fileToReplace.getRootDataFileId()); + } + // Reuse any file PID during a replace operation (if File PIDs are in use) + if (systemConfig.isFilePIDsEnabled()) { + df.setGlobalId(fileToReplace.getGlobalId()); + df.setGlobalIdCreateTime(fileToReplace.getGlobalIdCreateTime()); + // Should be true or fileToReplace wouldn't have an identifier (since it's not + // yet released in this if statement) + df.setIdentifierRegistered(fileToReplace.isIdentifierRegistered()); + fileToReplace.setGlobalId(null); + } + } + return true; } @@ -1061,16 +1093,6 @@ private boolean step_005_loadFileToReplaceById(Long dataFileId){ if (!step_015_auto_check_permissions(existingFile.getOwner())){ return false; }; - - - - // Is the file published? - // - if (!existingFile.isReleased()){ - addError(getBundleErr("unpublished_file_cannot_be_replaced")); - return false; - } - // Is the file in the latest dataset version? // if (!step_007_auto_isReplacementInLatestVersion(existingFile)){ @@ -1532,7 +1554,22 @@ private boolean step_070_run_update_dataset_command(){ } Command update_cmd; - update_cmd = new UpdateDatasetVersionCommand(dataset, dvRequest, clone); + String deleteStorageLocation = null; + long deleteFileId=-1; + if(isFileReplaceOperation()) { + List filesToDelete = new ArrayList(); + filesToDelete.add(fileToReplace.getFileMetadata()); + + if(!fileToReplace.isReleased()) { + //If file is only in draft version, also need to delete the physical file + deleteStorageLocation = fileService.getPhysicalFileToDelete(fileToReplace); + deleteFileId=fileToReplace.getId(); + } + //Adding the file to the delete list for the command will delete this filemetadata and, if the file hasn't been released, the datafile itself. + update_cmd = new UpdateDatasetVersionCommand(dataset, dvRequest, filesToDelete, clone); + } else { + update_cmd = new UpdateDatasetVersionCommand(dataset, dvRequest, clone); + } ((UpdateDatasetVersionCommand) update_cmd).setValidateLenient(true); try { @@ -1554,89 +1591,23 @@ private boolean step_070_run_update_dataset_command(){ this.addErrorSevere("add.add_file_error (see logs)"); logger.severe(ex.getMessage()); return false; - } - return true; - } - - - /** - * Go through the working DatasetVersion and remove the - * FileMetadata of the file to replace - * - * @return - */ - private boolean step_085_auto_remove_filemetadata_to_replace_from_working_version(){ - - msgt("step_085_auto_remove_filemetadata_to_replace_from_working_version 1"); - - if (!isFileReplaceOperation()){ - // Shouldn't happen! - this.addErrorSevere(getBundleErr("only_replace_operation") + " (step_085_auto_remove_filemetadata_to_replace_from_working_version"); - return false; - } - msg("step_085_auto_remove_filemetadata_to_replace_from_working_version 2"); - - if (this.hasError()){ - return false; } - - - msgt("File to replace getId: " + fileToReplace.getId()); - - Iterator fmIt = workingVersion.getFileMetadatas().iterator(); - msgt("Clear file to replace"); - int cnt = 0; - while (fmIt.hasNext()) { - cnt++; - - FileMetadata fm = fmIt.next(); - msg(cnt + ") next file: " + fm); - msg(" getDataFile().getId(): " + fm.getDataFile().getId()); - if (fm.getDataFile().getId() != null) { - if (Objects.equals(fm.getDataFile().getId(), fileToReplace.getId())) { - msg("Let's remove it!"); - - // If this is a tabular data file with a UNF, we'll need - // to recalculate the version UNF, once the file is removed: - - boolean recalculateUNF = !StringUtils.isEmpty(fm.getDataFile().getUnf()); - - if (workingVersion.getId() != null) { - // If this is an existing draft (i.e., this draft version - // is already saved in the dataset, we'll also need to remove this filemetadata - // explicitly: - msg(" this is an existing draft version..."); - fileService.removeFileMetadata(fm); - - // remove the filemetadata from the list of filemetadatas - // attached to the datafile object as well, for a good - // measure: - fileToReplace.getFileMetadatas().remove(fm); - // (and yes, we can do .remove(fm) safely - if this released - // file is part of an existing draft, we know that the - // filemetadata object also exists in the database, and thus - // has the id, and can be identified unambiguously. - } - - // and remove it from the list of filemetadatas attached - // to the version object, via the iterator: - fmIt.remove(); - - if (recalculateUNF) { - msg("recalculating the UNF"); - ingestService.recalculateDatasetVersionUNF(workingVersion); - msg("UNF recalculated: "+workingVersion.getUNF()); - } - - return true; + //Sanity check + if(isFileReplaceOperation()) { + if (deleteStorageLocation != null) { + // Finalize the delete of the physical file + // (File service will double-check that the datafile no + // longer exists in the database, before proceeding to + // delete the physical file) + try { + fileService.finalizeFileDelete(deleteFileId, deleteStorageLocation); + } catch (IOException ioex) { + logger.warning("Failed to delete the physical file associated with the deleted datafile id=" + + deleteFileId + ", storage location: " + deleteStorageLocation); } } } - - msg("No matches found!"); - addErrorSevere(getBundleErr("failed_to_remove_old_file_from_dataset")); - runMajorCleanup(); - return false; + return true; } @@ -1711,13 +1682,6 @@ private boolean step_080_run_update_dataset_command_for_replace(){ return false; } - // ----------------------------------------------------------- - // Remove the "fileToReplace" from the current working version - // ----------------------------------------------------------- - if (!step_085_auto_remove_filemetadata_to_replace_from_working_version()){ - return false; - } - // ----------------------------------------------------------- // Set the "root file ids" and "previous file ids" // THIS IS A KEY STEP - SPLIT IT OUT @@ -1727,26 +1691,27 @@ private boolean step_080_run_update_dataset_command_for_replace(){ // ----------------------------------------------------------- - /* - Check the root file id on fileToReplace, updating it if necessary - */ - if (fileToReplace.getRootDataFileId().equals(DataFile.ROOT_DATAFILE_ID_DEFAULT)){ + if (fileToReplace.isReleased()) { + /* + * Check the root file id on fileToReplace, updating it if necessary + */ + if (fileToReplace.getRootDataFileId().equals(DataFile.ROOT_DATAFILE_ID_DEFAULT)) { - fileToReplace.setRootDataFileId(fileToReplace.getId()); - fileToReplace = fileService.save(fileToReplace); - } - - /* - Go through the final file list, settting the rootFileId and previousFileId - */ - for (DataFile df : finalFileList){ - df.setPreviousDataFileId(fileToReplace.getId()); - - df.setRootDataFileId(fileToReplace.getRootDataFileId()); - - } + fileToReplace.setRootDataFileId(fileToReplace.getId()); + fileToReplace = fileService.save(fileToReplace); + } + + /* + * Go through the final file list, settting the rootFileId and previousFileId + */ + for (DataFile df : finalFileList) { + df.setPreviousDataFileId(fileToReplace.getId()); + + df.setRootDataFileId(fileToReplace.getRootDataFileId()); - // Call the update dataset command + } + } + // Call the update dataset command which will delete the replaced filemetadata and file in needed (if file is not released) // return step_070_run_update_dataset_command(); diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileReplacePageHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileReplacePageHelper.java index 3832e15b85e..6e0c16a93e0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileReplacePageHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileReplacePageHelper.java @@ -19,7 +19,7 @@ * Phase 1: File successfully uploaded and unpersisted DataFile is in memory * Phase 2: Save the files * - http://localhost:8080/editdatafiles.xhtml?mode=SINGLE_REPLACE&datasetId=26&fid=726 + http://localhost:8080/editdatafiles.xhtml?mode=REPLACE&datasetId=26&fid=726 * This is messy, trying to contain some of it--give me APIs or more time, more time:) * * @author rmp553 @@ -111,13 +111,8 @@ public boolean handleNativeFileUpload(InputStream inputStream, String fullStorag } OptionalFileParams ofp = null; + ofp = new OptionalFileParams(); if(checkSumValue != null) { - try { - ofp = new OptionalFileParams(null); - } catch (DataFileTagException e) { - // Shouldn't happen with null input - e.printStackTrace(); - } ofp.setCheckSum(checkSumValue, checkSumType); } // Run 1st phase of replace diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java index 302bdf915c8..cc75375f979 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java @@ -77,7 +77,9 @@ public class OptionalFileParams { public static final String CHECKSUM_OBJECT_TYPE = "@type"; public static final String CHECKSUM_OBJECT_VALUE = "@value"; - + public OptionalFileParams() { + } + public OptionalFileParams(String jsonData) throws DataFileTagException{ if (jsonData != null){ @@ -108,6 +110,21 @@ public OptionalFileParams(String description, this.restrict = restrict; } + //For use in replace operations - load the file metadata from the file being replaced so it can be applied to the new file + //checksum and mimetype aren't needed + public OptionalFileParams(DataFile df) throws DataFileTagException { + FileMetadata fm = df.getFileMetadata(); + + this.description = fm.getDescription(); + setCategories(fm.getCategoriesByName()); + this.addFileDataTags(df.getTagLabels()); + this.restrict = fm.isRestricted(); + //Explicitly do not replace the file name - replaces with -force may change the mimetype and extension + //this.label = fm.getLabel(); + this.directoryLabel = fm.getDirectoryLabel(); + this.provFreeForm = fm.getProvFreeForm(); + } + /** * Set description @@ -289,12 +306,7 @@ private void loadParamsFromJson(String jsonData) throws DataFileTagException{ // logger.log(Level.SEVERE, "jsonData is null"); } JsonObject jsonObj; - try { - jsonObj = new Gson().fromJson(jsonData, JsonObject.class); - } catch (ClassCastException ex) { - logger.info("Exception parsing string '" + jsonData + "': " + ex); - return; - } + jsonObj = new Gson().fromJson(jsonData, JsonObject.class); // ------------------------------- // get description as string diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddRoleAssigneesToExplicitGroupCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddRoleAssigneesToExplicitGroupCommand.java index f6bd1316e44..8ba1d181609 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddRoleAssigneesToExplicitGroupCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddRoleAssigneesToExplicitGroupCommand.java @@ -4,6 +4,7 @@ import edu.harvard.iq.dataverse.authorization.RoleAssignee; import edu.harvard.iq.dataverse.authorization.groups.GroupException; import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroup; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -49,6 +50,12 @@ public ExplicitGroup execute(CommandContext ctxt) throws CommandException { if ( ra == null ) { nonexistentRAs.add( rai ); } else { + if (ra instanceof AuthenticatedUser) { + AuthenticatedUser user = (AuthenticatedUser) ra; + if (user.isDeactivated()) { + throw new IllegalCommandException("User " + user.getUserIdentifier() + " is deactivated and cannot be added to a group.", this); + } + } try { explicitGroup.add(ra); } catch (GroupException ex) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java index 34263599ff0..276f52a5802 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java @@ -10,10 +10,12 @@ import edu.harvard.iq.dataverse.RoleAssignment; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.RoleAssignee; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import java.util.Collections; import java.util.Map; import java.util.Set; @@ -49,6 +51,12 @@ public AssignRoleCommand(RoleAssignee anAssignee, DataverseRole aRole, DvObject @Override public RoleAssignment execute(CommandContext ctxt) throws CommandException { + if (grantee instanceof AuthenticatedUser) { + AuthenticatedUser user = (AuthenticatedUser) grantee; + if (user.isDeactivated()) { + throw new IllegalCommandException("User " + user.getUserIdentifier() + " is deactivated and cannot be given a role.", this); + } + } // TODO make sure the role is defined on the dataverse. RoleAssignment roleAssignment = new RoleAssignment(role, grantee, defPoint, privateUrlToken); return ctxt.roles().save(roleAssignment); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java index ff28021146d..cb9b0a3c774 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.engine.command.impl; import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.api.AbstractApiBean; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; @@ -11,6 +12,8 @@ import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.util.BundleUtil; +import javax.persistence.NoResultException; /** * Create a new role in a dataverse. @@ -34,9 +37,19 @@ public DataverseRole execute(CommandContext ctxt) throws CommandException { User user = getUser(); //todo: temporary for 4.0 - only superusers can create and edit roles if ((!(user instanceof AuthenticatedUser) || !user.isSuperuser())) { - throw new IllegalCommandException("Roles can only be created or edited by superusers.",this); + throw new IllegalCommandException(BundleUtil.getStringFromBundle("permission.role.must.be.created.by.superuser"),this); + } + //Test to see if the role already exists in DB + try { + DataverseRole testRole = ctxt.em().createNamedQuery("DataverseRole.findDataverseRoleByAlias", DataverseRole.class) + .setParameter("alias", created.getAlias()) + .getSingleResult(); + if (!(testRole == null)) { + throw new IllegalCommandException(BundleUtil.getStringFromBundle("permission.role.not.created.alias.already.exists"), this); + } + } catch (NoResultException nre) { + // we want no results because that meand we can create a role } - dv.addRole(created); return ctxt.roles().save(created); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java index 373454bf694..a86f90d03eb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java @@ -61,7 +61,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // final DatasetVersion editVersion = getDataset().getEditVersion(); tidyUpFields(updateVersion); - // Merge the new version into out JPA context + // Merge the new version into our JPA context ctxt.em().merge(updateVersion); @@ -71,7 +71,8 @@ public Dataset execute(CommandContext ctxt) throws CommandException { updateVersion.setTermsOfUseAndAccess(newTerms); //Put old terms on version that will be deleted.... getDataset().getEditVersion().setTermsOfUseAndAccess(oldTerms); - + //Also set the fileaccessrequest boolean on the dataset to match the new terms + getDataset().setFileAccessRequest(updateVersion.getTermsOfUseAndAccess().isFileAccessRequest()); List newComments = getDataset().getEditVersion().getWorkflowComments(); if (newComments!=null && newComments.size() >0) { for(WorkflowComment wfc: newComments) { @@ -162,7 +163,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // Update modification time on the published version and the dataset updateVersion.setLastUpdateTime(getTimestamp()); tempDataset.setModificationTime(getTimestamp()); - + ctxt.em().merge(updateVersion); Dataset savedDataset = ctxt.em().merge(tempDataset); // Flush before calling DeleteDatasetVersion which calls @@ -184,7 +185,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { //And the exported metadata files try { - ExportService instance = ExportService.getInstance(ctxt.settings()); + ExportService instance = ExportService.getInstance(); instance.exportAllFormats(getDataset()); } catch (ExportException ex) { // Just like with indexing, a failure to export is not a fatal condition. diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java index 6aa5e0e250a..391e798e285 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java @@ -75,7 +75,7 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException { boolean doNormalSolrDocCleanUp = true; - ExportService instance = ExportService.getInstance(ctxt.settings()); + ExportService instance = ExportService.getInstance(); if (managed.getDataset().getReleasedVersion() != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeactivateUserCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeactivateUserCommand.java new file mode 100644 index 00000000000..1dab8120767 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeactivateUserCommand.java @@ -0,0 +1,44 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; +import java.sql.Timestamp; +import java.util.Date; + +// Superuser-only enforced below. +@RequiredPermissions({}) +public class DeactivateUserCommand extends AbstractCommand { + + private DataverseRequest request; + private AuthenticatedUser userToDeactivate; + + public DeactivateUserCommand(DataverseRequest request, AuthenticatedUser userToDeactivate) { + super(request, (DvObject) null); + this.request = request; + this.userToDeactivate = userToDeactivate; + } + + @Override + public AuthenticatedUser execute(CommandContext ctxt) throws CommandException { + if (!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser()) { + throw new PermissionException("Deactivate user command can only be called by superusers.", this, null, null); + } + if (userToDeactivate == null) { + throw new CommandException("Cannot deactivate user. User not found.", this); + } + ctxt.engine().submit(new RevokeAllRolesCommand(userToDeactivate, request)); + ctxt.authentication().removeAuthentictedUserItems(userToDeactivate); + ctxt.notifications().findByUser(userToDeactivate.getId()).forEach(ctxt.notifications()::delete); + userToDeactivate.setDeactivated(true); + userToDeactivate.setDeactivatedTime(new Timestamp(new Date().getTime())); + AuthenticatedUser deactivatedUser = ctxt.authentication().save(userToDeactivate); + return deactivatedUser; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePidCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePidCommand.java index 841c84793ec..274aeb3c3fd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePidCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePidCommand.java @@ -13,12 +13,12 @@ import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; +import org.apache.commons.httpclient.HttpException; + import java.util.Arrays; import java.util.Collections; import java.util.logging.Logger; -import javax.xml.ws.http.HTTPException; - /** * No required permissions because we check for superuser status. */ @@ -51,8 +51,8 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { dataset.setGlobalIdCreateTime(null); dataset.setIdentifierRegistered(false); ctxt.datasets().merge(dataset); - } catch (HTTPException hex) { - String message = BundleUtil.getStringFromBundle("pids.deletePid.failureExpected", Arrays.asList(dataset.getGlobalId().asString(), Integer.toString(hex.getStatusCode()))); + } catch (HttpException hex) { + String message = BundleUtil.getStringFromBundle("pids.deletePid.failureExpected", Arrays.asList(dataset.getGlobalId().asString(), Integer.toString(hex.getReasonCode()))); logger.info(message); throw new IllegalCommandException(message, this); } catch (Exception ex) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java index c2f186f1e8c..4fa07dedede 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java @@ -186,7 +186,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { //Should this be in onSuccess()? ctxt.workflows().getDefaultWorkflow(TriggerType.PostPublishDataset).ifPresent(wf -> { try { - ctxt.workflows().start(wf, buildContext(ds, TriggerType.PostPublishDataset, datasetExternallyReleased)); + ctxt.workflows().start(wf, buildContext(ds, TriggerType.PostPublishDataset, datasetExternallyReleased), false); } catch (CommandException ex) { ctxt.datasets().removeDatasetLocks(ds, DatasetLock.Reason.Workflow); logger.log(Level.SEVERE, "Error invoking post-publish workflow: " + ex.getMessage(), ex); @@ -246,7 +246,7 @@ public boolean onSuccess(CommandContext ctxt, Object r) { } } - exportMetadata(dataset, ctxt.settings()); + exportMetadata(dataset); ctxt.datasets().updateLastExportTimeStamp(dataset.getId()); @@ -257,10 +257,10 @@ public boolean onSuccess(CommandContext ctxt, Object r) { * Attempting to run metadata export, for all the formats for which we have * metadata Exporters. */ - private void exportMetadata(Dataset dataset, SettingsServiceBean settingsServiceBean) { + private void exportMetadata(Dataset dataset) { try { - ExportService instance = ExportService.getInstance(settingsServiceBean); + ExportService instance = ExportService.getInstance(); instance.exportAllFormats(dataset); } catch (Exception ex) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java new file mode 100644 index 00000000000..41a1708e4c5 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java @@ -0,0 +1,228 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetVersionUser; +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.GuestbookResponse; +import edu.harvard.iq.dataverse.RoleAssignment; +import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroup; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; +import edu.harvard.iq.dataverse.search.savedsearch.SavedSearch; +import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; +import java.math.BigDecimal; +import java.util.List; +import java.util.Set; +import javax.json.Json; +import javax.json.JsonArrayBuilder; +import javax.json.JsonObjectBuilder; + +// Superuser-only enforced below. +@RequiredPermissions({}) +public class GetUserTracesCommand extends AbstractCommand { + + private DataverseRequest request; + private AuthenticatedUser user; + + public GetUserTracesCommand(DataverseRequest request, AuthenticatedUser user) { + super(request, (DvObject) null); + this.request = request; + this.user = user; + } + + @Override + public JsonObjectBuilder execute(CommandContext ctxt) throws CommandException { + if (!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser()) { + throw new PermissionException("Get user traces command can only be called by superusers.", this, null, null); + } + if (user == null) { + throw new CommandException("Cannot get traces. User not found.", this); + } + Long userId = user.getId(); + JsonObjectBuilder traces = Json.createObjectBuilder(); +// List roleAssignments = ctxt.permissions().getDvObjectsUserHasRoleOn(user); + List roleAssignments = ctxt.roleAssignees().getAssignmentsFor(user.getIdentifier()); + if (roleAssignments != null && !roleAssignments.isEmpty()) { + JsonObjectBuilder job = Json.createObjectBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + for (RoleAssignment roleAssignment : roleAssignments) { + jab.add(NullSafeJsonBuilder.jsonObjectBuilder() + .add("id", roleAssignment.getId()) + .add("definitionPointName", roleAssignment.getDefinitionPoint().getCurrentName()) + .add("definitionPointIdentifier", roleAssignment.getDefinitionPoint().getIdentifier()) + .add("definitionPointId", roleAssignment.getDefinitionPoint().getId()) + .add("roleAlias", roleAssignment.getRole().getAlias()) + .add("roleName", roleAssignment.getRole().getName()) + ); + } + job.add("count", roleAssignments.size()); + job.add("items", jab); + traces.add("roleAssignments", job); + } + List dataversesCreated = ctxt.dataverses().findByCreatorId(userId); + if (dataversesCreated != null && !dataversesCreated.isEmpty()) { + JsonObjectBuilder job = Json.createObjectBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + for (Dataverse dataverse : dataversesCreated) { + jab.add(Json.createObjectBuilder() + .add("id", dataverse.getId()) + .add("alias", dataverse.getAlias()) + ); + } + job.add("count", dataversesCreated.size()); + job.add("items", jab); + traces.add("dataverseCreator", job); + } + List dataversesPublished = ctxt.dataverses().findByReleaseUserId(userId); + if (dataversesPublished != null && !dataversesPublished.isEmpty()) { + JsonObjectBuilder job = Json.createObjectBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + for (Dataverse dataverse : dataversesPublished) { + jab.add(Json.createObjectBuilder() + .add("id", dataverse.getId()) + .add("alias", dataverse.getAlias()) + ); + } + job.add("count", dataversesPublished.size()); + job.add("items", jab); + traces.add("dataversePublisher", job); + } + List datasetsCreated = ctxt.datasets().findByCreatorId(userId); + if (datasetsCreated != null && !datasetsCreated.isEmpty()) { + JsonObjectBuilder job = Json.createObjectBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + for (Dataset dataset : datasetsCreated) { + jab.add(Json.createObjectBuilder() + .add("id", dataset.getId()) + .add("pid", dataset.getGlobalId().asString()) + ); + } + job.add("count", datasetsCreated.size()); + job.add("items", jab); + traces.add("datasetCreator", job); + } + List datasetsPublished = ctxt.datasets().findByReleaseUserId(userId); + if (datasetsPublished != null && !datasetsPublished.isEmpty()) { + JsonObjectBuilder job = Json.createObjectBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + for (Dataset dataset : datasetsPublished) { + jab.add(Json.createObjectBuilder() + .add("id", dataset.getId()) + .add("pid", dataset.getGlobalId().asString()) + ); + } + job.add("count", datasetsPublished.size()); + job.add("items", jab); + traces.add("datasetPublisher", job); + } + List dataFilesCreated = ctxt.files().findByCreatorId(userId); + if (dataFilesCreated != null && !dataFilesCreated.isEmpty()) { + JsonObjectBuilder job = Json.createObjectBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + for (DataFile dataFile : dataFilesCreated) { + jab.add(Json.createObjectBuilder() + .add("id", dataFile.getId()) + .add("filename", dataFile.getCurrentName()) + .add("datasetPid", dataFile.getOwner().getGlobalId().asString()) + ); + } + job.add("count", dataFilesCreated.size()); + job.add("items", jab); + traces.add("dataFileCreator", job); + } + // TODO: Consider removing this because we don't seem to populate releaseuser_id for files. + List dataFilesPublished = ctxt.files().findByReleaseUserId(userId); + if (dataFilesPublished != null && !dataFilesPublished.isEmpty()) { + JsonObjectBuilder job = Json.createObjectBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + for (DataFile dataFile : dataFilesPublished) { + jab.add(Json.createObjectBuilder() + .add("id", dataFile.getId()) + .add("filename", dataFile.getCurrentName()) + .add("datasetPid", dataFile.getOwner().getGlobalId().asString()) + ); + } + job.add("count", dataFilesPublished.size()); + job.add("items", jab); + traces.add("dataFileCreator", job); + } + // These are the users who have published a version (or created a draft). + List datasetVersionUsers = ctxt.datasetVersion().getDatasetVersionUsersByAuthenticatedUser(user); + if (datasetVersionUsers != null && !datasetVersionUsers.isEmpty()) { + JsonObjectBuilder job = Json.createObjectBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + for (DatasetVersionUser datasetVersionUser : datasetVersionUsers) { + jab.add(Json.createObjectBuilder() + .add("id", datasetVersionUser.getId()) + .add("dataset", datasetVersionUser.getDatasetVersion().getDataset().getGlobalId().asString()) + .add("version", datasetVersionUser.getDatasetVersion().getSemanticVersion()) + ); + } + job.add("count", datasetVersionUsers.size()); + job.add("items", jab); + traces.add("datasetVersionUsers", job); + } + Set explicitGroups = ctxt.explicitGroups().findDirectlyContainingGroups(user); + if (explicitGroups != null && !explicitGroups.isEmpty()) { + JsonObjectBuilder job = Json.createObjectBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + for (ExplicitGroup explicitGroup : explicitGroups) { + jab.add(Json.createObjectBuilder() + .add("id", explicitGroup.getId()) + .add("name", explicitGroup.getDisplayName()) + ); + } + job.add("count", explicitGroups.size()); + job.add("items", jab); + traces.add("explicitGroups", job); + } + List guestbookResponses = ctxt.responses().findByAuthenticatedUserId(user); + if (guestbookResponses != null && !guestbookResponses.isEmpty()) { + JsonObjectBuilder job = Json.createObjectBuilder(); + // The feeling is that this is too much detail for now so we only show a count. +// JsonArrayBuilder jab = Json.createArrayBuilder(); +// for (GuestbookResponse guestbookResponse : guestbookResponses) { +// jab.add(Json.createObjectBuilder() +// .add("id", guestbookResponse.getId()) +// .add("downloadType", guestbookResponse.getDownloadtype()) +// .add("filename", guestbookResponse.getDataFile().getCurrentName()) +// .add("date", guestbookResponse.getResponseDate()) +// .add("guestbookName", guestbookResponse.getGuestbook().getName()) +// .add("dataset", guestbookResponse.getDatasetVersion().getDataset().getGlobalId().asString()) +// .add("version", guestbookResponse.getDatasetVersion().getSemanticVersion()) +// ); +// } + job.add("count", guestbookResponses.size()); +// job.add("items", jab); + traces.add("guestbookEntries", job); + } + List savedSearchs = ctxt.savedSearches().findByAuthenticatedUser(user); + if (savedSearchs != null && !savedSearchs.isEmpty()) { + JsonObjectBuilder job = Json.createObjectBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + for (SavedSearch savedSearch : savedSearchs) { + jab.add(Json.createObjectBuilder() + .add("id", savedSearch.getId()) + ); + } + job.add("count", savedSearchs.size()); + job.add("items", jab); + traces.add("savedSearches", job); + } + JsonObjectBuilder result = Json.createObjectBuilder(); + result.add("user", Json.createObjectBuilder() + .add("identifier", user.getIdentifier()) + .add("name", user.getName()) + ); + result.add("traces", traces); + return result; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GrantSuperuserStatusCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GrantSuperuserStatusCommand.java index 29f1b891c91..42af43b7247 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GrantSuperuserStatusCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GrantSuperuserStatusCommand.java @@ -40,6 +40,10 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { this, null, null); } + if (targetUser.isDeactivated()) { + throw new CommandException("User " + targetUser.getIdentifier() + " has been deactivated and cannot become a superuser.", this); + } + try { targetUser.setSuperuser(true); ctxt.em().merge(targetUser); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MergeInAccountCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MergeInAccountCommand.java index 28db9b890e9..1ec51764d73 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MergeInAccountCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MergeInAccountCommand.java @@ -25,6 +25,7 @@ import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.passwordreset.PasswordResetData; import edu.harvard.iq.dataverse.search.IndexResponse; import edu.harvard.iq.dataverse.search.savedsearch.SavedSearch; import edu.harvard.iq.dataverse.workflows.WorkflowComment; @@ -57,7 +58,15 @@ public MergeInAccountCommand(DataverseRequest createDataverseRequest, Authentica @Override protected void executeImpl(CommandContext ctxt) throws CommandException { - + + if (consumedAU.getId() == ongoingAU.getId()) { + throw new IllegalCommandException("You cannot merge an account into itself.", this); + } + + if (consumedAU.isDeactivated() && !ongoingAU.isDeactivated() || !consumedAU.isDeactivated() && ongoingAU.isDeactivated()) { + throw new IllegalCommandException("User accounts can only be merged if they are either both active or both deactivated.", this); + } + List baseRAList = ctxt.roleAssignees().getAssignmentsFor(ongoingAU.getIdentifier()); List consumedRAList = ctxt.roleAssignees().getAssignmentsFor(consumedAU.getIdentifier()); @@ -185,8 +194,8 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { ctxt.em().remove(consumedAUL); ctxt.em().remove(consumedAU); BuiltinUser consumedBuiltinUser = ctxt.builtinUsers().findByUserName(consumedAU.getUserIdentifier()); - if (consumedBuiltinUser != null){ - ctxt.em().remove(consumedBuiltinUser); + if (consumedBuiltinUser != null) { + ctxt.builtinUsers().removeUser(consumedBuiltinUser.getUserName()); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java index 41645078a25..f1bab1901d0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java @@ -96,7 +96,7 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException // We start a workflow theDataset = ctxt.em().merge(theDataset); ctxt.em().flush(); - ctxt.workflows().start(prePubWf.get(), buildContext(theDataset, TriggerType.PrePublishDataset, datasetExternallyReleased)); + ctxt.workflows().start(prePubWf.get(), buildContext(theDataset, TriggerType.PrePublishDataset, datasetExternallyReleased), true); return new PublishDatasetResult(theDataset, Status.Workflow); } else{ diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RedetectFileTypeCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RedetectFileTypeCommand.java index ef20ec76e12..8eeca0cb4cd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RedetectFileTypeCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RedetectFileTypeCommand.java @@ -89,7 +89,7 @@ public DataFile execute(CommandContext ctxt) throws CommandException { logger.info("Exception while reindexing files during file type redetection: " + ex.getLocalizedMessage()); } try { - ExportService instance = ExportService.getInstance(ctxt.settings()); + ExportService instance = ExportService.getInstance(); instance.exportAllFormats(dataset); } catch (ExportException ex) { // Just like with indexing, a failure to export is not a fatal condition. diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java index fefa8707c8b..4da9e2fef2f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java @@ -9,6 +9,8 @@ import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.util.FileMetadataUtil; + import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -165,38 +167,77 @@ public Dataset execute(CommandContext ctxt) throws CommandException { } // we have to merge to update the database but not flush because // we don't want to create two draft versions! - // Dataset tempDataset = ctxt.em().merge(theDataset); - //SEK 5/30/2019 - // This interim merge is causing: - // java.lang.IllegalArgumentException: Cannot merge an entity that has been removed: edu.harvard.iq.dvn.core.study.FileMetadata - // at the merge at line 177 - //Is this merge needed to add the lock? - seems to be 'no' so what is it needed for? - - // theDataset = ctxt.em().merge(theDataset); + // Although not completely tested, it looks like this merge handles the + // thumbnail case - if the filemetadata is removed from the context below and + // the dataset still references it, that could cause an issue. Merging here + // avoids any reference from it being the dataset thumbnail + theDataset = ctxt.em().merge(theDataset); + /* + * This code has to handle many cases, and anyone making changes should + * carefully check tests and basic methods that update the dataset version. The + * differences between the cases stem primarily from differences in whether the + * files to add, and their filemetadata, and files to delete, and their + * filemetadata have been persisted at this point, which manifests itself as to + * whether they have id numbers or not, and apparently, whether or not they + * exists in lists, e.g. the getFileMetadatas() list of a datafile. + * + * To handle this, the code is carefully checking to make sure that deletions + * are deleting the right things and not, for example, doing a remove(fmd) when + * the fmd.getId() is null, which just removes the first element found. + */ for (FileMetadata fmd : filesToDelete) { + logger.fine("Deleting fmd: " + fmd.getId() + " for file: " + fmd.getDataFile().getId()); + // if file is draft (ie. new to this version), delete it. Otherwise just remove + // filemetadata object) + // There are a few cases to handle: + // * the fmd has an id (has been persisted) and is the one in the current + // (draft) version + // * the fmd has an id (has been persisted) but it is from a published version + // so we need the corresponding one from the draft version (i.e. created during + // a getEditVersion call) + // * the fmd has no id (hasn't been persisted) so we have to use non-id based + // means to identify it and remove it from lists + + if (fmd.getId() != null) { + // If the datasetversion doesn't match, we have the fmd from a published version + // and we need to remove the one for the newly created draft instead, so we find + // it here + logger.fine("Edit ver: " + theDataset.getEditVersion().getId()); + logger.fine("fmd ver: " + fmd.getDatasetVersion().getId()); + if (!theDataset.getEditVersion().equals(fmd.getDatasetVersion())) { + fmd = FileMetadataUtil.getFmdForFileInEditVersion(fmd, theDataset.getEditVersion()); + } + } + fmd = ctxt.em().merge(fmd); + + // There are two datafile cases as well - the file has been released, so we're + // just removing it from the current draft version or it is only in the draft + // version and we completely remove the file. if (!fmd.getDataFile().isReleased()) { - // if file is draft (ie. new to this version, delete; otherwise just remove - // filemetadata object) + // remove the file ctxt.engine().submit(new DeleteDataFileCommand(fmd.getDataFile(), getRequest())); + // and remove the file from the dataset's list theDataset.getFiles().remove(fmd.getDataFile()); - theDataset.getEditVersion().getFileMetadatas().remove(fmd); - // added this check to handle issue where you could not deleter a file that - // shared a category with a new file - // the relation ship does not seem to cascade, yet somehow it was trying to - // merge the filemetadata - // todo: clean this up some when we clean the create / update dataset methods - for (DataFileCategory cat : theDataset.getCategories()) { - cat.getFileMetadatas().remove(fmd); - } } else { - FileMetadata mergedFmd = ctxt.em().merge(fmd); - ctxt.em().remove(mergedFmd); - fmd.getDataFile().getFileMetadatas().remove(mergedFmd); - theDataset.getEditVersion().getFileMetadatas().remove(mergedFmd); + // if we aren't removing the file, we need to explicitly remove the fmd from the + // context and then remove it from the datafile's list + ctxt.em().remove(fmd); + FileMetadataUtil.removeFileMetadataFromList(fmd.getDataFile().getFileMetadatas(), fmd); + } + // In either case, to fully remove the fmd, we have to remove any other possible + // references + // From the datasetversion + FileMetadataUtil.removeFileMetadataFromList(theDataset.getEditVersion().getFileMetadatas(), fmd); + // and from the list associated with each category + for (DataFileCategory cat : theDataset.getCategories()) { + FileMetadataUtil.removeFileMetadataFromList(cat.getFileMetadatas(), fmd); } } - + for(FileMetadata fmd: theDataset.getEditVersion().getFileMetadatas()) { + logger.fine("FMD: " + fmd.getId() + " for file: " + fmd.getDataFile().getId() + "is in final draft version"); + } + if (recalculateUNF) { ctxt.ingest().recalculateDatasetVersionUNF(theDataset.getEditVersion()); } diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java b/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java index 9eaf0dbced0..52de3cf1d78 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java @@ -45,28 +45,12 @@ public class ExportService { private static ExportService service; private ServiceLoader loader; - static SettingsServiceBean settingsService; private ExportService() { loader = ServiceLoader.load(Exporter.class); } - /** - * @deprecated Use `getInstance(SettingsServiceBean settingsService)` - * instead. For privacy reasons, we need to pass in settingsService so that - * we can make a decision whether not not to exclude email addresses. No new - * code should call this method and it would be nice to remove calls from - * existing code. - */ - @Deprecated public static synchronized ExportService getInstance() { - return getInstance(null); - } - - public static synchronized ExportService getInstance(SettingsServiceBean settingsService) { - ExportService.settingsService = settingsService; - // We pass settingsService into the JsonPrinter so it can check the :ExcludeEmailFromExport setting in calls to JsonPrinter.jsonAsDatasetDto(). - JsonPrinter.setSettingsService(settingsService); if (service == null) { service = new ExportService(); } @@ -75,7 +59,7 @@ public static synchronized ExportService getInstance(SettingsServiceBean setting public List< String[]> getExportersLabels() { List retList = new ArrayList<>(); - Iterator exporters = ExportService.getInstance(null).loader.iterator(); + Iterator exporters = ExportService.getInstance().loader.iterator(); while (exporters.hasNext()) { Exporter e = exporters.next(); String[] temp = new String[2]; diff --git a/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java index f77821a59e2..367ac4bbc5b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java @@ -35,7 +35,7 @@ public void exportDataset(DatasetVersion version, JsonObject json, OutputStream try { InputStream ddiInputStream; try { - ddiInputStream = ExportService.getInstance(ExportService.settingsService).getExport(version.getDataset(), "ddi"); + ddiInputStream = ExportService.getInstance().getExport(version.getDataset(), "ddi"); } catch(ExportException | IOException e) { throw new ExportException ("Cannot open export_ddi cached file"); } diff --git a/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java index 6cfcb590681..87adc1b4c5b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java @@ -25,7 +25,7 @@ public class OAI_OREExporter implements Exporter { public void exportDataset(DatasetVersion version, JsonObject json, OutputStream outputStream) throws ExportException { try { - new OREMap(version, ExportService.settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport, false)).writeOREMap(outputStream); + new OREMap(version).writeOREMap(outputStream); } catch (Exception e) { logger.severe(e.getMessage()); e.printStackTrace(); diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index 1ce1a2119a7..4fed5913263 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -27,6 +27,8 @@ import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_TYPE_TAG; import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_TYPE_UNF; import edu.harvard.iq.dataverse.export.DDIExporter; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; + import static edu.harvard.iq.dataverse.util.SystemConfig.FQDN; import static edu.harvard.iq.dataverse.util.SystemConfig.SITE_URL; import edu.harvard.iq.dataverse.util.json.JsonUtil; @@ -55,6 +57,7 @@ import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import org.w3c.dom.Document; +import org.apache.commons.lang3.StringUtils; import org.w3c.dom.DOMException; // For write operation @@ -79,8 +82,8 @@ public class DdiExportUtil { public static final String LEVEL_DV = "dv"; - @EJB - VariableServiceBean variableService; + + static SettingsServiceBean settingsService; public static final String NOTE_TYPE_CONTENTTYPE = "DATAVERSE:CONTENTTYPE"; public static final String NOTE_SUBJECT_CONTENTTYPE = "Content/MIME Type"; @@ -195,7 +198,19 @@ private static void createStdyDscr(XMLStreamWriter xmlw, DatasetDTO datasetDto) writeProducersElement(xmlw, version); xmlw.writeStartElement("distStmt"); - if (datasetDto.getPublisher() != null && !datasetDto.getPublisher().equals("")) { + //The default is to add Dataverse Repository as a distributor. The excludeinstallationifset setting turns that off if there is a distributor defined in the metadata + boolean distributorSet=false; + MetadataBlockDTO citationDTO= version.getMetadataBlocks().get("citation"); + if(citationDTO!=null) { + if(citationDTO.getField(DatasetFieldConstant.distributor)!=null) { + distributorSet=true; + } + } + logger.info("Dsitr set?: " + distributorSet); + logger.info("Pub?: " + datasetDto.getPublisher()); + boolean excludeRepository = settingsService.isTrueForKey(SettingsServiceBean.Key.ExportInstallationAsDistributorOnlyWhenNotSet, false); + logger.info("Exclude: " + excludeRepository); + if (!StringUtils.isEmpty(datasetDto.getPublisher()) && !(excludeRepository && distributorSet)) { xmlw.writeStartElement("distrbtr"); writeAttribute(xmlw, "source", "archive"); xmlw.writeCharacters(datasetDto.getPublisher()); @@ -308,7 +323,8 @@ private static void writeDocDescElement (XMLStreamWriter xmlw, DatasetDTO datase xmlw.writeEndElement(); // IDNo xmlw.writeEndElement(); // titlStmt xmlw.writeStartElement("distStmt"); - if (datasetDto.getPublisher() != null && !datasetDto.getPublisher().equals("")) { + //The doc is always published by the Dataverse Repository + if (!StringUtils.isEmpty(datasetDto.getPublisher())) { xmlw.writeStartElement("distrbtr"); writeAttribute(xmlw, "source", "archive"); xmlw.writeCharacters(datasetDto.getPublisher()); @@ -1795,4 +1811,8 @@ public static void datasetHtmlDDI(InputStream datafile, OutputStream outputStrea } + public static void injectSettingsService(SettingsServiceBean settingsSvc) { + settingsService=settingsSvc; + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilHelper.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilHelper.java new file mode 100644 index 00000000000..149c6791a7e --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilHelper.java @@ -0,0 +1,25 @@ +package edu.harvard.iq.dataverse.export.ddi; + +import javax.annotation.PostConstruct; +import javax.ejb.EJB; +import javax.ejb.Singleton; +import javax.ejb.Startup; + +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; + + /** + * This is a small helper bean + * As it is a singleton and built at application start (=deployment), it will inject the (stateless) + * dataverse service into the DdiExportUtil once it's ready. + */ + @Singleton + @Startup + public class DdiExportUtilHelper { + + @EJB SettingsServiceBean settingsSvc; + + @PostConstruct + public void injectService() { + DdiExportUtil.injectSettingsService(settingsSvc); + } + } \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java index 973c712b5c8..02e7675a776 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java @@ -229,7 +229,7 @@ public void markOaiRecordsAsRemoved(Collection records, Date updateTi public void exportAllFormats(Dataset dataset) { try { - ExportService exportServiceInstance = ExportService.getInstance(settingsService); + ExportService exportServiceInstance = ExportService.getInstance(); logger.log(Level.FINE, "Attempting to run export on dataset {0}", dataset.getGlobalId()); exportServiceInstance.exportAllFormats(dataset); datasetService.updateLastExportTimeStamp(dataset.getId()); @@ -240,7 +240,7 @@ public void exportAllFormats(Dataset dataset) { @TransactionAttribute(REQUIRES_NEW) public void exportAllFormatsInNewTransaction(Dataset dataset) throws ExportException { try { - ExportService exportServiceInstance = ExportService.getInstance(settingsService); + ExportService exportServiceInstance = ExportService.getInstance(); exportServiceInstance.exportAllFormats(dataset); datasetService.updateLastExportTimeStamp(dataset.getId()); } catch (Exception e) { diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java index c68cd98de4f..5d4c02a87e2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java @@ -132,11 +132,11 @@ private Context createContext() { } private void addSupportedMetadataFormats(Context context) { - for (String[] provider : ExportService.getInstance(settingsService).getExportersLabels()) { + for (String[] provider : ExportService.getInstance().getExportersLabels()) { String formatName = provider[1]; Exporter exporter; try { - exporter = ExportService.getInstance(settingsService).getExporter(formatName); + exporter = ExportService.getInstance().getExporter(formatName); } catch (ExportException ex) { exporter = null; } @@ -177,7 +177,7 @@ private RepositoryConfiguration createRepositoryConfiguration() { // some of the settings below - such as the max list numbers - // need to be configurable! - String dataverseName = dataverseService.findRootDataverse().getName(); + String dataverseName = dataverseService.getRootDataverseName(); String repositoryName = StringUtils.isEmpty(dataverseName) || "Root".equals(dataverseName) ? "Test Dataverse OAI Archive" : dataverseName + " Dataverse OAI Archive"; InternetAddress internetAddress = MailUtil.parseSystemAddress(settingsService.getValueForKey(SettingsServiceBean.Key.SystemEmail)); diff --git a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetData.java b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetData.java index 78af2a31dc2..a3150161c52 100644 --- a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetData.java +++ b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetData.java @@ -28,7 +28,9 @@ @NamedQuery(name="PasswordResetData.findByUser", query="SELECT prd FROM PasswordResetData prd WHERE prd.builtinUser = :user"), @NamedQuery(name="PasswordResetData.findByToken", - query="SELECT prd FROM PasswordResetData prd WHERE prd.token = :token") + query="SELECT prd FROM PasswordResetData prd WHERE prd.token = :token"), + @NamedQuery(name="PasswordResetData.deleteByUser", + query="DELETE FROM PasswordResetData prd WHERE prd.builtinUser = :user"), }) @Entity public class PasswordResetData implements Serializable { diff --git a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java index 532c0216038..aea910c496e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java @@ -121,13 +121,15 @@ public String sendPasswordResetLink() { actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.BuiltinUser, "passwordResetSent") .setInfo("Email Address: " + emailAddress) ); } else { - /** - * @todo remove "single" when it's no longer necessary. See - * https://github.com/IQSS/dataverse/issues/844 and - * https://github.com/IQSS/dataverse/issues/1141 - */ - logger.log(Level.INFO, "Couldn''t find single account using {0}", emailAddress); + logger.log(Level.INFO, "Cannot find account (or it's deactivated) given {0}", emailAddress); } + /** + * We show this "an email will be sent" message no matter what (if + * the account can be found or not, if the account has been + * deactivated or not) to prevent hackers from figuring out if you + * have an account based on your email address. Yes, this is a white + * lie sometimes, in the name of security. + */ FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("passwdVal.passwdReset.resetInitiated"), BundleUtil.getStringFromBundle("passwdReset.successSubmit.tip", Arrays.asList(emailAddress)))); } catch (PasswordResetException ex) { @@ -146,7 +148,6 @@ public String resetPassword() { String builtinAuthProviderId = BuiltinAuthenticationProvider.PROVIDER_ID; AuthenticatedUser au = authSvc.lookupUser(builtinAuthProviderId, user.getUserName()); session.setUser(au); - session.configureSessionTimeout(); return "/dataverse.xhtml?alias=" + dataverseService.findRootDataverse().getAlias() + "faces-redirect=true"; } else { FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, response.getMessageSummary(), response.getMessageDetail())); diff --git a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBean.java index 507c31f5595..c8db23985d8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBean.java @@ -52,13 +52,23 @@ public class PasswordResetServiceBean { * Initiate the password reset process. * * @param emailAddress - * @return {@link PasswordResetInitResponse} + * @return {@link PasswordResetInitResponse} with empty PasswordResetData if + * the reset won't continue (no user, deactivated user). * @throws edu.harvard.iq.dataverse.passwordreset.PasswordResetException */ // inspired by Troy Hunt: Everything you ever wanted to know about building a secure password reset feature - http://www.troyhunt.com/2012/05/everything-you-ever-wanted-to-know.html public PasswordResetInitResponse requestReset(String emailAddress) throws PasswordResetException { deleteAllExpiredTokens(); AuthenticatedUser authUser = authService.getAuthenticatedUserByEmail(emailAddress); + // This null check is for the NPE reported in https://github.com/IQSS/dataverse/issues/5462 + if (authUser == null) { + logger.info("Cannot find a user based on " + emailAddress + ". Cannot reset password."); + return new PasswordResetInitResponse(false); + } + if (authUser.isDeactivated()) { + logger.info("Cannot reset password for " + emailAddress + " because account is deactivated."); + return new PasswordResetInitResponse(false); + } BuiltinUser user = dataverseUserService.findByUserName(authUser.getUserIdentifier()); if (user != null) { @@ -186,6 +196,12 @@ private long deleteAllExpiredTokens() { return numDeleted; } + public void deleteResetDataByDataverseUser(BuiltinUser user) { + TypedQuery typedQuery = em.createNamedQuery("PasswordResetData.deleteByUser", PasswordResetData.class); + typedQuery.setParameter("user", user); + int numRowsAffected = typedQuery.executeUpdate(); + } + public PasswordChangeAttemptResponse attemptPasswordReset(BuiltinUser user, String newPassword, String token) { final String messageSummarySuccess = "Password Reset Successfully"; diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java index e8bc9fc3da7..b0658f10b34 100644 --- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java @@ -35,7 +35,6 @@ public String init() { String draftDatasetPageToBeRedirectedTo = privateUrlRedirectData.getDraftDatasetPageToBeRedirectedTo() + "&faces-redirect=true"; PrivateUrlUser privateUrlUser = privateUrlRedirectData.getPrivateUrlUser(); session.setUser(privateUrlUser); - session.configureSessionTimeout(); logger.info("Redirecting PrivateUrlUser '" + privateUrlUser.getIdentifier() + "' to " + draftDatasetPageToBeRedirectedTo); return draftDatasetPageToBeRedirectedTo; } catch (Exception ex) { diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 5b2d63c43eb..f61f879eee7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -23,6 +23,7 @@ import edu.harvard.iq.dataverse.dataaccess.DataAccessRequest; import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.datavariable.DataVariable; +import edu.harvard.iq.dataverse.datavariable.VariableMetadata; import edu.harvard.iq.dataverse.datavariable.VariableMetadataUtil; import edu.harvard.iq.dataverse.datavariable.VariableServiceBean; import edu.harvard.iq.dataverse.harvest.client.HarvestingClient; @@ -45,7 +46,9 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.Future; +import java.util.function.Function; import java.util.logging.Logger; +import java.util.stream.Collectors; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import javax.ejb.AsyncResult; @@ -698,7 +701,7 @@ private String addOrUpdateDataset(IndexableDataset indexableDataset) throws Sol return addOrUpdateDataset(indexableDataset, null); } - private String addOrUpdateDataset(IndexableDataset indexableDataset, Set datafilesInDraftVersion) throws SolrServerException, IOException { + private String addOrUpdateDataset(IndexableDataset indexableDataset, Set datafilesInDraftVersion) throws SolrServerException, IOException { IndexableDataset.DatasetState state = indexableDataset.getDatasetState(); Dataset dataset = indexableDataset.getDatasetVersion().getDataset(); logger.fine("adding or updating Solr document for dataset id " + dataset.getId()); @@ -934,8 +937,7 @@ private String addOrUpdateDataset(IndexableDataset indexableDataset, Set d */ if ((fileMetadata.getDataFile().isRestricted() == releasedFileMetadata.getDataFile().isRestricted())) { if (fileMetadata.contentEquals(releasedFileMetadata) - /* SEK 3/12/2020 remove variable metadata indexing*/ - // && variableMetadataUtil.compareVariableMetadata(releasedFileMetadata,fileMetadata) + && variableMetadataUtil.compareVariableMetadata(releasedFileMetadata,fileMetadata) ) { indexThisMetadata = false; logger.fine("This file metadata hasn't changed since the released version; skipping indexing."); @@ -1155,6 +1157,14 @@ private String addOrUpdateDataset(IndexableDataset indexableDataset, Set d // names and labels: if (fileMetadata.getDataFile().isTabularData()) { List variables = fileMetadata.getDataFile().getDataTable().getDataVariables(); + + Map variableMap = null; + List variablesByMetadata = variableService.findVarMetByFileMetaId(fileMetadata.getId()); + + variableMap = + variablesByMetadata.stream().collect(Collectors.toMap(VariableMetadata::getId, Function.identity())); + + for (DataVariable var : variables) { // Hard-coded search fields, for now: // TODO: eventually: review, decide how datavariables should @@ -1169,21 +1179,14 @@ private String addOrUpdateDataset(IndexableDataset indexableDataset, Set d if (var.getName() != null && !var.getName().equals("")) { datafileSolrInputDocument.addField(SearchFields.VARIABLE_NAME, var.getName()); } - -/* SEK 3/12/2020 remove variable metadata indexing - List vmList = variableService.findByDataVarIdAndFileMetaId(var.getId(), fileMetadata.getId()); - VariableMetadata vm = null; - if (vmList != null && vmList.size() >0) { - vm = vmList.get(0); - } - - if (vmList.size() == 0 ) { + + VariableMetadata vm = variableMap.get(var.getId()); + if (vm == null) { //Variable Label if (var.getLabel() != null && !var.getLabel().equals("")) { datafileSolrInputDocument.addField(SearchFields.VARIABLE_LABEL, var.getLabel()); } - - } else if (vm != null) { + } else { if (vm.getLabel() != null && !vm.getLabel().equals("") ) { datafileSolrInputDocument.addField(SearchFields.VARIABLE_LABEL, vm.getLabel()); } @@ -1204,7 +1207,6 @@ private String addOrUpdateDataset(IndexableDataset indexableDataset, Set d } } -*/ } // TABULAR DATA TAGS: diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index 1f53281b2fa..02637bfa8df 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -422,7 +422,18 @@ Whether Harvesting (OAI) service is enabled /** * Sort Date Facets Chronologically instead or presenting them in order of # of hits as other facets are. Default is true */ - ChronologicalDateFacets + ChronologicalDateFacets, + + /** + * Used where BrandingUtil.getInstallationBrandName is called, overides the default use of the root Dataverse collection name + */ + InstallationName, + /** + * In metadata exports that set a 'distributor' this flag determines whether the + * Installation Brand Name is always included (default/false) or is not included + * when the Distributor field (citation metadatablock) is set (true) + */ + ExportInstallationAsDistributorOnlyWhenNotSet ; @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/source/DbSettingConfigSource.java b/src/main/java/edu/harvard/iq/dataverse/settings/source/DbSettingConfigSource.java index 597fdb314d2..838cd415819 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/source/DbSettingConfigSource.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/source/DbSettingConfigSource.java @@ -23,7 +23,7 @@ public class DbSettingConfigSource implements ConfigSource { private static final ConcurrentHashMap properties = new ConcurrentHashMap<>(); private static Instant lastUpdate; private static SettingsServiceBean settingsSvc; - static final String PREFIX = "dataverse.settings.fromdb"; + public static final String PREFIX = "dataverse.settings.fromdb"; /** * Let the SettingsServiceBean be injected by DbSettingConfigHelper with PostConstruct @@ -39,18 +39,19 @@ public static void injectSettingsService(SettingsServiceBean injected) { */ public static void updateProperties() { // skip if the service has not been injected yet - if (settingsSvc == null) + if (settingsSvc == null) { return; - + } + properties.clear(); Set dbSettings = settingsSvc.listAll(); - dbSettings.forEach(s -> properties.put(PREFIX+"."+s.getName()+ (s.getLang() == null ? "" : "."+s.getLang()), s.getContent())); + dbSettings.forEach(s -> properties.put(PREFIX+"."+s.getName().substring(1) + (s.getLang() == null ? "" : "."+s.getLang()), s.getContent())); lastUpdate = Instant.now(); } @Override public Map getProperties() { // if the cache is at least XX number of seconds old, update before serving data. - if (lastUpdate == null || Instant.now().minus(Duration.ofSeconds(60)).isBefore(lastUpdate)) { + if (lastUpdate == null || Instant.now().minus(Duration.ofSeconds(60)).isAfter(lastUpdate)) { updateProperties(); } return properties; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileMetadataUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileMetadataUtil.java new file mode 100644 index 00000000000..cd5db359344 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileMetadataUtil.java @@ -0,0 +1,95 @@ +/* + Copyright (C) 2005-2012, by the President and Fellows of Harvard College. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Dataverse Network - A web application to share, preserve and analyze research data. + Developed at the Institute for Quantitative Social Science, Harvard University. + Version 3.0. +*/ + +package edu.harvard.iq.dataverse.util; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.FileMetadata; + +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.logging.Logger; + +public class FileMetadataUtil implements java.io.Serializable { + private static final Logger logger = Logger.getLogger(FileMetadataUtil.class.getCanonicalName()); + + //Delete the filemetadata from the list if and only if it is in the list + public static void removeFileMetadataFromList(Collection collection, FileMetadata fmToDelete) { + // With an id, the standard remove will work + if (fmToDelete.getId() != null) { + collection.remove(fmToDelete); + } else { + Iterator fmit = collection.iterator(); + while (fmit.hasNext()) { + FileMetadata fmd = fmit.next(); + // If not, we can remove based on a match based on the id of the related + // datafile + if (fmToDelete.getDataFile().getStorageIdentifier().equals(fmd.getDataFile().getStorageIdentifier())) { + // and a match on the datasetversion + if(fmToDelete.getDatasetVersion() == null) { + //not yet associated with a version (i.e. deleting from the upload screen), so the match on datafile is good enough + fmit.remove(); + } else if (fmToDelete.getDatasetVersion().getId() == null) { + // If the fmd to delete is for a datasetversion with no id, we assume match to + // any other fmd with a datasetversion with no id (since there should be only + // one) + // Otherwise, we don't delete anything (this fmd hasn't been persisted and isn't + // in the list. + if (fmd.getDatasetVersion().getId() == null) { + fmit.remove(); + break; + } + } else if (fmToDelete.getDatasetVersion().getId().equals(fmd.getDatasetVersion().getId())) { + fmit.remove(); + break; + } + } + } + } + } + + //Delete datafile from the list even if it's id is null (hasn't yet been persisted) + public static void removeDataFileFromList(List dfList, DataFile dfToDelete) { + // With an id, the standard remove will work + if (dfToDelete.getId() != null) { + dfList.remove(dfToDelete); + } else { + Iterator dfit = dfList.iterator(); + while (dfit.hasNext()) { + DataFile df = dfit.next(); + if (dfToDelete.getStorageIdentifier().equals(df.getStorageIdentifier())) { + dfit.remove(); + break; + } + } + } + } + + public static FileMetadata getFmdForFileInEditVersion(FileMetadata fmd, DatasetVersion editVersion) { + for(FileMetadata editFmd: editVersion.getFileMetadatas()) { + if(editFmd.getDataFile().getId().equals(fmd.getDataFile().getId())) { + return editFmd; + } + } + return null; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java index 37667d16b55..7ca702cabbe 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java @@ -29,8 +29,8 @@ public static InternetAddress parseSystemAddress(String systemEmail) { return null; } - public static String getSubjectTextBasedOnNotification(UserNotification userNotification, String rootDataverseName, Object objectOfNotification) { - List rootDvNameAsList = Arrays.asList(BrandingUtil.getInstallationBrandName(rootDataverseName)); + public static String getSubjectTextBasedOnNotification(UserNotification userNotification, Object objectOfNotification) { + List rootDvNameAsList = Arrays.asList(BrandingUtil.getInstallationBrandName()); switch (userNotification.getType()) { case ASSIGNROLE: return BundleUtil.getStringFromBundle("notification.email.assign.role.subject", rootDvNameAsList); @@ -54,6 +54,10 @@ public static String getSubjectTextBasedOnNotification(UserNotification userNoti return BundleUtil.getStringFromBundle("notification.email.publishFailure.dataset.subject", rootDvNameAsList); case RETURNEDDS: return BundleUtil.getStringFromBundle("notification.email.returned.dataset.subject", rootDvNameAsList); + case WORKFLOW_SUCCESS: + return BundleUtil.getStringFromBundle("notification.email.workflow.success.subject", rootDvNameAsList); + case WORKFLOW_FAILURE: + return BundleUtil.getStringFromBundle("notification.email.workflow.failure.subject", rootDvNameAsList); case CREATEACC: return BundleUtil.getStringFromBundle("notification.email.create.account.subject", rootDvNameAsList); case CHECKSUMFAIL: diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index a1ed4877f39..f5d129405b7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -650,7 +650,7 @@ public String getDefaultAuthProvider() { } public String getNameOfInstallation() { - return dataverseService.findRootDataverse().getName(); + return dataverseService.getRootDataverseName(); } public AbstractOAuth2AuthenticationProvider.DevOAuthAccountType getDevOAuthAccountType() { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java index 225144a0bd9..7c3db485e47 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java @@ -93,7 +93,6 @@ public class BagGenerator { private int timeout = 60; private RequestConfig config = RequestConfig.custom().setConnectTimeout(timeout * 1000) .setConnectionRequestTimeout(timeout * 1000).setSocketTimeout(timeout * 1000).build(); - private static HttpClientContext localContext = HttpClientContext.create(); protected CloseableHttpClient client; private PoolingHttpClientConnectionManager cm = null; @@ -986,7 +985,8 @@ public InputStream get() { HttpGet getMap = createNewGetRequest(new URI(uri), null); logger.finest("Retrieving " + tries + ": " + uri); CloseableHttpResponse response; - response = client.execute(getMap, localContext); + //Note - if we ever need to pass an HttpClientContext, we need a new one per thread. + response = client.execute(getMap); if (response.getStatusLine().getStatusCode() == 200) { logger.finest("Retrieved: " + uri); return response.getEntity().getContent(); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java index 5520de3954e..38303eb1f41 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java @@ -9,8 +9,9 @@ import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.TermsOfUseAndAccess; +import edu.harvard.iq.dataverse.branding.BrandingUtil; import edu.harvard.iq.dataverse.export.OAI_OREExporter; -import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.json.JsonLDNamespace; import edu.harvard.iq.dataverse.util.json.JsonLDTerm; @@ -20,7 +21,6 @@ import java.time.LocalDate; import java.util.List; import java.util.Map; -import java.util.ResourceBundle; import java.util.TreeMap; import java.util.Map.Entry; @@ -33,14 +33,21 @@ public class OREMap { + static SettingsServiceBean settingsService; + public static final String NAME = "OREMap"; private Map localContext = new TreeMap(); private DatasetVersion version; - private boolean excludeEmail = false; + private Boolean excludeEmail = null; - public OREMap(DatasetVersion version, boolean excludeEmail) { + public OREMap(DatasetVersion version) { this.version = version; - this.excludeEmail = excludeEmail; + } + + //Used when the ExcludeEmailFromExport needs to be overriden, i.e. for archiving + public OREMap(DatasetVersion dv, boolean exclude) { + this.version = dv; + this.excludeEmail = exclude; } public void writeOREMap(OutputStream outputStream) throws Exception { @@ -50,6 +57,11 @@ public void writeOREMap(OutputStream outputStream) throws Exception { public JsonObject getOREMap() throws Exception { + //Set this flag if it wasn't provided + if(excludeEmail==null) { + excludeEmail = settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport, false); + } + // Add namespaces we'll definitely use to Context // Additional namespaces are added as needed below localContext.putIfAbsent(JsonLDNamespace.ore.getPrefix(), JsonLDNamespace.ore.getUrl()); @@ -166,7 +178,7 @@ public JsonObject getOREMap() throws Exception { } aggBuilder.add(JsonLDTerm.schemaOrg("includedInDataCatalog").getLabel(), - dataset.getDataverseContext().getDisplayName()); + BrandingUtil.getRootDataverseCollectionName()); // The aggregation aggregates aggregatedresources (Datafiles) which each have // their own entry and metadata @@ -247,7 +259,7 @@ public JsonObject getOREMap() throws Exception { JsonObject oremap = Json.createObjectBuilder() .add(JsonLDTerm.dcTerms("modified").getLabel(), LocalDate.now().toString()) .add(JsonLDTerm.dcTerms("creator").getLabel(), - BundleUtil.getStringFromBundle("institution.name")) + BrandingUtil.getInstallationBrandName()) .add("@type", JsonLDTerm.ore("ResourceMap").getLabel()) // Define an id for the map itself (separate from the @id of the dataset being // described @@ -379,4 +391,7 @@ private JsonLDTerm getTermFor(String type, String subType) { return null; } + public static void injectSettingsService(SettingsServiceBean settingsSvc) { + settingsService = settingsSvc; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMapHelper.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMapHelper.java new file mode 100644 index 00000000000..bced52a7752 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMapHelper.java @@ -0,0 +1,25 @@ +package edu.harvard.iq.dataverse.util.bagit; + +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; + +import javax.annotation.PostConstruct; +import javax.ejb.EJB; +import javax.ejb.Singleton; +import javax.ejb.Startup; + +/** + * This is a small helper bean + * As it is a singleton and built at application start (=deployment), it will inject the (stateless) + * settings service into the OREMap once it's ready. + */ +@Singleton +@Startup +public class OREMapHelper { + @EJB + SettingsServiceBean settingsSvc; + + @PostConstruct + public void injectService() { + OREMap.injectSettingsService(settingsSvc); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 7a5334114e7..7e69d9325c0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -36,6 +36,7 @@ import edu.harvard.iq.dataverse.authorization.providers.AuthenticationProviderRow; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.branding.BrandingUtil; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -58,6 +59,9 @@ import java.util.stream.Collector; import java.util.stream.Collectors; import static java.util.stream.Collectors.toList; + +import javax.ejb.EJB; +import javax.ejb.Singleton; import javax.json.JsonArray; import javax.json.JsonObject; @@ -66,14 +70,15 @@ * * @author michael */ +@Singleton public class JsonPrinter { private static final Logger logger = Logger.getLogger(JsonPrinter.class.getCanonicalName()); - static SettingsServiceBean settingsService = null; + @EJB + static SettingsServiceBean settingsService; - // Passed to DatasetFieldWalker so it can check the :ExcludeEmailFromExport setting - public static void setSettingsService(SettingsServiceBean ssb) { + public static void injectSettingsService(SettingsServiceBean ssb) { settingsService = ssb; } @@ -109,6 +114,8 @@ public static JsonObjectBuilder json(AuthenticatedUser authenticatedUser) { .add("lastName", authenticatedUser.getLastName()) .add("email", authenticatedUser.getEmail()) .add("superuser", authenticatedUser.isSuperuser()) + .add("deactivated", authenticatedUser.isDeactivated()) + .add("deactivatedTime", authenticatedUser.getDeactivatedTime()) .add("affiliation", authenticatedUser.getAffiliation()) .add("position", authenticatedUser.getPosition()) .add("persistentUserId", authenticatedUser.getAuthenticatedUserLookup().getPersistentUserId()) @@ -321,7 +328,7 @@ public static JsonObjectBuilder json(Dataset ds) { .add("persistentUrl", ds.getPersistentURL()) .add("protocol", ds.getProtocol()) .add("authority", ds.getAuthority()) - .add("publisher", getRootDataverseNameforCitation(ds)) + .add("publisher", BrandingUtil.getInstallationBrandName()) .add("publicationDate", ds.getPublicationDateFormattedYYYYMMDD()) .add("storageIdentifier", ds.getStorageIdentifier()); } @@ -391,19 +398,6 @@ public static JsonObjectBuilder jsonDataFileList(List dataFiles){ return bld; } - private static String getRootDataverseNameforCitation(Dataset dataset) { - Dataverse root = dataset.getOwner(); - while (root.getOwner() != null) { - root = root.getOwner(); - } - String rootDataverseName = root.getName(); - if (!StringUtil.isEmpty(rootDataverseName)) { - return rootDataverseName; - } else { - return ""; - } - } - private static String getLicenseInfo(DatasetVersion dsv) { if (dsv.getTermsOfUseAndAccess().getLicense() != null && dsv.getTermsOfUseAndAccess().getLicense().equals(TermsOfUseAndAccess.License.CC0)) { return "CC0 Waiver"; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java new file mode 100644 index 00000000000..62f3569bb8d --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java @@ -0,0 +1,25 @@ +package edu.harvard.iq.dataverse.util.json; + +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; + +import javax.annotation.PostConstruct; +import javax.ejb.EJB; +import javax.ejb.Singleton; +import javax.ejb.Startup; + +/** + * This is a small helper bean + * As it is a singleton and built at application start (=deployment), it will inject the (stateless) + * settings service into the OREMap once it's ready. + */ +@Singleton +@Startup +public class JsonPrinterHelper { + @EJB + SettingsServiceBean settingsSvc; + + @PostConstruct + public void injectService() { + JsonPrinter.injectSettingsService(settingsSvc); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java index b6aa35ede7b..11b2ede9d76 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java @@ -5,6 +5,9 @@ import edu.harvard.iq.dataverse.DataverseRequestServiceBean; import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.RoleAssigneeServiceBean; +import edu.harvard.iq.dataverse.UserNotification; +import edu.harvard.iq.dataverse.UserNotification.Type; +import edu.harvard.iq.dataverse.UserNotificationServiceBean; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.CommandContext; @@ -16,11 +19,14 @@ import edu.harvard.iq.dataverse.workflow.internalspi.InternalWorkflowStepSP; import edu.harvard.iq.dataverse.workflow.step.Failure; import edu.harvard.iq.dataverse.workflow.step.Pending; +import edu.harvard.iq.dataverse.workflow.step.Success; import edu.harvard.iq.dataverse.workflow.step.WorkflowStep; import edu.harvard.iq.dataverse.workflow.step.WorkflowStepData; import edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult; +import edu.harvard.iq.dataverse.workflows.WorkflowComment; -import java.util.Date; +import java.sql.Timestamp; +import java.time.Instant; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -63,6 +69,9 @@ public class WorkflowServiceBean { @EJB SystemConfig systemConfig; + @EJB + UserNotificationServiceBean userNotificationService; + @EJB EjbDataverseEngine engine; @@ -99,18 +108,43 @@ public WorkflowServiceBean() { */ //ToDo - should this be @Async? or just the forward() method? @Asynchronous - public void start(Workflow wf, WorkflowContext ctxt) throws CommandException { - - // Since we are calling this asynchronously anyway - sleep here - // for a few seconds, just in case, to make sure the database update of - // the dataset initiated by the PublishDatasetCommand has finished, - // to avoid any concurrency/optimistic lock issues. - try { - Thread.sleep(1000); - } catch (Exception ex) { - logger.warning("Failed to sleep for a second."); + public void start(Workflow wf, WorkflowContext ctxt, boolean findDataset) throws CommandException { + /* + * Workflows appear to start running prior to the caller's transaction + * completing which can result in exceptions in setting the lock below. To avoid + * this, there are two work-arounds - wait briefly for that transaction to end, + * or refresh the dataset from the db - so the lock is written based on the + * current db state. The latter works for pre-publication workflows (since the + * only changes to the Dataset in the Publish command are edits to the version + * number in the draft version (which aren't valid for the draft anyway)), while + * the former is required for post-publication workflows which may need to see + * the final version number, update times and other changes made in the Finalize + * Publication command. Not waiting saves significant time when many datasets + * are processed, so is prefereable when it makes sense. + * + * This code should be reconsidered if/when the launching of pre/post + * publication workflows is moved to command onSuccess methods (and when + * onSuccess methods are guaranteed to be after the transaction completes (see + * #7568) or other changes are made that can guarantee the dataset in the + * WorkflowContext is up-to-date/usable in further transactions in the workflow. + * (e.g. if this method is not asynchronous) + * + */ + + if (!findDataset) { + /* + * Sleep here briefly to make sure the database update from the callers + * transaction completes which avoids any concurrency/optimistic lock issues. + * Note: 1 second appears long enough, but shorter delays may work + */ + try { + Thread.sleep(1000); + } catch (Exception ex) { + logger.warning("Failed to sleep for a second."); + } } - ctxt = refresh(ctxt, retrieveRequestedSettings( wf.getRequiredSettings()), getCurrentApiToken(ctxt.getRequest().getAuthenticatedUser())); + //Refresh will only em.find the dataset if findDataset is true. (otherwise the dataset is em.merged) + ctxt = refresh(ctxt, retrieveRequestedSettings( wf.getRequiredSettings()), getCurrentApiToken(ctxt.getRequest().getAuthenticatedUser()), findDataset); lockDataset(ctxt, new DatasetLock(DatasetLock.Reason.Workflow, ctxt.getRequest().getAuthenticatedUser())); forward(wf, ctxt); } @@ -184,10 +218,22 @@ private void doResume(PendingWorkflowInvocation pending, String body) { final WorkflowContext ctxt = refresh(newCtxt,retrieveRequestedSettings( wf.getRequiredSettings()), getCurrentApiToken(newCtxt.getRequest().getAuthenticatedUser())); WorkflowStepResult res = pendingStep.resume(ctxt, pending.getLocalData(), body); if (res instanceof Failure) { + logger.warning(((Failure) res).getReason()); + userNotificationService.sendNotification(ctxt.getRequest().getAuthenticatedUser(), Timestamp.from(Instant.now()), UserNotification.Type.WORKFLOW_FAILURE, ctxt.getDataset().getLatestVersion().getId(), ((Failure) res).getMessage()); + //UserNotification isn't meant to be a long-term record and doesn't store the comment, so we'll also keep it as a workflow comment + WorkflowComment wfc = new WorkflowComment(ctxt.getDataset().getLatestVersion(), WorkflowComment.Type.WORKFLOW_FAILURE, ((Failure) res).getMessage(), ctxt.getRequest().getAuthenticatedUser()); + datasets.addWorkflowComment(wfc); rollback(wf, ctxt, (Failure) res, pending.getPendingStepIdx() - 1); } else if (res instanceof Pending) { pauseAndAwait(wf, ctxt, (Pending) res, pending.getPendingStepIdx()); } else { + if (res instanceof Success) { + logger.info(((Success) res).getReason()); + userNotificationService.sendNotification(ctxt.getRequest().getAuthenticatedUser(), Timestamp.from(Instant.now()), UserNotification.Type.WORKFLOW_SUCCESS, ctxt.getDataset().getLatestVersion().getId(), ((Success) res).getMessage()); + //UserNotification isn't meant to be a long-term record and doesn't store the comment, so we'll also keep it as a workflow comment + WorkflowComment wfc = new WorkflowComment(ctxt.getDataset().getLatestVersion(), WorkflowComment.Type.WORKFLOW_SUCCESS, ((Success) res).getMessage(), ctxt.getRequest().getAuthenticatedUser()); + datasets.addWorkflowComment(wfc); + } executeSteps(wf, ctxt, pending.getPendingStepIdx() + 1); } } @@ -466,18 +512,39 @@ private WorkflowStep createStep(WorkflowStepData wsd) { private WorkflowContext refresh( WorkflowContext ctxt ) { return refresh(ctxt, ctxt.getSettings(), ctxt.getApiToken()); } - - private WorkflowContext refresh( WorkflowContext ctxt, Map settings, ApiToken apiToken ) { - /* An earlier version of this class used em.find() to 'refresh' the Dataset in the context. - * For a PostPublication workflow, this had the consequence of hiding/removing changes to the Dataset - * made in the FinalizeDatasetPublicationCommand (i.e. the fact that the draft version is now released and - * has a version number). It is not clear to me if the em.merge below is needed or if it handles the case of - * resumed workflows. (The overall method is needed to allow the context to be updated in the start() method with the - * settings and APItoken retrieved by the WorkflowServiceBean) - JM - 9/18. - */ - WorkflowContext newCtxt =new WorkflowContext( ctxt.getRequest(), - em.merge(ctxt.getDataset()), ctxt.getNextVersionNumber(), - ctxt.getNextMinorVersionNumber(), ctxt.getType(), settings, apiToken, ctxt.getDatasetExternallyReleased(), ctxt.getInvocationId(), ctxt.getLockId()); + + private WorkflowContext refresh(WorkflowContext ctxt, Map settings, ApiToken apiToken) { + return refresh(ctxt, settings, apiToken, false); + } + + private WorkflowContext refresh(WorkflowContext ctxt, Map settings, ApiToken apiToken, + boolean findDataset) { + /* + * An earlier version of this class used em.find() to 'refresh' the Dataset in + * the context. For a PostPublication workflow, this had the consequence of + * hiding/removing changes to the Dataset made in the + * FinalizeDatasetPublicationCommand (i.e. the fact that the draft version is + * now released and has a version number). It is not clear to me if the em.merge + * below is needed or if it handles the case of resumed workflows. (The overall + * method is needed to allow the context to be updated in the start() method + * with the settings and APItoken retrieved by the WorkflowServiceBean) - JM - + * 9/18. + */ + /* + * Introduced the findDataset boolean to optionally revert above change. + * Refreshing the Dataset just before trying to set the workflow lock greatly + * reduces the number of OptimisticLockExceptions. JvM 2/21 + */ + WorkflowContext newCtxt; + if (findDataset) { + newCtxt = new WorkflowContext(ctxt.getRequest(), datasets.find(ctxt.getDataset().getId()), + ctxt.getNextVersionNumber(), ctxt.getNextMinorVersionNumber(), ctxt.getType(), settings, apiToken, + ctxt.getDatasetExternallyReleased(), ctxt.getInvocationId(), ctxt.getLockId()); + } else { + newCtxt = new WorkflowContext(ctxt.getRequest(), em.merge(ctxt.getDataset()), ctxt.getNextVersionNumber(), + ctxt.getNextMinorVersionNumber(), ctxt.getType(), settings, apiToken, + ctxt.getDatasetExternallyReleased(), ctxt.getInvocationId(), ctxt.getLockId()); + } return newCtxt; } diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/AuthorizedExternalStep.java b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/AuthorizedExternalStep.java index 244abdb017c..bbe200aaeb3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/AuthorizedExternalStep.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/AuthorizedExternalStep.java @@ -9,6 +9,8 @@ import edu.harvard.iq.dataverse.workflow.step.Success; import edu.harvard.iq.dataverse.workflow.step.WorkflowStep; import edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult; +import edu.harvard.iq.dataverse.workflows.WorkflowUtil; + import static edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult.OK; import java.io.StringReader; @@ -77,29 +79,7 @@ public WorkflowStepResult run(WorkflowContext context) { @Override public WorkflowStepResult resume(WorkflowContext context, Map internalData, String externalData) { - try (StringReader reader = new StringReader(externalData)) { - JsonObject response = Json.createReader(reader).readObject(); - String status = response.getString("Status"); - String reason = null; - String message = null; - if (response.containsKey("Reason")) { - reason = response.getString("Reason"); - } - if (response.containsKey("Message")) { - message = response.getString("Message"); - } - switch (status) { - case "Success": - logger.log(Level.FINE, "AuthExt Worfklow Step Succeeded: " + reason); - return new Success(reason, message); - case "Failure": - logger.log(Level.WARNING, "Remote system indicates workflow failed: {0}", reason); - return new Failure(reason, message); - } - } catch (Exception e) { - logger.log(Level.WARNING, "Remote system returned a bad reposonse: {0}", externalData); - } - return new Failure("Workflow failure: Response from remote server could not be parsed:" + externalData, null); + return WorkflowUtil.parseResponse(externalData); } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/InternalWorkflowStepSP.java b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/InternalWorkflowStepSP.java index df5f2de9058..ef11d306cd3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/InternalWorkflowStepSP.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/InternalWorkflowStepSP.java @@ -17,6 +17,8 @@ public WorkflowStep getStep(String stepType, Map stepParameters) return new LoggingWorkflowStep(stepParameters); case "pause": return new PauseStep(stepParameters); + case "pause/message": + return new PauseWithMessageStep(stepParameters); case "http/sr": return new HttpSendReceiveClientStep(stepParameters); case "http/authExt": diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/PauseWithMessageStep.java b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/PauseWithMessageStep.java new file mode 100644 index 00000000000..f7332611697 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/PauseWithMessageStep.java @@ -0,0 +1,48 @@ +package edu.harvard.iq.dataverse.workflow.internalspi; + +import edu.harvard.iq.dataverse.workflow.WorkflowContext; +import edu.harvard.iq.dataverse.workflow.step.Failure; +import edu.harvard.iq.dataverse.workflow.step.Pending; +import edu.harvard.iq.dataverse.workflow.step.WorkflowStep; +import edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult; +import edu.harvard.iq.dataverse.workflows.WorkflowUtil; + +import java.util.HashMap; +import java.util.Map; +import java.util.logging.Logger; + + +/** + * A sample step that pauses the workflow. + * + * @author michael + */ +public class PauseWithMessageStep implements WorkflowStep { + + /** Constant used by testing to simulate a failed step. */ + public static final String FAILURE_RESPONSE="fail"; + + private final Map params = new HashMap<>(); + + public PauseWithMessageStep( Map paramSet ) { + params.putAll(paramSet); + } + + @Override + public WorkflowStepResult run(WorkflowContext context) { + final Pending result = new Pending(); + result.getData().putAll(params); + return result; + } + + @Override + public WorkflowStepResult resume(WorkflowContext context, Map internalData, String externalData) { + return WorkflowUtil.parseResponse(externalData); + } + + @Override + public void rollback(WorkflowContext context, Failure reason) { + // nothing to roll back + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/step/Failure.java b/src/main/java/edu/harvard/iq/dataverse/workflow/step/Failure.java index 0487313fd0d..950eed52245 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/step/Failure.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/step/Failure.java @@ -11,7 +11,7 @@ public class Failure implements WorkflowStepResult { private final String message; public Failure( String reason ) { - this(reason, reason); + this(reason, null); } /** diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/step/Success.java b/src/main/java/edu/harvard/iq/dataverse/workflow/step/Success.java index 621afdc61f6..0a00c4f279e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/step/Success.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/step/Success.java @@ -4,34 +4,37 @@ * Result returned when step execution succeeds. */ public class Success implements WorkflowStepResult { - + private final String reason; private final String message; - - public Success( String reason ) { + + public Success(String reason) { this(reason, null); } - + /** * Constructs a new success message. - * @param reason Technical reason (for logs etc.). - * @param message Human readable reason. + * + * @param reason Technical comment (for logs etc.). + * @param message Human readable comment. */ public Success(String reason, String message) { this.reason = reason; this.message = message; } - + /** - * Holds the technical reason for the success, useful for debugging the problem. - * @return the technical reason for the problem. + * Holds a technical comment about the success. + * + * @return the technical comment about the processing. */ public String getReason() { return reason; } /** - * Holds the user-friendly message explaining the failure. + * Holds the user-friendly message describing what was successfully done. + * * @return user-friendly message for the success. */ public String getMessage() { @@ -42,6 +45,5 @@ public String getMessage() { public String toString() { return "WorkflowStepResult.Success{" + "reason=" + reason + ", message=" + message + '}'; } - - -} + +} \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java index 26ddb9b1573..d03afcaa91a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java @@ -22,7 +22,9 @@ public class WorkflowComment implements Serializable { More may be added in future releases, */ public enum Type { - RETURN_TO_AUTHOR //, SUBMIT_FOR_REVIEW not available in this release but may be added in the future + RETURN_TO_AUTHOR, //, SUBMIT_FOR_REVIEW not available in this release but may be added in the future + WORKFLOW_SUCCESS, + WORKFLOW_FAILURE }; @Id @@ -61,20 +63,22 @@ public enum Type { @Column(nullable = false) private Timestamp created; + private boolean toBeShown; + // TODO: Consider support editing in the GUI some day, like GitHub issue comments (show "Edited" in the UI). We won't send a second email, however. You only get one shot to prevent spam. // @Transient // private Timestamp modified; // TODO: How should we best associate these entries to notifications, which can go to multiple authors and curators? + //FWIW: Workflow success/failure messages get shown to the user running the workflow if/when on the relevant dataset version page // @Transient // private List notifications; public WorkflowComment(DatasetVersion version, WorkflowComment.Type type, String message, AuthenticatedUser authenticatedUser) { this.type = type; - if (this.type.equals(WorkflowComment.Type.RETURN_TO_AUTHOR)) { - this.datasetVersion = version; - } + this.datasetVersion = version; this.message = message; this.authenticatedUser = authenticatedUser; this.created = new Timestamp(new Date().getTime()); + this.setToBeShown(true); } /** @@ -123,4 +127,12 @@ public void setDatasetVersion(DatasetVersion dv) { datasetVersion=dv; } + public boolean isToBeShown() { + return toBeShown; + } + + public void setToBeShown(boolean toBeShown) { + this.toBeShown = toBeShown; + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java index e28f8525c9a..aeb8bcf6c87 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java @@ -2,14 +2,29 @@ import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.api.Util; + +import java.io.StringReader; import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + import javax.json.Json; import javax.json.JsonArrayBuilder; +import javax.json.JsonObject; + import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; +import edu.harvard.iq.dataverse.workflow.internalspi.PauseWithMessageStep; +import edu.harvard.iq.dataverse.workflow.step.Failure; +import edu.harvard.iq.dataverse.workflow.step.Success; +import edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult; + import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; +import static org.apache.commons.lang3.StringEscapeUtils.escapeHtml4; public class WorkflowUtil { + private static final Logger logger = Logger.getLogger(WorkflowUtil.class.getName()); + public static JsonArrayBuilder getAllWorkflowComments(DatasetVersion datasetVersion) { JsonArrayBuilder workflowCommentsAsJson = Json.createArrayBuilder(); List workflowComments = datasetVersion.getWorkflowComments(); @@ -26,4 +41,49 @@ public static JsonArrayBuilder getAllWorkflowComments(DatasetVersion datasetVers } return workflowCommentsAsJson; } + + public static WorkflowStepResult parseResponse(String externalData) { + try (StringReader reader = new StringReader(externalData)) { + JsonObject response = Json.createReader(reader).readObject(); + String status = null; + //Lower case is documented, upper case is deprecated + if(response.containsKey("status")) { + status= response.getString("status"); + }else if(response.containsKey("Status")) { + status= response.getString("Status"); + } + String reason = null; + String message = null; + if (response.containsKey("reason")) { + reason = response.getString("reason"); + }else if (response.containsKey("Reason")) { + reason = response.getString("Reason"); + } + if (response.containsKey("message")) { + message = response.getString("message"); + }else if (response.containsKey("Message")) { + message = response.getString("Message"); + } + switch (status) { + case "success": + case "Success": + logger.log(Level.FINE, "AuthExt Worfklow Step Succeeded: " + reason); + return new Success(reason, message); + case "failure": + case "Failure": + logger.log(Level.WARNING, "Remote system indicates workflow failed: {0}", reason); + return new Failure(reason, message); + default: + logger.log(Level.WARNING, "Remote system returned a response with no \"status\" key or bad status value: {0}", escapeHtml4(externalData)); + return new Failure("Workflow failure: Response from remote server doesn't have valid \"status\":" + escapeHtml4(externalData), null); + } + } catch (Exception e) { + logger.log(Level.WARNING, "Remote system returned a bad response: {0}", externalData); + } + //In general, the remote workflow service creating the response is trusted, but, if it's causing an error, escape the result to avoid issues in the UI + return new Failure("Workflow failure: Response from remote server could not be parsed:" + escapeHtml4(externalData), null); + + } + + } diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index abcd502d638..ab5352c8efd 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -168,7 +168,6 @@ contact.context.support.intro={0},\n\nThe following message was sent from {1}.\n contact.context.support.ending=\n\n---\n\nMessage sent from Support contact form. # dataverseuser.xhtml -institution.name=A Dataverse Instance account.info=Account Information account.edit=Edit Account account.apiToken=API Token @@ -202,6 +201,9 @@ notification.wasSubmittedForReview={0} was submitted for review to be published notification.wasReturnedByReviewer={0} was returned by the curator of {1}. notification.wasPublished={0} was published in {1}. notification.publishFailedPidReg={0} in {1} could not be published due to a failure to register, or update the Global Identifier for the dataset or one of the files in it. Contact support if this continues to happen. +notification.workflowFailed=An external workflow run on {0} in {1} has failed. Check your email and/or view the Dataset page which may have additional details. Contact support if this continues to happen. +notification.workflowSucceeded=An external workflow run on {0} in {1} has succeeded. Check your email and/or view the Dataset page which may have additional details. + notification.ingestCompleted=Dataset {1} ingest has successfully finished. notification.ingestCompletedWithErrors=Dataset {1} ingest has finished with errors. notification.generic.objectDeleted=The dataverse, dataset, or file for this notification has been deleted. @@ -249,7 +251,7 @@ user.acccountterms.tip=The terms and conditions for using the application and se user.acccountterms.required=Please check the box to indicate your acceptance of the General Terms of Use. user.acccountterms.iagree=I have read and accept the Dataverse General Terms of Use as outlined above. user.createBtn=Create Account -user.updatePassword.welcome=Welcome to Dataverse {0}, {1} +user.updatePassword.welcome=Welcome to Dataverse {0} user.updatePassword.warning=With the release of our new Dataverse 4.0 upgrade, the password requirements and General Terms of Use have updated. As this is the first time you are using Dataverse since the update, you need to create a new password and agree to the new General Terms of Use. user.updatePassword.password={0} user.password=Password @@ -368,7 +370,7 @@ shib.dataverseUsername=Dataverse Username shib.currentDataversePassword=Current Dataverse Password shib.accountInformation=Account Information shib.offerToCreateNewAccount=This information is provided by your institution and will be used to create your Dataverse account. -shib.passwordRejected=Validation Error - Your account can only be converted if you provide the correct password for your existing account. +shib.passwordRejected=Validation Error - Your account can only be converted if you provide the correct password for your existing account. If your existing account has been deactivated by an administrator, you cannot convert your account. # oauth2/firstLogin.xhtml oauth2.btn.convertAccount=Convert Existing Account @@ -403,14 +405,18 @@ oauth2.newAccount.emailInvalid=Invalid email address. oauth2.convertAccount.explanation=Please enter your {0} account username or email and password to convert your account to the {1} log in option. Learn more about converting your account. oauth2.convertAccount.username=Existing username oauth2.convertAccount.password=Password -oauth2.convertAccount.authenticationFailed=Authentication failed - bad username or password. +oauth2.convertAccount.authenticationFailed=Your account can only be converted if you provide the correct username and password for your existing account. If your existing account has been deactivated by an administrator, you cannot convert your account. oauth2.convertAccount.buttonTitle=Convert Account oauth2.convertAccount.success=Your Dataverse account is now associated with your {0} account. +oauth2.convertAccount.failedDeactivated=Your existing account cannot be converted because it has been deactivated. # oauth2/callback.xhtml oauth2.callback.page.title=OAuth Callback oauth2.callback.message=Authentication Error - Dataverse could not authenticate your login with the provider that you selected. Please make sure you authorize your account to connect with Dataverse. For more details about the information being requested, see the User Guide. +# deactivated user accounts +deactivated.error=Sorry, your account has been deactivated. + # tab on dataverseuser.xhtml apitoken.title=API Token apitoken.message=Your API Token is valid for a year. Check out our {0}API Guide{1} for more information on using your API Token with the Dataverse APIs. @@ -608,6 +614,7 @@ dashboard.list_users.tbl_header.authProviderFactoryAlias=Authentication dashboard.list_users.tbl_header.createdTime=Created Time dashboard.list_users.tbl_header.lastLoginTime=Last Login Time dashboard.list_users.tbl_header.lastApiUseTime=Last API Use Time +dashboard.list_users.tbl_header.deactivated=deactivated dashboard.list_users.tbl_header.roles.removeAll=Remove All dashboard.list_users.tbl_header.roles.removeAll.header=Remove All Roles dashboard.list_users.tbl_header.roles.removeAll.confirmationText=Are you sure you want to remove all roles for user {0}? @@ -656,6 +663,11 @@ notification.email.submit.dataset.subject={0}: Your dataset has been submitted f notification.email.publish.dataset.subject={0}: Your dataset has been published notification.email.publishFailure.dataset.subject={0}: Failed to publish your dataset notification.email.returned.dataset.subject={0}: Your dataset has been returned +notification.email.workflow.success.subject={0}: Your dataset has been processed +notification.email.workflow.success=A workflow running on {0} (view at {1}) succeeded: {2} +notification.email.workflow.failure.subject={0}: Failed to process your dataset +notification.email.workflow.failure=A workflow running on {0} (view at {1}) failed: {2} +notification.email.workflow.nullMessage=No additional message sent from the workflow. notification.email.create.account.subject={0}: Your account has been created notification.email.assign.role.subject={0}: You have been assigned a role notification.email.revoke.role.subject={0}: Your role has been revoked @@ -967,6 +979,8 @@ dataverse.permissions.roles.add=Add New Role dataverse.permissions.roles.description=All the roles set up in your dataverse, that you can assign to users and groups. dataverse.permissions.roles.edit=Edit Role dataverse.permissions.roles.copy=Copy Role +dataverse.permissions.roles.alias.required=Please enter a unique identifier for this role. +dataverse.permissions.roles.name.required=Please enter a name for this role. # permissions-manage-files.xhtml dataverse.permissionsFiles.title=Restricted File Permissions @@ -1398,15 +1412,19 @@ dataset.message.locked.downloadNotAllowedInReview=Dataset file(s) may not be dow dataset.message.locked.downloadNotAllowed=Dataset file(s) may not be downloaded due to dataset lock. dataset.message.locked.editNotAllowed=Dataset cannot be edited due to dataset lock. dataset.message.locked.publishNotAllowed=Dataset cannot be published due to dataset lock. -dataset.message.createSuccess=This dataset has been created +dataset.message.createSuccess=This dataset has been created. dataset.message.createSuccess.failedToSaveFiles=Partial Success: The dataset has been created. But the file(s) could not be saved. Please try uploading the file(s) again. dataset.message.createSuccess.partialSuccessSavingFiles=Partial Success: The dataset has been created. But only {0} out of {1} files have been saved. Please try uploading the missing file(s) again. dataset.message.linkSuccess= {0} has been successfully linked to {1}. dataset.message.metadataSuccess=The metadata for this dataset has been updated. -dataset.message.termsSuccess=The terms for this dataset has been updated. +dataset.message.termsSuccess=The terms for this dataset have been updated. dataset.message.filesSuccess=The files for this dataset have been updated. dataset.message.addFiles.Failure=Failed to add files to the dataset. Please try uploading the file(s) again. dataset.message.addFiles.partialSuccess=Partial success: only {0} files out of {1} have been saved. Please try uploading the missing file(s) again. +dataset.message.publish.remind.draft=If it's ready for sharing, please publish it. +dataset.message.submit.remind.draft=If it's ready for sharing, please submit it for review. +dataset.message.publish.remind.version=If it's ready for sharing, please publish it so that others can see these changes. +dataset.message.submit.remind.version=If it's ready for sharing, please submit it for review so that others can see these changes. dataset.message.publishSuccess=This dataset has been published. dataset.message.only.authenticatedUsers=Only authenticated users may release Datasets. dataset.message.deleteSuccess=This dataset has been deleted. @@ -1520,10 +1538,8 @@ file.notFound.search=There are no files that match your search. Please change th file.noSelectedFiles.tip=There are no selected files to display. file.noUploadedFiles.tip=Files you upload will appear here. file.replace=Replace -file.replaced.warning.header=Edit File -file.replaced.warning.draft.warningMessage=You can not replace a file that has been replaced in a dataset draft. In order to replace it with a different file you must delete the dataset draft. Note that doing so will discard any other changes within this draft. -file.replaced.warning.previous.warningMessage=You can not edit a file that has been replaced in a previous dataset version. In order to edit it you must go to the most recently published version of the file. -file.alreadyDeleted.previous.warningMessage=This file has already been deleted in current version. It may not be edited. +file.alreadyDeleted.warning.header=Edit File +file.alreadyDeleted.previous.warningMessage=This file has already been deleted (or replaced) in the current version. It may not be edited. file.delete=Delete file.delete.duplicate.multiple=Delete Duplicate Files file.delete.duplicate.single=Delete Duplicate File @@ -1903,9 +1919,9 @@ file.metadata.datafiletag.not_tabular=You cannot add Tabular Data Tags to a non- file.metadata.filedirectory.invalidCharacters=Directory Name cannot contain invalid characters. Valid characters are a-Z, 0-9, '_', '-', '.', '\\', '/' and ' ' (white space). # File Edit Success -file.message.editSuccess=This file has been updated. +file.message.editSuccess=The file has been updated. file.message.deleteSuccess=The file has been deleted. -file.message.replaceSuccess=This file has been replaced. +file.message.replaceSuccess=The file has been replaced. # File Add/Replace operation messages file.addreplace.file_size_ok=File size is in range. @@ -1913,6 +1929,7 @@ file.addreplace.error.byte_abrev=B file.addreplace.error.file_exceeds_limit=This file size ({0}) exceeds the size limit of {1}. file.addreplace.error.dataset_is_null=The dataset cannot be null. file.addreplace.error.dataset_id_is_null=The dataset ID cannot be null. +file.addreplace.error.parsing=Error in parsing provided json file.addreplace.warning.unzip.failed=Failed to unzip the file. Saving the file as is. file.addreplace.warning.unzip.failed.size=A file contained in this zip file exceeds the size limit of {0}. This Dataverse installation will save and display the zipped file, rather than unpacking and displaying files. find.dataset.error.dataset_id_is_null=When accessing a dataset based on Persistent ID, a {0} query parameter must be present. @@ -1926,6 +1943,10 @@ find.dataverselinking.error.not.found.bad.ids=Bad dataverse ID number: {0} or da find.datafile.error.datafile.not.found.id=File with ID {0} not found. find.datafile.error.datafile.not.found.bad.id=Bad file ID number: {0}. find.datafile.error.dataset.not.found.persistentId=Datafile with Persistent ID {0} not found. +find.dataverse.role.error.role.not.found.id=Dataverse Role with ID {0} not found. +find.dataverse.role.error.role.not.found.bad.id=Bad Dataverse Role ID number: {0} +find.dataverse.role.error.role.not.found.alias=Dataverse Role with alias {0} not found. +find.dataverse.role.error.role.builtin.not.allowed=May not delete Built In Role {0}. file.addreplace.error.dataset_id_not_found=There was no dataset found for ID: file.addreplace.error.no_edit_dataset_permission=You do not have permission to edit this dataset. file.addreplace.error.filename_undetermined=The file name cannot be determined. @@ -2216,6 +2237,7 @@ shib.invalidEmailAddress=The SAML assertion contained an invalid email address: shib.emailAddress.error=A single valid address could not be found. shib.nullerror=The SAML assertion for "{0}" was null. Please contact support. dataverse.shib.success=Your Dataverse account is now associated with your institutional account. +shib.convert.fail.deactivated=Your existing account cannot be converted because it has been deactivated. shib.createUser.fail=Couldn't create user. shib.duplicate.email.error=Cannot login, because the e-mail address associated with it has changed since previous login and is already in use by another account. @@ -2235,6 +2257,8 @@ permission.permissionsMissing=Permissions {0} missing. permission.CannotAssigntDefaultPermissions=Cannot assign default permissions. permission.default.contributor.role.none.decription=A person who has no permissions on a newly created dataset. Not recommended for dataverses with human contributors. permission.default.contributor.role.none.name=None +permission.role.must.be.created.by.superuser=Roles can only be created or edited by superusers. +permission.role.not.created.alias.already.exists=Role with this alias already exists. #ManageFilePermissionsPage.java permission.roleNotAbleToBeRemoved=The role assignment was not able to be removed. @@ -2277,6 +2301,8 @@ pid.allowedCharacters=^[A-Za-z0-9._/:\\-]* #General Command Exception command.exception.only.superusers={1} can only be called by superusers. +command.exception.user.deactivated={0} failed: User account has been deactivated. +command.exception.user.deleted={0} failed: User account has been deleted. #Admin-API admin.api.auth.mustBeSuperUser=Forbidden. You must be a superuser. @@ -2290,8 +2316,7 @@ admin.api.deleteUser.failure.dvobjects= the user has created Dataverse object(s) admin.api.deleteUser.failure.gbResps= the user is associated with file download (Guestbook Response) record(s) admin.api.deleteUser.failure.roleAssignments=the user is associated with role assignment record(s) admin.api.deleteUser.failure.versionUser=the user has contributed to dataset version(s) -admin.api.deleteUser.failure.groupMember=the user is a member of Explicit Group(s) -admin.api.deleteUser.failure.pendingRequests=the user has pending File Access Request(s) +admin.api.deleteUser.failure.savedSearches=the user has created saved searches admin.api.deleteUser.success=Authenticated User {0} deleted. #Files.java diff --git a/src/main/resources/META-INF/microprofile-aliases.properties b/src/main/resources/META-INF/microprofile-aliases.properties index 4efaf02b083..ebde2910d52 100644 --- a/src/main/resources/META-INF/microprofile-aliases.properties +++ b/src/main/resources/META-INF/microprofile-aliases.properties @@ -1,4 +1,5 @@ # NOTE # This file is a placeholder for future aliases of deprecated config settings. # Format: -# dataverse.new.config.option=dataverse.old.deprecated.option \ No newline at end of file +# dataverse.new.config.option=dataverse.old.deprecated.option +dataverse.export.distributor.excludeinstallationifset=dataverse.settings.fromdb.ExportInstallationAsDistributorOnlyWhenNotSet \ No newline at end of file diff --git a/src/main/resources/db/migration/V5.3.0.5__7564-workflow.sql b/src/main/resources/db/migration/V5.3.0.5__7564-workflow.sql new file mode 100644 index 00000000000..d2a8d48726e --- /dev/null +++ b/src/main/resources/db/migration/V5.3.0.5__7564-workflow.sql @@ -0,0 +1,2 @@ +ALTER TABLE workflowcomment +ADD COLUMN IF NOT EXISTS tobeshown boolean; diff --git a/src/main/resources/db/migration/V5.3.0.6__2419-deactivate-users.sql b/src/main/resources/db/migration/V5.3.0.6__2419-deactivate-users.sql new file mode 100644 index 00000000000..a5e4b69e00b --- /dev/null +++ b/src/main/resources/db/migration/V5.3.0.6__2419-deactivate-users.sql @@ -0,0 +1,6 @@ +-- Users can be deactivated. +ALTER TABLE authenticateduser ADD COLUMN IF NOT EXISTS deactivated BOOLEAN; +-- Prevent old users from having null for deactivated. +UPDATE authenticateduser SET deactivated = false WHERE deactivated IS NULL; +-- A timestamp of when the user was deactivated. +ALTER TABLE authenticateduser ADD COLUMN IF NOT EXISTS deactivatedtime timestamp without time zone; diff --git a/src/main/webapp/dashboard-users.xhtml b/src/main/webapp/dashboard-users.xhtml index a9e7461f1fb..3f6087cf01c 100644 --- a/src/main/webapp/dashboard-users.xhtml +++ b/src/main/webapp/dashboard-users.xhtml @@ -65,6 +65,7 @@ + @@ -84,7 +85,8 @@ - + + diff --git a/src/main/webapp/dataverseuser.xhtml b/src/main/webapp/dataverseuser.xhtml index 5de0154f49c..3e48e16404c 100644 --- a/src/main/webapp/dataverseuser.xhtml +++ b/src/main/webapp/dataverseuser.xhtml @@ -312,6 +312,28 @@ + + + + + #{item.theObject.getDataset().getDisplayName()} + + + #{item.theObject.getDataset().getOwner().getDisplayName()} + + + + + + + + #{item.theObject.getDataset().getDisplayName()} + + + #{item.theObject.getDataset().getOwner().getDisplayName()} + + + diff --git a/src/main/webapp/editdatafiles.xhtml b/src/main/webapp/editdatafiles.xhtml index 01c5ccc9123..aa4eb5f228c 100644 --- a/src/main/webapp/editdatafiles.xhtml +++ b/src/main/webapp/editdatafiles.xhtml @@ -15,7 +15,7 @@ - + @@ -25,14 +25,15 @@ + - - + + @@ -53,9 +54,8 @@ - +
diff --git a/src/main/webapp/file-edit-button-fragment.xhtml b/src/main/webapp/file-edit-button-fragment.xhtml index 6b47cac4057..ef5df7acb3d 100644 --- a/src/main/webapp/file-edit-button-fragment.xhtml +++ b/src/main/webapp/file-edit-button-fragment.xhtml @@ -22,7 +22,6 @@ dataserVersion - the datasetversion object to associate with this view fileMetadata - for single file, the fileMetadata object of that file fileMetadataForAction - used by DatasetPage popups to identify single file (also clears for selected file) - isDraftReplacementFile - for single file, if the file is a draft and already replacing a past file configureTools - for single file, list of configureTools for the file bean - the named value of the backing bean for the below method(s), also used by isFilePg param unrestrictFileAction - name of the method on the above bean to call for unrestrict (method must take a boolean) @@ -38,7 +37,7 @@
  • @@ -71,22 +70,13 @@ - - - -
  • - - - -
  • - - -
  • - - - -
  • -
    + + +
  • + + + +
  • diff --git a/src/main/webapp/file-edit-popup-fragment.xhtml b/src/main/webapp/file-edit-popup-fragment.xhtml index 7fe8fb4c083..707a5f26721 100644 --- a/src/main/webapp/file-edit-popup-fragment.xhtml +++ b/src/main/webapp/file-edit-popup-fragment.xhtml @@ -13,7 +13,17 @@ restrictFileAction - name of the method on the above bean to call for restrict (method must take a boolean) deleteFileAction - name of method on the above bean to delete files --> - + + + +

    #{bundle['file.alreadyDeleted.previous.warningMessage']}

    +
    + +
    +
    @@ -71,8 +81,8 @@ - - + +
    - - + +
    \ No newline at end of file diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml index 34717bfae3b..06ff420a9d1 100644 --- a/src/main/webapp/file.xhtml +++ b/src/main/webapp/file.xhtml @@ -210,15 +210,15 @@
    - + #{bundle['file.editBtn']} - +
    + and !FilePage.deletedFile}"> @@ -642,31 +641,7 @@ #{bundle.close}
    - - -

    #{bundle['file.replaced.warning.draft.warningMessage']}

    -
    - -
    -
    - - - -

    #{bundle['file.replaced.warning.previous.warningMessage']}

    -
    - -

    #{bundle['file.alreadyDeleted.previous.warningMessage']}

    -
    -
    - -
    -
    +

    #{bundle['file.compute.fileAccessDenied']}

    diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index f3523b63853..c18dcf0d41a 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -530,7 +530,18 @@
    + and DatasetPage.canUpdateDataset() + and DatasetPage.isFileDeleted(fileMetadata.dataFile)}"> + + #{bundle['file.optionsBtn']} + +
    + diff --git a/src/main/webapp/roles-edit.xhtml b/src/main/webapp/roles-edit.xhtml index a59d53bcdae..e236ef180b3 100644 --- a/src/main/webapp/roles-edit.xhtml +++ b/src/main/webapp/roles-edit.xhtml @@ -8,28 +8,37 @@
    +
    - - + +
    + - - + + +
    +
    @@ -65,9 +74,12 @@
    + oncomplete="if (args && !args.validationFailed) addRoleCommand();" + update="roleInputTextFragment @([id$=Messages])" + > + + + diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index a1bcc0b08fd..c2049705cb1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -358,6 +358,63 @@ public void testConvertShibUserToBuiltin() throws Exception { } + /** + * Here we are asserting that deactivated users cannot be converted into + * shib users. + */ + @Test + public void testConvertDeactivateUserToShib() { + + Response createUserToConvert = UtilIT.createRandomUser(); + createUserToConvert.then().assertThat().statusCode(OK.getStatusCode()); + createUserToConvert.prettyPrint(); + + long idOfUserToConvert = createUserToConvert.body().jsonPath().getLong("data.authenticatedUser.id"); + String emailOfUserToConvert = createUserToConvert.body().jsonPath().getString("data.authenticatedUser.email"); + String usernameOfUserToConvert = UtilIT.getUsernameFromResponse(createUserToConvert); + + Response deactivateUser = UtilIT.deactivateUser(usernameOfUserToConvert); + deactivateUser.prettyPrint(); + deactivateUser.then().assertThat().statusCode(OK.getStatusCode()); + + String password = usernameOfUserToConvert; + String newEmailAddressToUse = "builtin2shib." + UUID.randomUUID().toString().substring(0, 8) + "@mailinator.com"; + String data = emailOfUserToConvert + ":" + password + ":" + newEmailAddressToUse; + + Response builtinToShibAnon = UtilIT.migrateBuiltinToShib(data, ""); + builtinToShibAnon.prettyPrint(); + builtinToShibAnon.then().assertThat().statusCode(FORBIDDEN.getStatusCode()); + + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response getAuthProviders = UtilIT.getAuthProviders(superuserApiToken); + getAuthProviders.prettyPrint(); + if (!getAuthProviders.body().asString().contains(BuiltinAuthenticationProvider.PROVIDER_ID)) { + System.out.println("Can't proceed with test without builtin provider."); + return; + } + + Response makeShibUser = UtilIT.migrateBuiltinToShib(data, superuserApiToken); + makeShibUser.prettyPrint(); + makeShibUser.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("[\"builtin account has been deactivated\"]")); + + Response userIsStillBuiltin = UtilIT.getAuthenticatedUser(usernameOfUserToConvert, superuserApiToken); + userIsStillBuiltin.prettyPrint(); + userIsStillBuiltin.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.id", equalTo(Long.valueOf(idOfUserToConvert).intValue())) + .body("data.identifier", equalTo("@" + usernameOfUserToConvert)) + .body("data.authenticationProviderId", equalTo("builtin")); + + } + @Test public void testConvertOAuthUserToBuiltin() throws Exception { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DeactivateUsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DeactivateUsersIT.java new file mode 100644 index 00000000000..de2a1d422c0 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/api/DeactivateUsersIT.java @@ -0,0 +1,282 @@ +package edu.harvard.iq.dataverse.api; + +import com.jayway.restassured.RestAssured; +import com.jayway.restassured.path.json.JsonPath; +import com.jayway.restassured.response.Response; +import edu.harvard.iq.dataverse.authorization.DataverseRole; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import static javax.ws.rs.core.Response.Status.BAD_REQUEST; +import static javax.ws.rs.core.Response.Status.CREATED; +import static javax.ws.rs.core.Response.Status.FORBIDDEN; +import static javax.ws.rs.core.Response.Status.OK; +import static javax.ws.rs.core.Response.Status.UNAUTHORIZED; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.startsWith; +import org.junit.BeforeClass; +import org.junit.Test; + +public class DeactivateUsersIT { + + @BeforeClass + public static void setUp() { + RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); + } + + @Test + public void testDeactivateUser() { + + Response createSuperuser = UtilIT.createRandomUser(); + createSuperuser.then().assertThat().statusCode(OK.getStatusCode()); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createDataverse = UtilIT.createRandomDataverse(superuserApiToken); + createDataverse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + Integer dataverseId = UtilIT.getDataverseIdFromResponse(createDataverse); + + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, superuserApiToken); + createDataset.prettyPrint(); + createDataset.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + String datasetPersistentId = UtilIT.getDatasetPersistentIdFromResponse(createDataset); + + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response grantRoleBeforeDeactivate = UtilIT.grantRoleOnDataverse(dataverseAlias, DataverseRole.ADMIN.toString(), "@" + username, superuserApiToken); + grantRoleBeforeDeactivate.prettyPrint(); + grantRoleBeforeDeactivate.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.assignee", equalTo("@" + username)) + .body("data._roleAlias", equalTo("admin")); + + String aliasInOwner = "groupFor" + dataverseAlias; + String displayName = "Group for " + dataverseAlias; + String user2identifier = "@" + username; + Response createGroup = UtilIT.createGroup(dataverseAlias, aliasInOwner, displayName, superuserApiToken); + createGroup.prettyPrint(); + createGroup.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + String groupIdentifier = JsonPath.from(createGroup.asString()).getString("data.identifier"); + + List roleAssigneesToAdd = new ArrayList<>(); + roleAssigneesToAdd.add(user2identifier); + Response addToGroup = UtilIT.addToGroup(dataverseAlias, aliasInOwner, roleAssigneesToAdd, superuserApiToken); + addToGroup.prettyPrint(); + addToGroup.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response userTracesBeforeDeactivate = UtilIT.getUserTraces(username, superuserApiToken); + userTracesBeforeDeactivate.prettyPrint(); + userTracesBeforeDeactivate.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.traces.roleAssignments.items[0].definitionPointName", equalTo(dataverseAlias)) + .body("data.traces.roleAssignments.items[0].definitionPointId", equalTo(dataverseId)) + .body("data.traces.explicitGroups.items[0].name", equalTo("Group for " + dataverseAlias)); + + Response deactivateUser = UtilIT.deactivateUser(username); + deactivateUser.prettyPrint(); + deactivateUser.then().assertThat().statusCode(OK.getStatusCode()); + + Response getUser = UtilIT.getAuthenticatedUser(username, superuserApiToken); + getUser.prettyPrint(); + getUser.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.deactivated", equalTo(true)); + + Response findUser = UtilIT.filterAuthenticatedUsers(superuserApiToken, username, null, 100, null); + findUser.prettyPrint(); + findUser.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.users[0].userIdentifier", equalTo(username)) + .body("data.users[0].deactivated", equalTo(true)) + .body("data.users[0].deactivatedTime", startsWith("2")); + + Response getUserDeactivated = UtilIT.getAuthenticatedUserByToken(apiToken); + getUserDeactivated.prettyPrint(); + getUserDeactivated.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + + Response userTracesAfterDeactivate = UtilIT.getUserTraces(username, superuserApiToken); + userTracesAfterDeactivate.prettyPrint(); + userTracesAfterDeactivate.then().assertThat() + .statusCode(OK.getStatusCode()) + /** + * Here we are showing the the following were deleted: + * + * - role assignments + * + * - membership in explict groups. + */ + .body("data.traces", equalTo(Collections.EMPTY_MAP)); + + Response grantRoleAfterDeactivate = UtilIT.grantRoleOnDataverse(dataverseAlias, DataverseRole.ADMIN.toString(), "@" + username, superuserApiToken); + grantRoleAfterDeactivate.prettyPrint(); + grantRoleAfterDeactivate.then().assertThat() + .statusCode(FORBIDDEN.getStatusCode()) + .body("message", equalTo("User " + username + " is deactivated and cannot be given a role.")); + + Response addToGroupAfter = UtilIT.addToGroup(dataverseAlias, aliasInOwner, roleAssigneesToAdd, superuserApiToken); + addToGroupAfter.prettyPrint(); + addToGroupAfter.then().assertThat() + .statusCode(FORBIDDEN.getStatusCode()) + .body("message", equalTo("User " + username + " is deactivated and cannot be added to a group.")); + + Response grantRoleOnDataset = UtilIT.grantRoleOnDataset(datasetPersistentId, DataverseRole.ADMIN.toString(), "@" + username, superuserApiToken); + grantRoleOnDataset.prettyPrint(); + grantRoleOnDataset.then().assertThat() + .statusCode(FORBIDDEN.getStatusCode()) + .body("message", equalTo("User " + username + " is deactivated and cannot be given a role.")); + + } + + @Test + public void testDeactivateUserById() { + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + + Long userId = JsonPath.from(createUser.body().asString()).getLong("data.authenticatedUser.id"); + Response deactivateUser = UtilIT.deactivateUser(userId); + deactivateUser.prettyPrint(); + deactivateUser.then().assertThat().statusCode(OK.getStatusCode()); + } + + @Test + public void testMergeDeactivatedIntoNonDeactivatedUser() { + + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createUserMergeTarget = UtilIT.createRandomUser(); + createUserMergeTarget.prettyPrint(); + String usernameMergeTarget = UtilIT.getUsernameFromResponse(createUserMergeTarget); + + Response createUserToMerge = UtilIT.createRandomUser(); + createUserToMerge.prettyPrint(); + String usernameToMerge = UtilIT.getUsernameFromResponse(createUserToMerge); + + Response deactivateUser = UtilIT.deactivateUser(usernameToMerge); + deactivateUser.prettyPrint(); + deactivateUser.then().assertThat().statusCode(OK.getStatusCode()); + + // User accounts can only be merged if they are either both active or both deactivated. + Response mergeAccounts = UtilIT.mergeAccounts(usernameMergeTarget, usernameToMerge, superuserApiToken); + mergeAccounts.prettyPrint(); + mergeAccounts.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + } + + @Test + public void testMergeNonDeactivatedIntoDeactivatedUser() { + + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createUserMergeTarget = UtilIT.createRandomUser(); + createUserMergeTarget.prettyPrint(); + String usernameMergeTarget = UtilIT.getUsernameFromResponse(createUserMergeTarget); + + Response createUserToMerge = UtilIT.createRandomUser(); + createUserToMerge.prettyPrint(); + String usernameToMerge = UtilIT.getUsernameFromResponse(createUserToMerge); + + Response deactivateUser = UtilIT.deactivateUser(usernameMergeTarget); + deactivateUser.prettyPrint(); + deactivateUser.then().assertThat().statusCode(OK.getStatusCode()); + + // User accounts can only be merged if they are either both active or both deactivated. + Response mergeAccounts = UtilIT.mergeAccounts(usernameMergeTarget, usernameToMerge, superuserApiToken); + mergeAccounts.prettyPrint(); + mergeAccounts.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + } + + @Test + public void testMergeDeactivatedIntoDeactivatedUser() { + + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createUserMergeTarget = UtilIT.createRandomUser(); + createUserMergeTarget.prettyPrint(); + String usernameMergeTarget = UtilIT.getUsernameFromResponse(createUserMergeTarget); + + Response createUserToMerge = UtilIT.createRandomUser(); + createUserToMerge.prettyPrint(); + String usernameToMerge = UtilIT.getUsernameFromResponse(createUserToMerge); + + Response deactivatedUserMergeTarget = UtilIT.deactivateUser(usernameMergeTarget); + deactivatedUserMergeTarget.prettyPrint(); + deactivatedUserMergeTarget.then().assertThat().statusCode(OK.getStatusCode()); + + Response deactivatedUserToMerge = UtilIT.deactivateUser(usernameToMerge); + deactivatedUserToMerge.prettyPrint(); + deactivatedUserToMerge.then().assertThat().statusCode(OK.getStatusCode()); + + // User accounts can only be merged if they are either both active or both deactivated. + Response mergeAccounts = UtilIT.mergeAccounts(usernameMergeTarget, usernameToMerge, superuserApiToken); + mergeAccounts.prettyPrint(); + mergeAccounts.then().assertThat().statusCode(OK.getStatusCode()); + } + + @Test + public void testMergeUserIntoSelf() { + + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createUserToMerge = UtilIT.createRandomUser(); + createUserToMerge.prettyPrint(); + String usernameToMerge = UtilIT.getUsernameFromResponse(createUserToMerge); + + String usernameMergeTarget = usernameToMerge; + + Response mergeAccounts = UtilIT.mergeAccounts(usernameMergeTarget, usernameToMerge, superuserApiToken); + mergeAccounts.prettyPrint(); + mergeAccounts.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + } + + @Test + public void testTurnDeactivatedUserIntoSuperuser() { + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String username = UtilIT.getUsernameFromResponse(createUser); + + Response deactivateUser = UtilIT.deactivateUser(username); + deactivateUser.prettyPrint(); + deactivateUser.then().assertThat().statusCode(OK.getStatusCode()); + + Response toggleSuperuser = UtilIT.makeSuperUser(username); + toggleSuperuser.prettyPrint(); + toggleSuperuser.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()); + + } + +} diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DeleteUsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DeleteUsersIT.java new file mode 100644 index 00000000000..cae1d0e210a --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/api/DeleteUsersIT.java @@ -0,0 +1,701 @@ +package edu.harvard.iq.dataverse.api; + +import com.jayway.restassured.RestAssured; +import com.jayway.restassured.path.json.JsonPath; +import com.jayway.restassured.response.Response; +import edu.harvard.iq.dataverse.authorization.DataverseRole; +import java.io.File; +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import javax.json.Json; +import javax.json.JsonObjectBuilder; +import static javax.ws.rs.core.Response.Status.BAD_REQUEST; +import static javax.ws.rs.core.Response.Status.CREATED; +import static javax.ws.rs.core.Response.Status.OK; +import static javax.ws.rs.core.Response.Status.UNAUTHORIZED; +import static junit.framework.Assert.assertEquals; +import static org.hamcrest.CoreMatchers.equalTo; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * The following query has been helpful in discovering places where user ids + * appear throughout the database. Here's a summary of how user deletion affects + * these tables. + * + * - apitoken: Not a concern. Tokens are deleted. + * + * - authenticateduserlookup: Not a concern. Rows are deleted. + * + * - confirmemaildata: Not a concern. Rows are deleted. + * + * - datasetlock: Not a concern, locks are deleted. + * + * - datasetversionuser: Definitely a concern. This table is what feeds the + * "Contributors" list under the "Version" tab on the dataset page. You can't + * delete the user. You can merge the user but the name under "Contributors" + * will change to the user you merged into. There is talk of implementing the + * concept of disabling users to handle this. + * + * - dvobject (creator_id): Definitely a concern. You can't delete a user. You + * have to merge instead. + * + * - dvobject (releaseuser_id): Definitely a concern. You can't delete a user. + * You have to merge instead. It seems that for files, releaseuser_id is not + * populated. + * + * - explicitgroup: Not a concern. Group membership is deleted. + * + * - fileaccessrequests: Not a concern. File requests are deleted. + * + * - guestbookresponse: Definitely a concern but it's possible to null out the + * user id. You can't delete a user but you can merge instead. There is talk of + * deactivate which would probably null out the id. In all cases the name and + * email address in the rows are left alone. + * + * - oauth2tokendata: Not a concern. Rows are deleted. + * + * - savedsearch: Definitely a concern. You can't delete a user. You have to + * merge. + * + * - userbannermessage: Not a concern. Rows are deleted. + * + * - usernotification (user_id): Not a concern. Deleted by a cascade. + * + * - usernotification (requestor_id): Not a big concern because of other + * constraints. This is only populated by "submit for review" (so that the + * curator has the name and email address of the author). All these + * notifications would be deleted by a cascade but deleting the user itself is + * prevented because the user recorded in the datasetversionuser table. (Both + * "submit for review" and "return to author" add you to that table.) So the + * bottom line is that the user can't be deleted. It has to be merged. + * + * - workflowcomment: Not a big concern because of other constraints. A workflow + * comment is optionally added as part of "return to author" but this also + * creates a row in the datasetversionuser table which means the user can't be + * deleted. It has to be merged instead. + * + * + * The tables that aren't captured above are actionlogrecord and roleassignment + * because the relationship is to the identifier (username) rather than the id. + * So we'll list them separately: + * + * - actionlogrecord: Not a concern. Delete can go through. On merge, they are + * changed from one user identifier to another. + * + * - roleassignment: Not a concern. Delete can go through. On merge, they are + * changed from one user identifier to another. + */ +/* + table_name | constraint_name +---------------------------------+---------------------------------------------------------------- + apitoken | fk_apitoken_authenticateduser_id + authenticateduserlookup | fk_authenticateduserlookup_authenticateduser_id + confirmemaildata | fk_confirmemaildata_authenticateduser_id + datasetlock | fk_datasetlock_user_id + datasetversionuser | fk_datasetversionuser_authenticateduser_id + dvobject | fk_dvobject_creator_id + dvobject | fk_dvobject_releaseuser_id + explicitgroup_authenticateduser | explicitgroup_authenticateduser_containedauthenticatedusers_id + fileaccessrequests | fk_fileaccessrequests_authenticated_user_id + guestbookresponse | fk_guestbookresponse_authenticateduser_id + oauth2tokendata | fk_oauth2tokendata_user_id + savedsearch | fk_savedsearch_creator_id + userbannermessage | fk_userbannermessage_user_id + usernotification | fk_usernotification_user_id + usernotification | fk_usernotification_requestor_id + workflowcomment | fk_workflowcomment_authenticateduser_id +(16 rows) + +-- https://stackoverflow.com/questions/5347050/postgresql-sql-script-to-get-a-list-of-all-tables-that-has-a-particular-column +select R.TABLE_NAME, R.CONSTRAINT_NAME +from INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE u +inner join INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS FK + on U.CONSTRAINT_CATALOG = FK.UNIQUE_CONSTRAINT_CATALOG + and U.CONSTRAINT_SCHEMA = FK.UNIQUE_CONSTRAINT_SCHEMA + and U.CONSTRAINT_NAME = FK.UNIQUE_CONSTRAINT_NAME +inner join INFORMATION_SCHEMA.KEY_COLUMN_USAGE R + ON R.CONSTRAINT_CATALOG = FK.CONSTRAINT_CATALOG + AND R.CONSTRAINT_SCHEMA = FK.CONSTRAINT_SCHEMA + AND R.CONSTRAINT_NAME = FK.CONSTRAINT_NAME +WHERE U.COLUMN_NAME = 'id' +-- AND U.TABLE_CATALOG = 'b' +-- AND U.TABLE_SCHEMA = 'c' + AND U.TABLE_NAME = 'authenticateduser' +ORDER BY R.TABLE_NAME; + */ +public class DeleteUsersIT { + + @BeforeClass + public static void setUp() { + RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); + } + + @Test + public void testDeleteRolesAndUnpublishedDataverse() { + + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String usernameForCreateDV = UtilIT.getUsernameFromResponse(createUser); + String normalApiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response getTraces1 = UtilIT.getUserTraces(usernameForCreateDV, superuserApiToken); + getTraces1.prettyPrint(); + getTraces1.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.user.identifier", equalTo("@" + usernameForCreateDV)) + // traces is {} when user hasn't left a trace + .body("data.traces", equalTo(Collections.emptyMap())); + + Response createDataverse = UtilIT.createRandomDataverse(normalApiToken); + createDataverse.prettyPrint(); + createDataverse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + + Response getTraces2 = UtilIT.getUserTraces(usernameForCreateDV, superuserApiToken); + getTraces2.prettyPrint(); + getTraces2.then().assertThat().statusCode(OK.getStatusCode()); + + if (true) { + return; + } + + createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String usernameForAssignedRole = UtilIT.getUsernameFromResponse(createUser); + String roleApiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response assignRole = UtilIT.grantRoleOnDataverse(dataverseAlias, DataverseRole.EDITOR.toString(), + "@" + usernameForAssignedRole, superuserApiToken); + + // Shouldn't be able to delete user with a role + Response deleteUserRole = UtilIT.deleteUser(usernameForAssignedRole); + + deleteUserRole.prettyPrint(); + deleteUserRole.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Could not delete Authenticated User @" + usernameForAssignedRole + " because the user is associated with role assignment record(s).")); + + // Now remove that role + Response removeRoles1 = UtilIT.deleteUserRoles(usernameForAssignedRole, superuserApiToken); + removeRoles1.prettyPrint(); + removeRoles1.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo("Roles removed for user " + usernameForAssignedRole + ".")); + + // Now the delete should work + Response deleteUserRole2 = UtilIT.deleteUser(usernameForAssignedRole); + deleteUserRole2.prettyPrint(); + deleteUserRole2.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo("AuthenticatedUser @" + usernameForAssignedRole + " deleted. ")); + + // The owner of the dataverse that was just created is dataverseAdmin because it created the parent dataverse (root). + Response getTraces3 = UtilIT.getUserTraces(usernameForCreateDV, superuserApiToken); + getTraces3.prettyPrint(); + getTraces3.then().assertThat().statusCode(OK.getStatusCode()); + + // Removing roles here but could equally just delete the dataverse. + Response removeRoles2 = UtilIT.deleteUserRoles(usernameForCreateDV, superuserApiToken); + removeRoles2.prettyPrint(); + removeRoles2.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Shouldn't be able to delete a user who has created a DV + Response deleteUserCreateDV = UtilIT.deleteUser(usernameForCreateDV); + deleteUserCreateDV.prettyPrint(); + deleteUserCreateDV.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Could not delete Authenticated User @" + usernameForCreateDV + " because the user has created Dataverse object(s).")); + + Response deleteDataverse = UtilIT.deleteDataverse(dataverseAlias, superuserApiToken); + deleteDataverse.prettyPrint(); + deleteDataverse.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Should be able to delete user after dv is deleted + Response deleteUserAfterDeleteDV = UtilIT.deleteUser(usernameForCreateDV); + deleteUserAfterDeleteDV.prettyPrint(); + deleteUserAfterDeleteDV.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response deleteSuperuser = UtilIT.deleteUser(superuserUsername); + deleteSuperuser.prettyPrint(); + assertEquals(200, deleteSuperuser.getStatusCode()); + + } + + @Test + public void testDeleteUserWithUnPublishedDataverse() { + + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverse = UtilIT.createRandomDataverse(apiToken); + createDataverse.prettyPrint(); + createDataverse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + + Response removeRoles1 = UtilIT.deleteUserRoles(username, superuserApiToken); + removeRoles1.prettyPrint(); + removeRoles1.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo("Roles removed for user " + username + ".")); + + Response deleteUser1 = UtilIT.deleteUser(username); + deleteUser1.prettyPrint(); + deleteUser1.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Could not delete Authenticated User @" + username + " because the user has created Dataverse object(s).")); + + Response traces = UtilIT.getUserTraces(username, superuserApiToken); + traces.prettyPrint(); + traces.then().assertThat().statusCode(OK.getStatusCode()); + + // You can't delete. You have to merge. + Response mergeAccounts = UtilIT.mergeAccounts(superuserUsername, username, superuserApiToken); + mergeAccounts.prettyPrint(); + mergeAccounts.then().assertThat().statusCode(OK.getStatusCode()); + } + + /** + * You can't delete an account with guestbook entries so you have to merge + * it instead. + */ + @Test + public void testDeleteUserWithGuestbookEntries() throws IOException { + + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String authorUsername = UtilIT.getUsernameFromResponse(createUser); + String authorApiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response downloader = UtilIT.createRandomUser(); + downloader.prettyPrint(); + String downloaderUsername = UtilIT.getUsernameFromResponse(downloader); + String downloaderApiToken = UtilIT.getApiTokenFromResponse(downloader); + + Response createDataverse = UtilIT.createRandomDataverse(authorApiToken); + createDataverse.prettyPrint(); + createDataverse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, authorApiToken); + createDataset.prettyPrint(); + createDataset.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset); + String datasetPid = JsonPath.from(createDataset.asString()).getString("data.persistentId"); + + Path pathtoReadme = Paths.get(java.nio.file.Files.createTempDirectory(null) + File.separator + "README.md"); + java.nio.file.Files.write(pathtoReadme, "In the beginning...".getBytes()); + + Response uploadReadme = UtilIT.uploadFileViaNative(datasetId.toString(), pathtoReadme.toString(), authorApiToken); + uploadReadme.prettyPrint(); + uploadReadme.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.files[0].label", equalTo("README.md")); + + int fileId = JsonPath.from(uploadReadme.body().asString()).getInt("data.files[0].dataFile.id"); + + Response publishDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, authorApiToken); + publishDataverse.then().assertThat().statusCode(OK.getStatusCode()); + Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetPid, "major", authorApiToken); + publishDataset.then().assertThat().statusCode(OK.getStatusCode()); + // This download creates a guestbook entry. + Response downloadFile = UtilIT.downloadFile(fileId, downloaderApiToken); + downloadFile.then().assertThat().statusCode(OK.getStatusCode()); + + // We can't delete the downloader because a guestbook record (a download) has been created. + Response deleteDownloaderFail = UtilIT.deleteUser(downloaderUsername); + deleteDownloaderFail.prettyPrint(); + deleteDownloaderFail.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()); + + // Let's see why we can't download. + Response getTraces = UtilIT.getUserTraces(downloaderUsername, superuserApiToken); + getTraces.prettyPrint(); + getTraces.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.traces.guestbookEntries.count", equalTo(1)); + + // We can't delete so we do a merge instead. + Response mergeAccounts = UtilIT.mergeAccounts(superuserUsername, downloaderUsername, superuserApiToken); + mergeAccounts.prettyPrint(); + mergeAccounts.then().assertThat().statusCode(OK.getStatusCode()); + + } + + @Test + public void testDatasetLocks() throws IOException { + + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String authorUsername = UtilIT.getUsernameFromResponse(createUser); + String authorApiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response downloader = UtilIT.createRandomUser(); + downloader.prettyPrint(); + String downloaderUsername = UtilIT.getUsernameFromResponse(downloader); + String downloaderApiToken = UtilIT.getApiTokenFromResponse(downloader); + + Response createDataverse = UtilIT.createRandomDataverse(authorApiToken); + createDataverse.prettyPrint(); + createDataverse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, authorApiToken); + createDataset.prettyPrint(); + createDataset.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset); + String datasetPid = JsonPath.from(createDataset.asString()).getString("data.persistentId"); + + Response lockDatasetResponse = UtilIT.lockDataset(datasetId.longValue(), "Ingest", superuserApiToken); + lockDatasetResponse.prettyPrint(); + lockDatasetResponse.then().assertThat() + .body("data.message", equalTo("dataset locked with lock type Ingest")) + .statusCode(200); + + Response checkDatasetLocks = UtilIT.checkDatasetLocks(datasetId.longValue(), "Ingest", superuserApiToken); + checkDatasetLocks.prettyPrint(); + checkDatasetLocks.then().assertThat() + .body("data[0].lockType", equalTo("Ingest")) + .statusCode(200); + Response deleteUserWhoCreatedLock = UtilIT.deleteUser(superuserUsername); + deleteUserWhoCreatedLock.prettyPrint(); + deleteUserWhoCreatedLock.then().assertThat() + .statusCode(OK.getStatusCode()); + } + + @Test + public void testDeleteUserWhoIsMemberOfGroup() throws IOException { + + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String authorUsername = UtilIT.getUsernameFromResponse(createUser); + String authorApiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response downloader = UtilIT.createRandomUser(); + downloader.prettyPrint(); + String downloaderUsername = UtilIT.getUsernameFromResponse(downloader); + String downloaderApiToken = UtilIT.getApiTokenFromResponse(downloader); + + Response createDataverse = UtilIT.createRandomDataverse(authorApiToken); + createDataverse.prettyPrint(); + createDataverse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + + Response createGroupMember = UtilIT.createRandomUser(); + createGroupMember.prettyPrint(); + String groupMemberUsername = UtilIT.getUsernameFromResponse(createGroupMember); + String groupMemberApiToken = UtilIT.getApiTokenFromResponse(createGroupMember); + + String aliasInOwner = "groupFor" + dataverseAlias; + String displayName = "Group for " + dataverseAlias; + String user2identifier = "@" + groupMemberUsername; + Response createGroup = UtilIT.createGroup(dataverseAlias, aliasInOwner, displayName, superuserApiToken); + createGroup.prettyPrint(); + createGroup.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + String groupIdentifier = JsonPath.from(createGroup.asString()).getString("data.identifier"); + + List roleAssigneesToAdd = new ArrayList<>(); + roleAssigneesToAdd.add(user2identifier); + Response addToGroup = UtilIT.addToGroup(dataverseAlias, aliasInOwner, roleAssigneesToAdd, superuserApiToken); + addToGroup.prettyPrint(); + addToGroup.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response getTraces = UtilIT.getUserTraces(groupMemberUsername, superuserApiToken); + getTraces.prettyPrint(); + getTraces.then().assertThat().statusCode(OK.getStatusCode()); + + Response deleteUserInGroup = UtilIT.deleteUser(groupMemberUsername); + deleteUserInGroup.prettyPrint(); + deleteUserInGroup.then().assertThat() + .statusCode(OK.getStatusCode()); + + } + + @Test + public void testDeleteUserWithFileAccessRequests() throws IOException { + + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String authorUsername = UtilIT.getUsernameFromResponse(createUser); + String authorApiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response fileRequester = UtilIT.createRandomUser(); + fileRequester.prettyPrint(); + String fileRequesterUsername = UtilIT.getUsernameFromResponse(fileRequester); + String fileRequesterApiToken = UtilIT.getApiTokenFromResponse(fileRequester); + + Response createDataverse = UtilIT.createRandomDataverse(authorApiToken); + createDataverse.prettyPrint(); + createDataverse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, authorApiToken); + createDataset.prettyPrint(); + createDataset.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset); + String datasetPid = JsonPath.from(createDataset.asString()).getString("data.persistentId"); + + Path pathtoReadme = Paths.get(java.nio.file.Files.createTempDirectory(null) + File.separator + "README.md"); + java.nio.file.Files.write(pathtoReadme, "In the beginning...".getBytes()); + + Response uploadReadme = UtilIT.uploadFileViaNative(datasetId.toString(), pathtoReadme.toString(), authorApiToken); + uploadReadme.prettyPrint(); + uploadReadme.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.files[0].label", equalTo("README.md")); + + Integer fileId = JsonPath.from(uploadReadme.body().asString()).getInt("data.files[0].dataFile.id"); + + Response restrictResponse = UtilIT.restrictFile(fileId.toString(), true, authorApiToken); + restrictResponse.prettyPrint(); + restrictResponse.then().assertThat().statusCode(OK.getStatusCode()); + + //Update Dataset to allow requests + Response allowAccessRequestsResponse = UtilIT.allowAccessRequests(datasetPid, true, authorApiToken); + allowAccessRequestsResponse.prettyPrint(); + allowAccessRequestsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response publishDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, authorApiToken); + publishDataverse.then().assertThat().statusCode(OK.getStatusCode()); + Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetPid, "major", authorApiToken); + publishDataset.then().assertThat().statusCode(OK.getStatusCode()); + + Response requestFileAccessResponse = UtilIT.requestFileAccess(fileId.toString(), fileRequesterApiToken); + requestFileAccessResponse.prettyPrint(); + requestFileAccessResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Let's see why we can't download. + Response getTraces = UtilIT.getUserTraces(fileRequesterUsername, superuserApiToken); + getTraces.prettyPrint(); + getTraces.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Even if users have outstanding file requests, they can be deleted. + Response deleteDownloaderSuccess = UtilIT.deleteUser(fileRequesterUsername); + deleteDownloaderSuccess.prettyPrint(); + deleteDownloaderSuccess.then().assertThat() + .statusCode(OK.getStatusCode()); + } + + @Test + public void testCuratorSendsCommentsToAuthor() throws InterruptedException { + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createCurator1 = UtilIT.createRandomUser(); + createCurator1.prettyPrint(); + createCurator1.then().assertThat() + .statusCode(OK.getStatusCode()); + String curator1Username = UtilIT.getUsernameFromResponse(createCurator1); + String curator1ApiToken = UtilIT.getApiTokenFromResponse(createCurator1); + + Response createCurator2 = UtilIT.createRandomUser(); + createCurator2.prettyPrint(); + createCurator2.then().assertThat() + .statusCode(OK.getStatusCode()); + String curator2Username = UtilIT.getUsernameFromResponse(createCurator2); + String curator2ApiToken = UtilIT.getApiTokenFromResponse(createCurator2); + + Response createDataverseResponse = UtilIT.createRandomDataverse(curator1ApiToken); + createDataverseResponse.prettyPrint(); + createDataverseResponse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response makeCurator2Admin = UtilIT.grantRoleOnDataverse(dataverseAlias, DataverseRole.ADMIN.toString(), "@" + curator2Username, curator1ApiToken); + makeCurator2Admin.prettyPrint(); + makeCurator2Admin.then().assertThat() + .body("data.assignee", equalTo("@" + curator2Username)) + .body("data._roleAlias", equalTo("admin")) + .statusCode(OK.getStatusCode()); + + Response createAuthor1 = UtilIT.createRandomUser(); + createAuthor1.prettyPrint(); + createAuthor1.then().assertThat() + .statusCode(OK.getStatusCode()); + String author1Username = UtilIT.getUsernameFromResponse(createAuthor1); + String author1ApiToken = UtilIT.getApiTokenFromResponse(createAuthor1); + + Response createAuthor2 = UtilIT.createRandomUser(); + createAuthor2.prettyPrint(); + createAuthor2.then().assertThat() + .statusCode(OK.getStatusCode()); + String author2Username = UtilIT.getUsernameFromResponse(createAuthor2); + String author2ApiToken = UtilIT.getApiTokenFromResponse(createAuthor2); + + Response grantAuthor1AddDataset = UtilIT.grantRoleOnDataverse(dataverseAlias, DataverseRole.DS_CONTRIBUTOR.toString(), "@" + author1Username, curator1ApiToken); + grantAuthor1AddDataset.prettyPrint(); + grantAuthor1AddDataset.then().assertThat() + .body("data.assignee", equalTo("@" + author1Username)) + .body("data._roleAlias", equalTo("dsContributor")) + .statusCode(OK.getStatusCode()); + + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, author1ApiToken); + createDataset.prettyPrint(); + createDataset.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset); + + // FIXME: have the initial create return the DOI or Handle to obviate the need for this call. + Response getDatasetJsonBeforePublishing = UtilIT.nativeGet(datasetId, author1ApiToken); + getDatasetJsonBeforePublishing.prettyPrint(); + String protocol = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.protocol"); + String authority = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.authority"); + String identifier = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.identifier"); + + String datasetPersistentId = protocol + ":" + authority + "/" + identifier; + System.out.println("datasetPersistentId: " + datasetPersistentId); + +// Response grantAuthor2ContributorOnDataset = UtilIT.grantRoleOnDataset(datasetPersistentId, DataverseRole.DS_CONTRIBUTOR.toString(), "@" + author2Username, curatorApiToken); + // TODO: Tighten this down to something more realistic than ADMIN. + Response grantAuthor2ContributorOnDataset = UtilIT.grantRoleOnDataset(datasetPersistentId, DataverseRole.ADMIN.toString(), "@" + author2Username, curator1ApiToken); + grantAuthor2ContributorOnDataset.prettyPrint(); + grantAuthor2ContributorOnDataset.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.assignee", equalTo("@" + author2Username)) + .body("data._roleAlias", equalTo("admin")); + +// // Whoops, the author tries to publish but isn't allowed. The curator will take a look. +// Response noPermToPublish = UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", author1ApiToken); +// noPermToPublish.prettyPrint(); +// noPermToPublish.then().assertThat() +// .body("message", equalTo("User @" + author1Username + " is not permitted to perform requested action.")) +// .statusCode(UNAUTHORIZED.getStatusCode()); + Response submitForReview = UtilIT.submitDatasetForReview(datasetPersistentId, author2ApiToken); + submitForReview.prettyPrint(); + submitForReview.then().assertThat() + .statusCode(OK.getStatusCode()); + + // curator2 returns dataset to author. This makes curator2 a contributor. + String comments = "You forgot to upload any files."; + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("reasonForReturn", comments); + Response returnToAuthor = UtilIT.returnDatasetToAuthor(datasetPersistentId, jsonObjectBuilder.build(), curator2ApiToken); + returnToAuthor.prettyPrint(); + returnToAuthor.then().assertThat() + .body("data.inReview", equalTo(false)) + .statusCode(OK.getStatusCode()); + + Response getTracesForCurator2 = UtilIT.getUserTraces(curator2Username, superuserApiToken); + getTracesForCurator2.prettyPrint(); + getTracesForCurator2.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response removeRolesFromCurator2 = UtilIT.deleteUserRoles(curator2Username, superuserApiToken); + removeRolesFromCurator2.prettyPrint(); + removeRolesFromCurator2.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo("Roles removed for user " + curator2Username + ".")); + + // Because curator2 returned the dataset to the authors, curator2 is now a contributor + // and cannot be deleted. + Response deleteCurator2Fail = UtilIT.deleteUser(curator2Username); + deleteCurator2Fail.prettyPrint(); + deleteCurator2Fail.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Could not delete Authenticated User @" + curator2Username + + " because the user has contributed to dataset version(s).")); + + // What should we do with curator2 instead of deleting? The only option is to merge + // curator2 into some other account. Once implemented, we'll deactivate curator2's account + // so that curator2 continues to be displayed as a contributor. + // + // TODO: deactivate curator2 here + // + // Show the error if you don't have permission. + Response failToRemoveRole = UtilIT.deleteUserRoles(author2Username, curator2ApiToken); + failToRemoveRole.prettyPrint(); + failToRemoveRole.then().assertThat() + .statusCode(UNAUTHORIZED.getStatusCode()) + .body("message", equalTo("User @" + curator2Username + " is not permitted to perform requested action.")); + + Response removeRolesFromAuthor2 = UtilIT.deleteUserRoles(author2Username, superuserApiToken); + removeRolesFromAuthor2.prettyPrint(); + removeRolesFromAuthor2.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo("Roles removed for user " + author2Username + ".")); + + // Similarly, we can't delete author2 because author2 submitted + // the dataset for review, which makes one a contributor. + Response deleteAuthor2Fail = UtilIT.deleteUser(author2Username); + deleteAuthor2Fail.prettyPrint(); + deleteAuthor2Fail.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Could not delete Authenticated User @" + author2Username + + " because the user has contributed to dataset version(s).")); + + } + +} diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 8d4369fa85b..9fa06e28a0d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -241,16 +241,12 @@ public void testAddFileBadJson() { Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, junkJson, apiToken); + String parseError = BundleUtil.getStringFromBundle("file.addreplace.error.parsing"); + addResponse.then().assertThat() - .body("status", equalTo(AbstractApiBean.STATUS_OK)) - .body("data.files[0].categories", nullValue()) - .body("data.files[0].dataFile.contentType", equalTo("image/png")) - .body("data.files[0].dataFile.description", equalTo("")) - .body("data.files[0].dataFile.tabularTags", nullValue()) - .body("data.files[0].label", equalTo("dataverseproject.png")) - // not sure why description appears in two places - .body("data.files[0].description", equalTo("")) - .statusCode(OK.getStatusCode()); + .statusCode(BAD_REQUEST.getStatusCode()) + .body("status", equalTo(AbstractApiBean.STATUS_ERROR)) + .body("message", equalTo(parseError)); } @Test @@ -372,12 +368,25 @@ public void test_006_ReplaceFileGood() throws InterruptedException { .add("categories", Json.createArrayBuilder() .add("Data") ); + + /* + * ToDo: When the dataset is still locked, the replaceFile call below returns an + * 'OK' status with an empty 'data' array The sleepForLock avoids that so this + * test tests the normal replace functionality directly, but a new test to check + * that, when the dataset is locked, the call fails instead of returning OK + * would be useful (along with making the replace call do that) + */ + /* + * ToDo: make sleep time shorter for this? Add sleepForLock before subsequent + * calls as well? (Or is it only needed here because it is still locked from the + * publish call above?) + */ + + UtilIT.sleepForLock(datasetId, null, apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION); Response replaceResp = UtilIT.replaceFile(origFileId.toString(), pathToFile2, json.build(), apiToken); msgt(replaceResp.prettyPrint()); - String successMsg2 = BundleUtil.getStringFromBundle("file.addreplace.success.replace"); - replaceResp.then().assertThat() /** * @todo We have a need to show human readable success messages @@ -621,6 +630,9 @@ public void testForceReplaceAndUpdate() { .add("categories", Json.createArrayBuilder() .add("Data") ); + + UtilIT.sleepForLock(datasetId, null, apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION); + Response replaceResp = UtilIT.replaceFile(origFileId.toString(), pathToFile2, json.build(), apiToken); replaceResp.prettyPrint(); @@ -733,14 +745,11 @@ public void test_007_ReplaceFileUnpublishedAndBadIds() { String pathToFile2 = "src/main/webapp/resources/images/cc0.png"; Response replaceResp = UtilIT.replaceFile(origFileId.toString(), pathToFile2, apiToken); - String errMsgUnpublished = BundleUtil.getStringFromBundle("file.addreplace.error.unpublished_file_cannot_be_replaced"); - replaceResp.then().assertThat() - .statusCode(BAD_REQUEST.getStatusCode()) - .body("status", equalTo(AbstractApiBean.STATUS_ERROR)) - .body("message", Matchers.startsWith(errMsgUnpublished)) - ; - + .body("data.files[0].dataFile.contentType", equalTo("image/png")) + .body("data.files[0].label", equalTo("cc0.png")) + .statusCode(OK.getStatusCode()); + // ------------------------- // Publish dataset // ------------------------- @@ -903,10 +912,11 @@ public void testReplaceFileBadJson() { Response replaceResp = UtilIT.replaceFile(origFileId.toString(), pathToFile2, jsonAsString, apiToken); msgt("replace resp: " + replaceResp.prettyPrint()); - + String parseError = BundleUtil.getStringFromBundle("file.addreplace.error.parsing"); replaceResp.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("status", equalTo(AbstractApiBean.STATUS_OK)); + .statusCode(BAD_REQUEST.getStatusCode()) + .body("status", equalTo(AbstractApiBean.STATUS_ERROR)) + .body("message", equalTo(parseError)); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java new file mode 100644 index 00000000000..d1e2ffb2426 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java @@ -0,0 +1,101 @@ + +package edu.harvard.iq.dataverse.api; + +import com.jayway.restassured.RestAssured; +import com.jayway.restassured.path.json.JsonPath; +import com.jayway.restassured.response.Response; +import java.util.logging.Logger; +import static junit.framework.Assert.assertEquals; +import static org.hamcrest.CoreMatchers.equalTo; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * + * @author skraffmi + */ +public class RolesIT { + + private static final Logger logger = Logger.getLogger(AdminIT.class.getCanonicalName()); + + @BeforeClass + public static void setUp() { + RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); + } + + @Test + public void testCreateDeleteRoles() { + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + UtilIT.makeSuperUser(username); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.prettyPrint(); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + String pathToJsonFile = "scripts/api/data/role-test-addRole.json"; + Response addBuiltinRoleResponse = UtilIT.addBuiltInRole(pathToJsonFile); + addBuiltinRoleResponse.prettyPrint(); + String body = addBuiltinRoleResponse.getBody().asString(); + String status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + //Try to delete from non-admin api - should fail. + + Response deleteBuiltinRoleResponseError = UtilIT.deleteDataverseRole("testRole", apiToken); + deleteBuiltinRoleResponseError.prettyPrint(); + body = deleteBuiltinRoleResponseError.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + deleteBuiltinRoleResponseError.then().assertThat().body("message", equalTo("May not delete Built In Role Test Role.")); + + + Response deleteBuiltinRoleResponseSucceed = UtilIT.deleteBuiltInRole("testRole"); + deleteBuiltinRoleResponseSucceed.prettyPrint(); + body = deleteBuiltinRoleResponseSucceed.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + //add as dataverse role + Response addDataverseRoleResponse = UtilIT.addDataverseRole(pathToJsonFile, dataverseAlias, apiToken); + addDataverseRoleResponse.prettyPrint(); + body = addBuiltinRoleResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response viewDataverseRoleResponse = UtilIT.viewDataverseRole("testRole", apiToken); + viewDataverseRoleResponse.prettyPrint(); + body = viewDataverseRoleResponse.getBody().asString(); + String idString = JsonPath.from(body).getString("data.id"); + + System.out.print("idString: " + idString); + + Response deleteDataverseRoleResponseBadAlias = UtilIT.deleteDataverseRole("badAlias", apiToken); + deleteDataverseRoleResponseBadAlias.prettyPrint(); + body = deleteDataverseRoleResponseBadAlias.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + deleteDataverseRoleResponseBadAlias.then().assertThat().body("message", equalTo("Dataverse Role with alias badAlias not found.")); + + Long idBad = Long.parseLong(idString) + 10; + Response deleteDataverseRoleResponseBadId = UtilIT.deleteDataverseRoleById(idBad.toString(), apiToken); + deleteDataverseRoleResponseBadId.prettyPrint(); + body = deleteDataverseRoleResponseBadId.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + deleteDataverseRoleResponseBadId.then().assertThat().body("message", equalTo("Dataverse Role with ID " + idBad.toString() + " not found.")); + + Response deleteDataverseRoleResponseSucceed = UtilIT.deleteDataverseRoleById(idString, apiToken); + deleteDataverseRoleResponseSucceed.prettyPrint(); + body = deleteDataverseRoleResponseSucceed.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + } + +} diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index c5f4da033d1..c7f8986f73a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -977,12 +977,38 @@ static public Response grantRoleOnDataverse(String definitionPoint, String role, .post("api/dataverses/" + definitionPoint + "/assignments?key=" + apiToken); } + public static Response deactivateUser(String username) { + Response deactivateUserResponse = given() + .post("/api/admin/authenticatedUsers/" + username + "/deactivate"); + return deactivateUserResponse; + } + + public static Response deactivateUser(Long userId) { + Response deactivateUserResponse = given() + .post("/api/admin/authenticatedUsers/id/" + userId + "/deactivate"); + return deactivateUserResponse; + } + public static Response deleteUser(String username) { Response deleteUserResponse = given() .delete("/api/admin/authenticatedUsers/" + username + "/"); return deleteUserResponse; } + public static Response deleteUserRoles(String username, String apiToken) { + Response deleteUserResponse = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .post("/api/users/" + username + "/removeRoles"); + return deleteUserResponse; + } + + public static Response getUserTraces(String username, String apiToken) { + Response response = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/users/" + username + "/traces"); + return response; + } + public static Response reingestFile(Long fileId, String apiToken) { Response response = given() .header(API_TOKEN_HTTP_HEADER, apiToken) @@ -1201,6 +1227,7 @@ static Response listAuthenticatedUsers(String apiToken) { return response; } + // TODO: Consider removing apiToken since it isn't used by the API itself. static Response getAuthenticatedUser(String userIdentifier, String apiToken) { Response response = given() .header(API_TOKEN_HTTP_HEADER, apiToken) @@ -2232,7 +2259,7 @@ static Boolean sleepForLock(String idOrPersistentId, String lockType, String api } catch (InterruptedException ex) { Logger.getLogger(UtilIT.class.getName()).log(Level.SEVERE, null, ex); } - } while (lockedForIngest.body().prettyPrint().contains(lockType)); + } while (lockedForIngest.body().jsonPath().getList("data").size() >0 && (lockType==null || lockedForIngest.body().prettyPrint().contains(lockType))); return i <= duration; @@ -2495,6 +2522,66 @@ static Response addBannerMessage(String pathToJsonFile) { return addBannerMessageResponse; } + static Response addBuiltInRole(String pathToJsonFile) { + String jsonIn = getDatasetJson(pathToJsonFile); + + Response addBannerMessageResponse = given() + .body(jsonIn) + .contentType("application/json") + .post("/api/admin/roles"); + return addBannerMessageResponse; + } + + static Response deleteBuiltInRole(String roleAlias) { + + Response addBannerMessageResponse = given() + .delete("/api/admin/roles/:alias?alias=" +roleAlias); + return addBannerMessageResponse; + } + + static Response addDataverseRole(String pathToJsonFile, String dvAlias, String apiToken) { + String jsonIn = getDatasetJson(pathToJsonFile); + + Response addBannerMessageResponse = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(jsonIn) + .contentType("application/json") + .post("/api/roles?dvo="+dvAlias); + return addBannerMessageResponse; + } + + static Response deleteDataverseRole( String roleAlias, String apiToken) { + + Response addBannerMessageResponse = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .delete("/api/roles/:alias?alias="+roleAlias); + return addBannerMessageResponse; + } + + static Response deleteDataverseRoleById( String id, String apiToken) { + + Response addBannerMessageResponse = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .delete("/api/roles/"+id); + return addBannerMessageResponse; + } + + static Response viewDataverseRole( String roleAlias, String apiToken) { + + Response addBannerMessageResponse = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/roles/:alias?alias="+roleAlias); + return addBannerMessageResponse; + } + + static Response viewDataverseRoleById( String id, String apiToken) { + + Response addBannerMessageResponse = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/roles/"+id); + return addBannerMessageResponse; + } + static Response getBannerMessages() { Response getBannerMessagesResponse = given() diff --git a/src/test/java/edu/harvard/iq/dataverse/branding/BrandingUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/branding/BrandingUtilTest.java index a57f73bd7a7..32f6a487994 100644 --- a/src/test/java/edu/harvard/iq/dataverse/branding/BrandingUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/branding/BrandingUtilTest.java @@ -1,32 +1,65 @@ package edu.harvard.iq.dataverse.branding; +import edu.harvard.iq.dataverse.DataverseServiceBean; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import java.io.UnsupportedEncodingException; import java.util.Arrays; import javax.mail.internet.AddressException; import javax.mail.internet.InternetAddress; import static org.junit.Assert.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; +import org.junit.jupiter.api.MethodOrderer.OrderAnnotation; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +@TestMethodOrder(OrderAnnotation.class) public class BrandingUtilTest { + @Mock + DataverseServiceBean dataverseSvc; + @Mock + SettingsServiceBean settingsSvc; + @Test + @Order(1) public void testGetInstallationBrandName() { System.out.println("testGetInstallationBrandName"); - assertEquals("LibraScholar", BrandingUtil.getInstallationBrandName("LibraScholar")); - assertEquals(null, BrandingUtil.getInstallationBrandName(null));// misconfiguration to set to null - assertEquals("", BrandingUtil.getInstallationBrandName(""));// misconfiguration to set to empty string + + Mockito.when(settingsSvc.getValueForKey(SettingsServiceBean.Key.InstallationName)).thenReturn(null); + + //And configure the mock DataverseService to pretend the root collection name is as shown + Mockito.when(dataverseSvc.getRootDataverseName()).thenReturn("LibraScholar"); + BrandingUtil.injectServices(dataverseSvc, settingsSvc); + + assertEquals("LibraScholar", BrandingUtil.getInstallationBrandName()); //Defaults to root collection name + + Mockito.when(settingsSvc.getValueForKey(SettingsServiceBean.Key.InstallationName)).thenReturn("NotLibraScholar"); + + assertEquals("NotLibraScholar", BrandingUtil.getInstallationBrandName()); //uses setting } @Test public void testGetSupportTeamName() throws AddressException, UnsupportedEncodingException { System.out.println("testGetSupportTeamName"); - assertEquals("Support", BrandingUtil.getSupportTeamName(null, null)); - assertEquals("Support", BrandingUtil.getSupportTeamName(null, "")); - assertEquals("LibraScholar Support", BrandingUtil.getSupportTeamName(null, "LibraScholar")); - assertEquals("LibraScholar Support", BrandingUtil.getSupportTeamName(new InternetAddress("support@librascholar.edu"), "LibraScholar")); - assertEquals("LibraScholar Support Team", BrandingUtil.getSupportTeamName(new InternetAddress("support@librascholar.edu", "LibraScholar Support Team"), "LibraScholar")); - assertEquals("", BrandingUtil.getSupportTeamName(new InternetAddress("support@librascholar.edu", ""), "LibraScholar")); // misconfiguration to set to empty string + Mockito.when(dataverseSvc.getRootDataverseName()).thenReturn(null); + BrandingUtil.injectServices(dataverseSvc, settingsSvc); + assertEquals("Support", BrandingUtil.getSupportTeamName(null)); + Mockito.when(dataverseSvc.getRootDataverseName()).thenReturn(""); + BrandingUtil.injectServices(dataverseSvc, settingsSvc); + assertEquals("Support", BrandingUtil.getSupportTeamName(null)); + Mockito.when(dataverseSvc.getRootDataverseName()).thenReturn("LibraScholar"); + BrandingUtil.injectServices(dataverseSvc, settingsSvc); + assertEquals("LibraScholar Support", BrandingUtil.getSupportTeamName(null)); + assertEquals("LibraScholar Support", BrandingUtil.getSupportTeamName(new InternetAddress("support@librascholar.edu"))); + assertEquals("LibraScholar Support Team", BrandingUtil.getSupportTeamName(new InternetAddress("support@librascholar.edu", "LibraScholar Support Team"))); + assertEquals("", BrandingUtil.getSupportTeamName(new InternetAddress("support@librascholar.edu", ""))); // misconfiguration to set to empty string } @Test @@ -103,7 +136,9 @@ public void testEmailSubject() { @Test public void testGetContactHeader() { System.out.println("testGetContactHeader"); - assertEquals("Contact Support", BrandingUtil.getContactHeader(null, null)); + Mockito.when(dataverseSvc.getRootDataverseName()).thenReturn(null); + BrandingUtil.injectServices(dataverseSvc, settingsSvc); + assertEquals("Contact Support", BrandingUtil.getContactHeader(null)); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommandTest.java index ea39bb1bc77..243285e69ab 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommandTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommandTest.java @@ -9,13 +9,19 @@ import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.engine.TestCommandContext; import edu.harvard.iq.dataverse.engine.TestDataverseEngine; +import edu.harvard.iq.dataverse.engine.TestEntityManager; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.mocks.MocksFactory; +import javax.persistence.EntityManager; +import javax.persistence.TypedQuery; import static org.junit.Assert.assertTrue; import org.junit.Before; import org.junit.Test; +import org.mockito.Matchers; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; /** * @@ -36,6 +42,12 @@ public DataverseRole save(DataverseRole aRole) { } }; } + + @Override + public EntityManager em() { + return new LocalTestEntityManager(); + + } }); @Before @@ -94,4 +106,33 @@ public void testGuestUsersCantAddRoles() throws CommandException { engine.submit(sut); } + private class LocalTestEntityManager extends TestEntityManager { + + @Override + public T merge(T entity) { + return entity; + } + + @Override + public void persist(Object entity) { + // + } + + @Override + public void flush() { + //nothing to do here + } + + @Override + public TypedQuery createNamedQuery(String name, Class resultClass) { + //Mocking a query to return no results when + //checking for existing role in DB + TypedQuery mockedQuery = mock(TypedQuery.class); + when(mockedQuery.setParameter(Matchers.anyString(), Matchers.anyObject())).thenReturn(mockedQuery); + when(mockedQuery.getSingleResult()).thenReturn(null); + return mockedQuery; + } + + } + } diff --git a/src/test/java/edu/harvard/iq/dataverse/export/DDIExporterTest.java b/src/test/java/edu/harvard/iq/dataverse/export/DDIExporterTest.java index e70e375ace9..3a5ef60a0e4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/DDIExporterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/export/DDIExporterTest.java @@ -2,7 +2,6 @@ import edu.harvard.iq.dataverse.ControlledVocabularyValue; import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetFieldServiceBean; import edu.harvard.iq.dataverse.DatasetFieldType; import edu.harvard.iq.dataverse.DatasetFieldType.FieldType; import edu.harvard.iq.dataverse.mocks.MockDatasetFieldSvc; @@ -17,9 +16,7 @@ import java.nio.file.Paths; import java.time.Year; import java.util.Arrays; -import java.util.HashMap; import java.util.HashSet; -import java.util.Map; import java.util.Set; import javax.json.Json; import javax.json.JsonObject; @@ -145,12 +142,11 @@ public void testCitation() throws Exception { version.setVersionState(DatasetVersion.VersionState.DRAFT); Dataset dataset = new Dataset(); version.setDataset(dataset); - Dataverse dataverse = new Dataverse(); - dataset.setOwner(dataverse); + dataset.setOwner(new Dataverse()); String citation = version.getCitation(); System.out.println("citation: " + citation); int currentYear = Year.now().getValue(); - assertEquals("Finch, Fiona, " + currentYear + ", \"Darwin's Finches\", DRAFT VERSION", citation); + assertEquals("Finch, Fiona, " + currentYear + ", \"Darwin's Finches\", LibraScholar, DRAFT VERSION", citation); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilTest.java index a76ce8475f2..dce34385274 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilTest.java @@ -1,30 +1,42 @@ package edu.harvard.iq.dataverse.export.ddi; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.xml.XmlPrinter; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; -import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Paths; import java.util.logging.Logger; import edu.harvard.iq.dataverse.util.xml.html.HtmlPrinter; -import org.junit.Test; -import javax.json.JsonObject; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; +import org.junit.jupiter.api.MethodOrderer.OrderAnnotation; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; import static org.junit.Assert.*; +@ExtendWith(MockitoExtension.class) +@TestMethodOrder(OrderAnnotation.class) public class DdiExportUtilTest { private static final Logger logger = Logger.getLogger(DdiExportUtilTest.class.getCanonicalName()); + @Mock + SettingsServiceBean settingsSvc; + @Test + @Order(1) public void testJson2DdiNoFiles() throws Exception { + Mockito.when(settingsSvc.isTrueForKey(SettingsServiceBean.Key.ExportInstallationAsDistributorOnlyWhenNotSet, false)).thenReturn(false); + DdiExportUtil.injectSettingsService(settingsSvc); File datasetVersionJson = new File("src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json"); String datasetVersionAsJson = new String(Files.readAllBytes(Paths.get(datasetVersionJson.getAbsolutePath()))); File ddiFile = new File("src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml"); diff --git a/src/test/java/edu/harvard/iq/dataverse/settings/source/DbSettingConfigSourceTest.java b/src/test/java/edu/harvard/iq/dataverse/settings/source/DbSettingConfigSourceTest.java index c886ef0ba69..9ceca24aadf 100644 --- a/src/test/java/edu/harvard/iq/dataverse/settings/source/DbSettingConfigSourceTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/settings/source/DbSettingConfigSourceTest.java @@ -36,7 +36,7 @@ void testEmptyIfNoSettingsService() { @Test @Order(2) void testDataRetrieval() { - Set settings = new HashSet<>(Arrays.asList(new Setting("FooBar", "hello"), new Setting("FooBarI18N", "de", "hallo"))); + Set settings = new HashSet<>(Arrays.asList(new Setting(":FooBar", "hello"), new Setting(":FooBarI18N", "de", "hallo"))); Mockito.when(settingsSvc.listAll()).thenReturn(settings); DbSettingConfigSource.injectSettingsService(settingsSvc); diff --git a/src/test/java/edu/harvard/iq/dataverse/util/MailUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/MailUtilTest.java index af0c414d356..4363d1dd3c9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/MailUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/MailUtilTest.java @@ -1,19 +1,36 @@ package edu.harvard.iq.dataverse.util; +import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.UserNotification; +import edu.harvard.iq.dataverse.branding.BrandingUtil; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; + import static org.junit.Assert.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; +import org.junit.jupiter.api.MethodOrderer.OrderAnnotation; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; import org.junit.Before; +@ExtendWith(MockitoExtension.class) +@TestMethodOrder(OrderAnnotation.class) public class MailUtilTest { - private String rootDataverseName; UserNotification userNotification = new UserNotification(); + @Mock + DataverseServiceBean dataverseSvc; + @Mock + SettingsServiceBean settingsSvc; + @Before public void setUp() { - rootDataverseName = "LibraScholar"; userNotification = new UserNotification(); + } @Test @@ -31,93 +48,99 @@ public void testParseSystemAddress() { } @Test + @Order(1) public void testSubjectCreateAccount() { + Mockito.when(settingsSvc.getValueForKey(SettingsServiceBean.Key.InstallationName)).thenReturn(null); + //And configure the mock DataverseService to pretend the root collection name is as shown + Mockito.when(dataverseSvc.getRootDataverseName()).thenReturn("LibraScholar"); + BrandingUtil.injectServices(dataverseSvc, settingsSvc); + userNotification.setType(UserNotification.Type.CREATEACC); - assertEquals("LibraScholar: Your account has been created", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Your account has been created", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectAssignRole() { userNotification.setType(UserNotification.Type.ASSIGNROLE); - assertEquals("LibraScholar: You have been assigned a role", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: You have been assigned a role", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectCreateDataverse() { userNotification.setType(UserNotification.Type.CREATEDV); - assertEquals("LibraScholar: Your dataverse has been created", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Your dataverse has been created", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectRevokeRole() { userNotification.setType(UserNotification.Type.REVOKEROLE); - assertEquals("LibraScholar: Your role has been revoked", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Your role has been revoked", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectRequestFileAccess() { userNotification.setType(UserNotification.Type.REQUESTFILEACCESS); - assertEquals("LibraScholar: Access has been requested for a restricted file", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Access has been requested for a restricted file", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectGrantFileAccess() { userNotification.setType(UserNotification.Type.GRANTFILEACCESS); - assertEquals("LibraScholar: You have been granted access to a restricted file", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: You have been granted access to a restricted file", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectRejectFileAccess() { userNotification.setType(UserNotification.Type.REJECTFILEACCESS); - assertEquals("LibraScholar: Your request for access to a restricted file has been rejected", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Your request for access to a restricted file has been rejected", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectCreateDataset() { userNotification.setType(UserNotification.Type.CREATEDS); - assertEquals("LibraScholar: Your dataset has been created", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Your dataset has been created", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectSubmittedDS() { userNotification.setType(UserNotification.Type.SUBMITTEDDS); - assertEquals("LibraScholar: Your dataset has been submitted for review", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Your dataset has been submitted for review", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectPublishedDS() { userNotification.setType(UserNotification.Type.PUBLISHEDDS); - assertEquals("LibraScholar: Your dataset has been published", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Your dataset has been published", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectReturnedDS() { userNotification.setType(UserNotification.Type.RETURNEDDS); - assertEquals("LibraScholar: Your dataset has been returned", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Your dataset has been returned", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectChecksumFail() { userNotification.setType(UserNotification.Type.CHECKSUMFAIL); - assertEquals("LibraScholar: Your upload failed checksum validation", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Your upload failed checksum validation", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectFileSystemImport() { userNotification.setType(UserNotification.Type.FILESYSTEMIMPORT); //TODO SEK add a dataset version to get the Dataset Title which is actually used in the subject now - assertEquals("Dataset LibraScholar has been successfully uploaded and verified", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName , null)); + assertEquals("Dataset LibraScholar has been successfully uploaded and verified", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectChecksumImport() { userNotification.setType(UserNotification.Type.CHECKSUMIMPORT); - assertEquals("LibraScholar: Your file checksum job has completed", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Your file checksum job has completed", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectConfirmEmail() { userNotification.setType(UserNotification.Type.CONFIRMEMAIL); - assertEquals("LibraScholar: Verify your email address", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Verify your email address", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java index f8f0fdc7554..32f7b86a9c9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java @@ -201,7 +201,7 @@ public void testDatasetContactOutOfBoxNoPrivacy() { fields.add(datasetContactField); SettingsServiceBean nullServiceBean = null; - JsonPrinter.setSettingsService(nullServiceBean); + JsonPrinter.injectSettingsService(nullServiceBean); JsonObject jsonObject = JsonPrinter.json(block, fields).build(); assertNotNull(jsonObject); @@ -242,7 +242,7 @@ public void testDatasetContactWithPrivacy() { datasetContactField.setDatasetFieldCompoundValues(vals); fields.add(datasetContactField); - JsonPrinter.setSettingsService(new MockSettingsSvc()); + JsonPrinter.injectSettingsService(new MockSettingsSvc()); JsonObject jsonObject = JsonPrinter.json(block, fields).build(); assertNotNull(jsonObject); diff --git a/tests/jenkins/ec2/Jenkinsfile b/tests/jenkins/ec2/Jenkinsfile index 4a16f865886..7c35da867c7 100644 --- a/tests/jenkins/ec2/Jenkinsfile +++ b/tests/jenkins/ec2/Jenkinsfile @@ -24,9 +24,9 @@ pipeline { env.EC2_REPO = env.GIT_URL } } - sh '/usr/bin/curl -O https://raw.githubusercontent.com/GlobalDataverseCommunityConsortium/dataverse-ansible/master/ec2/ec2-create-instance.sh' + sh '/usr/bin/curl -O https://raw.githubusercontent.com/GlobalDataverseCommunityConsortium/dataverse-ansible/develop/ec2/ec2-create-instance.sh' sh '/bin/rm -f groupvars.yml' - sh '/usr/bin/curl -o groupvars.yml https://raw.githubusercontent.com/GlobalDataverseCommunityConsortium/dataverse-ansible/master/tests/group_vars/jenkins.yml' + sh '/usr/bin/curl -o groupvars.yml https://raw.githubusercontent.com/GlobalDataverseCommunityConsortium/dataverse-ansible/develop/tests/group_vars/jenkins.yml' sh '/usr/bin/bash ec2-create-instance.sh -b ${CHANGE_BRANCH} -r ${EC2_REPO} -t jenkins_delete_me -l target -g groupvars.yml -s t3a.large -d' } } From 0cf73590bd74a3f771e3104fa2d49abab9350bd9 Mon Sep 17 00:00:00 2001 From: mderuijter Date: Wed, 28 Apr 2021 10:01:43 +0200 Subject: [PATCH 017/183] added license info to publish dataset popup dialog --- src/main/java/propertyFiles/Bundle.properties | 4 +- src/main/resources/META-INF/persistence.xml | 2 +- src/main/webapp/dataset.xhtml | 3453 ++++++++++------- src/main/webapp/resources/css/structure.css | 1 + 4 files changed, 2001 insertions(+), 1459 deletions(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index ab5352c8efd..e7f12971a6a 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1635,7 +1635,7 @@ file.dataFilesTab.metadata.addBtn=Add + Edit Metadata file.dataFilesTab.terms.header=Terms file.dataFilesTab.terms.editTermsBtn=Edit Terms Requirements file.dataFilesTab.terms.list.termsOfUse.header=Terms of Use -file.dataFilesTab.terms.list.termsOfUse.waiver=Waiver +file.dataFilesTab.terms.list.termsOfUse.waiver=License/DUA file.dataFilesTab.terms.list.termsOfUse.waiver.title=The waiver informs data downloaders how they can use this dataset. file.dataFilesTab.terms.list.termsOfUse.waiver.txt=CC0 - "Public Domain Dedication" file.cc0.icon.alttxt=Creative Commons CC0 1.0 Public Domain Dedication icon @@ -1667,7 +1667,7 @@ file.dataFilesTab.terms.list.termsOfAccess.header=Restricted Files + Terms of Ac file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles=Restricted Files file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.title=The number of restricted files in this dataset. file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.txt=There {0, choice, 0#are|1#is|2#are} {0} restricted {0, choice, 0#files|1#file|2#files} in this dataset. -file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess=Terms of Access +file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess=Terms of Access for Restricted Files file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess.title=Information on how and if users can gain access to the restricted files in this dataset. file.dataFilesTab.terms.list.termsOfAccess.requestAccess=Request Access file.dataFilesTab.terms.list.termsOfAccess.requestAccess.title=If checked, users can request access to the restricted files in this dataset. diff --git a/src/main/resources/META-INF/persistence.xml b/src/main/resources/META-INF/persistence.xml index 45552f36939..fe80cbfefee 100644 --- a/src/main/resources/META-INF/persistence.xml +++ b/src/main/resources/META-INF/persistence.xml @@ -11,7 +11,7 @@ - + diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 0198e303b06..fdcd08ab24f 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -11,684 +11,868 @@ xmlns:cc="http://java.sun.com/jsf/composite" xmlns:o="http://omnifaces.org/ui" xmlns:of="http://omnifaces.org/functions"> - - - - - - - - - - - + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    -
    -
    - #{DatasetPage.datasetVersionUI.title.value} -
    - - - - - - - - -
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    +
    +
    + #{DatasetPage.datasetVersionUI.title.value} +
    + + + + + + + +
    +
    - -
    -
    - - - - - - - -
    + +
    +
    + + + + + + + +
    -
    - -
    - - + - - + +
    -
    - -
    -
    - #{bundle['metrics.dataset.title']} - - - - - - -
    -
    - -
    - - - - -
    - -
    - - - +
    + +
    +
    + #{bundle['metrics.dataset.title']} + -
    - -
    - - + data-toggle="tooltip" data-placement="auto top" + data-trigger="hover" + data-original-title="#{bundle['metrics.dataset.tip.default']}"> + + + + +
    +
    + +
    + + + + +
    + +
    + + + + +
    + +
    + + + + +
    + +
    + + + - -
    - -
    - - - - - - -
    + +
    -
    + +
    - -
    - + +
    + - -
    -
    -
    #{bundle['dataset.deaccession.reason']}
    -

    #{DatasetPage.workingVersion.versionNote}

    - -

    #{bundle['dataset.beAccessedAt']} #{DatasetPage.workingVersion.archiveNote}

    -
    -
    + +
    +
    +
    #{bundle['dataset.deaccession.reason']}
    +

    #{DatasetPage.workingVersion.versionNote}

    + +

    #{bundle['dataset.beAccessedAt']} #{DatasetPage.workingVersion.archiveNote} +

    +
    - +
    + - - - +
    +
    - - - +
    + + + - - - -
    - -
    - -

    #{DatasetPage.dataset.owner.name}

    - -
    - -

    - #{bundle['dataset.host.tip']} -

    -
    - - - - - - - - - - - - - - -
    -
    -
    -
    -
    - -
    + + + +
    + +
    + +

    + #{DatasetPage.dataset.owner.name}

    + +
    +

    - #{bundle['dataset.template.tip']} + #{bundle['dataset.host.tip']}

    - - - - - -
    +
    + + + + + + + + + + + + + + +
    +
    -
    - +
    +
    + +
    +

    + #{bundle['dataset.template.tip']} +

    + + + + +
    - - - - - - - - - - - -
    - - - -
    - - - -
    - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + - - - - - + + + + -
    - + - - #{bundle['file.dataFilesTab.metadata.addBtn']} - -
    + + #{bundle['file.dataFilesTab.metadata.addBtn']} + +
    - - -
    + +
    - - - - - - - - - - - - - - - - - - - - - - +
    + + + + + + + + +
    + + + + + + + + + + + + +
    +
    + + +
    + #{bundle['file.metadataTip']} +
    + +
    + + + + + + +
    + + + + + + + + + + + + + +

    #{bundle['dataset.share.datasetShare.tip']}

    +
    +
    +
    - - -
    - #{bundle['file.metadataTip']} + + +

    + + + + +

    +
    +
    + #{bundle['metrics.citations.dialog.empty']} +
    + + +
    - -
    - - - - - - +
    +
    - - - - - - - - - - - + + +

    + #{bundle['dataset.noValidSelectedFilesForDownload']}

    + +

    #{bundle['dataset.requestAccessToRestrictedFiles']}

    - -

    #{bundle['dataset.share.datasetShare.tip']}

    -
    -
    - -
    -
    - +
    + +
    +
    + +

    + #{bundle['dataset.mixedSelectedFilesForDownload']}

    + + + + + + +
    #{resFile.label}
    +
    +

    #{bundle['dataset.downloadUnrestricted']}

    + + + +
    +
    + +

    + #{bundle['dataset.mixedSelectedFilesForDownload']}

    + + + + + + +
    #{resFile.label}
    +
    +

    #{bundle['dataset.downloadUnrestricted']}

    + + + +
    +
    + +

    + #{bundle['file.deleteDialog.tip']}

    +
    + + +
    +
    + +

    + #{bundle['file.deleteDraftDialog.tip']}

    +
    + + +
    +
    + +

    - +

    -
    -
    - #{bundle['metrics.citations.dialog.empty']} +
    +
    +

    #{bundle['dataset.privateurl.absent']}

    - - - -
    -
    - -
    - - -

    #{bundle['dataset.noValidSelectedFilesForDownload']}

    - -

    #{bundle['dataset.requestAccessToRestrictedFiles']}

    -
    -
    - -
    -
    - -

    #{bundle['dataset.mixedSelectedFilesForDownload']}

    - - - - - - -
    #{resFile.label}
    -
    -

    #{bundle['dataset.downloadUnrestricted']}

    - - - -
    -
    - -

    #{bundle['dataset.mixedSelectedFilesForDownload']}

    - - - - - - -
    #{resFile.label}
    -
    -

    #{bundle['dataset.downloadUnrestricted']}

    - - - -
    -
    - -

    #{bundle['file.deleteDialog.tip']}

    -
    - - -
    -
    - -

    #{bundle['file.deleteDraftDialog.tip']}

    -
    - - -
    -
    - - -

    - - - - -

    -
    -
    -

    #{bundle['dataset.privateurl.absent']}

    -
    -
    -

    #{bundle['dataset.privateurl.createdSuccess']}

    - -

    - #{privateUrlLink} -

    -
    -
    -
    - - - - -
    -
    - -

    #{bundle['dataset.privateurl.cannotCreate']}

    -
    - +

    + #{bundle['dataset.privateurl.createdSuccess']}

    + +

    + #{privateUrlLink} +

    -
    -
    - -

    #{bundle['dataset.privateurl.disableConfirmationText']}

    -
    - -
    -
    - -

    #{bundle['file.deleteFileDialog.multiple.immediate']}

    - -

    #{bundle['file.deleteFileDialog.failed.tip']}

    -
    - - -
    -
    - -

    #{bundle['dataset.compute.computeBatchRestricted']}

    -
    -
    -
    - - - - - -

    #{bundle['file.deaccessionDialog.tip']}

    -
    - - - - - -
    -
    - - - - - - - - - - - - -
    -
    - - - - - -
    -
    - - - - -
    -
    - - - - -
    -
    -
    - -

    #{bundle['file.deaccessionDialog.deaccession.tip']}

    + +

    + #{bundle['dataset.privateurl.cannotCreate']}

    - - -
    -
    - -

    #{bundle['file.deaccessionDialog.deaccessionDataset.tip']}

    -
    - - -
    -
    -