From d1ad4f0e2bf2911ea4d94e39c604fdf8215791c4 Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Tue, 9 Jan 2024 13:23:52 -0500 Subject: [PATCH 01/49] #10218 OdumInstitute is now uncch-rdmc --- .github/workflows/guides_build_sphinx.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/guides_build_sphinx.yml b/.github/workflows/guides_build_sphinx.yml index 992f30f2872..86b59b11d35 100644 --- a/.github/workflows/guides_build_sphinx.yml +++ b/.github/workflows/guides_build_sphinx.yml @@ -11,6 +11,6 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - uses: OdumInstitute/sphinx-action@master + - uses: uncch-rdmc/sphinx-action@master with: docs-folder: "doc/sphinx-guides/" From fd9f051540f0aab5db340a01d74bd38d9a0dee27 Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Tue, 9 Jan 2024 13:25:10 -0500 Subject: [PATCH 02/49] #10218 OdumInstitute is now uncch-rdmc --- doc/sphinx-guides/source/developers/version-control.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/developers/version-control.rst b/doc/sphinx-guides/source/developers/version-control.rst index 12f3d5b81fd..ce10a159b42 100644 --- a/doc/sphinx-guides/source/developers/version-control.rst +++ b/doc/sphinx-guides/source/developers/version-control.rst @@ -274,16 +274,16 @@ By default, when a pull request is made from a fork, "Allow edits from maintaine This is a nice feature of GitHub because it means that the core dev team for the Dataverse Project can make small (or even large) changes to a pull request from a contributor to help the pull request along on its way to QA and being merged. -GitHub documents how to make changes to a fork at https://help.github.com/articles/committing-changes-to-a-pull-request-branch-created-from-a-fork/ but as of this writing the steps involve making a new clone of the repo. This works but you might find it more convenient to add a "remote" to your existing clone. The example below uses the fork at https://github.com/OdumInstitute/dataverse and the branch ``4709-postgresql_96`` but the technique can be applied to any fork and branch: +GitHub documents how to make changes to a fork at https://help.github.com/articles/committing-changes-to-a-pull-request-branch-created-from-a-fork/ but as of this writing the steps involve making a new clone of the repo. This works but you might find it more convenient to add a "remote" to your existing clone. The example below uses the fork at https://github.com/uncch-rdmc/dataverse and the branch ``4709-postgresql_96`` but the technique can be applied to any fork and branch: .. code-block:: bash - git remote add OdumInstitute git@github.com:OdumInstitute/dataverse.git - git fetch OdumInstitute + git remote add uncch-rdmc git@github.com:uncch-rdmc/dataverse.git + git fetch uncch-rdmc git checkout 4709-postgresql_96 vim path/to/file.txt git commit - git push OdumInstitute 4709-postgresql_96 + git push uncch-rdmc 4709-postgresql_96 ---- From 8a6744fc808eaa7abaf5ac28d6405333f4d7dc2e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 3 Sep 2024 22:27:42 +0000 Subject: [PATCH 03/49] Bump actions/download-artifact from 3 to 4.1.7 in /.github/workflows Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 3 to 4.1.7. - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/v3...v4.1.7) --- updated-dependencies: - dependency-name: actions/download-artifact dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy_beta_testing.yml | 2 +- .github/workflows/maven_unit_test.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/deploy_beta_testing.yml b/.github/workflows/deploy_beta_testing.yml index 028f0140cc9..efe3e0d8621 100644 --- a/.github/workflows/deploy_beta_testing.yml +++ b/.github/workflows/deploy_beta_testing.yml @@ -45,7 +45,7 @@ jobs: - uses: actions/checkout@v3 - name: Download war artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4.1.7 with: name: built-app path: ./ diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 4ad4798bc64..fe335f5f45a 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -105,7 +105,7 @@ jobs: cache: maven # Get the build output from the unit test job - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4.1.7 with: name: java-artifacts - run: | @@ -137,7 +137,7 @@ jobs: cache: maven # Get the build output from the integration test job - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4.1.7 with: name: java-reportdir - run: tar -xvf java-reportdir.tar From a242d14c75db789044792b5f5649de6aeed541af Mon Sep 17 00:00:00 2001 From: jo-pol Date: Mon, 14 Oct 2024 10:56:19 +0200 Subject: [PATCH 04/49] mime type of m4a uploaded in zip --- .../propertyFiles/MimeTypeDetectionByFileExtension.properties | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties b/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties index 630539d912e..05e61a40c17 100644 --- a/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties +++ b/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties @@ -15,6 +15,7 @@ m=text/x-matlab mat=application/matlab-mat md=text/markdown mp3=audio/mp3 +m4a=audio/x-m4a nii=image/nii nc=application/netcdf ods=application/vnd.oasis.opendocument.spreadsheet From cce22a281465959d776cd64c759b728a19cb3721 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 14 Oct 2024 11:54:53 +0100 Subject: [PATCH 05/49] Changed: users/token GET endpoint to support all available auth mechanisms --- .../edu/harvard/iq/dataverse/api/Users.java | 21 ++++++++++++------- .../edu/harvard/iq/dataverse/api/UsersIT.java | 3 +-- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Users.java b/src/main/java/edu/harvard/iq/dataverse/api/Users.java index c1a7c95dbff..ecf7839e616 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Users.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Users.java @@ -137,15 +137,20 @@ public Response deleteToken(@Context ContainerRequestContext crc) { @Path("token") @AuthRequired @GET - public Response getTokenExpirationDate() { - ApiToken token = authSvc.findApiToken(getRequestApiKey()); - - if (token == null) { - return notFound("Token " + getRequestApiKey() + " not found."); + public Response getTokenExpirationDate(@Context ContainerRequestContext crc) { + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + ApiToken token = authSvc.findApiTokenByUser(user); + + if (token == null) { + return notFound("Token not found."); + } + + return ok(String.format("Token %s expires on %s", token.getTokenString(), token.getExpireTime())); + + } catch (WrappedResponse wr) { + return wr.getResponse(); } - - return ok("Token " + getRequestApiKey() + " expires on " + token.getExpireTime()); - } @Path("token/recreate") diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java index 1003c1a990c..ce3b8bf75ff 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java @@ -405,7 +405,6 @@ public void testAPITokenEndpoints() { */ createUser = UtilIT.createRandomUser(); - String username = UtilIT.getUsernameFromResponse(createUser); String apiToken = UtilIT.getApiTokenFromResponse(createUser); Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); createDataverseResponse.prettyPrint(); @@ -428,7 +427,7 @@ public void testAPITokenEndpoints() { getExpiration = UtilIT.getTokenExpiration(tokenForPrivateUrlUser); getExpiration.prettyPrint(); getExpiration.then().assertThat() - .statusCode(NOT_FOUND.getStatusCode()); + .statusCode(UNAUTHORIZED.getStatusCode()); createUser = UtilIT.createRandomUser(); assertEquals(OK.getStatusCode(), createUser.getStatusCode()); From 129c80c768a7c1e4fa2ec55dcacade723de24f94 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 14 Oct 2024 12:09:40 +0100 Subject: [PATCH 06/49] Added: release notes for #10914 --- doc/release-notes/10914-users-token-api-credentials.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 doc/release-notes/10914-users-token-api-credentials.md diff --git a/doc/release-notes/10914-users-token-api-credentials.md b/doc/release-notes/10914-users-token-api-credentials.md new file mode 100644 index 00000000000..888214481f6 --- /dev/null +++ b/doc/release-notes/10914-users-token-api-credentials.md @@ -0,0 +1,3 @@ +Extended the users/token GET endpoint to support any auth mechanism for retrieving the token information. + +Previously, this endpoint only accepted an API token to retrieve its information. Now, it accepts any authentication mechanism and returns the associated API token information. From 7f5b0bea1670b5c2ec84651b45820211b9df2988 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 15 Oct 2024 13:34:34 +0100 Subject: [PATCH 07/49] Added: updateDataverse endpoint with addDataverse refactoring --- .../harvard/iq/dataverse/api/Dataverses.java | 160 ++++++++++---- .../command/impl/CreateDataverseCommand.java | 6 +- .../command/impl/UpdateDataverseCommand.java | 204 ++++++++++-------- 3 files changed, 231 insertions(+), 139 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 0ee146ed99b..b85ee0afc8f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -127,73 +127,145 @@ public Response addRoot(@Context ContainerRequestContext crc, String body) { @Path("{identifier}") public Response addDataverse(@Context ContainerRequestContext crc, String body, @PathParam("identifier") String parentIdtf) { Dataverse newDataverse; - JsonObject newDataverseJson; try { - newDataverseJson = JsonUtil.getJsonObject(body); - newDataverse = jsonParser().parseDataverse(newDataverseJson); + newDataverse = parseAndValidateDataverse(body); } catch (JsonParsingException jpe) { - logger.log(Level.SEVERE, "Json: {0}", body); return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparse"), jpe.getMessage())); } catch (JsonParseException ex) { - logger.log(Level.SEVERE, "Error parsing dataverse from json: " + ex.getMessage(), ex); return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparsetodataverse"), ex.getMessage())); } try { - JsonObject metadataBlocksJson = newDataverseJson.getJsonObject("metadataBlocks"); - List inputLevels = null; - List metadataBlocks = null; - List facetList = null; - if (metadataBlocksJson != null) { - JsonArray inputLevelsArray = metadataBlocksJson.getJsonArray("inputLevels"); - inputLevels = inputLevelsArray != null ? parseInputLevels(inputLevelsArray, newDataverse) : null; - - JsonArray metadataBlockNamesArray = metadataBlocksJson.getJsonArray("metadataBlockNames"); - metadataBlocks = metadataBlockNamesArray != null ? parseNewDataverseMetadataBlocks(metadataBlockNamesArray) : null; - - JsonArray facetIdsArray = metadataBlocksJson.getJsonArray("facetIds"); - facetList = facetIdsArray != null ? parseFacets(facetIdsArray) : null; - } + List inputLevels = parseInputLevels(body, newDataverse); + List metadataBlocks = parseMetadataBlocks(body); + List facets = parseFacets(body); if (!parentIdtf.isEmpty()) { Dataverse owner = findDataverseOrDie(parentIdtf); newDataverse.setOwner(owner); } - // set the dataverse - contact relationship in the contacts - for (DataverseContact dc : newDataverse.getDataverseContacts()) { - dc.setDataverse(newDataverse); - } - AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); - newDataverse = execCommand(new CreateDataverseCommand(newDataverse, createDataverseRequest(u), facetList, inputLevels, metadataBlocks)); + newDataverse = execCommand(new CreateDataverseCommand(newDataverse, createDataverseRequest(u), facets, inputLevels, metadataBlocks)); return created("/dataverses/" + newDataverse.getAlias(), json(newDataverse)); - } catch (WrappedResponse ww) { - - String error = ConstraintViolationUtil.getErrorStringForConstraintViolations(ww.getCause()); - if (!error.isEmpty()) { - logger.log(Level.INFO, error); - return ww.refineResponse(error); - } - return ww.getResponse(); + } catch (WrappedResponse ww) { + return handleWrappedResponse(ww); } catch (EJBException ex) { - Throwable cause = ex; - StringBuilder sb = new StringBuilder(); - sb.append("Error creating dataverse."); - while (cause.getCause() != null) { - cause = cause.getCause(); - if (cause instanceof ConstraintViolationException) { - sb.append(ConstraintViolationUtil.getErrorStringForConstraintViolations(cause)); - } - } - logger.log(Level.SEVERE, sb.toString()); - return error(Response.Status.INTERNAL_SERVER_ERROR, "Error creating dataverse: " + sb.toString()); + return handleEJBException(ex, "Error creating dataverse."); } catch (Exception ex) { logger.log(Level.SEVERE, "Error creating dataverse", ex); return error(Response.Status.INTERNAL_SERVER_ERROR, "Error creating dataverse: " + ex.getMessage()); + } + } + + @PUT + @AuthRequired + @Path("{identifier}") + public Response updateDataverse(@Context ContainerRequestContext crc, String body, @PathParam("identifier") String identifier) { + Dataverse originalDataverse; + try { + originalDataverse = findDataverseOrDie(identifier); + } catch (WrappedResponse e) { + return e.getResponse(); + } + + Dataverse updatedDataverse; + try { + updatedDataverse = parseAndValidateDataverse(body); + } catch (JsonParsingException jpe) { + return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparse"), jpe.getMessage())); + } catch (JsonParseException ex) { + return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparsetodataverse"), ex.getMessage())); + } + + try { + List inputLevels = parseInputLevels(body, originalDataverse); + List metadataBlocks = parseMetadataBlocks(body); + List facets = parseFacets(body); + + updatedDataverse.setId(originalDataverse.getId()); + AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); + updatedDataverse = execCommand(new UpdateDataverseCommand(updatedDataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks)); + return created("/dataverses/" + updatedDataverse.getAlias(), json(updatedDataverse)); + + } catch (WrappedResponse ww) { + return handleWrappedResponse(ww); + } catch (EJBException ex) { + return handleEJBException(ex, "Error updating dataverse."); + } catch (Exception ex) { + logger.log(Level.SEVERE, "Error updating dataverse", ex); + return error(Response.Status.INTERNAL_SERVER_ERROR, "Error updating dataverse: " + ex.getMessage()); + } + } + + private Dataverse parseAndValidateDataverse(String body) throws JsonParsingException, JsonParseException { + try { + JsonObject dataverseJson = JsonUtil.getJsonObject(body); + return jsonParser().parseDataverse(dataverseJson); + } catch (JsonParsingException jpe) { + logger.log(Level.SEVERE, "Json: {0}", body); + throw jpe; + } catch (JsonParseException ex) { + logger.log(Level.SEVERE, "Error parsing dataverse from json: " + ex.getMessage(), ex); + throw ex; + } + } + + private List parseInputLevels(String body, Dataverse dataverse) throws WrappedResponse { + JsonObject metadataBlocksJson = getMetadataBlocksJson(body); + if (metadataBlocksJson == null) { + return null; + } + JsonArray inputLevelsArray = metadataBlocksJson.getJsonArray("inputLevels"); + return inputLevelsArray != null ? parseInputLevels(inputLevelsArray, dataverse) : null; + } + + private List parseMetadataBlocks(String body) throws WrappedResponse { + JsonObject metadataBlocksJson = getMetadataBlocksJson(body); + if (metadataBlocksJson == null) { + return null; + } + JsonArray metadataBlocksArray = metadataBlocksJson.getJsonArray("metadataBlockNames"); + return metadataBlocksArray != null ? parseNewDataverseMetadataBlocks(metadataBlocksArray) : null; + } + + private List parseFacets(String body) throws WrappedResponse { + JsonObject metadataBlocksJson = getMetadataBlocksJson(body); + if (metadataBlocksJson == null) { + return null; + } + JsonArray facetsArray = metadataBlocksJson.getJsonArray("facetIds"); + return facetsArray != null ? parseFacets(facetsArray) : null; + } + + private JsonObject getMetadataBlocksJson(String body) { + JsonObject dataverseJson = JsonUtil.getJsonObject(body); + return dataverseJson.getJsonObject("metadataBlocks"); + } + + private Response handleWrappedResponse(WrappedResponse ww) { + String error = ConstraintViolationUtil.getErrorStringForConstraintViolations(ww.getCause()); + if (!error.isEmpty()) { + logger.log(Level.INFO, error); + return ww.refineResponse(error); + } + return ww.getResponse(); + } + + private Response handleEJBException(EJBException ex, String action) { + Throwable cause = ex; + StringBuilder sb = new StringBuilder(); + sb.append(action); + while (cause.getCause() != null) { + cause = cause.getCause(); + if (cause instanceof ConstraintViolationException) { + sb.append(ConstraintViolationUtil.getErrorStringForConstraintViolations(cause)); + } } + logger.log(Level.SEVERE, sb.toString()); + return error(Response.Status.INTERNAL_SERVER_ERROR, sb.toString()); } private List parseNewDataverseMetadataBlocks(JsonArray metadataBlockNamesArray) throws WrappedResponse { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index 489b36e7cef..2ce16a86297 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -67,7 +67,6 @@ public CreateDataverseCommand(Dataverse created, @Override public Dataverse execute(CommandContext ctxt) throws CommandException { - Dataverse owner = created.getOwner(); if (owner == null) { if (ctxt.dataverses().isRootDataverseExists()) { @@ -75,6 +74,10 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } } + for (DataverseContact dc : created.getDataverseContacts()) { + dc.setDataverse(created); + } + if (metadataBlocks != null && !metadataBlocks.isEmpty()) { created.setMetadataBlockRoot(true); created.setMetadataBlocks(metadataBlocks); @@ -194,5 +197,4 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { public boolean onSuccess(CommandContext ctxt, Object r) { return ctxt.dataverses().index((Dataverse) r); } - } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index bdb69dc918f..b1670a264bf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -1,13 +1,11 @@ package edu.harvard.iq.dataverse.engine.command.impl; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetFieldType; -import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.Dataverse.DataverseType; -import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevel; import edu.harvard.iq.dataverse.authorization.Permission; import static edu.harvard.iq.dataverse.dataverse.DataverseUtil.validateDataverseMetadataExternally; + import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -21,121 +19,141 @@ /** * Update an existing dataverse. + * * @author michael */ -@RequiredPermissions( Permission.EditDataverse ) +@RequiredPermissions(Permission.EditDataverse) public class UpdateDataverseCommand extends AbstractCommand { - private static final Logger logger = Logger.getLogger(UpdateDataverseCommand.class.getName()); - - private final Dataverse editedDv; - private final List facetList; + private static final Logger logger = Logger.getLogger(UpdateDataverseCommand.class.getName()); + + private final Dataverse editedDv; + private final List facetList; private final List featuredDataverseList; private final List inputLevelList; + private final List metadataBlocks; private boolean datasetsReindexRequired = false; - public UpdateDataverseCommand(Dataverse editedDv, List facetList, List featuredDataverseList, - DataverseRequest aRequest, List inputLevelList ) { - super(aRequest, editedDv); - this.editedDv = editedDv; - // add update template uses this command but does not - // update facet list or featured dataverses - if (facetList != null){ - this.facetList = new ArrayList<>(facetList); - } else { - this.facetList = null; - } - if (featuredDataverseList != null){ - this.featuredDataverseList = new ArrayList<>(featuredDataverseList); - } else { - this.featuredDataverseList = null; - } - if (inputLevelList != null){ - this.inputLevelList = new ArrayList<>(inputLevelList); - } else { - this.inputLevelList = null; - } - } - - @Override - public Dataverse execute(CommandContext ctxt) throws CommandException { - logger.fine("Entering update dataverse command"); - - // Perform any optional validation steps, if defined: - if (ctxt.systemConfig().isExternalDataverseValidationEnabled()) { - // For admins, an override of the external validation step may be enabled: - if (!(getUser().isSuperuser() && ctxt.systemConfig().isExternalValidationAdminOverrideEnabled())) { - String executable = ctxt.systemConfig().getDataverseValidationExecutable(); - boolean result = validateDataverseMetadataExternally(editedDv, executable, getRequest()); - - if (!result) { - String rejectionMessage = ctxt.systemConfig().getDataverseUpdateValidationFailureMsg(); - throw new IllegalCommandException(rejectionMessage, this); - } + public UpdateDataverseCommand(Dataverse editedDv, + List facetList, + List featuredDataverseList, + DataverseRequest aRequest, + List inputLevelList) { + this(editedDv, facetList, featuredDataverseList, aRequest, inputLevelList, null); + } + + public UpdateDataverseCommand(Dataverse editedDv, + List facetList, + List featuredDataverseList, + DataverseRequest aRequest, + List inputLevelList, + List metadataBlocks) { + super(aRequest, editedDv); + this.editedDv = editedDv; + // add update template uses this command but does not + // update facet list or featured dataverses + if (facetList != null) { + this.facetList = new ArrayList<>(facetList); + } else { + this.facetList = null; + } + if (featuredDataverseList != null) { + this.featuredDataverseList = new ArrayList<>(featuredDataverseList); + } else { + this.featuredDataverseList = null; + } + if (inputLevelList != null) { + this.inputLevelList = new ArrayList<>(inputLevelList); + } else { + this.inputLevelList = null; + } + if (metadataBlocks != null) { + this.metadataBlocks = new ArrayList<>(metadataBlocks); + } else { + this.metadataBlocks = null; + } + } + + @Override + public Dataverse execute(CommandContext ctxt) throws CommandException { + logger.fine("Entering update dataverse command"); + + // Perform any optional validation steps, if defined: + if (ctxt.systemConfig().isExternalDataverseValidationEnabled()) { + // For admins, an override of the external validation step may be enabled: + if (!(getUser().isSuperuser() && ctxt.systemConfig().isExternalValidationAdminOverrideEnabled())) { + String executable = ctxt.systemConfig().getDataverseValidationExecutable(); + boolean result = validateDataverseMetadataExternally(editedDv, executable, getRequest()); + + if (!result) { + String rejectionMessage = ctxt.systemConfig().getDataverseUpdateValidationFailureMsg(); + throw new IllegalCommandException(rejectionMessage, this); } } - - Dataverse oldDv = ctxt.dataverses().find(editedDv.getId()); - - DataverseType oldDvType = oldDv.getDataverseType(); - String oldDvAlias = oldDv.getAlias(); - String oldDvName = oldDv.getName(); - oldDv = null; - - Dataverse result = ctxt.dataverses().save(editedDv); - - if ( facetList != null ) { - ctxt.facets().deleteFacetsFor(result); - int i=0; - for ( DatasetFieldType df : facetList ) { - ctxt.facets().create(i++, df.getId(), result.getId()); - } + } + + for (DataverseContact dc : editedDv.getDataverseContacts()) { + dc.setDataverse(editedDv); + } + + Dataverse oldDv = ctxt.dataverses().find(editedDv.getId()); + + DataverseType oldDvType = oldDv.getDataverseType(); + String oldDvAlias = oldDv.getAlias(); + String oldDvName = oldDv.getName(); + + Dataverse result = ctxt.dataverses().save(editedDv); + + if (facetList != null) { + ctxt.facets().deleteFacetsFor(result); + int i = 0; + for (DatasetFieldType df : facetList) { + ctxt.facets().create(i++, df.getId(), result.getId()); } - if ( featuredDataverseList != null ) { - ctxt.featuredDataverses().deleteFeaturedDataversesFor(result); - int i=0; - for ( Object obj : featuredDataverseList ) { - Dataverse dv = (Dataverse) obj; - ctxt.featuredDataverses().create(i++, dv.getId(), result.getId()); - } + } + if (featuredDataverseList != null) { + ctxt.featuredDataverses().deleteFeaturedDataversesFor(result); + int i = 0; + for (Object obj : featuredDataverseList) { + Dataverse dv = (Dataverse) obj; + ctxt.featuredDataverses().create(i++, dv.getId(), result.getId()); } - if ( inputLevelList != null ) { - ctxt.fieldTypeInputLevels().deleteFacetsFor(result); - for ( DataverseFieldTypeInputLevel obj : inputLevelList ) { - ctxt.fieldTypeInputLevels().create(obj); - } + } + if (inputLevelList != null) { + ctxt.fieldTypeInputLevels().deleteFacetsFor(result); + for (DataverseFieldTypeInputLevel obj : inputLevelList) { + ctxt.fieldTypeInputLevels().create(obj); } - - // We don't want to reindex the children datasets unnecessarily: - // When these values are changed we need to reindex all children datasets - // This check is not recursive as all the values just report the immediate parent - if (!oldDvType.equals(editedDv.getDataverseType()) + } + + // We don't want to reindex the children datasets unnecessarily: + // When these values are changed we need to reindex all children datasets + // This check is not recursive as all the values just report the immediate parent + if (!oldDvType.equals(editedDv.getDataverseType()) || !oldDvName.equals(editedDv.getName()) || !oldDvAlias.equals(editedDv.getAlias())) { - datasetsReindexRequired = true; - } - - return result; - } - + datasetsReindexRequired = true; + } + + return result; + } + @Override public boolean onSuccess(CommandContext ctxt, Object r) { - + // first kick of async index of datasets // TODO: is this actually needed? Is there a better way to handle // It appears that we at some point lost some extra logic here, where // we only reindex the underlying datasets if one or more of the specific set - // of fields have been changed (since these values are included in the + // of fields have been changed (since these values are included in the // indexed solr documents for dataasets). So I'm putting that back. -L.A. Dataverse result = (Dataverse) r; - + if (datasetsReindexRequired) { List datasets = ctxt.datasets().findByOwnerId(result.getId()); ctxt.index().asyncIndexDatasetList(datasets, true); } - - return ctxt.dataverses().index((Dataverse) r); - } + return ctxt.dataverses().index((Dataverse) r); + } } - From 19c8a12b32a502ee43f46916248a7d4691928aa6 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 16 Oct 2024 16:30:00 +0100 Subject: [PATCH 08/49] Changed: limiting the information to update in a dataverse through the new update endpoint --- .../harvard/iq/dataverse/api/Dataverses.java | 8 +- .../iq/dataverse/util/json/JsonParser.java | 79 ++++++++++++++----- 2 files changed, 62 insertions(+), 25 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index b85ee0afc8f..0bc389041c2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -128,7 +128,7 @@ public Response addRoot(@Context ContainerRequestContext crc, String body) { public Response addDataverse(@Context ContainerRequestContext crc, String body, @PathParam("identifier") String parentIdtf) { Dataverse newDataverse; try { - newDataverse = parseAndValidateDataverse(body); + newDataverse = parseAndValidateDataverseRequestBody(body, null); } catch (JsonParsingException jpe) { return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparse"), jpe.getMessage())); } catch (JsonParseException ex) { @@ -172,7 +172,7 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod Dataverse updatedDataverse; try { - updatedDataverse = parseAndValidateDataverse(body); + updatedDataverse = parseAndValidateDataverseRequestBody(body, originalDataverse); } catch (JsonParsingException jpe) { return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparse"), jpe.getMessage())); } catch (JsonParseException ex) { @@ -200,10 +200,10 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod } } - private Dataverse parseAndValidateDataverse(String body) throws JsonParsingException, JsonParseException { + private Dataverse parseAndValidateDataverseRequestBody(String body, Dataverse dataverseToUpdate) throws JsonParsingException, JsonParseException { try { JsonObject dataverseJson = JsonUtil.getJsonObject(body); - return jsonParser().parseDataverse(dataverseJson); + return dataverseToUpdate != null ? jsonParser().parseDataverseUpdates(dataverseJson, dataverseToUpdate) : jsonParser().parseDataverse(dataverseJson); } catch (JsonParsingException jpe) { logger.log(Level.SEVERE, "Json: {0}", body); throw jpe; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 2f01c9bc2f2..f63e4c4fd9c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -50,6 +50,7 @@ import java.util.Set; import java.util.logging.Logger; import java.util.stream.Collectors; + import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonObject; @@ -128,19 +129,8 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException { dv.setPermissionRoot(jobj.getBoolean("permissionRoot", false)); dv.setFacetRoot(jobj.getBoolean("facetRoot", false)); dv.setAffiliation(jobj.getString("affiliation", null)); - - if (jobj.containsKey("dataverseContacts")) { - JsonArray dvContacts = jobj.getJsonArray("dataverseContacts"); - int i = 0; - List dvContactList = new LinkedList<>(); - for (JsonValue jsv : dvContacts) { - DataverseContact dvc = new DataverseContact(dv); - dvc.setContactEmail(getMandatoryString((JsonObject) jsv, "contactEmail")); - dvc.setDisplayOrder(i++); - dvContactList.add(dvc); - } - dv.setDataverseContacts(dvContactList); - } + + updateDataverseContacts(dv, jobj); if (jobj.containsKey("theme")) { DataverseTheme theme = parseDataverseTheme(jobj.getJsonObject("theme")); @@ -149,14 +139,8 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException { } dv.setDataverseType(Dataverse.DataverseType.UNCATEGORIZED); // default - if (jobj.containsKey("dataverseType")) { - for (Dataverse.DataverseType dvtype : Dataverse.DataverseType.values()) { - if (dvtype.name().equals(jobj.getString("dataverseType"))) { - dv.setDataverseType(dvtype); - } - } - } - + updateDataverseType(dv, jobj); + if (jobj.containsKey("filePIDsEnabled")) { dv.setFilePIDsEnabled(jobj.getBoolean("filePIDsEnabled")); } @@ -189,6 +173,59 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException { return dv; } + + public Dataverse parseDataverseUpdates(JsonObject jsonObject, Dataverse dataverseToUpdate) throws JsonParseException { + String alias = jsonObject.getString("alias", null); + if (alias != null) { + dataverseToUpdate.setAlias(alias); + } + + String name = jsonObject.getString("name", null); + if (name != null) { + dataverseToUpdate.setName(name); + } + + String description = jsonObject.getString("description", null); + if (description != null) { + dataverseToUpdate.setDescription(description); + } + + String affiliation = jsonObject.getString("affiliation", null); + if (affiliation != null) { + dataverseToUpdate.setAffiliation(affiliation); + } + + updateDataverseType(dataverseToUpdate, jsonObject); + + updateDataverseContacts(dataverseToUpdate, jsonObject); + + return dataverseToUpdate; + } + + private void updateDataverseType(Dataverse dataverse, JsonObject jsonObject) { + String receivedDataverseType = jsonObject.getString("dataverseType", null); + if (receivedDataverseType != null) { + Arrays.stream(Dataverse.DataverseType.values()) + .filter(type -> type.name().equals(receivedDataverseType)) + .findFirst() + .ifPresent(dataverse::setDataverseType); + } + } + + private void updateDataverseContacts(Dataverse dataverse, JsonObject jsonObject) throws JsonParseException { + if (jsonObject.containsKey("dataverseContacts")) { + JsonArray dvContacts = jsonObject.getJsonArray("dataverseContacts"); + int i = 0; + List dvContactList = new LinkedList<>(); + for (JsonValue jsv : dvContacts) { + DataverseContact dvc = new DataverseContact(dataverse); + dvc.setContactEmail(getMandatoryString((JsonObject) jsv, "contactEmail")); + dvc.setDisplayOrder(i++); + dvContactList.add(dvc); + } + dataverse.setDataverseContacts(dvContactList); + } + } public DataverseTheme parseDataverseTheme(JsonObject obj) { From f4c3d2c9d9991edd2d02bd760d1fec86547a519c Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 16 Oct 2024 16:43:12 +0100 Subject: [PATCH 09/49] Removed: DataverseContact host dataverse re-set --- .../dataverse/engine/command/impl/CreateDataverseCommand.java | 4 ---- .../dataverse/engine/command/impl/UpdateDataverseCommand.java | 4 ---- 2 files changed, 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index 2ce16a86297..6957dac416d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -74,10 +74,6 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } } - for (DataverseContact dc : created.getDataverseContacts()) { - dc.setDataverse(created); - } - if (metadataBlocks != null && !metadataBlocks.isEmpty()) { created.setMetadataBlockRoot(true); created.setMetadataBlocks(metadataBlocks); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index b1670a264bf..551f0ffdff7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -92,10 +92,6 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } } - for (DataverseContact dc : editedDv.getDataverseContacts()) { - dc.setDataverse(editedDv); - } - Dataverse oldDv = ctxt.dataverses().find(editedDv.getId()); DataverseType oldDvType = oldDv.getDataverseType(); From 8ef8cfd2c70d34f458d5bad33d7b790c3150b409 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 17 Oct 2024 13:35:44 +0100 Subject: [PATCH 10/49] Added: parseDataverseUpdates unit test --- .../dataverse/util/json/JsonParserTest.java | 54 ++++++++++++------- 1 file changed, 35 insertions(+), 19 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java index 59e175f30c1..1a1d836f6a0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java @@ -4,17 +4,9 @@ package edu.harvard.iq.dataverse.util.json; -import edu.harvard.iq.dataverse.ControlledVocabularyValue; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetField; -import edu.harvard.iq.dataverse.DatasetFieldCompoundValue; -import edu.harvard.iq.dataverse.DatasetFieldType; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.DatasetFieldType.FieldType; -import edu.harvard.iq.dataverse.DatasetFieldValue; -import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DataverseTheme.Alignment; -import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.UserNotification.Type; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroup; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroupProvider; @@ -50,16 +42,7 @@ import java.io.StringReader; import java.math.BigDecimal; import java.text.ParseException; -import java.util.Arrays; -import java.util.Calendar; -import java.util.Collections; -import java.util.Date; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Set; -import java.util.TimeZone; +import java.util.*; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.*; @@ -281,6 +264,39 @@ public void testParseCompleteDataverse() throws JsonParseException { throw new JsonParseException("Couldn't read test file", ioe); } } + + /** + * TODO + * @throws JsonParseException when this test is broken. + */ + @Test + public void parseDataverseUpdates() throws JsonParseException { + Dataverse dataverse = new Dataverse(); + dataverse.setName("Name to update"); + dataverse.setAlias("aliasToUpdate"); + dataverse.setAffiliation("Affiliation to update"); + dataverse.setDescription("Description to update"); + dataverse.setDataverseType(Dataverse.DataverseType.DEPARTMENT); + List originalContacts = new ArrayList<>(); + originalContacts.add(new DataverseContact(dataverse, "updatethis@example.edu")); + dataverse.setDataverseContacts(originalContacts); + JsonObject dvJson; + try (FileReader reader = new FileReader("doc/sphinx-guides/source/_static/api/dataverse-complete.json")) { + dvJson = Json.createReader(reader).readObject(); + Dataverse actual = sut.parseDataverseUpdates(dvJson, dataverse); + assertEquals("Scientific Research", actual.getName()); + assertEquals("science", actual.getAlias()); + assertEquals("Scientific Research University", actual.getAffiliation()); + assertEquals("We do all the science.", actual.getDescription()); + assertEquals("LABORATORY", actual.getDataverseType().toString()); + assertEquals(2, actual.getDataverseContacts().size()); + assertEquals("pi@example.edu,student@example.edu", actual.getContactEmails()); + assertEquals(0, actual.getDataverseContacts().get(0).getDisplayOrder()); + assertEquals(1, actual.getDataverseContacts().get(1).getDisplayOrder()); + } catch (IOException ioe) { + throw new JsonParseException("Couldn't read test file", ioe); + } + } @Test public void testParseThemeDataverse() throws JsonParseException { From 62df2a7d534b87cd8975fd01317d3d1a05576e4a Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 17 Oct 2024 14:36:27 +0100 Subject: [PATCH 11/49] Changed: reordered logic in UpdateDataverseCommand for further refactoring --- .../command/impl/UpdateDataverseCommand.java | 56 +++++++++++-------- 1 file changed, 34 insertions(+), 22 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index 551f0ffdff7..16b93debb6d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -78,6 +78,11 @@ public UpdateDataverseCommand(Dataverse editedDv, public Dataverse execute(CommandContext ctxt) throws CommandException { logger.fine("Entering update dataverse command"); + if (metadataBlocks != null && !metadataBlocks.isEmpty()) { + editedDv.setMetadataBlockRoot(true); + editedDv.setMetadataBlocks(metadataBlocks); + } + // Perform any optional validation steps, if defined: if (ctxt.systemConfig().isExternalDataverseValidationEnabled()) { // For admins, an override of the external validation step may be enabled: @@ -98,39 +103,46 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { String oldDvAlias = oldDv.getAlias(); String oldDvName = oldDv.getName(); - Dataverse result = ctxt.dataverses().save(editedDv); - - if (facetList != null) { - ctxt.facets().deleteFacetsFor(result); - int i = 0; - for (DatasetFieldType df : facetList) { - ctxt.facets().create(i++, df.getId(), result.getId()); - } + // We don't want to reindex the children datasets unnecessarily: + // When these values are changed we need to reindex all children datasets + // This check is not recursive as all the values just report the immediate parent + if (!oldDvType.equals(editedDv.getDataverseType()) + || !oldDvName.equals(editedDv.getName()) + || !oldDvAlias.equals(editedDv.getAlias())) { + datasetsReindexRequired = true; } + if (featuredDataverseList != null) { - ctxt.featuredDataverses().deleteFeaturedDataversesFor(result); + ctxt.featuredDataverses().deleteFeaturedDataversesFor(editedDv); int i = 0; for (Object obj : featuredDataverseList) { Dataverse dv = (Dataverse) obj; - ctxt.featuredDataverses().create(i++, dv.getId(), result.getId()); + ctxt.featuredDataverses().create(i++, dv.getId(), editedDv.getId()); } } - if (inputLevelList != null) { - ctxt.fieldTypeInputLevels().deleteFacetsFor(result); - for (DataverseFieldTypeInputLevel obj : inputLevelList) { - ctxt.fieldTypeInputLevels().create(obj); + + if (facetList != null) { + ctxt.facets().deleteFacetsFor(editedDv); + if (!facetList.isEmpty()) { + editedDv.setFacetRoot(true); + } + int i = 0; + for (DatasetFieldType df : facetList) { + ctxt.facets().create(i++, df, editedDv); } } - - // We don't want to reindex the children datasets unnecessarily: - // When these values are changed we need to reindex all children datasets - // This check is not recursive as all the values just report the immediate parent - if (!oldDvType.equals(editedDv.getDataverseType()) - || !oldDvName.equals(editedDv.getName()) - || !oldDvAlias.equals(editedDv.getAlias())) { - datasetsReindexRequired = true; + if (inputLevelList != null) { + if (!inputLevelList.isEmpty()) { + editedDv.addInputLevelsMetadataBlocksIfNotPresent(inputLevelList); + } + ctxt.fieldTypeInputLevels().deleteFacetsFor(editedDv); + for (DataverseFieldTypeInputLevel inputLevel : inputLevelList) { + inputLevel.setDataverse(editedDv); + ctxt.fieldTypeInputLevels().create(inputLevel); + } } + Dataverse result = ctxt.dataverses().save(editedDv); return result; } From 6ccbb4ae53ee6bdd573b686e98c964ecf4e8d2db Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 17 Oct 2024 14:37:05 +0100 Subject: [PATCH 12/49] Changed: updateDataverse return code --- src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 0bc389041c2..d8bd2b8cb4b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -188,7 +188,7 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); updatedDataverse = execCommand(new UpdateDataverseCommand(updatedDataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks)); - return created("/dataverses/" + updatedDataverse.getAlias(), json(updatedDataverse)); + return ok(json(updatedDataverse)); } catch (WrappedResponse ww) { return handleWrappedResponse(ww); From 5c1703906dfeb205604dd8608b286ee706295e2d Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 17 Oct 2024 14:37:47 +0100 Subject: [PATCH 13/49] Added: IT for updateDataverse endpoint --- .../iq/dataverse/api/DataversesIT.java | 43 +++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 62 +++++++++++++++++++ 2 files changed, 105 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 8c6a8244af1..7abc35d536a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -1253,6 +1253,49 @@ public void testAddDataverse() { .body("message", equalTo("Invalid metadata block name: \"" + invalidMetadataBlockName + "\"")); } + @Test + public void testUpdateDataverse() { + Response createUser = UtilIT.createRandomUser(); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + String testAliasSuffix = "-update-dataverse"; + + String testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + Response createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root"); + createSubDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + + String newAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + String newName = "New Test Dataverse Name"; + String newAffiliation = "New Test Dataverse Affiliation"; + String newDataverseType = Dataverse.DataverseType.TEACHING_COURSES.toString(); + String[] newContactEmails = new String[] {"new_email@dataverse.com"}; + String[] newInputLevelNames = new String[] {"geographicCoverage"}; + String[] newFacetIds = new String[] {"contributorName"}; + String[] newMetadataBlockNames = new String[] {"citation", "geospatial", "biomedical"}; + + Response updateDataverseResponse = UtilIT.updateDataverse( + testDataverseAlias, + newAlias, + newName, + newAffiliation, + newDataverseType, + newContactEmails, + newInputLevelNames, + newFacetIds, + newMetadataBlockNames, + apiToken + ); + + updateDataverseResponse.prettyPrint(); + updateDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // TODO add more assertions and cases + + // The alias has been changed, so we should not be able to do any operation using the old one + String oldDataverseAlias = testDataverseAlias; + Response getDataverseResponse = UtilIT.listDataverseFacets(oldDataverseAlias, apiToken); + getDataverseResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + } + @Test public void testListFacets() { Response createUserResponse = UtilIT.createRandomUser(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 70f49d81b35..eb40a85f10c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse.api; +import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import io.restassured.http.ContentType; import io.restassured.path.json.JsonPath; import io.restassured.response.Response; @@ -12,6 +13,7 @@ import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonObject; +import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import static jakarta.ws.rs.core.Response.Status.CREATED; import java.nio.charset.StandardCharsets; @@ -428,6 +430,66 @@ static Response createSubDataverse(String alias, String category, String apiToke return createDataverseResponse; } + static Response updateDataverse(String alias, + String newAlias, + String newName, + String newAffiliation, + String newDataverseType, + String[] newContactEmails, + String[] newInputLevelNames, + String[] newFacetIds, + String[] newMetadataBlockNames, + String apiToken) { + JsonArrayBuilder contactArrayBuilder = Json.createArrayBuilder(); + for(String contactEmail : newContactEmails) { + contactArrayBuilder.add(Json.createObjectBuilder().add("contactEmail", contactEmail)); + } + NullSafeJsonBuilder jsonBuilder = jsonObjectBuilder() + .add("alias", newAlias) + .add("name", newName) + .add("affiliation", newAffiliation) + .add("dataverseContacts", contactArrayBuilder) + .add("dataverseType", newDataverseType) + .add("affiliation", newAffiliation); + + JsonObjectBuilder metadataBlocksObjectBuilder = Json.createObjectBuilder(); + + if (newInputLevelNames != null) { + JsonArrayBuilder inputLevelsArrayBuilder = Json.createArrayBuilder(); + for(String inputLevelName : newInputLevelNames) { + inputLevelsArrayBuilder.add(Json.createObjectBuilder() + .add("datasetFieldTypeName", inputLevelName) + .add("required", true) + .add("include", true) + ); + } + metadataBlocksObjectBuilder.add("inputLevels", inputLevelsArrayBuilder); + } + + if (newMetadataBlockNames != null) { + JsonArrayBuilder metadataBlockNamesArrayBuilder = Json.createArrayBuilder(); + for(String metadataBlockName : newMetadataBlockNames) { + metadataBlockNamesArrayBuilder.add(metadataBlockName); + } + metadataBlocksObjectBuilder.add("metadataBlockNames", metadataBlockNamesArrayBuilder); + } + + if (newFacetIds != null) { + JsonArrayBuilder facetIdsArrayBuilder = Json.createArrayBuilder(); + for(String facetId : newFacetIds) { + facetIdsArrayBuilder.add(facetId); + } + metadataBlocksObjectBuilder.add("facetIds", facetIdsArrayBuilder); + } + + jsonBuilder.add("metadataBlocks", metadataBlocksObjectBuilder); + + JsonObject dvData = jsonBuilder.build(); + return given() + .body(dvData.toString()).contentType(ContentType.JSON) + .when().put("/api/dataverses/" + alias + "?key=" + apiToken); + } + static Response createDataverse(JsonObject dvData, String apiToken) { Response createDataverseResponse = given() .body(dvData.toString()).contentType(ContentType.JSON) From e5cdb106e22064fe4fc84fa834dae2bf984525ff Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 17 Oct 2024 14:52:12 +0100 Subject: [PATCH 14/49] Refactor: UtilIT duplication on dataverse write operations --- .../edu/harvard/iq/dataverse/api/UtilIT.java | 66 ++++++------------- 1 file changed, 21 insertions(+), 45 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index eb40a85f10c..502f1ecb0a8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -391,43 +391,12 @@ static Response createSubDataverse(String alias, String category, String apiToke objectBuilder.add("affiliation", affiliation); } - JsonObjectBuilder metadataBlocksObjectBuilder = Json.createObjectBuilder(); - - if (inputLevelNames != null) { - JsonArrayBuilder inputLevelsArrayBuilder = Json.createArrayBuilder(); - for(String inputLevelName : inputLevelNames) { - inputLevelsArrayBuilder.add(Json.createObjectBuilder() - .add("datasetFieldTypeName", inputLevelName) - .add("required", true) - .add("include", true) - ); - } - metadataBlocksObjectBuilder.add("inputLevels", inputLevelsArrayBuilder); - } - - if (metadataBlockNames != null) { - JsonArrayBuilder metadataBlockNamesArrayBuilder = Json.createArrayBuilder(); - for(String metadataBlockName : metadataBlockNames) { - metadataBlockNamesArrayBuilder.add(metadataBlockName); - } - metadataBlocksObjectBuilder.add("metadataBlockNames", metadataBlockNamesArrayBuilder); - } - - if (facetIds != null) { - JsonArrayBuilder facetIdsArrayBuilder = Json.createArrayBuilder(); - for(String facetId : facetIds) { - facetIdsArrayBuilder.add(facetId); - } - metadataBlocksObjectBuilder.add("facetIds", facetIdsArrayBuilder); - } - - objectBuilder.add("metadataBlocks", metadataBlocksObjectBuilder); + updateDataverseRequestJsonWithMetadataBlocksConfiguration(inputLevelNames, facetIds, metadataBlockNames, objectBuilder); JsonObject dvData = objectBuilder.build(); - Response createDataverseResponse = given() + return given() .body(dvData.toString()).contentType(ContentType.JSON) .when().post("/api/dataverses/" + parentDV + "?key=" + apiToken); - return createDataverseResponse; } static Response updateDataverse(String alias, @@ -452,11 +421,23 @@ static Response updateDataverse(String alias, .add("dataverseType", newDataverseType) .add("affiliation", newAffiliation); + updateDataverseRequestJsonWithMetadataBlocksConfiguration(newInputLevelNames, newFacetIds, newMetadataBlockNames, jsonBuilder); + + JsonObject dvData = jsonBuilder.build(); + return given() + .body(dvData.toString()).contentType(ContentType.JSON) + .when().put("/api/dataverses/" + alias + "?key=" + apiToken); + } + + private static void updateDataverseRequestJsonWithMetadataBlocksConfiguration(String[] inputLevelNames, + String[] facetIds, + String[] metadataBlockNames, + JsonObjectBuilder objectBuilder) { JsonObjectBuilder metadataBlocksObjectBuilder = Json.createObjectBuilder(); - if (newInputLevelNames != null) { + if (inputLevelNames != null) { JsonArrayBuilder inputLevelsArrayBuilder = Json.createArrayBuilder(); - for(String inputLevelName : newInputLevelNames) { + for(String inputLevelName : inputLevelNames) { inputLevelsArrayBuilder.add(Json.createObjectBuilder() .add("datasetFieldTypeName", inputLevelName) .add("required", true) @@ -466,28 +447,23 @@ static Response updateDataverse(String alias, metadataBlocksObjectBuilder.add("inputLevels", inputLevelsArrayBuilder); } - if (newMetadataBlockNames != null) { + if (metadataBlockNames != null) { JsonArrayBuilder metadataBlockNamesArrayBuilder = Json.createArrayBuilder(); - for(String metadataBlockName : newMetadataBlockNames) { + for(String metadataBlockName : metadataBlockNames) { metadataBlockNamesArrayBuilder.add(metadataBlockName); } metadataBlocksObjectBuilder.add("metadataBlockNames", metadataBlockNamesArrayBuilder); } - if (newFacetIds != null) { + if (facetIds != null) { JsonArrayBuilder facetIdsArrayBuilder = Json.createArrayBuilder(); - for(String facetId : newFacetIds) { + for(String facetId : facetIds) { facetIdsArrayBuilder.add(facetId); } metadataBlocksObjectBuilder.add("facetIds", facetIdsArrayBuilder); } - jsonBuilder.add("metadataBlocks", metadataBlocksObjectBuilder); - - JsonObject dvData = jsonBuilder.build(); - return given() - .body(dvData.toString()).contentType(ContentType.JSON) - .when().put("/api/dataverses/" + alias + "?key=" + apiToken); + objectBuilder.add("metadataBlocks", metadataBlocksObjectBuilder); } static Response createDataverse(JsonObject dvData, String apiToken) { From 8020d50c26a3d41bef41403984495bad535dbad2 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 17 Oct 2024 14:56:24 +0100 Subject: [PATCH 15/49] Added: pending doc comment to JsonParserTest method --- .../edu/harvard/iq/dataverse/util/json/JsonParserTest.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java index 1a1d836f6a0..2cffa7d921c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java @@ -266,7 +266,8 @@ public void testParseCompleteDataverse() throws JsonParseException { } /** - * TODO + * Test that a JSON object passed for a complete Dataverse update is correctly parsed. + * This checks that all properties are parsed into the correct dataverse properties. * @throws JsonParseException when this test is broken. */ @Test From 2d10f22de0a20ab73ef146d6b90f1fb587672f2a Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 17 Oct 2024 15:49:36 +0100 Subject: [PATCH 16/49] Added: missing IT for updateDataverse endpoint --- .../iq/dataverse/api/DataversesIT.java | 63 ++++++++++++++++--- 1 file changed, 55 insertions(+), 8 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 7abc35d536a..c311fa1016e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -1,12 +1,15 @@ package edu.harvard.iq.dataverse.api; import io.restassured.RestAssured; + import static io.restassured.RestAssured.given; import static io.restassured.path.json.JsonPath.with; + import io.restassured.response.Response; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; + import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; @@ -14,6 +17,7 @@ import java.util.Arrays; import java.util.List; import java.util.logging.Logger; + import jakarta.json.Json; import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; @@ -31,6 +35,7 @@ import static org.junit.jupiter.api.Assertions.*; import java.nio.file.Files; + import io.restassured.path.json.JsonPath; import org.hamcrest.CoreMatchers; import org.hamcrest.Matchers; @@ -43,7 +48,7 @@ public class DataversesIT { public static void setUpClass() { RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); } - + @AfterAll public static void afterClass() { Response removeExcludeEmail = UtilIT.deleteSetting(SettingsServiceBean.Key.ExcludeEmailFromExport); @@ -1267,10 +1272,10 @@ public void testUpdateDataverse() { String newName = "New Test Dataverse Name"; String newAffiliation = "New Test Dataverse Affiliation"; String newDataverseType = Dataverse.DataverseType.TEACHING_COURSES.toString(); - String[] newContactEmails = new String[] {"new_email@dataverse.com"}; - String[] newInputLevelNames = new String[] {"geographicCoverage"}; - String[] newFacetIds = new String[] {"contributorName"}; - String[] newMetadataBlockNames = new String[] {"citation", "geospatial", "biomedical"}; + String[] newContactEmails = new String[]{"new_email@dataverse.com"}; + String[] newInputLevelNames = new String[]{"geographicCoverage"}; + String[] newFacetIds = new String[]{"contributorName"}; + String[] newMetadataBlockNames = new String[]{"citation", "geospatial", "biomedical"}; Response updateDataverseResponse = UtilIT.updateDataverse( testDataverseAlias, @@ -1285,15 +1290,57 @@ public void testUpdateDataverse() { apiToken ); - updateDataverseResponse.prettyPrint(); + // Assert dataverse properties are updated updateDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); - - // TODO add more assertions and cases + String actualDataverseAlias = updateDataverseResponse.then().extract().path("data.alias"); + assertEquals(newAlias, actualDataverseAlias); + String actualDataverseName = updateDataverseResponse.then().extract().path("data.name"); + assertEquals(newName, actualDataverseName); + String actualDataverseAffiliation = updateDataverseResponse.then().extract().path("data.affiliation"); + assertEquals(newAffiliation, actualDataverseAffiliation); + String actualDataverseType = updateDataverseResponse.then().extract().path("data.dataverseType"); + assertEquals(newDataverseType, actualDataverseType); + String actualContactEmail = updateDataverseResponse.then().extract().path("data.dataverseContacts[0].contactEmail"); + assertEquals("new_email@dataverse.com", actualContactEmail); + + // Assert metadata blocks are updated + Response listMetadataBlocksResponse = UtilIT.listMetadataBlocks(newAlias, false, false, apiToken); + String actualDataverseMetadataBlock1 = listMetadataBlocksResponse.then().extract().path("data[0].name"); + String actualDataverseMetadataBlock2 = listMetadataBlocksResponse.then().extract().path("data[1].name"); + String actualDataverseMetadataBlock3 = listMetadataBlocksResponse.then().extract().path("data[2].name"); + assertThat(newMetadataBlockNames, hasItemInArray(actualDataverseMetadataBlock1)); + assertThat(newMetadataBlockNames, hasItemInArray(actualDataverseMetadataBlock2)); + assertThat(newMetadataBlockNames, hasItemInArray(actualDataverseMetadataBlock3)); + + // Assert custom facets are updated + Response listDataverseFacetsResponse = UtilIT.listDataverseFacets(newAlias, apiToken); + String actualFacetName = listDataverseFacetsResponse.then().extract().path("data[0]"); + assertThat(newFacetIds, hasItemInArray(actualFacetName)); + + // Assert input levels are updated + Response listDataverseInputLevelsResponse = UtilIT.listDataverseInputLevels(newAlias, apiToken); + String actualInputLevelName = listDataverseInputLevelsResponse.then().extract().path("data[0].datasetFieldTypeName"); + assertThat(newInputLevelNames, hasItemInArray(actualInputLevelName)); // The alias has been changed, so we should not be able to do any operation using the old one String oldDataverseAlias = testDataverseAlias; Response getDataverseResponse = UtilIT.listDataverseFacets(oldDataverseAlias, apiToken); getDataverseResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // Should return error when the dataverse to edit does not exist + updateDataverseResponse = UtilIT.updateDataverse( + "unexistingDataverseAlias", + newAlias, + newName, + newAffiliation, + newDataverseType, + newContactEmails, + newInputLevelNames, + newFacetIds, + newMetadataBlockNames, + apiToken + ); + updateDataverseResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); } @Test From d334b689437e06cf674a6725600c705628845c47 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 21 Oct 2024 09:41:55 +0200 Subject: [PATCH 17/49] Refactor: CreateDataverseCommand inheriting AbstractWriteDataverseCommand --- .../impl/AbstractWriteDataverseCommand.java | 84 +++++++++++++++ .../command/impl/CreateDataverseCommand.java | 102 +++++------------- 2 files changed, 110 insertions(+), 76 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java new file mode 100644 index 00000000000..577f877db41 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java @@ -0,0 +1,84 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.*; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; + +import java.util.ArrayList; +import java.util.List; + +/** + * TODO + */ +abstract class AbstractWriteDataverseCommand extends AbstractCommand { + + protected Dataverse dataverse; + private final List inputLevels; + private final List facets; + protected final List metadataBlocks; + + public AbstractWriteDataverseCommand(Dataverse dataverse, + DataverseRequest request, + List facets, + List inputLevels, + List metadataBlocks) { + super(request, dataverse.getOwner()); + this.dataverse = dataverse; + if (facets != null) { + this.facets = new ArrayList<>(facets); + } else { + this.facets = null; + } + if (inputLevels != null) { + this.inputLevels = new ArrayList<>(inputLevels); + } else { + this.inputLevels = null; + } + if (metadataBlocks != null) { + this.metadataBlocks = new ArrayList<>(metadataBlocks); + } else { + this.metadataBlocks = null; + } + } + + @Override + public Dataverse execute(CommandContext ctxt) throws CommandException { + dataverse = innerExecute(ctxt); + + if (metadataBlocks != null && !metadataBlocks.isEmpty()) { + dataverse.setMetadataBlockRoot(true); + dataverse.setMetadataBlocks(metadataBlocks); + } + + if (facets != null) { + ctxt.facets().deleteFacetsFor(dataverse); + + if (!facets.isEmpty()) { + dataverse.setFacetRoot(true); + } + + int i = 0; + for (DatasetFieldType df : facets) { + ctxt.facets().create(i++, df, dataverse); + } + } + + if (inputLevels != null) { + if (!inputLevels.isEmpty()) { + dataverse.addInputLevelsMetadataBlocksIfNotPresent(inputLevels); + } + ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse); + for (DataverseFieldTypeInputLevel inputLevel : inputLevels) { + inputLevel.setDataverse(dataverse); + ctxt.fieldTypeInputLevels().create(inputLevel); + } + } + + return ctxt.dataverses().save(dataverse); + } + + abstract protected Dataverse innerExecute(CommandContext ctxt) throws IllegalCommandException; +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index 6957dac416d..ce922dc565d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -6,11 +6,9 @@ import edu.harvard.iq.dataverse.authorization.groups.Group; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; -import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; -import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -27,47 +25,26 @@ * @author michael */ @RequiredPermissions(Permission.AddDataverse) -public class CreateDataverseCommand extends AbstractCommand { - - private final Dataverse created; - private final List inputLevelList; - private final List facetList; - private final List metadataBlocks; +public class CreateDataverseCommand extends AbstractWriteDataverseCommand { public CreateDataverseCommand(Dataverse created, - DataverseRequest aRequest, - List facetList, - List inputLevelList) { - this(created, aRequest, facetList, inputLevelList, null); + DataverseRequest request, + List facets, + List inputLevels) { + this(created, request, facets, inputLevels, null); } public CreateDataverseCommand(Dataverse created, - DataverseRequest aRequest, - List facetList, - List inputLevelList, + DataverseRequest request, + List facets, + List inputLevels, List metadataBlocks) { - super(aRequest, created.getOwner()); - this.created = created; - if (facetList != null) { - this.facetList = new ArrayList<>(facetList); - } else { - this.facetList = null; - } - if (inputLevelList != null) { - this.inputLevelList = new ArrayList<>(inputLevelList); - } else { - this.inputLevelList = null; - } - if (metadataBlocks != null) { - this.metadataBlocks = new ArrayList<>(metadataBlocks); - } else { - this.metadataBlocks = null; - } + super(created, request, facets, inputLevels, metadataBlocks); } @Override - public Dataverse execute(CommandContext ctxt) throws CommandException { - Dataverse owner = created.getOwner(); + protected Dataverse innerExecute(CommandContext ctxt) throws IllegalCommandException { + Dataverse owner = dataverse.getOwner(); if (owner == null) { if (ctxt.dataverses().isRootDataverseExists()) { throw new IllegalCommandException("Root Dataverse already exists. Cannot create another one", this); @@ -75,44 +52,44 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } if (metadataBlocks != null && !metadataBlocks.isEmpty()) { - created.setMetadataBlockRoot(true); - created.setMetadataBlocks(metadataBlocks); + dataverse.setMetadataBlockRoot(true); + dataverse.setMetadataBlocks(metadataBlocks); } - if (created.getCreateDate() == null) { - created.setCreateDate(new Timestamp(new Date().getTime())); + if (dataverse.getCreateDate() == null) { + dataverse.setCreateDate(new Timestamp(new Date().getTime())); } - if (created.getCreator() == null) { + if (dataverse.getCreator() == null) { final User user = getRequest().getUser(); if (user.isAuthenticated()) { - created.setCreator((AuthenticatedUser) user); + dataverse.setCreator((AuthenticatedUser) user); } else { throw new IllegalCommandException("Guest users cannot create a Dataverse.", this); } } - if (created.getDataverseType() == null) { - created.setDataverseType(Dataverse.DataverseType.UNCATEGORIZED); + if (dataverse.getDataverseType() == null) { + dataverse.setDataverseType(Dataverse.DataverseType.UNCATEGORIZED); } - if (created.getDefaultContributorRole() == null) { - created.setDefaultContributorRole(ctxt.roles().findBuiltinRoleByAlias(DataverseRole.EDITOR)); + if (dataverse.getDefaultContributorRole() == null) { + dataverse.setDefaultContributorRole(ctxt.roles().findBuiltinRoleByAlias(DataverseRole.EDITOR)); } // @todo for now we are saying all dataverses are permission root - created.setPermissionRoot(true); + dataverse.setPermissionRoot(true); - if (ctxt.dataverses().findByAlias(created.getAlias()) != null) { - throw new IllegalCommandException("A dataverse with alias " + created.getAlias() + " already exists", this); + if (ctxt.dataverses().findByAlias(dataverse.getAlias()) != null) { + throw new IllegalCommandException("A dataverse with alias " + dataverse.getAlias() + " already exists", this); } - if (created.getFilePIDsEnabled() != null && !ctxt.settings().isTrueForKey(SettingsServiceBean.Key.AllowEnablingFilePIDsPerCollection, false)) { + if (dataverse.getFilePIDsEnabled() != null && !ctxt.settings().isTrueForKey(SettingsServiceBean.Key.AllowEnablingFilePIDsPerCollection, false)) { throw new IllegalCommandException("File PIDs cannot be enabled per collection", this); } // Save the dataverse - Dataverse managedDv = ctxt.dataverses().save(created); + Dataverse managedDv = ctxt.dataverses().save(dataverse); // Find the built in admin role (currently by alias) DataverseRole adminRole = ctxt.roles().findBuiltinRoleByAlias(DataverseRole.ADMIN); @@ -159,33 +136,6 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } managedDv.setPermissionModificationTime(new Timestamp(new Date().getTime())); - - if (facetList != null) { - ctxt.facets().deleteFacetsFor(managedDv); - - if (!facetList.isEmpty()) { - managedDv.setFacetRoot(true); - } - - int i = 0; - for (DatasetFieldType df : facetList) { - ctxt.facets().create(i++, df, managedDv); - } - } - - if (inputLevelList != null) { - if (!inputLevelList.isEmpty()) { - managedDv.addInputLevelsMetadataBlocksIfNotPresent(inputLevelList); - } - ctxt.fieldTypeInputLevels().deleteFacetsFor(managedDv); - for (DataverseFieldTypeInputLevel inputLevel : inputLevelList) { - inputLevel.setDataverse(managedDv); - ctxt.fieldTypeInputLevels().create(inputLevel); - } - } - - // TODO: save is called here and above; we likely don't need both - managedDv = ctxt.dataverses().save(managedDv); return managedDv; } From e7782394b037fb6890f785cebd6f12869630c6c6 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 21 Oct 2024 10:57:54 +0200 Subject: [PATCH 18/49] Refactor: UpdateDataverseCommand inheriting AbstractWriteDataverseCommand --- .../impl/AbstractWriteDataverseCommand.java | 5 +- .../command/impl/CreateDataverseCommand.java | 2 +- .../command/impl/UpdateDataverseCommand.java | 102 ++++-------------- 3 files changed, 27 insertions(+), 82 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java index 577f877db41..40c2abf5d21 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java @@ -11,7 +11,7 @@ import java.util.List; /** - * TODO + * An abstract base class for commands that perform write operations on {@link Dataverse}s. */ abstract class AbstractWriteDataverseCommand extends AbstractCommand { @@ -21,11 +21,12 @@ abstract class AbstractWriteDataverseCommand extends AbstractCommand protected final List metadataBlocks; public AbstractWriteDataverseCommand(Dataverse dataverse, + Dataverse affectedDataverse, DataverseRequest request, List facets, List inputLevels, List metadataBlocks) { - super(request, dataverse.getOwner()); + super(request, affectedDataverse); this.dataverse = dataverse; if (facets != null) { this.facets = new ArrayList<>(facets); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index ce922dc565d..145cfb6199c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -39,7 +39,7 @@ public CreateDataverseCommand(Dataverse created, List facets, List inputLevels, List metadataBlocks) { - super(created, request, facets, inputLevels, metadataBlocks); + super(created, created.getOwner(), request, facets, inputLevels, metadataBlocks); } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index 16b93debb6d..14d9e408be8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -6,16 +6,13 @@ import static edu.harvard.iq.dataverse.dataverse.DataverseUtil.validateDataverseMetadataExternally; -import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; -import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import java.util.ArrayList; import java.util.List; -import java.util.logging.Logger; /** * Update an existing dataverse. @@ -23,72 +20,41 @@ * @author michael */ @RequiredPermissions(Permission.EditDataverse) -public class UpdateDataverseCommand extends AbstractCommand { - private static final Logger logger = Logger.getLogger(UpdateDataverseCommand.class.getName()); - - private final Dataverse editedDv; - private final List facetList; +public class UpdateDataverseCommand extends AbstractWriteDataverseCommand { private final List featuredDataverseList; - private final List inputLevelList; - private final List metadataBlocks; private boolean datasetsReindexRequired = false; public UpdateDataverseCommand(Dataverse editedDv, - List facetList, - List featuredDataverseList, - DataverseRequest aRequest, - List inputLevelList) { - this(editedDv, facetList, featuredDataverseList, aRequest, inputLevelList, null); + List facets, + List featuredDataverses, + DataverseRequest request, + List inputLevels) { + this(editedDv, facets, featuredDataverses, request, inputLevels, null); } public UpdateDataverseCommand(Dataverse editedDv, - List facetList, - List featuredDataverseList, - DataverseRequest aRequest, - List inputLevelList, + List facets, + List featuredDataverses, + DataverseRequest request, + List inputLevels, List metadataBlocks) { - super(aRequest, editedDv); - this.editedDv = editedDv; - // add update template uses this command but does not - // update facet list or featured dataverses - if (facetList != null) { - this.facetList = new ArrayList<>(facetList); - } else { - this.facetList = null; - } - if (featuredDataverseList != null) { - this.featuredDataverseList = new ArrayList<>(featuredDataverseList); + super(editedDv, editedDv, request, facets, inputLevels, metadataBlocks); + if (featuredDataverses != null) { + this.featuredDataverseList = new ArrayList<>(featuredDataverses); } else { this.featuredDataverseList = null; } - if (inputLevelList != null) { - this.inputLevelList = new ArrayList<>(inputLevelList); - } else { - this.inputLevelList = null; - } - if (metadataBlocks != null) { - this.metadataBlocks = new ArrayList<>(metadataBlocks); - } else { - this.metadataBlocks = null; - } } @Override - public Dataverse execute(CommandContext ctxt) throws CommandException { - logger.fine("Entering update dataverse command"); - - if (metadataBlocks != null && !metadataBlocks.isEmpty()) { - editedDv.setMetadataBlockRoot(true); - editedDv.setMetadataBlocks(metadataBlocks); - } - + protected Dataverse innerExecute(CommandContext ctxt) throws IllegalCommandException { // Perform any optional validation steps, if defined: if (ctxt.systemConfig().isExternalDataverseValidationEnabled()) { // For admins, an override of the external validation step may be enabled: if (!(getUser().isSuperuser() && ctxt.systemConfig().isExternalValidationAdminOverrideEnabled())) { String executable = ctxt.systemConfig().getDataverseValidationExecutable(); - boolean result = validateDataverseMetadataExternally(editedDv, executable, getRequest()); + boolean result = validateDataverseMetadataExternally(dataverse, executable, getRequest()); if (!result) { String rejectionMessage = ctxt.systemConfig().getDataverseUpdateValidationFailureMsg(); @@ -97,7 +63,7 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } } - Dataverse oldDv = ctxt.dataverses().find(editedDv.getId()); + Dataverse oldDv = ctxt.dataverses().find(dataverse.getId()); DataverseType oldDvType = oldDv.getDataverseType(); String oldDvAlias = oldDv.getAlias(); @@ -106,44 +72,22 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { // We don't want to reindex the children datasets unnecessarily: // When these values are changed we need to reindex all children datasets // This check is not recursive as all the values just report the immediate parent - if (!oldDvType.equals(editedDv.getDataverseType()) - || !oldDvName.equals(editedDv.getName()) - || !oldDvAlias.equals(editedDv.getAlias())) { + if (!oldDvType.equals(dataverse.getDataverseType()) + || !oldDvName.equals(dataverse.getName()) + || !oldDvAlias.equals(dataverse.getAlias())) { datasetsReindexRequired = true; } if (featuredDataverseList != null) { - ctxt.featuredDataverses().deleteFeaturedDataversesFor(editedDv); + ctxt.featuredDataverses().deleteFeaturedDataversesFor(dataverse); int i = 0; for (Object obj : featuredDataverseList) { Dataverse dv = (Dataverse) obj; - ctxt.featuredDataverses().create(i++, dv.getId(), editedDv.getId()); - } - } - - if (facetList != null) { - ctxt.facets().deleteFacetsFor(editedDv); - if (!facetList.isEmpty()) { - editedDv.setFacetRoot(true); - } - int i = 0; - for (DatasetFieldType df : facetList) { - ctxt.facets().create(i++, df, editedDv); - } - } - if (inputLevelList != null) { - if (!inputLevelList.isEmpty()) { - editedDv.addInputLevelsMetadataBlocksIfNotPresent(inputLevelList); - } - ctxt.fieldTypeInputLevels().deleteFacetsFor(editedDv); - for (DataverseFieldTypeInputLevel inputLevel : inputLevelList) { - inputLevel.setDataverse(editedDv); - ctxt.fieldTypeInputLevels().create(inputLevel); + ctxt.featuredDataverses().create(i++, dv.getId(), dataverse.getId()); } } - Dataverse result = ctxt.dataverses().save(editedDv); - return result; + return dataverse; } @Override @@ -154,7 +98,7 @@ public boolean onSuccess(CommandContext ctxt, Object r) { // It appears that we at some point lost some extra logic here, where // we only reindex the underlying datasets if one or more of the specific set // of fields have been changed (since these values are included in the - // indexed solr documents for dataasets). So I'm putting that back. -L.A. + // indexed solr documents for datasets). So I'm putting that back. -L.A. Dataverse result = (Dataverse) r; if (datasetsReindexRequired) { From 4e90d0c3fe8d501f5810a162c304ce4e3b43a891 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 21 Oct 2024 16:40:43 +0100 Subject: [PATCH 19/49] Added: docs for #10904 --- doc/sphinx-guides/source/api/native-api.rst | 52 +++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index f8b8620f121..6254742eebb 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -74,6 +74,58 @@ The request JSON supports an optional ``metadataBlocks`` object, with the follow To obtain an example of how these objects are included in the JSON file, download :download:`dataverse-complete-optional-params.json <../_static/api/dataverse-complete-optional-params.json>` file and modify it to suit your needs. +.. _update-dataverse-api: + +Update a Dataverse Collection +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Updates an existing Dataverse collection using a JSON file following the same structure as the one used in the API for the creation. (see :ref:`create-dataverse-api`). + +The steps for updating a Dataverse collection are: + +- Prepare a JSON file containing the fields for the properties you want to update. You do not need to include all the properties, only the ones you want to update. +- Execute a curl command or equivalent. + +As an example, you can download :download:`dataverse-complete.json <../_static/api/dataverse-complete.json>` file and modify it to suit your needs. The controlled vocabulary for ``dataverseType`` is the following: + +- ``DEPARTMENT`` +- ``JOURNALS`` +- ``LABORATORY`` +- ``ORGANIZATIONS_INSTITUTIONS`` +- ``RESEARCHERS`` +- ``RESEARCH_GROUP`` +- ``RESEARCH_PROJECTS`` +- ``TEACHING_COURSES`` +- ``UNCATEGORIZED`` + +The curl command below assumes you are using the name "dataverse-complete.json" and that this file is in your current working directory. + +Next you need to figure out the alias or database id of the Dataverse collection you want to update. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export DV_ALIAS=dvAlias + + curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/dataverses/$DV_ALIAS" --upload-file dataverse-complete.json + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/dataverses/dvAlias" --upload-file dataverse-complete.json + +You should expect an HTTP 200 response and JSON beginning with "status":"OK" followed by a representation of the updated Dataverse collection. + +Same as in :ref:`create-dataverse-api`, the request JSON supports an optional ``metadataBlocks`` object, with the following supported sub-objects: + +- ``metadataBlockNames``: The names of the metadata blocks you want to add to the Dataverse collection. +- ``inputLevels``: The names of the fields in each metadata block for which you want to add a custom configuration regarding their inclusion or requirement when creating and editing datasets in the new Dataverse collection. Note that if the corresponding metadata blocks names are not specified in the ``metadataBlockNames``` field, they will be added automatically to the Dataverse collection. +- ``facetIds``: The names of the fields to use as facets for browsing datasets and collections in the new Dataverse collection. Note that the order of the facets is defined by their order in the provided JSON array. + +To obtain an example of how these objects are included in the JSON file, download :download:`dataverse-complete-optional-params.json <../_static/api/dataverse-complete-optional-params.json>` file and modify it to suit your needs. + .. _view-dataverse: View a Dataverse Collection From 6aac751d55375e7433d01d500f38b8be83a7b5bc Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 21 Oct 2024 16:44:09 +0100 Subject: [PATCH 20/49] Added: release notes for #10904 --- doc/release-notes/10904-edit-dataverse-collection-endpoint.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/10904-edit-dataverse-collection-endpoint.md diff --git a/doc/release-notes/10904-edit-dataverse-collection-endpoint.md b/doc/release-notes/10904-edit-dataverse-collection-endpoint.md new file mode 100644 index 00000000000..b9256941eea --- /dev/null +++ b/doc/release-notes/10904-edit-dataverse-collection-endpoint.md @@ -0,0 +1 @@ +Adds a new endpoint (`PUT /api/dataverses/`) for updating an existing Dataverse collection using a JSON file following the same structure as the one used in the API for the creation. From 4f98be6a1bcec06ffcada8098e57baf4ea0dd9d2 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 21 Oct 2024 17:37:26 +0100 Subject: [PATCH 21/49] Removed: unnecessary line in updateDataverse endpoint --- src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index d8bd2b8cb4b..895d073bb47 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -184,8 +184,6 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod List metadataBlocks = parseMetadataBlocks(body); List facets = parseFacets(body); - updatedDataverse.setId(originalDataverse.getId()); - AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); updatedDataverse = execCommand(new UpdateDataverseCommand(updatedDataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks)); return ok(json(updatedDataverse)); From 0b5f9a834b01dff526dbe76c250a5707a04a4656 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 24 Oct 2024 15:46:59 +0100 Subject: [PATCH 22/49] Changed: handling properties update through a DTO object for updateDataverse endpoint --- .../harvard/iq/dataverse/api/Dataverses.java | 42 +++++--- .../iq/dataverse/api/dto/DataverseDTO.java | 63 ++++++++++++ .../command/impl/UpdateDataverseCommand.java | 42 +++++++- .../iq/dataverse/util/json/JsonParser.java | 99 ++++++++++--------- .../iq/dataverse/api/DataversesIT.java | 20 ++++ .../dataverse/util/json/JsonParserTest.java | 20 ++-- 6 files changed, 204 insertions(+), 82 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/api/dto/DataverseDTO.java diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 895d073bb47..25176b85689 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -3,12 +3,9 @@ import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.api.datadeposit.SwordServiceBean; -import edu.harvard.iq.dataverse.api.dto.DataverseMetadataBlockFacetDTO; +import edu.harvard.iq.dataverse.api.dto.*; import edu.harvard.iq.dataverse.authorization.DataverseRole; -import edu.harvard.iq.dataverse.api.dto.ExplicitGroupDTO; -import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO; -import edu.harvard.iq.dataverse.api.dto.RoleDTO; import edu.harvard.iq.dataverse.api.imports.ImportException; import edu.harvard.iq.dataverse.api.imports.ImportServiceBean; import edu.harvard.iq.dataverse.authorization.Permission; @@ -128,7 +125,7 @@ public Response addRoot(@Context ContainerRequestContext crc, String body) { public Response addDataverse(@Context ContainerRequestContext crc, String body, @PathParam("identifier") String parentIdtf) { Dataverse newDataverse; try { - newDataverse = parseAndValidateDataverseRequestBody(body, null); + newDataverse = parseAndValidateAddDataverseRequestBody(body); } catch (JsonParsingException jpe) { return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparse"), jpe.getMessage())); } catch (JsonParseException ex) { @@ -159,20 +156,33 @@ public Response addDataverse(@Context ContainerRequestContext crc, String body, } } + private Dataverse parseAndValidateAddDataverseRequestBody(String body) throws JsonParsingException, JsonParseException { + try { + JsonObject addDataverseJson = JsonUtil.getJsonObject(body); + return jsonParser().parseDataverse(addDataverseJson); + } catch (JsonParsingException jpe) { + logger.log(Level.SEVERE, "Json: {0}", body); + throw jpe; + } catch (JsonParseException ex) { + logger.log(Level.SEVERE, "Error parsing dataverse from json: " + ex.getMessage(), ex); + throw ex; + } + } + @PUT @AuthRequired @Path("{identifier}") public Response updateDataverse(@Context ContainerRequestContext crc, String body, @PathParam("identifier") String identifier) { - Dataverse originalDataverse; + Dataverse dataverse; try { - originalDataverse = findDataverseOrDie(identifier); + dataverse = findDataverseOrDie(identifier); } catch (WrappedResponse e) { return e.getResponse(); } - Dataverse updatedDataverse; + DataverseDTO updatedDataverseDTO; try { - updatedDataverse = parseAndValidateDataverseRequestBody(body, originalDataverse); + updatedDataverseDTO = parseAndValidateUpdateDataverseRequestBody(body); } catch (JsonParsingException jpe) { return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparse"), jpe.getMessage())); } catch (JsonParseException ex) { @@ -180,13 +190,13 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod } try { - List inputLevels = parseInputLevels(body, originalDataverse); + List inputLevels = parseInputLevels(body, dataverse); List metadataBlocks = parseMetadataBlocks(body); List facets = parseFacets(body); AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); - updatedDataverse = execCommand(new UpdateDataverseCommand(updatedDataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks)); - return ok(json(updatedDataverse)); + dataverse = execCommand(new UpdateDataverseCommand(dataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks, updatedDataverseDTO)); + return ok(json(dataverse)); } catch (WrappedResponse ww) { return handleWrappedResponse(ww); @@ -198,15 +208,15 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod } } - private Dataverse parseAndValidateDataverseRequestBody(String body, Dataverse dataverseToUpdate) throws JsonParsingException, JsonParseException { + private DataverseDTO parseAndValidateUpdateDataverseRequestBody(String body) throws JsonParsingException, JsonParseException { try { - JsonObject dataverseJson = JsonUtil.getJsonObject(body); - return dataverseToUpdate != null ? jsonParser().parseDataverseUpdates(dataverseJson, dataverseToUpdate) : jsonParser().parseDataverse(dataverseJson); + JsonObject updateDataverseJson = JsonUtil.getJsonObject(body); + return jsonParser().parseDataverseDTO(updateDataverseJson); } catch (JsonParsingException jpe) { logger.log(Level.SEVERE, "Json: {0}", body); throw jpe; } catch (JsonParseException ex) { - logger.log(Level.SEVERE, "Error parsing dataverse from json: " + ex.getMessage(), ex); + logger.log(Level.SEVERE, "Error parsing DataverseDTO from json: " + ex.getMessage(), ex); throw ex; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/dto/DataverseDTO.java b/src/main/java/edu/harvard/iq/dataverse/api/dto/DataverseDTO.java new file mode 100644 index 00000000000..4f2f1032c07 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/dto/DataverseDTO.java @@ -0,0 +1,63 @@ +package edu.harvard.iq.dataverse.api.dto; + +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DataverseContact; + +import java.util.List; + +public class DataverseDTO { + private String alias; + private String name; + private String description; + private String affiliation; + private List dataverseContacts; + private Dataverse.DataverseType dataverseType; + + public String getAlias() { + return alias; + } + + public void setAlias(String alias) { + this.alias = alias; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getAffiliation() { + return affiliation; + } + + public void setAffiliation(String affiliation) { + this.affiliation = affiliation; + } + + public List getDataverseContacts() { + return dataverseContacts; + } + + public void setDataverseContacts(List dataverseContacts) { + this.dataverseContacts = dataverseContacts; + } + + public Dataverse.DataverseType getDataverseType() { + return dataverseType; + } + + public void setDataverseType(Dataverse.DataverseType dataverseType) { + this.dataverseType = dataverseType; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index 14d9e408be8..55cc3708097 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -2,6 +2,7 @@ import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.Dataverse.DataverseType; +import edu.harvard.iq.dataverse.api.dto.DataverseDTO; import edu.harvard.iq.dataverse.authorization.Permission; import static edu.harvard.iq.dataverse.dataverse.DataverseUtil.validateDataverseMetadataExternally; @@ -22,29 +23,32 @@ @RequiredPermissions(Permission.EditDataverse) public class UpdateDataverseCommand extends AbstractWriteDataverseCommand { private final List featuredDataverseList; + private final DataverseDTO updatedDataverseDTO; private boolean datasetsReindexRequired = false; - public UpdateDataverseCommand(Dataverse editedDv, + public UpdateDataverseCommand(Dataverse dataverse, List facets, List featuredDataverses, DataverseRequest request, List inputLevels) { - this(editedDv, facets, featuredDataverses, request, inputLevels, null); + this(dataverse, facets, featuredDataverses, request, inputLevels, null, null); } - public UpdateDataverseCommand(Dataverse editedDv, + public UpdateDataverseCommand(Dataverse dataverse, List facets, List featuredDataverses, DataverseRequest request, List inputLevels, - List metadataBlocks) { - super(editedDv, editedDv, request, facets, inputLevels, metadataBlocks); + List metadataBlocks, + DataverseDTO updatedDataverseDTO) { + super(dataverse, dataverse, request, facets, inputLevels, metadataBlocks); if (featuredDataverses != null) { this.featuredDataverseList = new ArrayList<>(featuredDataverses); } else { this.featuredDataverseList = null; } + this.updatedDataverseDTO = updatedDataverseDTO; } @Override @@ -87,9 +91,37 @@ protected Dataverse innerExecute(CommandContext ctxt) throws IllegalCommandExcep } } + if (updatedDataverseDTO != null) { + updateDataverseFromDTO(dataverse, updatedDataverseDTO); + } + return dataverse; } + private void updateDataverseFromDTO(Dataverse dataverse, DataverseDTO dto) { + if (dto.getAlias() != null) { + dataverse.setAlias(dto.getAlias()); + } + if (dto.getName() != null) { + dataverse.setName(dto.getName()); + } + if (dto.getDescription() != null) { + dataverse.setDescription(dto.getDescription()); + } + if (dto.getAffiliation() != null) { + dataverse.setAffiliation(dto.getAffiliation()); + } + if (dto.getDataverseContacts() != null) { + dataverse.setDataverseContacts(dto.getDataverseContacts()); + for (DataverseContact dc : dataverse.getDataverseContacts()) { + dc.setDataverse(dataverse); + } + } + if (dto.getDataverseType() != null) { + dataverse.setDataverseType(dto.getDataverseType()); + } + } + @Override public boolean onSuccess(CommandContext ctxt, Object r) { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index f63e4c4fd9c..8552389525d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -19,6 +19,7 @@ import edu.harvard.iq.dataverse.MetadataBlockServiceBean; import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.api.Util; +import edu.harvard.iq.dataverse.api.dto.DataverseDTO; import edu.harvard.iq.dataverse.api.dto.FieldDTO; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroup; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress; @@ -48,6 +49,7 @@ import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.function.Consumer; import java.util.logging.Logger; import java.util.stream.Collectors; @@ -130,8 +132,19 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException { dv.setFacetRoot(jobj.getBoolean("facetRoot", false)); dv.setAffiliation(jobj.getString("affiliation", null)); - updateDataverseContacts(dv, jobj); - + if (jobj.containsKey("dataverseContacts")) { + JsonArray dvContacts = jobj.getJsonArray("dataverseContacts"); + int i = 0; + List dvContactList = new LinkedList<>(); + for (JsonValue jsv : dvContacts) { + DataverseContact dvc = new DataverseContact(dv); + dvc.setContactEmail(getMandatoryString((JsonObject) jsv, "contactEmail")); + dvc.setDisplayOrder(i++); + dvContactList.add(dvc); + } + dv.setDataverseContacts(dvContactList); + } + if (jobj.containsKey("theme")) { DataverseTheme theme = parseDataverseTheme(jobj.getJsonObject("theme")); dv.setDataverseTheme(theme); @@ -139,7 +152,13 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException { } dv.setDataverseType(Dataverse.DataverseType.UNCATEGORIZED); // default - updateDataverseType(dv, jobj); + String receivedDataverseType = jobj.getString("dataverseType", null); + if (receivedDataverseType != null) { + Arrays.stream(Dataverse.DataverseType.values()) + .filter(type -> type.name().equals(receivedDataverseType)) + .findFirst() + .ifPresent(dv::setDataverseType); + } if (jobj.containsKey("filePIDsEnabled")) { dv.setFilePIDsEnabled(jobj.getBoolean("filePIDsEnabled")); @@ -147,7 +166,7 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException { /* We decided that subject is not user set, but gotten from the subject of the dataverse's datasets - leavig this code in for now, in case we need to go back to it at some point - + if (jobj.containsKey("dataverseSubjects")) { List dvSubjectList = new LinkedList<>(); DatasetFieldType subjectType = datasetFieldSvc.findByName(DatasetFieldConstant.subject); @@ -170,63 +189,49 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException { dv.setDataverseSubjects(dvSubjectList); } */ - + return dv; } - public Dataverse parseDataverseUpdates(JsonObject jsonObject, Dataverse dataverseToUpdate) throws JsonParseException { - String alias = jsonObject.getString("alias", null); - if (alias != null) { - dataverseToUpdate.setAlias(alias); - } - - String name = jsonObject.getString("name", null); - if (name != null) { - dataverseToUpdate.setName(name); - } - - String description = jsonObject.getString("description", null); - if (description != null) { - dataverseToUpdate.setDescription(description); - } - - String affiliation = jsonObject.getString("affiliation", null); - if (affiliation != null) { - dataverseToUpdate.setAffiliation(affiliation); - } - - updateDataverseType(dataverseToUpdate, jsonObject); + public DataverseDTO parseDataverseDTO(JsonObject jsonObject) throws JsonParseException { + DataverseDTO dataverseDTO = new DataverseDTO(); - updateDataverseContacts(dataverseToUpdate, jsonObject); + setDataverseDTOPropertyIfPresent(jsonObject, "alias", dataverseDTO::setAlias); + setDataverseDTOPropertyIfPresent(jsonObject, "name", dataverseDTO::setName); + setDataverseDTOPropertyIfPresent(jsonObject, "description", dataverseDTO::setDescription); + setDataverseDTOPropertyIfPresent(jsonObject, "affiliation", dataverseDTO::setAffiliation); - return dataverseToUpdate; - } - - private void updateDataverseType(Dataverse dataverse, JsonObject jsonObject) { - String receivedDataverseType = jsonObject.getString("dataverseType", null); - if (receivedDataverseType != null) { + String dataverseType = jsonObject.getString("dataverseType", null); + if (dataverseType != null) { Arrays.stream(Dataverse.DataverseType.values()) - .filter(type -> type.name().equals(receivedDataverseType)) + .filter(type -> type.name().equals(dataverseType)) .findFirst() - .ifPresent(dataverse::setDataverseType); + .ifPresent(dataverseDTO::setDataverseType); } - } - private void updateDataverseContacts(Dataverse dataverse, JsonObject jsonObject) throws JsonParseException { if (jsonObject.containsKey("dataverseContacts")) { JsonArray dvContacts = jsonObject.getJsonArray("dataverseContacts"); - int i = 0; - List dvContactList = new LinkedList<>(); - for (JsonValue jsv : dvContacts) { - DataverseContact dvc = new DataverseContact(dataverse); - dvc.setContactEmail(getMandatoryString((JsonObject) jsv, "contactEmail")); - dvc.setDisplayOrder(i++); - dvContactList.add(dvc); + List contacts = new ArrayList<>(); + for (int i = 0; i < dvContacts.size(); i++) { + JsonObject contactObj = dvContacts.getJsonObject(i); + DataverseContact contact = new DataverseContact(); + contact.setContactEmail(getMandatoryString(contactObj, "contactEmail")); + contact.setDisplayOrder(i); + contacts.add(contact); } - dataverse.setDataverseContacts(dvContactList); + dataverseDTO.setDataverseContacts(contacts); } + + return dataverseDTO; } - + + private void setDataverseDTOPropertyIfPresent(JsonObject jsonObject, String key, Consumer setter) { + String value = jsonObject.getString(key, null); + if (value != null) { + setter.accept(value); + } + } + public DataverseTheme parseDataverseTheme(JsonObject obj) { DataverseTheme theme = new DataverseTheme(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index c311fa1016e..e6ec3cf5400 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -1341,6 +1341,26 @@ public void testUpdateDataverse() { apiToken ); updateDataverseResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // User with unprivileged API token cannot update Root dataverse + updateDataverseResponse = UtilIT.updateDataverse( + "root", + newAlias, + newName, + newAffiliation, + newDataverseType, + newContactEmails, + newInputLevelNames, + newFacetIds, + newMetadataBlockNames, + apiToken + ); + updateDataverseResponse.then().assertThat().statusCode(UNAUTHORIZED.getStatusCode()); + + Response rootCollectionInfoResponse = UtilIT.exportDataverse("root", apiToken); + rootCollectionInfoResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.name", equalTo("Root")); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java index 2cffa7d921c..f241a5d1dda 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java @@ -8,6 +8,7 @@ import edu.harvard.iq.dataverse.DatasetFieldType.FieldType; import edu.harvard.iq.dataverse.DataverseTheme.Alignment; import edu.harvard.iq.dataverse.UserNotification.Type; +import edu.harvard.iq.dataverse.api.dto.DataverseDTO; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroup; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroupProvider; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress; @@ -266,32 +267,23 @@ public void testParseCompleteDataverse() throws JsonParseException { } /** - * Test that a JSON object passed for a complete Dataverse update is correctly parsed. - * This checks that all properties are parsed into the correct dataverse properties. + * Test that a JSON object passed for a DataverseDTO is correctly parsed. + * This checks that all properties are parsed into the correct DataverseDTO properties. * @throws JsonParseException when this test is broken. */ @Test - public void parseDataverseUpdates() throws JsonParseException { - Dataverse dataverse = new Dataverse(); - dataverse.setName("Name to update"); - dataverse.setAlias("aliasToUpdate"); - dataverse.setAffiliation("Affiliation to update"); - dataverse.setDescription("Description to update"); - dataverse.setDataverseType(Dataverse.DataverseType.DEPARTMENT); - List originalContacts = new ArrayList<>(); - originalContacts.add(new DataverseContact(dataverse, "updatethis@example.edu")); - dataverse.setDataverseContacts(originalContacts); + public void parseDataverseDTO() throws JsonParseException { JsonObject dvJson; try (FileReader reader = new FileReader("doc/sphinx-guides/source/_static/api/dataverse-complete.json")) { dvJson = Json.createReader(reader).readObject(); - Dataverse actual = sut.parseDataverseUpdates(dvJson, dataverse); + DataverseDTO actual = sut.parseDataverseDTO(dvJson); assertEquals("Scientific Research", actual.getName()); assertEquals("science", actual.getAlias()); assertEquals("Scientific Research University", actual.getAffiliation()); assertEquals("We do all the science.", actual.getDescription()); assertEquals("LABORATORY", actual.getDataverseType().toString()); assertEquals(2, actual.getDataverseContacts().size()); - assertEquals("pi@example.edu,student@example.edu", actual.getContactEmails()); + assertEquals("pi@example.edu,student@example.edu", actual.getDataverseContacts().get(0).getContactEmail()); assertEquals(0, actual.getDataverseContacts().get(0).getDisplayOrder()); assertEquals(1, actual.getDataverseContacts().get(1).getDisplayOrder()); } catch (IOException ioe) { From 04f7f768fa5df05fa61e4387f08f56b88316dbc4 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 28 Oct 2024 10:00:30 -0400 Subject: [PATCH 23/49] fix get major version when deaccessioned --- .../edu/harvard/iq/dataverse/Dataset.java | 11 +++++- .../edu/harvard/iq/dataverse/DatasetTest.java | 39 ++++++++++++++++++- 2 files changed, 48 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 40ed491a302..78579b1de21 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -483,8 +483,17 @@ public Date getMostRecentMajorVersionReleaseDate() { if (this.isHarvested()) { return getVersions().get(0).getReleaseTime(); } else { + Long majorVersion = null; for (DatasetVersion version : this.getVersions()) { - if (version.isReleased() && version.getMinorVersionNumber().equals((long) 0)) { + if (version.isReleased()) { + if (version.getMinorVersionNumber().equals((long) 0)) { + return version.getReleaseTime(); + } else if (majorVersion == null) { + majorVersion = version.getVersionNumber(); + } + } else if (version.isDeaccessioned() && majorVersion != null + && majorVersion.longValue() == version.getVersionNumber().longValue() + && version.getMinorVersionNumber().equals((long) 0)) { return version.getReleaseTime(); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetTest.java index 2153a336303..687e0af5b81 100644 --- a/src/test/java/edu/harvard/iq/dataverse/DatasetTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetTest.java @@ -10,6 +10,7 @@ import static org.junit.jupiter.api.Assertions.*; import java.util.ArrayList; +import java.util.Date; import java.util.List; /** @@ -240,5 +241,41 @@ public void datasetShouldBeDeaccessionedWithDeaccessionedAndDeaccessionedVersion assertTrue(dataset.isDeaccessioned()); } - + + @Test + public void testGetMostRecentMajorVersionReleaseDateWithDeaccessionedVersions() { + List versionList = new ArrayList(); + + long ver = 5; + // 5.2 + DatasetVersion relVersion = new DatasetVersion(); + relVersion.setVersionState(VersionState.RELEASED); + relVersion.setMinorVersionNumber(2L); + relVersion.setVersionNumber(ver); + versionList.add(relVersion); + + // 5.1 + relVersion = new DatasetVersion(); + relVersion.setVersionState(VersionState.DEACCESSIONED); + relVersion.setMinorVersionNumber(1L); + relVersion.setVersionNumber(ver); + versionList.add(relVersion); + + // 5.0, 4.0, 3.0, 2.0, 1.0 + while (ver > 0) { + DatasetVersion deaccessionedVersion = new DatasetVersion(); + deaccessionedVersion.setVersionState(VersionState.DEACCESSIONED); + // only add an actual date to v5.0 so the assertNotNull will only pass if this version's date is returned + deaccessionedVersion.setReleaseTime((ver == 5) ? new Date() : null); + deaccessionedVersion.setMinorVersionNumber(0L); + deaccessionedVersion.setVersionNumber(ver--); + versionList.add(deaccessionedVersion); + } + + Dataset dataset = new Dataset(); + dataset.setVersions(versionList); + + Date releaseDate = dataset.getMostRecentMajorVersionReleaseDate(); + assertNotNull(releaseDate); + } } From 7010eada5a48a858ee0517a30f64d22ca236ec49 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 28 Oct 2024 11:17:18 -0400 Subject: [PATCH 24/49] adding release note --- ...shed-files-appearing-in-search-results-for-anon-user.md | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md diff --git a/doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md b/doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md new file mode 100644 index 00000000000..6e6d575ddcf --- /dev/null +++ b/doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md @@ -0,0 +1,7 @@ +A bug fix was made that gets the major version of a Dataset when all major version were deaccessioned. This fixes the incorrect showing of the files as unpublished in the search list even when they are published. +This fix affects the indexing meaning these datasets must be re-indexed once Dataverse is updated. This can be manually done by calling the index API for each affected Dataset. + +Example: +```shell +curl http://localhost:8080/api/admin/index/dataset?persistentId=doi:10.7910/DVN/6X4ZZL +``` From 81672be40faa97a02498880d1866d30d5f17cf9e Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 28 Oct 2024 11:18:42 -0400 Subject: [PATCH 25/49] adding release note --- ...published-files-appearing-in-search-results-for-anon-user.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md b/doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md index 6e6d575ddcf..a43e4ce0d36 100644 --- a/doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md +++ b/doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md @@ -1,4 +1,4 @@ -A bug fix was made that gets the major version of a Dataset when all major version were deaccessioned. This fixes the incorrect showing of the files as unpublished in the search list even when they are published. +A bug fix was made that gets the major version of a Dataset when all major versions were deaccessioned. This fixes the incorrect showing of the files as "Unpublished" in the search list even when they are published. This fix affects the indexing meaning these datasets must be re-indexed once Dataverse is updated. This can be manually done by calling the index API for each affected Dataset. Example: From f2a9b0aee0094c7460c219941fc40c0065cf3e95 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 28 Oct 2024 13:46:49 -0400 Subject: [PATCH 26/49] Update doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md Co-authored-by: Philip Durbin --- ...published-files-appearing-in-search-results-for-anon-user.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md b/doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md index a43e4ce0d36..126ef957447 100644 --- a/doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md +++ b/doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md @@ -1,3 +1,5 @@ +## Unpublished file bug fix + A bug fix was made that gets the major version of a Dataset when all major versions were deaccessioned. This fixes the incorrect showing of the files as "Unpublished" in the search list even when they are published. This fix affects the indexing meaning these datasets must be re-indexed once Dataverse is updated. This can be manually done by calling the index API for each affected Dataset. From c4c3e2849745a949675ea73d1b1a4ff8a1caadfb Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 28 Oct 2024 13:47:36 -0400 Subject: [PATCH 27/49] Update doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md Co-authored-by: Philip Durbin --- ...published-files-appearing-in-search-results-for-anon-user.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md b/doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md index 126ef957447..394f44419e4 100644 --- a/doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md +++ b/doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md @@ -1,7 +1,7 @@ ## Unpublished file bug fix A bug fix was made that gets the major version of a Dataset when all major versions were deaccessioned. This fixes the incorrect showing of the files as "Unpublished" in the search list even when they are published. -This fix affects the indexing meaning these datasets must be re-indexed once Dataverse is updated. This can be manually done by calling the index API for each affected Dataset. +This fix affects the indexing, meaning these datasets must be re-indexed once Dataverse is updated. This can be manually done by calling the index API for each affected Dataset. Example: ```shell From 2dfdaa27e3e2f7b2dcb21b2b5ca4f4a47cd245d5 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 28 Oct 2024 13:51:14 -0400 Subject: [PATCH 28/49] add #10947 and #10974 to release note --- ...published-files-appearing-in-search-results-for-anon-user.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md b/doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md index 394f44419e4..66ea04b124f 100644 --- a/doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md +++ b/doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md @@ -7,3 +7,5 @@ Example: ```shell curl http://localhost:8080/api/admin/index/dataset?persistentId=doi:10.7910/DVN/6X4ZZL ``` + +See also #10947 and #10974. From 93dd4231fa2d7d180412c93435b6f8e682816bc0 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 30 Oct 2024 13:19:30 -0400 Subject: [PATCH 29/49] add test for returnDatasetFieldTypes #10984 --- .../java/edu/harvard/iq/dataverse/api/DataversesIT.java | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 6a040f27786..0eb2670b272 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -911,6 +911,15 @@ public void testListMetadataBlocks() { createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + Response listMetadataBlocks = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken); + listMetadataBlocks.prettyPrint(); + listMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode()); + listMetadataBlocks.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].name", is("citation")) + // failing? "fields" is empty, showing {} + .body("data[0].fields.title.displayOnCreate", equalTo(true)); + Response setMetadataBlocksResponse = UtilIT.setMetadataBlocks(dataverseAlias, Json.createArrayBuilder().add("citation").add("astrophysics"), apiToken); setMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); From 51794df26c00ce5ab9eb1fdbeb7d5aa91e8576fc Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 30 Oct 2024 16:45:57 -0400 Subject: [PATCH 30/49] Add FIXME about inheritence. Add "if #10984 fixed" to tests. --- .../iq/dataverse/DatasetFieldServiceBean.java | 2 +- .../iq/dataverse/api/DataversesIT.java | 29 ++++++++++++++----- 2 files changed, 22 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java index 91150b79505..c977ae784bd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java @@ -947,7 +947,7 @@ public List findAllInMetadataBlockAndDataverse(MetadataBlock m criteriaQuery.where( criteriaBuilder.equal(dataverseRoot.get("id"), dataverse.getId()), // Match the Dataverse ID. criteriaBuilder.equal(metadataBlockRoot.get("id"), metadataBlock.getId()), // Match the MetadataBlock ID. - metadataBlockRoot.in(dataverseRoot.get("metadataBlocks")), // Ensure the MetadataBlock is part of the Dataverse. + metadataBlockRoot.in(dataverseRoot.get("metadataBlocks")), // Ensure the MetadataBlock is part of the Dataverse. FIXME: inherit blocks from parent datasetFieldTypeRoot.in(metadataBlockRoot.get("datasetFieldTypes")), // Ensure the DatasetFieldType is part of the MetadataBlock. criteriaBuilder.or(includedAsInputLevelPredicate, hasNoInputLevelPredicate), // Include DatasetFieldTypes based on the input level predicates. displayedOnCreatePredicate // Apply the display-on-create filter if necessary. diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 0eb2670b272..9e3555555e8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -911,14 +911,27 @@ public void testListMetadataBlocks() { createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - Response listMetadataBlocks = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken); - listMetadataBlocks.prettyPrint(); - listMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode()); - listMetadataBlocks.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data[0].name", is("citation")) - // failing? "fields" is empty, showing {} - .body("data[0].fields.title.displayOnCreate", equalTo(true)); + boolean issue10984fixed = false; + // See https://github.com/IQSS/dataverse/issues/10984 + if (issue10984fixed) { + Response listMetadataBlocks = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken); + listMetadataBlocks.prettyPrint(); + listMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode()); + listMetadataBlocks.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].name", is("citation")) + .body("data[0].fields.title.displayOnCreate", equalTo(true)); + + } else { + Response listMetadataBlocks = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken); + listMetadataBlocks.prettyPrint(); + listMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode()); + listMetadataBlocks.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].name", is("citation")) + // "fields" should be more like 28, not 0 + .body("data[0].fields.size()", is(0)); + } Response setMetadataBlocksResponse = UtilIT.setMetadataBlocks(dataverseAlias, Json.createArrayBuilder().add("citation").add("astrophysics"), apiToken); setMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); From db181848cd9c94044222598a97a7cd891a16e8de Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 30 Oct 2024 17:46:56 -0400 Subject: [PATCH 31/49] fixes the validation method in harvesting import that got broken in 10836. #10989 --- .../api/imports/ImportServiceBean.java | 49 +++++++++++++------ 1 file changed, 33 insertions(+), 16 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index 66f48bfb872..b203738a9fd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -359,12 +359,7 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve if (harvestedVersion.getReleaseTime() == null) { harvestedVersion.setReleaseTime(oaiDateStamp); } - - // is this the right place to call tidyUpFields()? - // usually it is called within the body of the create/update commands - // later on. - DatasetFieldUtil.tidyUpFields(harvestedVersion.getDatasetFields(), true); - + // Check data against validation constraints. // Make an attempt to sanitize any invalid fields encountered - // missing required fields or invalid values, by filling the values @@ -382,7 +377,9 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve if (sanitized) { validateVersionMetadata(harvestedVersion, cleanupLog); } - + + DatasetFieldUtil.tidyUpFields(harvestedVersion.getDatasetFields(), true); + if (existingDataset != null) { importedDataset = engineSvc.submit(new UpdateHarvestedDatasetCommand(existingDataset, harvestedVersion, dataverseRequest)); } else { @@ -742,15 +739,35 @@ private boolean validateVersionMetadata(DatasetVersion version, boolean sanitize boolean fixed = false; Set invalidViolations = version.validate(); if (!invalidViolations.isEmpty()) { - for (ConstraintViolation v : invalidViolations) { - DatasetFieldValue f = v.getRootBean(); - - String msg = "Invalid metadata field: " + f.getDatasetField().getDatasetFieldType().getDisplayName() + "; " - + "Invalid value: '" + f.getValue() + "'"; - if (sanitize) { - msg += ", replaced with '" + DatasetField.NA_VALUE + "'"; - f.setValue(DatasetField.NA_VALUE); - fixed = true; + for (ConstraintViolation v : invalidViolations) { + Object invalid = v.getRootBean(); + String msg = ""; + if (invalid instanceof DatasetField) { + DatasetField f = (DatasetField) invalid; + + msg += "Missing required field: " + f.getDatasetFieldType().getDisplayName() + ";"; + if (sanitize) { + msg += " populated with '" + DatasetField.NA_VALUE + "'"; + f.setSingleValue(DatasetField.NA_VALUE); + fixed = true; + } + } else if (invalid instanceof DatasetFieldValue) { + DatasetFieldValue fv = (DatasetFieldValue) invalid; + + msg += "Invalid metadata field: " + fv.getDatasetField().getDatasetFieldType().getDisplayName() + "; " + + "Invalid value: '" + fv.getValue() + "'"; + if (sanitize) { + msg += ", replaced with '" + DatasetField.NA_VALUE + "'"; + fv.setValue(DatasetField.NA_VALUE); + fixed = true; + } + } else { + // DatasetVersion.validate() can also produce constraint violations + // in TermsOfUse and FileMetadata classes. + // We do not make any attempt to sanitize those. + if (invalid != null) { + msg += "Invalid " + invalid.getClass().getName() + ": " + v.getMessage(); + } } cleanupLog.println(msg); From ed8a889bdc0df615d2f49a66c67ecfa41559099b Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 31 Oct 2024 12:49:26 +0000 Subject: [PATCH 32/49] Fixed: always querying the owner if the dataverse is not MetadataBlock root in findAllInMetadataBlockAndDataverse --- .../iq/dataverse/DatasetFieldServiceBean.java | 4 +++ .../iq/dataverse/api/DataversesIT.java | 29 +++++-------------- 2 files changed, 12 insertions(+), 21 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java index c977ae784bd..e87d11dd7eb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java @@ -891,6 +891,10 @@ public List findAllDisplayedOnCreateInMetadataBlock(MetadataBl } public List findAllInMetadataBlockAndDataverse(MetadataBlock metadataBlock, Dataverse dataverse, boolean onlyDisplayedOnCreate) { + if (!dataverse.isMetadataBlockRoot() && dataverse.getOwner() != null) { + return findAllInMetadataBlockAndDataverse(metadataBlock, dataverse.getOwner(), onlyDisplayedOnCreate); + } + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(DatasetFieldType.class); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 9e3555555e8..f59d152f6be 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -911,27 +911,14 @@ public void testListMetadataBlocks() { createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - boolean issue10984fixed = false; - // See https://github.com/IQSS/dataverse/issues/10984 - if (issue10984fixed) { - Response listMetadataBlocks = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken); - listMetadataBlocks.prettyPrint(); - listMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode()); - listMetadataBlocks.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data[0].name", is("citation")) - .body("data[0].fields.title.displayOnCreate", equalTo(true)); - - } else { - Response listMetadataBlocks = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken); - listMetadataBlocks.prettyPrint(); - listMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode()); - listMetadataBlocks.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data[0].name", is("citation")) - // "fields" should be more like 28, not 0 - .body("data[0].fields.size()", is(0)); - } + Response listMetadataBlocks = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken); + listMetadataBlocks.prettyPrint(); + listMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode()); + listMetadataBlocks.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].name", is("citation")) + .body("data[0].fields.title.displayOnCreate", equalTo(true)) + .body("data[0].fields.size()", is(28)); Response setMetadataBlocksResponse = UtilIT.setMetadataBlocks(dataverseAlias, Json.createArrayBuilder().add("citation").add("astrophysics"), apiToken); setMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); From cfe9dbddb720a34e3360a18ace8808942bf93d22 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 31 Oct 2024 12:55:52 +0000 Subject: [PATCH 33/49] Removed: FIXME comment --- .../java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java index e87d11dd7eb..ded7c83de62 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java @@ -951,7 +951,7 @@ public List findAllInMetadataBlockAndDataverse(MetadataBlock m criteriaQuery.where( criteriaBuilder.equal(dataverseRoot.get("id"), dataverse.getId()), // Match the Dataverse ID. criteriaBuilder.equal(metadataBlockRoot.get("id"), metadataBlock.getId()), // Match the MetadataBlock ID. - metadataBlockRoot.in(dataverseRoot.get("metadataBlocks")), // Ensure the MetadataBlock is part of the Dataverse. FIXME: inherit blocks from parent + metadataBlockRoot.in(dataverseRoot.get("metadataBlocks")), // Ensure the MetadataBlock is part of the Dataverse. datasetFieldTypeRoot.in(metadataBlockRoot.get("datasetFieldTypes")), // Ensure the DatasetFieldType is part of the MetadataBlock. criteriaBuilder.or(includedAsInputLevelPredicate, hasNoInputLevelPredicate), // Include DatasetFieldTypes based on the input level predicates. displayedOnCreatePredicate // Apply the display-on-create filter if necessary. From f317ab005f241777fc4c8b4be531c7e17f9fdf54 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 31 Oct 2024 14:26:40 +0000 Subject: [PATCH 34/49] Added: tweaks to DataversesIT listMetadataBlocks --- src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index f59d152f6be..31a6c60bef9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -911,11 +911,13 @@ public void testListMetadataBlocks() { createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + // New Dataverse should return just the citation block and its displayOnCreate fields when onlyDisplayedOnCreate=true and returnDatasetFieldTypes=true Response listMetadataBlocks = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken); listMetadataBlocks.prettyPrint(); listMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode()); listMetadataBlocks.then().assertThat() .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(1)) .body("data[0].name", is("citation")) .body("data[0].fields.title.displayOnCreate", equalTo(true)) .body("data[0].fields.size()", is(28)); From e7eace38ad31b2ab9995ac7f1485d477acd035c8 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 31 Oct 2024 10:52:12 -0400 Subject: [PATCH 35/49] Update FeatureFlags apiNotes While adding a flag I noticed a typo and missing apiNote --- .../edu/harvard/iq/dataverse/settings/FeatureFlags.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java index 33e828e619d..20632c170e4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java @@ -97,12 +97,16 @@ public enum FeatureFlags { * for the dataset. * * @apiNote Raise flag by setting - * "dataverse.feature.enable-dataset-thumbnail-autoselect" + * "dataverse.feature.disable-dataset-thumbnail-autoselect" * @since Dataverse 6.4 */ DISABLE_DATASET_THUMBNAIL_AUTOSELECT("disable-dataset-thumbnail-autoselect"), /** * Feature flag for the new Globus upload framework. + * + * @apiNote Raise flag by setting + * "dataverse.feature.globus-use-experimental-async-framework" + * @since Dataverse 6.4 */ GLOBUS_USE_EXPERIMENTAL_ASYNC_FRAMEWORK("globus-use-experimental-async-framework"), ; From 5b2b35d16cc4e76038343401af848c87809403d0 Mon Sep 17 00:00:00 2001 From: Omer Fahim Date: Fri, 1 Nov 2024 12:59:59 -0400 Subject: [PATCH 36/49] Update dev-usage.rst - Add Details for Accessing and Saving Harvesting Logs to Local --- .../source/container/dev-usage.rst | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 6a1edcf7ebd..e481bcfd5ae 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -140,6 +140,48 @@ Alternatives: - If you used Docker Compose for running, you may use ``docker compose -f docker-compose-dev.yml logs ``. Options are the same. +Accessing Harvesting Log Files +------------------------------ + +1. Open a Terminal and Access Dataverse Container +Run the following command to access the Dataverse container (assuming your container is named dataverse-1): + +.. code-block:: +docker exec -it dataverse-1 bash +Code updated + +This command opens an interactive shell within the dataverse-1 container. + +2. Navigate to the Log Files Directory +Once inside the container, navigate to the directory where Dataverse logs are stored: + +.. code-block:: +cd /opt/payara/appserver/glassfish/domains/domain1/logs +Code updated + +This directory contains various log files, including those relevant to harvesting. + +3. Create a Directory for Copying Files +Create a directory where you’ll copy the files you want to access on your local machine: + +mkdir /dv/filesToCopy + +This will create a new folder named filesToCopy inside /dv. + +4. Copy the Files to the New Directory +Copy all files from the current directory to the newly created filesToCopy directory: + +cp * /dv/filesToCopy + +This command copies all files in the logs directory to /dv/filesToCopy. + +5. Access the Files on Your Mac +On your Mac, the copied files should appear in the following directory: + +docker-dev-volumes/app/data/filesToCopy + + + Redeploying ----------- From 90665f9ab705634d775656ab53f6b8c17823661e Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 1 Nov 2024 13:02:21 -0400 Subject: [PATCH 37/49] Revert "Bump actions/download-artifact from 3 to 4.1.7 in /.github/workflows" (#10987) will check after the reverted code is merged --- .github/workflows/deploy_beta_testing.yml | 2 +- .github/workflows/maven_unit_test.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/deploy_beta_testing.yml b/.github/workflows/deploy_beta_testing.yml index efe3e0d8621..028f0140cc9 100644 --- a/.github/workflows/deploy_beta_testing.yml +++ b/.github/workflows/deploy_beta_testing.yml @@ -45,7 +45,7 @@ jobs: - uses: actions/checkout@v3 - name: Download war artifact - uses: actions/download-artifact@v4.1.7 + uses: actions/download-artifact@v3 with: name: built-app path: ./ diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 102fb1d5882..a94b17a67ba 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -107,7 +107,7 @@ jobs: cache: maven # Get the build output from the unit test job - - uses: actions/download-artifact@v4.1.7 + - uses: actions/download-artifact@v3 with: name: java-artifacts - run: | @@ -140,7 +140,7 @@ jobs: cache: maven # Get the build output from the integration test job - - uses: actions/download-artifact@v4.1.7 + - uses: actions/download-artifact@v3 with: name: java-reportdir - run: tar -xvf java-reportdir.tar From 899cc25996e25a0ddd1cbb514ae912cf927eea9a Mon Sep 17 00:00:00 2001 From: Omer Fahim Date: Fri, 1 Nov 2024 14:17:10 -0400 Subject: [PATCH 38/49] Update doc/sphinx-guides/source/container/dev-usage.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/container/dev-usage.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index e481bcfd5ae..7e4a640a45d 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -147,6 +147,7 @@ Accessing Harvesting Log Files Run the following command to access the Dataverse container (assuming your container is named dataverse-1): .. code-block:: + docker exec -it dataverse-1 bash Code updated From 356da8fc09c429d8776698b4bc8aa81333cb56c4 Mon Sep 17 00:00:00 2001 From: Omer Fahim Date: Fri, 1 Nov 2024 14:21:04 -0400 Subject: [PATCH 39/49] Update doc/sphinx-guides/source/container/dev-usage.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/container/dev-usage.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 7e4a640a45d..bd2ff6f0382 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -156,6 +156,7 @@ This command opens an interactive shell within the dataverse-1 container. 2. Navigate to the Log Files Directory Once inside the container, navigate to the directory where Dataverse logs are stored: + .. code-block:: cd /opt/payara/appserver/glassfish/domains/domain1/logs Code updated From 9119c4b4799fd4a14223ac9c07471ef8db34f095 Mon Sep 17 00:00:00 2001 From: ofahimIQSS Date: Fri, 1 Nov 2024 14:30:37 -0400 Subject: [PATCH 40/49] Update dev-usage.rst --- doc/sphinx-guides/source/container/dev-usage.rst | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index bd2ff6f0382..7b5274844f6 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -149,7 +149,6 @@ Run the following command to access the Dataverse container (assuming your conta .. code-block:: docker exec -it dataverse-1 bash -Code updated This command opens an interactive shell within the dataverse-1 container. @@ -159,28 +158,33 @@ Once inside the container, navigate to the directory where Dataverse logs are st .. code-block:: cd /opt/payara/appserver/glassfish/domains/domain1/logs -Code updated This directory contains various log files, including those relevant to harvesting. 3. Create a Directory for Copying Files Create a directory where you’ll copy the files you want to access on your local machine: -mkdir /dv/filesToCopy +.. code-block:: + + mkdir /dv/filesToCopy This will create a new folder named filesToCopy inside /dv. 4. Copy the Files to the New Directory Copy all files from the current directory to the newly created filesToCopy directory: -cp * /dv/filesToCopy +.. code-block:: + + cp * /dv/filesToCopy This command copies all files in the logs directory to /dv/filesToCopy. 5. Access the Files on Your Mac On your Mac, the copied files should appear in the following directory: -docker-dev-volumes/app/data/filesToCopy +.. code-block:: + + docker-dev-volumes/app/data/filesToCopy From 41a1d13288c31c938395bdab80601bc916391a15 Mon Sep 17 00:00:00 2001 From: ofahimIQSS Date: Fri, 1 Nov 2024 14:39:21 -0400 Subject: [PATCH 41/49] Update dev-usage.rst --- doc/sphinx-guides/source/container/dev-usage.rst | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 7b5274844f6..da7d6845bb5 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -148,16 +148,16 @@ Run the following command to access the Dataverse container (assuming your conta .. code-block:: -docker exec -it dataverse-1 bash + docker exec -it dataverse-1 bash This command opens an interactive shell within the dataverse-1 container. 2. Navigate to the Log Files Directory Once inside the container, navigate to the directory where Dataverse logs are stored: - .. code-block:: -cd /opt/payara/appserver/glassfish/domains/domain1/logs + + cd /opt/payara/appserver/glassfish/domains/domain1/logs This directory contains various log files, including those relevant to harvesting. @@ -186,9 +186,6 @@ On your Mac, the copied files should appear in the following directory: docker-dev-volumes/app/data/filesToCopy - - - Redeploying ----------- From 432feb0cba801a982b7d156b8d1d19a6625817a4 Mon Sep 17 00:00:00 2001 From: ofahimIQSS Date: Fri, 1 Nov 2024 14:48:05 -0400 Subject: [PATCH 42/49] Update dev-usage.rst --- doc/sphinx-guides/source/container/dev-usage.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index da7d6845bb5..bfad3d34cf2 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -143,7 +143,7 @@ Alternatives: Accessing Harvesting Log Files ------------------------------ -1. Open a Terminal and Access Dataverse Container +1. Open a terminal and access Dataverse container. Run the following command to access the Dataverse container (assuming your container is named dataverse-1): .. code-block:: @@ -152,7 +152,7 @@ Run the following command to access the Dataverse container (assuming your conta This command opens an interactive shell within the dataverse-1 container. -2. Navigate to the Log Files Directory +2. Navigate to the Log Files directory Once inside the container, navigate to the directory where Dataverse logs are stored: .. code-block:: From 0d59b16f8c05ddc190f38efaf1891cf6750f8107 Mon Sep 17 00:00:00 2001 From: ofahimIQSS Date: Fri, 1 Nov 2024 14:57:15 -0400 Subject: [PATCH 43/49] Update dev-usage.rst --- doc/sphinx-guides/source/container/dev-usage.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index bfad3d34cf2..27fbb28dd26 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -143,7 +143,7 @@ Alternatives: Accessing Harvesting Log Files ------------------------------ -1. Open a terminal and access Dataverse container. +1. Open a terminal and access the Dataverse container. Run the following command to access the Dataverse container (assuming your container is named dataverse-1): .. code-block:: @@ -152,7 +152,7 @@ Run the following command to access the Dataverse container (assuming your conta This command opens an interactive shell within the dataverse-1 container. -2. Navigate to the Log Files directory +2. Navigate to the Log files directory. Once inside the container, navigate to the directory where Dataverse logs are stored: .. code-block:: @@ -161,7 +161,7 @@ Once inside the container, navigate to the directory where Dataverse logs are st This directory contains various log files, including those relevant to harvesting. -3. Create a Directory for Copying Files +3. Create a directory for copying files. Create a directory where you’ll copy the files you want to access on your local machine: .. code-block:: @@ -170,7 +170,7 @@ Create a directory where you’ll copy the files you want to access on your loca This will create a new folder named filesToCopy inside /dv. -4. Copy the Files to the New Directory +4. Copy the files to the new directory. Copy all files from the current directory to the newly created filesToCopy directory: .. code-block:: @@ -179,7 +179,7 @@ Copy all files from the current directory to the newly created filesToCopy direc This command copies all files in the logs directory to /dv/filesToCopy. -5. Access the Files on Your Mac +5. Access the files on Your Mac On your Mac, the copied files should appear in the following directory: .. code-block:: From c76d62a6200382e6b584e220acde64bfd33716d2 Mon Sep 17 00:00:00 2001 From: ofahimIQSS Date: Fri, 1 Nov 2024 15:03:46 -0400 Subject: [PATCH 44/49] Update dev-usage.rst --- doc/sphinx-guides/source/container/dev-usage.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 27fbb28dd26..80e3cac989c 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -141,7 +141,7 @@ Alternatives: Options are the same. Accessing Harvesting Log Files ------------------------------- +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 1. Open a terminal and access the Dataverse container. Run the following command to access the Dataverse container (assuming your container is named dataverse-1): From e28b6d342eddc97e2632a53cdca17cf789d58022 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 1 Nov 2024 15:27:17 -0400 Subject: [PATCH 45/49] tweaks #10996 --- .../source/container/dev-usage.rst | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 80e3cac989c..c02c1d4010f 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -143,7 +143,8 @@ Alternatives: Accessing Harvesting Log Files ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -1. Open a terminal and access the Dataverse container. +\1. Open a terminal and access the Dataverse container. + Run the following command to access the Dataverse container (assuming your container is named dataverse-1): .. code-block:: @@ -152,7 +153,8 @@ Run the following command to access the Dataverse container (assuming your conta This command opens an interactive shell within the dataverse-1 container. -2. Navigate to the Log files directory. +\2. Navigate to the log files directory. + Once inside the container, navigate to the directory where Dataverse logs are stored: .. code-block:: @@ -161,8 +163,9 @@ Once inside the container, navigate to the directory where Dataverse logs are st This directory contains various log files, including those relevant to harvesting. -3. Create a directory for copying files. -Create a directory where you’ll copy the files you want to access on your local machine: +\3. Create a directory for copying files. + +Create a directory where you'll copy the files you want to access on your local machine: .. code-block:: @@ -170,7 +173,8 @@ Create a directory where you’ll copy the files you want to access on your loca This will create a new folder named filesToCopy inside /dv. -4. Copy the files to the new directory. +\4. Copy the files to the new directory. + Copy all files from the current directory to the newly created filesToCopy directory: .. code-block:: @@ -179,8 +183,9 @@ Copy all files from the current directory to the newly created filesToCopy direc This command copies all files in the logs directory to /dv/filesToCopy. -5. Access the files on Your Mac -On your Mac, the copied files should appear in the following directory: +\5. Access the files on your local machine. + +On your local machine, the copied files should appear in the following directory: .. code-block:: From 052262fe5badf98395704773f6ddfc4a179d9897 Mon Sep 17 00:00:00 2001 From: jo-pol Date: Mon, 4 Nov 2024 10:25:02 +0100 Subject: [PATCH 46/49] replaced deprecated mime type with mp4 --- .../propertyFiles/MimeTypeDetectionByFileExtension.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties b/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties index 05e61a40c17..4507c22fdf8 100644 --- a/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties +++ b/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties @@ -15,7 +15,7 @@ m=text/x-matlab mat=application/matlab-mat md=text/markdown mp3=audio/mp3 -m4a=audio/x-m4a +m4a=audio/mp4 nii=image/nii nc=application/netcdf ods=application/vnd.oasis.opendocument.spreadsheet From a55d31f19f9d422e2160d15b68d9519c9e29d394 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 4 Nov 2024 16:19:17 +0000 Subject: [PATCH 47/49] Fixed: unit test assertion in JsonParserTest --- .../java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java index f241a5d1dda..236344a9200 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java @@ -283,7 +283,7 @@ public void parseDataverseDTO() throws JsonParseException { assertEquals("We do all the science.", actual.getDescription()); assertEquals("LABORATORY", actual.getDataverseType().toString()); assertEquals(2, actual.getDataverseContacts().size()); - assertEquals("pi@example.edu,student@example.edu", actual.getDataverseContacts().get(0).getContactEmail()); + assertEquals("pi@example.edu", actual.getDataverseContacts().get(0).getContactEmail()); assertEquals(0, actual.getDataverseContacts().get(0).getDisplayOrder()); assertEquals(1, actual.getDataverseContacts().get(1).getDisplayOrder()); } catch (IOException ioe) { From 1ed0d304307e8839493b00aa48cf1002ec8e5afa Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 4 Nov 2024 16:24:14 +0000 Subject: [PATCH 48/49] Added: assertion to JsonParserTest --- .../java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java index 236344a9200..52e9c6de678 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java @@ -284,6 +284,7 @@ public void parseDataverseDTO() throws JsonParseException { assertEquals("LABORATORY", actual.getDataverseType().toString()); assertEquals(2, actual.getDataverseContacts().size()); assertEquals("pi@example.edu", actual.getDataverseContacts().get(0).getContactEmail()); + assertEquals("student@example.edu", actual.getDataverseContacts().get(1).getContactEmail()); assertEquals(0, actual.getDataverseContacts().get(0).getDisplayOrder()); assertEquals(1, actual.getDataverseContacts().get(1).getDisplayOrder()); } catch (IOException ioe) { From b1dcb00b8ad46549e7f74304b11e2dcc9d3a1e64 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 4 Nov 2024 16:25:50 +0000 Subject: [PATCH 49/49] Refactor: JsonParserTest.parseDataverseDTO --- .../iq/dataverse/util/json/JsonParserTest.java | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java index 52e9c6de678..d1cb30e2bc3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java @@ -277,16 +277,17 @@ public void parseDataverseDTO() throws JsonParseException { try (FileReader reader = new FileReader("doc/sphinx-guides/source/_static/api/dataverse-complete.json")) { dvJson = Json.createReader(reader).readObject(); DataverseDTO actual = sut.parseDataverseDTO(dvJson); + List actualDataverseContacts = actual.getDataverseContacts(); assertEquals("Scientific Research", actual.getName()); assertEquals("science", actual.getAlias()); assertEquals("Scientific Research University", actual.getAffiliation()); assertEquals("We do all the science.", actual.getDescription()); assertEquals("LABORATORY", actual.getDataverseType().toString()); - assertEquals(2, actual.getDataverseContacts().size()); - assertEquals("pi@example.edu", actual.getDataverseContacts().get(0).getContactEmail()); - assertEquals("student@example.edu", actual.getDataverseContacts().get(1).getContactEmail()); - assertEquals(0, actual.getDataverseContacts().get(0).getDisplayOrder()); - assertEquals(1, actual.getDataverseContacts().get(1).getDisplayOrder()); + assertEquals(2, actualDataverseContacts.size()); + assertEquals("pi@example.edu", actualDataverseContacts.get(0).getContactEmail()); + assertEquals("student@example.edu", actualDataverseContacts.get(1).getContactEmail()); + assertEquals(0, actualDataverseContacts.get(0).getDisplayOrder()); + assertEquals(1, actualDataverseContacts.get(1).getDisplayOrder()); } catch (IOException ioe) { throw new JsonParseException("Couldn't read test file", ioe); }