Skip to content

Commit

Permalink
Merge pull request #10644 from IQSS/10633-create-collection-api-ext
Browse files Browse the repository at this point in the history
addDataverse API endpoint extension for input levels and facet list setup
  • Loading branch information
landreev authored Jul 25, 2024
2 parents 1e7e9f1 + b65b5a3 commit 650003e
Show file tree
Hide file tree
Showing 14 changed files with 468 additions and 90 deletions.
1 change: 1 addition & 0 deletions doc/release-notes/10633-add-dataverse-api-extension.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
The addDataverse (/api/dataverses/{identifier}) API endpoint has been extended to allow adding metadata blocks, input levels and facet ids at creation time, as the Dataverse page in create mode does in JSF.
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
{
"name": "Scientific Research",
"alias": "science",
"dataverseContacts": [
{
"contactEmail": "[email protected]"
},
{
"contactEmail": "[email protected]"
}
],
"affiliation": "Scientific Research University",
"description": "We do all the science.",
"dataverseType": "LABORATORY",
"metadataBlocks": {
"metadataBlockNames": [
"citation", "geospatial"
],
"inputLevels": [
{
"datasetFieldTypeName": "geographicCoverage",
"include": true,
"required": true
},
{
"datasetFieldTypeName": "country",
"include": true,
"required": true
},
{
"datasetFieldTypeName": "geographicUnit",
"include": false,
"required": false
},
{
"datasetFieldTypeName": "geographicBoundingBox",
"include": false,
"required": false
},
{
"datasetFieldTypeName": "westLongitude",
"include": false,
"required": false
},
{
"datasetFieldTypeName": "eastLongitude",
"include": false,
"required": false
},
{
"datasetFieldTypeName": "northLatitude",
"include": false,
"required": false
},
{
"datasetFieldTypeName": "southLatitude",
"include": false,
"required": false
}
],
"facetIds": [
"authorName", "authorAffiliation"
]
}
}
27 changes: 27 additions & 0 deletions doc/sphinx-guides/source/api/native-api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,14 @@ The fully expanded example above (without environment variables) looks like this
You should expect an HTTP 200 response and JSON beginning with "status":"OK" followed by a representation of the newly-created Dataverse collection.

The request JSON supports an optional ``metadataBlocks`` object, with the following supported sub-objects:

- ``metadataBlockNames``: The names of the metadata blocks you want to add to the Dataverse collection.
- ``inputLevels``: The names of the fields in each metadata block for which you want to add a custom configuration regarding their inclusion or requirement when creating and editing datasets in the new Dataverse collection. Note that if the corresponding metadata blocks names are not specified in the ``metadataBlockNames``` field, they will be added automatically to the Dataverse collection.
- ``facetIds``: The names of the fields to use as facets for browsing datasets and collections in the new Dataverse collection. Note that the order of the facets is defined by their order in the provided JSON array.

To obtain an example of how these objects are included in the JSON file, download :download:`dataverse-complete-optional-params.json <../_static/api/dataverse-complete-optional-params.json>` file and modify it to suit your needs.

.. _view-dataverse:

View a Dataverse Collection
Expand Down Expand Up @@ -258,6 +266,25 @@ The fully expanded example above (without environment variables) looks like this
curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/metadatablockfacets"
List Field Type Input Levels Configured for a Dataverse Collection
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

|CORS| List the dataverse field type input levels configured for a given Dataverse collection ``id``:

.. code-block:: bash
export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
export SERVER_URL=https://demo.dataverse.org
export ID=root
curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/inputLevels"
The fully expanded example above (without environment variables) looks like this:

.. code-block:: bash
curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/inputLevels"
Set Metadata Block Facets for a Dataverse Collection
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

Expand Down
16 changes: 14 additions & 2 deletions src/main/java/edu/harvard/iq/dataverse/Dataverse.java
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
import jakarta.persistence.OneToOne;
import jakarta.persistence.OrderBy;
import jakarta.persistence.Table;
import jakarta.persistence.Transient;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Pattern;
import jakarta.validation.constraints.Size;
Expand Down Expand Up @@ -593,7 +592,7 @@ public void setDataverseTheme(DataverseTheme dataverseTheme) {
}

public void setMetadataBlocks(List<MetadataBlock> metadataBlocks) {
this.metadataBlocks = metadataBlocks;
this.metadataBlocks = new ArrayList<>(metadataBlocks);
}

public List<DatasetFieldType> getCitationDatasetFieldTypes() {
Expand Down Expand Up @@ -834,4 +833,17 @@ public boolean isAncestorOf( DvObject other ) {
public String getLocalURL() {
return SystemConfig.getDataverseSiteUrlStatic() + "/dataverse/" + this.getAlias();
}

public void addInputLevelsMetadataBlocksIfNotPresent(List<DataverseFieldTypeInputLevel> inputLevels) {
for (DataverseFieldTypeInputLevel inputLevel : inputLevels) {
MetadataBlock inputLevelMetadataBlock = inputLevel.getDatasetFieldType().getMetadataBlock();
if (!hasMetadataBlock(inputLevelMetadataBlock)) {
metadataBlocks.add(inputLevelMetadataBlock);
}
}
}

private boolean hasMetadataBlock(MetadataBlock metadataBlock) {
return metadataBlocks.stream().anyMatch(block -> block.getId().equals(metadataBlock.getId()));
}
}
114 changes: 83 additions & 31 deletions src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import edu.harvard.iq.dataverse.util.ConstraintViolationUtil;
import edu.harvard.iq.dataverse.util.StringUtil;
import static edu.harvard.iq.dataverse.util.StringUtil.nonEmpty;
import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*;

import edu.harvard.iq.dataverse.util.json.JSONLDUtil;
import edu.harvard.iq.dataverse.util.json.JsonParseException;
Expand Down Expand Up @@ -60,8 +61,7 @@
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.core.Response.Status;
import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray;
import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;

import java.io.IOException;
import java.io.OutputStream;
import java.text.MessageFormat;
Expand Down Expand Up @@ -123,35 +123,47 @@ public Response addRoot(@Context ContainerRequestContext crc, String body) {
@AuthRequired
@Path("{identifier}")
public Response addDataverse(@Context ContainerRequestContext crc, String body, @PathParam("identifier") String parentIdtf) {

Dataverse d;
JsonObject dvJson;
Dataverse newDataverse;
JsonObject newDataverseJson;
try {
dvJson = JsonUtil.getJsonObject(body);
d = jsonParser().parseDataverse(dvJson);
newDataverseJson = JsonUtil.getJsonObject(body);
newDataverse = jsonParser().parseDataverse(newDataverseJson);
} catch (JsonParsingException jpe) {
logger.log(Level.SEVERE, "Json: {0}", body);
return error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparse"), jpe.getMessage()));
} catch (JsonParseException ex) {
logger.log(Level.SEVERE, "Error parsing dataverse from json: " + ex.getMessage(), ex);
return error(Response.Status.BAD_REQUEST,
"Error parsing the POSTed json into a dataverse: " + ex.getMessage());
return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparsetodataverse"), ex.getMessage()));
}

try {
JsonObject metadataBlocksJson = newDataverseJson.getJsonObject("metadataBlocks");
List<DataverseFieldTypeInputLevel> inputLevels = null;
List<MetadataBlock> metadataBlocks = null;
if (metadataBlocksJson != null) {
JsonArray inputLevelsArray = metadataBlocksJson.getJsonArray("inputLevels");
inputLevels = inputLevelsArray != null ? parseInputLevels(inputLevelsArray, newDataverse) : null;

JsonArray metadataBlockNamesArray = metadataBlocksJson.getJsonArray("metadataBlockNames");
metadataBlocks = metadataBlockNamesArray != null ? parseNewDataverseMetadataBlocks(metadataBlockNamesArray) : null;
}

JsonArray facetIdsArray = newDataverseJson.getJsonArray("facetIds");
List<DatasetFieldType> facetList = facetIdsArray != null ? parseFacets(facetIdsArray) : null;

if (!parentIdtf.isEmpty()) {
Dataverse owner = findDataverseOrDie(parentIdtf);
d.setOwner(owner);
newDataverse.setOwner(owner);
}

// set the dataverse - contact relationship in the contacts
for (DataverseContact dc : d.getDataverseContacts()) {
dc.setDataverse(d);
for (DataverseContact dc : newDataverse.getDataverseContacts()) {
dc.setDataverse(newDataverse);
}

AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc);
d = execCommand(new CreateDataverseCommand(d, createDataverseRequest(u), null, null));
return created("/dataverses/" + d.getAlias(), json(d));
newDataverse = execCommand(new CreateDataverseCommand(newDataverse, createDataverseRequest(u), facetList, inputLevels, metadataBlocks));
return created("/dataverses/" + newDataverse.getAlias(), json(newDataverse));
} catch (WrappedResponse ww) {

String error = ConstraintViolationUtil.getErrorStringForConstraintViolations(ww.getCause());
Expand Down Expand Up @@ -179,7 +191,21 @@ public Response addDataverse(@Context ContainerRequestContext crc, String body,

}
}


private List<MetadataBlock> parseNewDataverseMetadataBlocks(JsonArray metadataBlockNamesArray) throws WrappedResponse {
List<MetadataBlock> selectedMetadataBlocks = new ArrayList<>();
for (JsonString metadataBlockName : metadataBlockNamesArray.getValuesAs(JsonString.class)) {
MetadataBlock metadataBlock = metadataBlockSvc.findByName(metadataBlockName.getString());
if (metadataBlock == null) {
String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.metadatablocks.error.invalidmetadatablockname"), metadataBlockName);
throw new WrappedResponse(badRequest(errorMessage));
}
selectedMetadataBlocks.add(metadataBlock);
}

return selectedMetadataBlocks;
}

@POST
@AuthRequired
@Path("{identifier}/validateDatasetJson")
Expand Down Expand Up @@ -637,43 +663,73 @@ public Response updateAttribute(@Context ContainerRequestContext crc, @PathParam
}
}

@GET
@AuthRequired
@Path("{identifier}/inputLevels")
public Response getInputLevels(@Context ContainerRequestContext crc, @PathParam("identifier") String identifier) {
try {
Dataverse dataverse = findDataverseOrDie(identifier);
List<DataverseFieldTypeInputLevel> inputLevels = execCommand(new ListDataverseInputLevelsCommand(createDataverseRequest(getRequestUser(crc)), dataverse));
return ok(jsonDataverseInputLevels(inputLevels));
} catch (WrappedResponse e) {
return e.getResponse();
}
}

@PUT
@AuthRequired
@Path("{identifier}/inputLevels")
public Response updateInputLevels(@Context ContainerRequestContext crc, @PathParam("identifier") String identifier, String jsonBody) {
try {
Dataverse dataverse = findDataverseOrDie(identifier);
List<DataverseFieldTypeInputLevel> newInputLevels = parseInputLevels(jsonBody, dataverse);
List<DataverseFieldTypeInputLevel> newInputLevels = parseInputLevels(Json.createReader(new StringReader(jsonBody)).readArray(), dataverse);
execCommand(new UpdateDataverseInputLevelsCommand(dataverse, createDataverseRequest(getRequestUser(crc)), newInputLevels));
return ok(BundleUtil.getStringFromBundle("dataverse.update.success"), JsonPrinter.json(dataverse));
} catch (WrappedResponse e) {
return e.getResponse();
}
}

private List<DataverseFieldTypeInputLevel> parseInputLevels(String jsonBody, Dataverse dataverse) throws WrappedResponse {
JsonArray inputLevelsArray = Json.createReader(new StringReader(jsonBody)).readArray();

private List<DataverseFieldTypeInputLevel> parseInputLevels(JsonArray inputLevelsArray, Dataverse dataverse) throws WrappedResponse {
List<DataverseFieldTypeInputLevel> newInputLevels = new ArrayList<>();
for (JsonValue value : inputLevelsArray) {
JsonObject inputLevel = (JsonObject) value;
String datasetFieldTypeName = inputLevel.getString("datasetFieldTypeName");
DatasetFieldType datasetFieldType = datasetFieldSvc.findByName(datasetFieldTypeName);

if (datasetFieldType == null) {
String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.updateinputlevels.error.invalidfieldtypename"), datasetFieldTypeName);
String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.inputlevels.error.invalidfieldtypename"), datasetFieldTypeName);
throw new WrappedResponse(badRequest(errorMessage));
}

boolean required = inputLevel.getBoolean("required");
boolean include = inputLevel.getBoolean("include");

if (required && !include) {
String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.inputlevels.error.cannotberequiredifnotincluded"), datasetFieldTypeName);
throw new WrappedResponse(badRequest(errorMessage));
}

newInputLevels.add(new DataverseFieldTypeInputLevel(datasetFieldType, dataverse, required, include));
}

return newInputLevels;
}

private List<DatasetFieldType> parseFacets(JsonArray facetsArray) throws WrappedResponse {
List<DatasetFieldType> facets = new LinkedList<>();
for (JsonString facetId : facetsArray.getValuesAs(JsonString.class)) {
DatasetFieldType dsfType = findDatasetFieldType(facetId.getString());
if (dsfType == null) {
throw new WrappedResponse(badRequest(MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.facets.error.fieldtypenotfound"), facetId)));
} else if (!dsfType.isFacetable()) {
throw new WrappedResponse(badRequest(MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.facets.error.fieldtypenotfacetable"), facetId)));
}
facets.add(dsfType);
}
return facets;
}

@DELETE
@AuthRequired
@Path("{linkingDataverseId}/deleteLink/{linkedDataverseId}")
Expand Down Expand Up @@ -922,16 +978,12 @@ public Response deleteFeaturedCollections(@Context ContainerRequestContext crc,
* (judging by the UI). This triggers a 500 when '-d @foo.json' is used.
*/
public Response setFacets(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, String facetIds) {

List<DatasetFieldType> facets = new LinkedList<>();
for (JsonString facetId : Util.asJsonArray(facetIds).getValuesAs(JsonString.class)) {
DatasetFieldType dsfType = findDatasetFieldType(facetId.getString());
if (dsfType == null) {
return error(Response.Status.BAD_REQUEST, "Can't find dataset field type '" + facetId + "'");
} else if (!dsfType.isFacetable()) {
return error(Response.Status.BAD_REQUEST, "Dataset field type '" + facetId + "' is not facetable");
}
facets.add(dsfType);
JsonArray jsonArray = Util.asJsonArray(facetIds);
List<DatasetFieldType> facets;
try {
facets = parseFacets(jsonArray);
} catch (WrappedResponse e) {
return e.getResponse();
}

try {
Expand Down
Loading

0 comments on commit 650003e

Please sign in to comment.