errors = passwordValidatorService.validate(password, new Date(), false);
- final JsonArrayBuilder errorArray = Json.createArrayBuilder();
- errors.forEach(errorArray::add);
- return ok(Json.createObjectBuilder().add("password", password).add("errors", errorArray));
- }
-
- @GET
- @Path("/isOrcid")
- public Response isOrcidEnabled() {
- return authSvc.isOrcidEnabled() ? ok("Orcid is enabled") : ok("no orcid for you.");
- }
+ /**
+ * This method is used in API tests, called from UtilIt.java.
+ */
+ @GET
+ @Path("datasets/thumbnailMetadata/{id}")
+ public Response getDatasetThumbnailMetadata(@PathParam("id") Long idSupplied) {
+ Dataset dataset = datasetSvc.find(idSupplied);
+ if (dataset == null) {
+ return error(Response.Status.NOT_FOUND, "Could not find dataset based on id supplied: " + idSupplied + ".");
+ }
+ JsonObjectBuilder data = Json.createObjectBuilder();
+ DatasetThumbnail datasetThumbnail = dataset.getDatasetThumbnail(ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE);
+ data.add("isUseGenericThumbnail", dataset.isUseGenericThumbnail());
+ data.add("datasetLogoPresent", DatasetUtil.isDatasetLogoPresent(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE));
+ if (datasetThumbnail != null) {
+ data.add("datasetThumbnailBase64image", datasetThumbnail.getBase64image());
+ DataFile dataFile = datasetThumbnail.getDataFile();
+ if (dataFile != null) {
+ /**
+ * @todo Change this from a String to a long.
+ */
+ data.add("dataFileId", dataFile.getId().toString());
+ }
+ }
+ return ok(data);
+ }
+
+ /**
+ * validatePassword
+ *
+ * Validate a password with an API call
+ *
+ * @param password
+ * The password
+ * @return A response with the validation result.
+ */
+ @Path("validatePassword")
+ @POST
+ public Response validatePassword(String password) {
+
+ final List errors = passwordValidatorService.validate(password, new Date(), false);
+ final JsonArrayBuilder errorArray = Json.createArrayBuilder();
+ errors.forEach(errorArray::add);
+ return ok(Json.createObjectBuilder().add("password", password).add("errors", errorArray));
+ }
+
+ @GET
+ @Path("/isOrcid")
+ public Response isOrcidEnabled() {
+ return authSvc.isOrcidEnabled() ? ok("Orcid is enabled") : ok("no orcid for you.");
+ }
@POST
- @AuthRequired
+ @AuthRequired
@Path("{id}/reregisterHDLToPID")
public Response reregisterHdlToPID(@Context ContainerRequestContext crc, @PathParam("id") String id) {
logger.info("Starting to reregister " + id + " Dataset Id. (from hdl to doi)" + new Date());
@@ -1825,7 +1805,7 @@ public Response updateHashValues(@Context ContainerRequestContext crc, @PathPara
}
@POST
- @AuthRequired
+ @AuthRequired
@Path("/computeDataFileHashValue/{fileId}/algorithm/{alg}")
public Response computeDataFileHashValue(@Context ContainerRequestContext crc, @PathParam("fileId") String fileId, @PathParam("alg") String alg) {
@@ -1887,7 +1867,7 @@ public Response computeDataFileHashValue(@Context ContainerRequestContext crc, @
}
@POST
- @AuthRequired
+ @AuthRequired
@Path("/validateDataFileHashValue/{fileId}")
public Response validateDataFileHashValue(@Context ContainerRequestContext crc, @PathParam("fileId") String fileId) {
@@ -1954,7 +1934,7 @@ public Response validateDataFileHashValue(@Context ContainerRequestContext crc,
}
@POST
- @AuthRequired
+ @AuthRequired
@Path("/submitDatasetVersionToArchive/{id}/{version}")
public Response submitDatasetVersionToArchive(@Context ContainerRequestContext crc, @PathParam("id") String dsid,
@PathParam("version") String versionNumber) {
@@ -2027,7 +2007,7 @@ public void run() {
* @return
*/
@POST
- @AuthRequired
+ @AuthRequired
@Path("/archiveAllUnarchivedDatasetVersions")
public Response archiveAllUnarchivedDatasetVersions(@Context ContainerRequestContext crc, @QueryParam("listonly") boolean listonly, @QueryParam("limit") Integer limit, @QueryParam("latestonly") boolean latestonly) {
@@ -2126,7 +2106,7 @@ public Response clearMetricsCacheByName(@PathParam("name") String name) {
}
@GET
- @AuthRequired
+ @AuthRequired
@Path("/dataverse/{alias}/addRoleAssignmentsToChildren")
public Response addRoleAssignementsToChildren(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse {
Dataverse owner = dataverseSvc.findByAlias(alias);
@@ -2157,90 +2137,90 @@ public Response addRoleAssignementsToChildren(@Context ContainerRequestContext c
}
@GET
- @AuthRequired
+ @AuthRequired
@Path("/dataverse/{alias}/storageDriver")
public Response getStorageDriver(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse {
- Dataverse dataverse = dataverseSvc.findByAlias(alias);
- if (dataverse == null) {
- return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + ".");
- }
- try {
- AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
- if (!user.isSuperuser()) {
- return error(Response.Status.FORBIDDEN, "Superusers only.");
- }
- } catch (WrappedResponse wr) {
- return wr.getResponse();
- }
- //Note that this returns what's set directly on this dataverse. If null/DataAccess.UNDEFINED_STORAGE_DRIVER_IDENTIFIER, the user would have to recurse the chain of parents to find the effective storageDriver
- return ok(dataverse.getStorageDriverId());
+ Dataverse dataverse = dataverseSvc.findByAlias(alias);
+ if (dataverse == null) {
+ return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + ".");
+ }
+ try {
+ AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
+ if (!user.isSuperuser()) {
+ return error(Response.Status.FORBIDDEN, "Superusers only.");
+ }
+ } catch (WrappedResponse wr) {
+ return wr.getResponse();
+ }
+ //Note that this returns what's set directly on this dataverse. If null/DataAccess.UNDEFINED_STORAGE_DRIVER_IDENTIFIER, the user would have to recurse the chain of parents to find the effective storageDriver
+ return ok(dataverse.getStorageDriverId());
}
@PUT
- @AuthRequired
+ @AuthRequired
@Path("/dataverse/{alias}/storageDriver")
public Response setStorageDriver(@Context ContainerRequestContext crc, @PathParam("alias") String alias, String label) throws WrappedResponse {
- Dataverse dataverse = dataverseSvc.findByAlias(alias);
- if (dataverse == null) {
- return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + ".");
- }
- try {
- AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
- if (!user.isSuperuser()) {
- return error(Response.Status.FORBIDDEN, "Superusers only.");
- }
- } catch (WrappedResponse wr) {
- return wr.getResponse();
- }
- for (Entry store: DataAccess.getStorageDriverLabels().entrySet()) {
- if(store.getKey().equals(label)) {
- dataverse.setStorageDriverId(store.getValue());
- return ok("Storage set to: " + store.getKey() + "/" + store.getValue());
- }
- }
- return error(Response.Status.BAD_REQUEST,
- "No Storage Driver found for : " + label);
+ Dataverse dataverse = dataverseSvc.findByAlias(alias);
+ if (dataverse == null) {
+ return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + ".");
+ }
+ try {
+ AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
+ if (!user.isSuperuser()) {
+ return error(Response.Status.FORBIDDEN, "Superusers only.");
+ }
+ } catch (WrappedResponse wr) {
+ return wr.getResponse();
+ }
+ for (Entry store: DataAccess.getStorageDriverLabels().entrySet()) {
+ if(store.getKey().equals(label)) {
+ dataverse.setStorageDriverId(store.getValue());
+ return ok("Storage set to: " + store.getKey() + "/" + store.getValue());
+ }
+ }
+ return error(Response.Status.BAD_REQUEST,
+ "No Storage Driver found for : " + label);
}
@DELETE
- @AuthRequired
+ @AuthRequired
@Path("/dataverse/{alias}/storageDriver")
public Response resetStorageDriver(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse {
- Dataverse dataverse = dataverseSvc.findByAlias(alias);
- if (dataverse == null) {
- return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + ".");
- }
- try {
- AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
- if (!user.isSuperuser()) {
- return error(Response.Status.FORBIDDEN, "Superusers only.");
- }
- } catch (WrappedResponse wr) {
- return wr.getResponse();
- }
- dataverse.setStorageDriverId("");
- return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER);
+ Dataverse dataverse = dataverseSvc.findByAlias(alias);
+ if (dataverse == null) {
+ return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + ".");
+ }
+ try {
+ AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
+ if (!user.isSuperuser()) {
+ return error(Response.Status.FORBIDDEN, "Superusers only.");
+ }
+ } catch (WrappedResponse wr) {
+ return wr.getResponse();
+ }
+ dataverse.setStorageDriverId("");
+ return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER);
}
@GET
- @AuthRequired
+ @AuthRequired
@Path("/dataverse/storageDrivers")
public Response listStorageDrivers(@Context ContainerRequestContext crc) throws WrappedResponse {
- try {
- AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
- if (!user.isSuperuser()) {
- return error(Response.Status.FORBIDDEN, "Superusers only.");
- }
- } catch (WrappedResponse wr) {
- return wr.getResponse();
- }
- JsonObjectBuilder bld = jsonObjectBuilder();
- DataAccess.getStorageDriverLabels().entrySet().forEach(s -> bld.add(s.getKey(), s.getValue()));
- return ok(bld);
+ try {
+ AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
+ if (!user.isSuperuser()) {
+ return error(Response.Status.FORBIDDEN, "Superusers only.");
+ }
+ } catch (WrappedResponse wr) {
+ return wr.getResponse();
+ }
+ JsonObjectBuilder bld = jsonObjectBuilder();
+ DataAccess.getStorageDriverLabels().entrySet().forEach(s -> bld.add(s.getKey(), s.getValue()));
+ return ok(bld);
}
@GET
- @AuthRequired
+ @AuthRequired
@Path("/dataverse/{alias}/curationLabelSet")
public Response getCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse {
Dataverse dataverse = dataverseSvc.findByAlias(alias);
@@ -2262,7 +2242,7 @@ public Response getCurationLabelSet(@Context ContainerRequestContext crc, @PathP
}
@PUT
- @AuthRequired
+ @AuthRequired
@Path("/dataverse/{alias}/curationLabelSet")
public Response setCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("alias") String alias, @QueryParam("name") String name) throws WrappedResponse {
Dataverse dataverse = dataverseSvc.findByAlias(alias);
@@ -2293,7 +2273,7 @@ public Response setCurationLabelSet(@Context ContainerRequestContext crc, @PathP
}
@DELETE
- @AuthRequired
+ @AuthRequired
@Path("/dataverse/{alias}/curationLabelSet")
public Response resetCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse {
Dataverse dataverse = dataverseSvc.findByAlias(alias);
@@ -2313,7 +2293,7 @@ public Response resetCurationLabelSet(@Context ContainerRequestContext crc, @Pat
}
@GET
- @AuthRequired
+ @AuthRequired
@Path("/dataverse/curationLabelSets")
public Response listCurationLabelSets(@Context ContainerRequestContext crc) throws WrappedResponse {
try {
@@ -2423,7 +2403,7 @@ public Response getBannerMessages(@PathParam("id") Long id) throws WrappedRespon
}
@POST
- @AuthRequired
+ @AuthRequired
@Consumes("application/json")
@Path("/requestSignedUrl")
public Response getSignedUrl(@Context ContainerRequestContext crc, JsonObject urlInfo) {
@@ -2541,4 +2521,160 @@ public Response getFeatureFlag(@PathParam("flag") String flagIn) {
}
}
+ @GET
+ @AuthRequired
+ @Path("/datafiles/auditFiles")
+ public Response getAuditFiles(@Context ContainerRequestContext crc,
+ @QueryParam("firstId") Long firstId, @QueryParam("lastId") Long lastId,
+ @QueryParam("datasetIdentifierList") String datasetIdentifierList) throws WrappedResponse {
+ try {
+ AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
+ if (!user.isSuperuser()) {
+ return error(Response.Status.FORBIDDEN, "Superusers only.");
+ }
+ } catch (WrappedResponse wr) {
+ return wr.getResponse();
+ }
+
+ int datasetsChecked = 0;
+ long startId = (firstId == null ? 0 : firstId);
+ long endId = (lastId == null ? Long.MAX_VALUE : lastId);
+
+ List datasetIdentifiers;
+ if (datasetIdentifierList == null || datasetIdentifierList.isEmpty()) {
+ datasetIdentifiers = Collections.emptyList();
+ } else {
+ startId = 0;
+ endId = Long.MAX_VALUE;
+ datasetIdentifiers = List.of(datasetIdentifierList.split(","));
+ }
+ if (endId < startId) {
+ return badRequest("Invalid Parameters: lastId must be equal to or greater than firstId");
+ }
+
+ NullSafeJsonBuilder jsonObjectBuilder = NullSafeJsonBuilder.jsonObjectBuilder();
+ JsonArrayBuilder jsonDatasetsArrayBuilder = Json.createArrayBuilder();
+ JsonArrayBuilder jsonFailuresArrayBuilder = Json.createArrayBuilder();
+
+ if (startId > 0) {
+ jsonObjectBuilder.add("firstId", startId);
+ }
+ if (endId < Long.MAX_VALUE) {
+ jsonObjectBuilder.add("lastId", endId);
+ }
+
+ // compile the list of ids to process
+ List datasetIds;
+ if (datasetIdentifiers.isEmpty()) {
+ datasetIds = datasetService.findAllLocalDatasetIds();
+ } else {
+ datasetIds = new ArrayList<>(datasetIdentifiers.size());
+ JsonArrayBuilder jab = Json.createArrayBuilder();
+ datasetIdentifiers.forEach(id -> {
+ String dId = id.trim();
+ jab.add(dId);
+ Dataset d = datasetService.findByGlobalId(dId);
+ if (d != null) {
+ datasetIds.add(d.getId());
+ } else {
+ NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder();
+ job.add("datasetIdentifier",dId);
+ job.add("reason","Not Found");
+ jsonFailuresArrayBuilder.add(job);
+ }
+ });
+ jsonObjectBuilder.add("datasetIdentifierList", jab);
+ }
+
+ for (Long datasetId : datasetIds) {
+ if (datasetId < startId) {
+ continue;
+ } else if (datasetId > endId) {
+ break;
+ }
+ Dataset dataset;
+ try {
+ dataset = findDatasetOrDie(String.valueOf(datasetId));
+ datasetsChecked++;
+ } catch (WrappedResponse e) {
+ NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder();
+ job.add("datasetId", datasetId);
+ job.add("reason", e.getMessage());
+ jsonFailuresArrayBuilder.add(job);
+ continue;
+ }
+
+ List missingFiles = new ArrayList<>();
+ List missingFileMetadata = new ArrayList<>();
+ try {
+ Predicate filter = s -> true;
+ StorageIO datasetIO = DataAccess.getStorageIO(dataset);
+ final List result = datasetIO.cleanUp(filter, true);
+ // add files that are in dataset files but not in cleanup result or DataFiles with missing FileMetadata
+ dataset.getFiles().forEach(df -> {
+ try {
+ StorageIO datafileIO = df.getStorageIO();
+ String storageId = df.getStorageIdentifier();
+ FileMetadata fm = df.getFileMetadata();
+ if (!datafileIO.exists()) {
+ missingFiles.add(storageId + "," + (fm != null ?
+ (fm.getDirectoryLabel() != null || !fm.getDirectoryLabel().isEmpty() ? "directoryLabel,"+fm.getDirectoryLabel()+"," : "")
+ +"label,"+fm.getLabel() : "type,"+df.getContentType()));
+ }
+ if (fm == null) {
+ missingFileMetadata.add(storageId + ",dataFileId," + df.getId());
+ }
+ } catch (IOException e) {
+ NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder();
+ job.add("dataFileId", df.getId());
+ job.add("reason", e.getMessage());
+ jsonFailuresArrayBuilder.add(job);
+ }
+ });
+ } catch (IOException e) {
+ NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder();
+ job.add("datasetId", datasetId);
+ job.add("reason", e.getMessage());
+ jsonFailuresArrayBuilder.add(job);
+ }
+
+ JsonObjectBuilder job = Json.createObjectBuilder();
+ if (!missingFiles.isEmpty() || !missingFileMetadata.isEmpty()) {
+ job.add("id", dataset.getId());
+ job.add("pid", dataset.getProtocol() + ":" + dataset.getAuthority() + "/" + dataset.getIdentifier());
+ job.add("persistentURL", dataset.getPersistentURL());
+ if (!missingFileMetadata.isEmpty()) {
+ JsonArrayBuilder jabMissingFileMetadata = Json.createArrayBuilder();
+ missingFileMetadata.forEach(mm -> {
+ String[] missingMetadata = mm.split(",");
+ NullSafeJsonBuilder jobj = NullSafeJsonBuilder.jsonObjectBuilder()
+ .add("storageIdentifier", missingMetadata[0])
+ .add(missingMetadata[1], missingMetadata[2]);
+ jabMissingFileMetadata.add(jobj);
+ });
+ job.add("missingFileMetadata", jabMissingFileMetadata);
+ }
+ if (!missingFiles.isEmpty()) {
+ JsonArrayBuilder jabMissingFiles = Json.createArrayBuilder();
+ missingFiles.forEach(mf -> {
+ String[] missingFile = mf.split(",");
+ NullSafeJsonBuilder jobj = NullSafeJsonBuilder.jsonObjectBuilder()
+ .add("storageIdentifier", missingFile[0]);
+ for (int i = 2; i < missingFile.length; i+=2) {
+ jobj.add(missingFile[i-1], missingFile[i]);
+ }
+ jabMissingFiles.add(jobj);
+ });
+ job.add("missingFiles", jabMissingFiles);
+ }
+ jsonDatasetsArrayBuilder.add(job);
+ }
+ }
+
+ jsonObjectBuilder.add("datasetsChecked", datasetsChecked);
+ jsonObjectBuilder.add("datasets", jsonDatasetsArrayBuilder);
+ jsonObjectBuilder.add("failures", jsonFailuresArrayBuilder);
+
+ return ok(jsonObjectBuilder);
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java
index 01c51dc2b4c..907295ad848 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java
@@ -126,7 +126,7 @@ public Response getByName(@PathParam("name") String name) {
String solrFieldSearchable = dsf.getSolrField().getNameSearchable();
String solrFieldFacetable = dsf.getSolrField().getNameFacetable();
String metadataBlock = dsf.getMetadataBlock().getName();
- String uri=dsf.getUri();
+ String uri = dsf.getUri();
boolean hasParent = dsf.isHasParent();
boolean allowsMultiples = dsf.isAllowMultiples();
boolean isRequired = dsf.isRequired();
@@ -243,7 +243,9 @@ public Response loadDatasetFields(File file) {
br = new BufferedReader(new FileReader("/" + file));
while ((line = br.readLine()) != null) {
lineNumber++;
- values = line.split(splitBy);
+ values = Arrays.stream(line.split(splitBy))
+ .map(String::trim)
+ .toArray(String[]::new);
if (values[0].startsWith("#")) { // Header row
switch (values[0]) {
case "#metadataBlock":
@@ -326,7 +328,7 @@ public Response loadDatasetFields(File file) {
*/
public String getGeneralErrorMessage(HeaderType header, int lineNumber, String message) {
List arguments = new ArrayList<>();
- arguments.add(header.name());
+ arguments.add(header != null ? header.name() : "unknown");
arguments.add(String.valueOf(lineNumber));
arguments.add(message);
return BundleUtil.getStringFromBundle("api.admin.datasetfield.load.GeneralErrorMessage", arguments);
@@ -334,9 +336,9 @@ public String getGeneralErrorMessage(HeaderType header, int lineNumber, String m
/**
* Turn ArrayIndexOutOfBoundsException into an informative error message
- * @param lineNumber
* @param header
- * @param e
+ * @param lineNumber
+ * @param wrongIndex
* @return
*/
public String getArrayIndexOutOfBoundMessage(HeaderType header,
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
index 369a22fe8d7..2ec10816acc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
@@ -2171,8 +2171,32 @@ public Response getAssignments(@Context ContainerRequestContext crc, @PathParam(
@GET
@AuthRequired
+ @Deprecated(forRemoval = true, since = "2024-10-17")
@Path("{id}/privateUrl")
public Response getPrivateUrlData(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
+ return getPreviewUrlData(crc, idSupplied);
+ }
+
+ @POST
+ @AuthRequired
+ @Deprecated(forRemoval = true, since = "2024-10-17")
+ @Path("{id}/privateUrl")
+ public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @DefaultValue("false") @QueryParam("anonymizedAccess") boolean anonymizedAccess) {
+ return createPreviewUrl(crc, idSupplied, anonymizedAccess);
+ }
+
+ @DELETE
+ @AuthRequired
+ @Deprecated(forRemoval = true, since = "2024-10-17")
+ @Path("{id}/privateUrl")
+ public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
+ return deletePreviewUrl(crc, idSupplied);
+ }
+
+ @GET
+ @AuthRequired
+ @Path("{id}/previewUrl")
+ public Response getPreviewUrlData(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
return response( req -> {
PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, findDatasetOrDie(idSupplied)));
return (privateUrl != null) ? ok(json(privateUrl))
@@ -2182,8 +2206,8 @@ public Response getPrivateUrlData(@Context ContainerRequestContext crc, @PathPar
@POST
@AuthRequired
- @Path("{id}/privateUrl")
- public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,@DefaultValue("false") @QueryParam ("anonymizedAccess") boolean anonymizedAccess) {
+ @Path("{id}/previewUrl")
+ public Response createPreviewUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,@DefaultValue("false") @QueryParam ("anonymizedAccess") boolean anonymizedAccess) {
if(anonymizedAccess && settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames)==null) {
throw new NotAcceptableException("Anonymized Access not enabled");
}
@@ -2194,8 +2218,8 @@ public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathPara
@DELETE
@AuthRequired
- @Path("{id}/privateUrl")
- public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
+ @Path("{id}/previewUrl")
+ public Response deletePreviewUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
return response( req -> {
Dataset dataset = findDatasetOrDie(idSupplied);
PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, dataset));
@@ -2208,6 +2232,7 @@ public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathPara
}, getRequestUser(crc));
}
+
@GET
@AuthRequired
@Path("{id}/thumbnail/candidates")
@@ -2992,6 +3017,26 @@ public Response cleanStorage(@Context ContainerRequestContext crc, @PathParam("i
}
+ @GET
+ @AuthRequired
+ @Path("{id}/versions/{versionId1}/compare/{versionId2}")
+ public Response getCompareVersions(@Context ContainerRequestContext crc, @PathParam("id") String id,
+ @PathParam("versionId1") String versionId1,
+ @PathParam("versionId2") String versionId2,
+ @Context UriInfo uriInfo, @Context HttpHeaders headers) {
+ try {
+ DataverseRequest req = createDataverseRequest(getRequestUser(crc));
+ DatasetVersion dsv1 = getDatasetVersionOrDie(req, versionId1, findDatasetOrDie(id), uriInfo, headers);
+ DatasetVersion dsv2 = getDatasetVersionOrDie(req, versionId2, findDatasetOrDie(id), uriInfo, headers);
+ if (dsv1.getCreateTime().getTime() > dsv2.getCreateTime().getTime()) {
+ return error(BAD_REQUEST, BundleUtil.getStringFromBundle("dataset.version.compare.incorrect.order"));
+ }
+ return ok(DatasetVersion.compareVersions(dsv1, dsv2));
+ } catch (WrappedResponse wr) {
+ return wr.getResponse();
+ }
+ }
+
private static Set getDatasetFilenames(Dataset dataset) {
Set files = new HashSet<>();
for (DataFile dataFile: dataset.getFiles()) {
@@ -4833,6 +4878,33 @@ public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String
}
return ok(responseJson);
}
+
+ @GET
+ @Path("previewUrlDatasetVersion/{previewUrlToken}")
+ public Response getPreviewUrlDatasetVersion(@PathParam("previewUrlToken") String previewUrlToken, @QueryParam("returnOwners") boolean returnOwners) {
+ PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(previewUrlToken);
+ if (privateUrlUser == null) {
+ return notFound("Private URL user not found");
+ }
+ boolean isAnonymizedAccess = privateUrlUser.hasAnonymizedAccess();
+ String anonymizedFieldTypeNames = settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames);
+ if(isAnonymizedAccess && anonymizedFieldTypeNames == null) {
+ throw new NotAcceptableException("Anonymized Access not enabled");
+ }
+ DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(previewUrlToken);
+ if (dsv == null || dsv.getId() == null) {
+ return notFound("Dataset version not found");
+ }
+ JsonObjectBuilder responseJson;
+ if (isAnonymizedAccess) {
+ List anonymizedFieldTypeNamesList = new ArrayList<>(Arrays.asList(anonymizedFieldTypeNames.split(",\\s")));
+ responseJson = json(dsv, anonymizedFieldTypeNamesList, true, returnOwners);
+ } else {
+ responseJson = json(dsv, null, true, returnOwners);
+ }
+ return ok(responseJson);
+ }
+
@GET
@Path("privateUrlDatasetVersion/{privateUrlToken}/citation")
@@ -4845,6 +4917,18 @@ public Response getPrivateUrlDatasetVersionCitation(@PathParam("privateUrlToken"
return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found")
: ok(dsv.getCitation(true, privateUrlUser.hasAnonymizedAccess()));
}
+
+ @GET
+ @Path("previewUrlDatasetVersion/{previewUrlToken}/citation")
+ public Response getPreviewUrlDatasetVersionCitation(@PathParam("previewUrlToken") String previewUrlToken) {
+ PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(previewUrlToken);
+ if (privateUrlUser == null) {
+ return notFound("Private URL user not found");
+ }
+ DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(previewUrlToken);
+ return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found")
+ : ok(dsv.getCitation(true, privateUrlUser.hasAnonymizedAccess()));
+ }
@GET
@AuthRequired
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
index f05bba8830e..f864a5a9d1c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
@@ -195,7 +195,7 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod
List facets = parseFacets(body);
AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc);
- dataverse = execCommand(new UpdateDataverseCommand(dataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks, updatedDataverseDTO));
+ dataverse = execCommand(new UpdateDataverseCommand(dataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks, updatedDataverseDTO, true));
return ok(json(dataverse));
} catch (WrappedResponse ww) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Info.java b/src/main/java/edu/harvard/iq/dataverse/api/Info.java
index 257519677d3..2439c996816 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Info.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Info.java
@@ -12,12 +12,17 @@
import jakarta.ws.rs.Produces;
import org.apache.commons.io.IOUtils;
+import edu.harvard.iq.dataverse.export.ExportService;
import edu.harvard.iq.dataverse.settings.JvmSettings;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.util.BundleUtil;
import edu.harvard.iq.dataverse.util.SystemConfig;
+import io.gdcc.spi.export.Exporter;
+import io.gdcc.spi.export.ExportException;
+import io.gdcc.spi.export.XMLExporter;
import jakarta.ejb.EJB;
import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
import jakarta.json.JsonValue;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.Path;
@@ -92,6 +97,32 @@ public Response getZipDownloadLimit() {
return ok(zipDownloadLimit);
}
+ @GET
+ @Path("exportFormats")
+ public Response getExportFormats() {
+ JsonObjectBuilder responseModel = Json.createObjectBuilder();
+ ExportService instance = ExportService.getInstance();
+ for (String[] labels : instance.getExportersLabels()) {
+ try {
+ Exporter exporter = instance.getExporter(labels[1]);
+ JsonObjectBuilder exporterObject = Json.createObjectBuilder().add("displayName", labels[0])
+ .add("mediaType", exporter.getMediaType()).add("isHarvestable", exporter.isHarvestable())
+ .add("isVisibleInUserInterface", exporter.isAvailableToUsers());
+ if (exporter instanceof XMLExporter xmlExporter) {
+ exporterObject.add("XMLNameSpace", xmlExporter.getXMLNameSpace())
+ .add("XMLSchemaLocation", xmlExporter.getXMLSchemaLocation())
+ .add("XMLSchemaVersion", xmlExporter.getXMLSchemaVersion());
+ }
+ responseModel.add(labels[1], exporterObject);
+ }
+ catch (ExportException ex){
+ logger.warning("Failed to get: " + labels[1]);
+ logger.warning(ex.getLocalizedMessage());
+ }
+ }
+ return ok(responseModel);
+ }
+
private Response getSettingResponseByKey(SettingsServiceBean.Key key) {
String setting = settingsService.getValueForKey(key);
if (setting != null) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java b/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java
index 33a11a2df23..e6519c9ff36 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java
@@ -181,7 +181,7 @@ public Response delete(@PathParam("id") long doomedId, @QueryParam("unlink") boo
try {
wasDeleted = savedSearchSvc.delete(doomedId, unlink);
} catch (Exception e) {
- return error(INTERNAL_SERVER_ERROR, "Problem while trying to unlink links of saved search id " + doomedId);
+ return error(INTERNAL_SERVER_ERROR, "Problem while trying to unlink links of saved search id " + doomedId + ". Exception: " + e.getLocalizedMessage());
}
if (wasDeleted) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Search.java b/src/main/java/edu/harvard/iq/dataverse/api/Search.java
index 6b9fcb38305..ba82f8f758b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Search.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Search.java
@@ -73,6 +73,7 @@ public Response search(
@QueryParam("metadata_fields") List metadataFields,
@QueryParam("geo_point") String geoPointRequested,
@QueryParam("geo_radius") String geoRadiusRequested,
+ @QueryParam("show_type_counts") boolean showTypeCounts,
@Context HttpServletResponse response
) {
@@ -175,7 +176,7 @@ public Response search(
JsonArrayBuilder itemsArrayBuilder = Json.createArrayBuilder();
List solrSearchResults = solrQueryResponse.getSolrSearchResults();
for (SolrSearchResult solrSearchResult : solrSearchResults) {
- itemsArrayBuilder.add(solrSearchResult.json(showRelevance, showEntityIds, showApiUrls, metadataFields, getDatasetFileCount(solrSearchResult)));
+ itemsArrayBuilder.add(solrSearchResult.json(showRelevance, showEntityIds, showApiUrls, metadataFields));
}
JsonObjectBuilder spelling_alternatives = Json.createObjectBuilder();
@@ -210,6 +211,15 @@ public Response search(
}
value.add("count_in_response", solrSearchResults.size());
+ if (showTypeCounts && !solrQueryResponse.getTypeFacetCategories().isEmpty()) {
+ JsonObjectBuilder objectTypeCounts = Json.createObjectBuilder();
+ for (FacetCategory facetCategory : solrQueryResponse.getTypeFacetCategories()) {
+ for (FacetLabel facetLabel : facetCategory.getFacetLabel()) {
+ objectTypeCounts.add(facetLabel.getName(), facetLabel.getCount());
+ }
+ }
+ value.add("total_count_per_object_type", objectTypeCounts);
+ }
/**
* @todo Returning the fq might be useful as a troubleshooting aid
* but we don't want to expose the raw dataverse database ids in
@@ -229,15 +239,6 @@ public Response search(
}
}
- private Long getDatasetFileCount(SolrSearchResult solrSearchResult) {
- DvObject dvObject = solrSearchResult.getEntity();
- if (dvObject.isInstanceofDataset()) {
- DatasetVersion datasetVersion = ((Dataset) dvObject).getVersionFromId(solrSearchResult.getDatasetVersionId());
- return datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion);
- }
- return null;
- }
-
private User getUser(ContainerRequestContext crc) throws WrappedResponse {
User userToExecuteSearchAs = GuestUser.get();
try {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Users.java b/src/main/java/edu/harvard/iq/dataverse/api/Users.java
index ecf7839e616..166465115c8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Users.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Users.java
@@ -8,29 +8,33 @@
import edu.harvard.iq.dataverse.api.auth.AuthRequired;
import edu.harvard.iq.dataverse.authorization.users.ApiToken;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.authorization.users.GuestUser;
import edu.harvard.iq.dataverse.authorization.users.User;
-import edu.harvard.iq.dataverse.engine.command.impl.ChangeUserIdentifierCommand;
-import edu.harvard.iq.dataverse.engine.command.impl.GetUserTracesCommand;
-import edu.harvard.iq.dataverse.engine.command.impl.MergeInAccountCommand;
-import edu.harvard.iq.dataverse.engine.command.impl.RevokeAllRolesCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.*;
+import edu.harvard.iq.dataverse.settings.FeatureFlags;
+import edu.harvard.iq.dataverse.util.BundleUtil;
import edu.harvard.iq.dataverse.util.FileUtil;
+import static edu.harvard.iq.dataverse.api.auth.AuthUtil.extractBearerTokenFromHeaderParam;
import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
+import java.text.MessageFormat;
import java.util.Arrays;
import java.util.List;
+import java.util.Optional;
import java.util.logging.Level;
import java.util.logging.Logger;
+
+import edu.harvard.iq.dataverse.util.json.JsonParseException;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
import jakarta.ejb.Stateless;
import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
import jakarta.json.JsonObjectBuilder;
+import jakarta.json.stream.JsonParsingException;
import jakarta.ws.rs.*;
import jakarta.ws.rs.container.ContainerRequestContext;
-import jakarta.ws.rs.core.Context;
-import jakarta.ws.rs.core.MediaType;
-import jakarta.ws.rs.core.Request;
-import jakarta.ws.rs.core.Response;
-import jakarta.ws.rs.core.Variant;
+import jakarta.ws.rs.core.*;
/**
*
@@ -266,4 +270,24 @@ public Response getTracesElement(@Context ContainerRequestContext crc, @Context
}
}
+ @POST
+ @Path("register")
+ public Response registerOIDCUser(String body) {
+ if (!FeatureFlags.API_BEARER_AUTH.enabled()) {
+ return error(Response.Status.INTERNAL_SERVER_ERROR, BundleUtil.getStringFromBundle("users.api.errors.bearerAuthFeatureFlagDisabled"));
+ }
+ Optional bearerToken = extractBearerTokenFromHeaderParam(httpRequest.getHeader(HttpHeaders.AUTHORIZATION));
+ if (bearerToken.isEmpty()) {
+ return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("users.api.errors.bearerTokenRequired"));
+ }
+ try {
+ JsonObject userJson = JsonUtil.getJsonObject(body);
+ execCommand(new RegisterOIDCUserCommand(createDataverseRequest(GuestUser.get()), bearerToken.get(), jsonParser().parseUserDTO(userJson)));
+ } catch (JsonParseException | JsonParsingException e) {
+ return error(Response.Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("users.api.errors.jsonParseToUserDTO"), e.getMessage()));
+ } catch (WrappedResponse e) {
+ return e.getResponse();
+ }
+ return ok(BundleUtil.getStringFromBundle("users.api.userRegistered"));
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanism.java
index 0dd8a28baca..fbb0b484b58 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanism.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanism.java
@@ -9,6 +9,7 @@
import jakarta.inject.Inject;
import jakarta.ws.rs.container.ContainerRequestContext;
+
import java.util.logging.Logger;
/**
@@ -49,7 +50,7 @@ public User findUserFromRequest(ContainerRequestContext containerRequestContext)
authUser = userSvc.updateLastApiUseTime(authUser);
return authUser;
}
- throw new WrappedAuthErrorResponse(RESPONSE_MESSAGE_BAD_API_KEY);
+ throw new WrappedUnauthorizedAuthErrorResponse(RESPONSE_MESSAGE_BAD_API_KEY);
}
private String getRequestApiKey(ContainerRequestContext containerRequestContext) {
@@ -59,7 +60,7 @@ private String getRequestApiKey(ContainerRequestContext containerRequestContext)
return headerParamApiKey != null ? headerParamApiKey : queryParamApiKey;
}
- private void checkAnonymizedAccessToRequestPath(String requestPath, PrivateUrlUser privateUrlUser) throws WrappedAuthErrorResponse {
+ private void checkAnonymizedAccessToRequestPath(String requestPath, PrivateUrlUser privateUrlUser) throws WrappedUnauthorizedAuthErrorResponse {
if (!privateUrlUser.hasAnonymizedAccess()) {
return;
}
@@ -67,7 +68,7 @@ private void checkAnonymizedAccessToRequestPath(String requestPath, PrivateUrlUs
// to download the file or image thumbs
if (!(requestPath.startsWith(ACCESS_DATAFILE_PATH_PREFIX) && !requestPath.substring(ACCESS_DATAFILE_PATH_PREFIX.length()).contains("/"))) {
logger.info("Anonymized access request for " + requestPath);
- throw new WrappedAuthErrorResponse(RESPONSE_MESSAGE_BAD_API_KEY);
+ throw new WrappedUnauthorizedAuthErrorResponse(RESPONSE_MESSAGE_BAD_API_KEY);
}
}
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthUtil.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthUtil.java
new file mode 100644
index 00000000000..36cd7c7f1df
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthUtil.java
@@ -0,0 +1,24 @@
+package edu.harvard.iq.dataverse.api.auth;
+
+import java.util.Optional;
+
+public class AuthUtil {
+
+ private static final String BEARER_AUTH_SCHEME = "Bearer";
+
+ /**
+ * Extracts the Bearer token from the provided HTTP Authorization header value.
+ *
+ * Validates that the header value starts with the "Bearer" scheme as defined in RFC 6750.
+ * If the header is null, empty, or does not start with "Bearer ", an empty {@link Optional} is returned.
+ *
+ * @param headerParamBearerToken the raw HTTP Authorization header value containing the Bearer token
+ * @return An {@link Optional} containing the raw Bearer token if present and valid; otherwise, an empty {@link Optional}
+ */
+ public static Optional extractBearerTokenFromHeaderParam(String headerParamBearerToken) {
+ if (headerParamBearerToken != null && headerParamBearerToken.toLowerCase().startsWith(BEARER_AUTH_SCHEME.toLowerCase() + " ")) {
+ return Optional.of(headerParamBearerToken);
+ }
+ return Optional.empty();
+ }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java
index 31f524af3f0..3ee9bb909f2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java
@@ -1,124 +1,65 @@
package edu.harvard.iq.dataverse.api.auth;
-import com.nimbusds.oauth2.sdk.ParseException;
-import com.nimbusds.oauth2.sdk.token.BearerAccessToken;
import edu.harvard.iq.dataverse.UserServiceBean;
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
-import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier;
-import edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthProvider;
+import edu.harvard.iq.dataverse.authorization.exceptions.AuthorizationException;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.settings.FeatureFlags;
+import edu.harvard.iq.dataverse.util.BundleUtil;
import jakarta.inject.Inject;
import jakarta.ws.rs.container.ContainerRequestContext;
import jakarta.ws.rs.core.HttpHeaders;
-import java.io.IOException;
-import java.util.List;
+
import java.util.Optional;
import java.util.logging.Level;
import java.util.logging.Logger;
-import java.util.stream.Collectors;
+
+import static edu.harvard.iq.dataverse.api.auth.AuthUtil.extractBearerTokenFromHeaderParam;
public class BearerTokenAuthMechanism implements AuthMechanism {
- private static final String BEARER_AUTH_SCHEME = "Bearer";
private static final Logger logger = Logger.getLogger(BearerTokenAuthMechanism.class.getCanonicalName());
-
- public static final String UNAUTHORIZED_BEARER_TOKEN = "Unauthorized bearer token";
- public static final String INVALID_BEARER_TOKEN = "Could not parse bearer token";
- public static final String BEARER_TOKEN_DETECTED_NO_OIDC_PROVIDER_CONFIGURED = "Bearer token detected, no OIDC provider configured";
@Inject
protected AuthenticationServiceBean authSvc;
@Inject
protected UserServiceBean userSvc;
-
+
@Override
public User findUserFromRequest(ContainerRequestContext containerRequestContext) throws WrappedAuthErrorResponse {
- if (FeatureFlags.API_BEARER_AUTH.enabled()) {
- Optional bearerToken = getRequestApiKey(containerRequestContext);
- // No Bearer Token present, hence no user can be authenticated
- if (bearerToken.isEmpty()) {
- return null;
- }
-
- // Validate and verify provided Bearer Token, and retrieve UserRecordIdentifier
- // TODO: Get the identifier from an invalidating cache to avoid lookup bursts of the same token. Tokens in the cache should be removed after some (configurable) time.
- UserRecordIdentifier userInfo = verifyOidcBearerTokenAndGetUserIdentifier(bearerToken.get());
+ if (!FeatureFlags.API_BEARER_AUTH.enabled()) {
+ return null;
+ }
- // retrieve Authenticated User from AuthService
- AuthenticatedUser authUser = authSvc.lookupUser(userInfo);
- if (authUser != null) {
- // track the API usage
- authUser = userSvc.updateLastApiUseTime(authUser);
- return authUser;
- } else {
- // a valid Token was presented, but we have no associated user account.
- logger.log(Level.WARNING, "Bearer token detected, OIDC provider {0} validated Token but no linked UserAccount", userInfo.getUserRepoId());
- // TODO: Instead of returning null, we should throw a meaningful error to the client.
- // Probably this will be a wrapped auth error response with an error code and a string describing the problem.
- return null;
- }
+ Optional bearerToken = getRequestBearerToken(containerRequestContext);
+ if (bearerToken.isEmpty()) {
+ return null;
}
- return null;
- }
- /**
- * Verifies the given Bearer token and obtain information about the corresponding user within respective AuthProvider.
- *
- * @param token The string containing the encoded JWT
- * @return
- */
- private UserRecordIdentifier verifyOidcBearerTokenAndGetUserIdentifier(String token) throws WrappedAuthErrorResponse {
+ AuthenticatedUser authUser;
try {
- BearerAccessToken accessToken = BearerAccessToken.parse(token);
- // Get list of all authentication providers using Open ID Connect
- // @TASK: Limited to OIDCAuthProviders, could be widened to OAuth2Providers.
- List providers = authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class).stream()
- .map(providerId -> (OIDCAuthProvider) authSvc.getAuthenticationProvider(providerId))
- .collect(Collectors.toUnmodifiableList());
- // If not OIDC Provider are configured we cannot validate a Token
- if(providers.isEmpty()){
- logger.log(Level.WARNING, "Bearer token detected, no OIDC provider configured");
- throw new WrappedAuthErrorResponse(BEARER_TOKEN_DETECTED_NO_OIDC_PROVIDER_CONFIGURED);
- }
+ authUser = authSvc.lookupUserByOIDCBearerToken(bearerToken.get());
+ } catch (AuthorizationException e) {
+ logger.log(Level.WARNING, "Authorization failed: {0}", e.getMessage());
+ throw new WrappedUnauthorizedAuthErrorResponse(e.getMessage());
+ }
- // Iterate over all OIDC providers if multiple. Sadly needed as do not know which provided the Token.
- for (OIDCAuthProvider provider : providers) {
- try {
- // The OIDCAuthProvider need to verify a Bearer Token and equip the client means to identify the corresponding AuthenticatedUser.
- Optional userInfo = provider.getUserIdentifier(accessToken);
- if(userInfo.isPresent()) {
- logger.log(Level.FINE, "Bearer token detected, provider {0} confirmed validity and provided identifier", provider.getId());
- return userInfo.get();
- }
- } catch (IOException e) {
- // TODO: Just logging this is not sufficient - if there is an IO error with the one provider
- // which would have validated successfully, this is not the users fault. We need to
- // take note and refer to that later when occurred.
- logger.log(Level.FINE, "Bearer token detected, provider " + provider.getId() + " indicates an invalid Token, skipping", e);
- }
- }
- } catch (ParseException e) {
- logger.log(Level.FINE, "Bearer token detected, unable to parse bearer token (invalid Token)", e);
- throw new WrappedAuthErrorResponse(INVALID_BEARER_TOKEN);
+ if (authUser == null) {
+ logger.log(Level.WARNING, "Bearer token detected, OIDC provider validated the token but no linked UserAccount");
+ throw new WrappedForbiddenAuthErrorResponse(BundleUtil.getStringFromBundle("bearerTokenAuthMechanism.errors.tokenValidatedButNoRegisteredUser"));
}
- // No UserInfo returned means we have an invalid access token.
- logger.log(Level.FINE, "Bearer token detected, yet no configured OIDC provider validated it.");
- throw new WrappedAuthErrorResponse(UNAUTHORIZED_BEARER_TOKEN);
+ return userSvc.updateLastApiUseTime(authUser);
}
/**
* Retrieve the raw, encoded token value from the Authorization Bearer HTTP header as defined in RFC 6750
+ *
* @return An {@link Optional} either empty if not present or the raw token from the header
*/
- private Optional getRequestApiKey(ContainerRequestContext containerRequestContext) {
- String headerParamApiKey = containerRequestContext.getHeaderString(HttpHeaders.AUTHORIZATION);
- if (headerParamApiKey != null && headerParamApiKey.toLowerCase().startsWith(BEARER_AUTH_SCHEME.toLowerCase() + " ")) {
- return Optional.of(headerParamApiKey);
- } else {
- return Optional.empty();
- }
+ public static Optional getRequestBearerToken(ContainerRequestContext containerRequestContext) {
+ String headerParamBearerToken = containerRequestContext.getHeaderString(HttpHeaders.AUTHORIZATION);
+ return extractBearerTokenFromHeaderParam(headerParamBearerToken);
}
-}
\ No newline at end of file
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanism.java
index 801e2752b9e..e5be5144897 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanism.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanism.java
@@ -5,6 +5,7 @@
import jakarta.inject.Inject;
import jakarta.ws.rs.container.ContainerRequestContext;
+
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@@ -19,9 +20,9 @@ public class CompoundAuthMechanism implements AuthMechanism {
private final List authMechanisms = new ArrayList<>();
@Inject
- public CompoundAuthMechanism(ApiKeyAuthMechanism apiKeyAuthMechanism, WorkflowKeyAuthMechanism workflowKeyAuthMechanism, SignedUrlAuthMechanism signedUrlAuthMechanism, SessionCookieAuthMechanism sessionCookieAuthMechanism, BearerTokenAuthMechanism bearerTokenAuthMechanism) {
+ public CompoundAuthMechanism(ApiKeyAuthMechanism apiKeyAuthMechanism, WorkflowKeyAuthMechanism workflowKeyAuthMechanism, SignedUrlAuthMechanism signedUrlAuthMechanism, BearerTokenAuthMechanism bearerTokenAuthMechanism, SessionCookieAuthMechanism sessionCookieAuthMechanism) {
// Auth mechanisms should be ordered by priority here
- add(apiKeyAuthMechanism, workflowKeyAuthMechanism, signedUrlAuthMechanism, sessionCookieAuthMechanism,bearerTokenAuthMechanism);
+ add(apiKeyAuthMechanism, workflowKeyAuthMechanism, signedUrlAuthMechanism, bearerTokenAuthMechanism, sessionCookieAuthMechanism);
}
public CompoundAuthMechanism(AuthMechanism... authMechanisms) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java
index 258661f6495..30e8a3b9ca4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java
@@ -43,7 +43,7 @@ public User findUserFromRequest(ContainerRequestContext containerRequestContext)
if (user != null) {
return user;
}
- throw new WrappedAuthErrorResponse(RESPONSE_MESSAGE_BAD_SIGNED_URL);
+ throw new WrappedUnauthorizedAuthErrorResponse(RESPONSE_MESSAGE_BAD_SIGNED_URL);
}
private String getSignedUrlRequestParameter(ContainerRequestContext containerRequestContext) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanism.java
index bbd67713e85..df54b69af96 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanism.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanism.java
@@ -30,7 +30,7 @@ public User findUserFromRequest(ContainerRequestContext containerRequestContext)
if (authUser != null) {
return authUser;
}
- throw new WrappedAuthErrorResponse(RESPONSE_MESSAGE_BAD_WORKFLOW_KEY);
+ throw new WrappedUnauthorizedAuthErrorResponse(RESPONSE_MESSAGE_BAD_WORKFLOW_KEY);
}
private String getRequestWorkflowKey(ContainerRequestContext containerRequestContext) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedAuthErrorResponse.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedAuthErrorResponse.java
index 40431557261..da92d882197 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedAuthErrorResponse.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedAuthErrorResponse.java
@@ -6,18 +6,24 @@
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
-public class WrappedAuthErrorResponse extends Exception {
+public abstract class WrappedAuthErrorResponse extends Exception {
private final String message;
private final Response response;
- public WrappedAuthErrorResponse(String message) {
+ public WrappedAuthErrorResponse(Response.Status status, String message) {
this.message = message;
- this.response = Response.status(Response.Status.UNAUTHORIZED)
+ this.response = createErrorResponse(status, message);
+ }
+
+ protected Response createErrorResponse(Response.Status status, String message) {
+ return Response.status(status)
.entity(NullSafeJsonBuilder.jsonObjectBuilder()
.add("status", ApiConstants.STATUS_ERROR)
.add("message", message).build()
- ).type(MediaType.APPLICATION_JSON_TYPE).build();
+ )
+ .type(MediaType.APPLICATION_JSON_TYPE)
+ .build();
}
public String getMessage() {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedForbiddenAuthErrorResponse.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedForbiddenAuthErrorResponse.java
new file mode 100644
index 00000000000..082ed3ca8d8
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedForbiddenAuthErrorResponse.java
@@ -0,0 +1,10 @@
+package edu.harvard.iq.dataverse.api.auth;
+
+import jakarta.ws.rs.core.Response;
+
+public class WrappedForbiddenAuthErrorResponse extends WrappedAuthErrorResponse {
+
+ public WrappedForbiddenAuthErrorResponse(String message) {
+ super(Response.Status.FORBIDDEN, message);
+ }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedUnauthorizedAuthErrorResponse.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedUnauthorizedAuthErrorResponse.java
new file mode 100644
index 00000000000..1d2eb8f8bd8
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedUnauthorizedAuthErrorResponse.java
@@ -0,0 +1,10 @@
+package edu.harvard.iq.dataverse.api.auth;
+
+import jakarta.ws.rs.core.Response;
+
+public class WrappedUnauthorizedAuthErrorResponse extends WrappedAuthErrorResponse {
+
+ public WrappedUnauthorizedAuthErrorResponse(String message) {
+ super(Response.Status.UNAUTHORIZED, message);
+ }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/dto/UserDTO.java b/src/main/java/edu/harvard/iq/dataverse/api/dto/UserDTO.java
new file mode 100644
index 00000000000..df1920c4d25
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/dto/UserDTO.java
@@ -0,0 +1,67 @@
+package edu.harvard.iq.dataverse.api.dto;
+
+public class UserDTO {
+ private String username;
+ private String firstName;
+ private String lastName;
+ private String emailAddress;
+ private String affiliation;
+ private String position;
+ private boolean termsAccepted;
+
+ public String getUsername() {
+ return username;
+ }
+
+ public void setUsername(String username) {
+ this.username = username;
+ }
+
+ public String getFirstName() {
+ return firstName;
+ }
+
+ public void setFirstName(String firstName) {
+ this.firstName = firstName;
+ }
+
+ public String getLastName() {
+ return lastName;
+ }
+
+ public void setLastName(String lastName) {
+ this.lastName = lastName;
+ }
+
+ public String getEmailAddress() {
+ return emailAddress;
+ }
+
+ public void setEmailAddress(String emailAddress) {
+ this.emailAddress = emailAddress;
+ }
+
+ public String getAffiliation() {
+ return affiliation;
+ }
+
+ public void setAffiliation(String affiliation) {
+ this.affiliation = affiliation;
+ }
+
+ public String getPosition() {
+ return position;
+ }
+
+ public void setPosition(String position) {
+ this.position = position;
+ }
+
+ public boolean isTermsAccepted() {
+ return termsAccepted;
+ }
+
+ public void setTermsAccepted(boolean termsAccepted) {
+ this.termsAccepted = termsAccepted;
+ }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java
index 35d35316f73..31941d3c8c0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java
@@ -5,13 +5,21 @@
import edu.harvard.iq.dataverse.DatasetFieldType;
import edu.harvard.iq.dataverse.DatasetVersion;
import edu.harvard.iq.dataverse.DatasetVersion.VersionState;
-import edu.harvard.iq.dataverse.api.dto.*;
+import edu.harvard.iq.dataverse.api.dto.LicenseDTO;
import edu.harvard.iq.dataverse.api.dto.FieldDTO;
import edu.harvard.iq.dataverse.api.dto.MetadataBlockDTO;
+import edu.harvard.iq.dataverse.api.dto.DatasetDTO;
+import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO;
+import edu.harvard.iq.dataverse.api.dto.FileMetadataDTO;
+import edu.harvard.iq.dataverse.api.dto.DataFileDTO;
+import edu.harvard.iq.dataverse.api.dto.DataTableDTO;
+
import edu.harvard.iq.dataverse.api.imports.ImportUtil.ImportType;
import static edu.harvard.iq.dataverse.export.ddi.DdiExportUtil.NOTE_TYPE_CONTENTTYPE;
import static edu.harvard.iq.dataverse.export.ddi.DdiExportUtil.NOTE_TYPE_TERMS_OF_ACCESS;
+import edu.harvard.iq.dataverse.license.License;
+import edu.harvard.iq.dataverse.license.LicenseServiceBean;
import edu.harvard.iq.dataverse.util.StringUtil;
import java.io.File;
import java.io.FileInputStream;
@@ -32,6 +40,9 @@
import org.apache.commons.lang3.StringUtils;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
/**
*
* @author ellenk
@@ -103,6 +114,8 @@ public class ImportDDIServiceBean {
@EJB DatasetFieldServiceBean datasetFieldService;
@EJB ImportGenericServiceBean importGenericService;
+
+ @EJB LicenseServiceBean licenseService;
// TODO: stop passing the xml source as a string; (it could be huge!) -- L.A. 4.5
@@ -1180,7 +1193,24 @@ private void processDataAccs(XMLStreamReader xmlr, DatasetVersionDTO dvDTO) thro
String noteType = xmlr.getAttributeValue(null, "type");
if (NOTE_TYPE_TERMS_OF_USE.equalsIgnoreCase(noteType) ) {
if ( LEVEL_DV.equalsIgnoreCase(xmlr.getAttributeValue(null, "level"))) {
- dvDTO.setTermsOfUse(parseText(xmlr, "notes"));
+ String termsOfUseStr = parseText(xmlr, "notes").trim();
+ Pattern pattern = Pattern.compile("(.*)", Pattern.CASE_INSENSITIVE);
+ Matcher matcher = pattern.matcher(termsOfUseStr);
+ boolean matchFound = matcher.find();
+ if (matchFound) {
+ String uri = matcher.group(1);
+ String license = matcher.group(2);
+ License lic = licenseService.getByNameOrUri(license);
+ if (lic != null) {
+ LicenseDTO licenseDTO = new LicenseDTO();
+ licenseDTO.setName(license);
+ licenseDTO.setUri(uri);
+ dvDTO.setLicense(licenseDTO);
+ }
+
+ } else {
+ dvDTO.setTermsOfUse(termsOfUseStr);
+ }
}
} else if (NOTE_TYPE_TERMS_OF_ACCESS.equalsIgnoreCase(noteType) ) {
if (LEVEL_DV.equalsIgnoreCase(xmlr.getAttributeValue(null, "level"))) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java
index 0f7587671e7..aa5b25e3967 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java
@@ -156,10 +156,10 @@ public DatasetDTO processXML( XMLStreamReader xmlr, ForeignMetadataFormatMapping
// Note that arbitrary formatting tags are supported for the outer xml
// wrapper. -- L.A. 4.5
public DatasetDTO processOAIDCxml(String DcXmlToParse) throws XMLStreamException {
- return processOAIDCxml(DcXmlToParse, null);
+ return processOAIDCxml(DcXmlToParse, null, false);
}
- public DatasetDTO processOAIDCxml(String DcXmlToParse, String oaiIdentifier) throws XMLStreamException {
+ public DatasetDTO processOAIDCxml(String DcXmlToParse, String oaiIdentifier, boolean preferSuppliedIdentifier) throws XMLStreamException {
// look up DC metadata mapping:
ForeignMetadataFormatMapping dublinCoreMapping = findFormatMappingByName(DCTERMS);
@@ -208,7 +208,7 @@ public DatasetDTO processOAIDCxml(String DcXmlToParse, String oaiIdentifier) thr
// can parse and recognize as the global id for the imported dataset
// (note that this is the default behavior during harvesting),
// so we need to reaassign it accordingly:
- String identifier = getOtherIdFromDTO(datasetDTO.getDatasetVersion());
+ String identifier = selectIdentifier(datasetDTO.getDatasetVersion(), oaiIdentifier, preferSuppliedIdentifier);
logger.fine("Imported identifier: " + identifier);
globalIdentifier = reassignIdentifierAsGlobalId(identifier, datasetDTO);
@@ -228,8 +228,17 @@ public DatasetDTO processOAIDCxml(String DcXmlToParse, String oaiIdentifier) thr
private void processXMLElement(XMLStreamReader xmlr, String currentPath, String openingTag, ForeignMetadataFormatMapping foreignFormatMapping, DatasetDTO datasetDTO) throws XMLStreamException {
logger.fine("entering processXMLElement; ("+currentPath+")");
-
- for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) {
+
+ while (xmlr.hasNext()) {
+
+ int event;
+ try {
+ event = xmlr.next();
+ } catch (XMLStreamException ex) {
+ logger.warning("Error occurred in the XML parsing : " + ex.getMessage());
+ continue; // Skip Undeclared namespace prefix and Unexpected close tag related to com.ctc.wstx.exc.WstxParsingException
+ }
+
if (event == XMLStreamConstants.START_ELEMENT) {
String currentElement = xmlr.getLocalName();
@@ -358,8 +367,20 @@ private FieldDTO makeDTO(DatasetFieldType dataverseFieldType, FieldDTO value, St
return value;
}
- private String getOtherIdFromDTO(DatasetVersionDTO datasetVersionDTO) {
+ public String selectIdentifier(DatasetVersionDTO datasetVersionDTO, String suppliedIdentifier) {
+ return selectIdentifier(datasetVersionDTO, suppliedIdentifier, false);
+ }
+
+ private String selectIdentifier(DatasetVersionDTO datasetVersionDTO, String suppliedIdentifier, boolean preferSuppliedIdentifier) {
List otherIds = new ArrayList<>();
+
+ if (suppliedIdentifier != null && preferSuppliedIdentifier) {
+ // This supplied identifier (in practice, his is likely the OAI-PMH
+ // identifier from the section) will be our first
+ // choice candidate for the pid of the imported dataset:
+ otherIds.add(suppliedIdentifier);
+ }
+
for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
String key = entry.getKey();
MetadataBlockDTO value = entry.getValue();
@@ -377,6 +398,16 @@ private String getOtherIdFromDTO(DatasetVersionDTO datasetVersionDTO) {
}
}
}
+
+ if (suppliedIdentifier != null && !preferSuppliedIdentifier) {
+ // Unless specifically instructed to prefer this extra identifier
+ // (in practice, this is likely the OAI-PMH identifier from the
+ // section), we will try to use it as the *last*
+ // possible candidate for the pid, so, adding it to the end of the
+ // list:
+ otherIds.add(suppliedIdentifier);
+ }
+
if (!otherIds.isEmpty()) {
// We prefer doi or hdl identifiers like "doi:10.7910/DVN/1HE30F"
for (String otherId : otherIds) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java
index d0a0629e1ae..7dc2aed799e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java
@@ -250,11 +250,8 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest,
} else if ("dc".equalsIgnoreCase(metadataFormat) || "oai_dc".equals(metadataFormat)) {
logger.fine("importing DC "+metadataFile.getAbsolutePath());
try {
- String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath()));
- String suggestedIdentifier = harvestingClient.isUseOaiIdentifiersAsPids()
- ? harvestIdentifier
- : null;
- dsDTO = importGenericService.processOAIDCxml(xmlToParse, suggestedIdentifier);
+ String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath()));
+ dsDTO = importGenericService.processOAIDCxml(xmlToParse, harvestIdentifier, harvestingClient.isUseOaiIdentifiersAsPids());
} catch (IOException | XMLStreamException e) {
throw new ImportException("Failed to process Dublin Core XML record: "+ e.getClass() + " (" + e.getMessage() + ")");
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java
index 4a8fb123fd4..032c1dd5164 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java
@@ -1,11 +1,18 @@
package edu.harvard.iq.dataverse.authorization;
+import com.nimbusds.oauth2.sdk.ParseException;
+import com.nimbusds.oauth2.sdk.token.BearerAccessToken;
+import com.nimbusds.openid.connect.sdk.claims.UserInfo;
import edu.harvard.iq.dataverse.DatasetVersionServiceBean;
import edu.harvard.iq.dataverse.DvObjectServiceBean;
import edu.harvard.iq.dataverse.GuestbookResponseServiceBean;
import edu.harvard.iq.dataverse.RoleAssigneeServiceBean;
import edu.harvard.iq.dataverse.UserNotificationServiceBean;
import edu.harvard.iq.dataverse.UserServiceBean;
+import edu.harvard.iq.dataverse.authorization.exceptions.AuthorizationException;
+import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2Exception;
+import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2UserRecord;
+import edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthProvider;
import edu.harvard.iq.dataverse.search.IndexServiceBean;
import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean;
@@ -34,21 +41,14 @@
import edu.harvard.iq.dataverse.validation.PasswordValidatorServiceBean;
import edu.harvard.iq.dataverse.workflow.PendingWorkflowInvocation;
import edu.harvard.iq.dataverse.workflows.WorkflowComment;
+
+import java.io.IOException;
import java.sql.Timestamp;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Calendar;
-import java.util.Collection;
-import java.util.Date;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeSet;
+import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.Collectors;
-import jakarta.annotation.PostConstruct;
+
import jakarta.ejb.EJB;
import jakarta.ejb.EJBException;
import jakarta.ejb.Stateless;
@@ -126,9 +126,8 @@ public class AuthenticationServiceBean {
PrivateUrlServiceBean privateUrlService;
@PersistenceContext(unitName = "VDCNet-ejbPU")
- private EntityManager em;
-
-
+ EntityManager em;
+
public AbstractOAuth2AuthenticationProvider getOAuth2Provider( String id ) {
return authProvidersRegistrationService.getOAuth2AuthProvidersMap().get(id);
}
@@ -978,4 +977,70 @@ public ApiToken getValidApiTokenForUser(User user) {
}
return apiToken;
}
+
+ /**
+ * Looks up an authenticated user based on the provided OIDC bearer token.
+ *
+ * @param bearerToken The OIDC bearer token.
+ * @return An instance of {@link AuthenticatedUser} representing the authenticated user.
+ * @throws AuthorizationException If the token is invalid or no OIDC provider is configured.
+ */
+ public AuthenticatedUser lookupUserByOIDCBearerToken(String bearerToken) throws AuthorizationException {
+ // TODO: Get the identifier from an invalidating cache to avoid lookup bursts of the same token.
+ // Tokens in the cache should be removed after some (configurable) time.
+ OAuth2UserRecord oAuth2UserRecord = verifyOIDCBearerTokenAndGetOAuth2UserRecord(bearerToken);
+ return lookupUser(oAuth2UserRecord.getUserRecordIdentifier());
+ }
+
+ /**
+ * Verifies the given OIDC bearer token and retrieves the corresponding OAuth2UserRecord.
+ *
+ * @param bearerToken The OIDC bearer token.
+ * @return An {@link OAuth2UserRecord} containing the user's info.
+ * @throws AuthorizationException If the token is invalid or if no OIDC providers are available.
+ */
+ public OAuth2UserRecord verifyOIDCBearerTokenAndGetOAuth2UserRecord(String bearerToken) throws AuthorizationException {
+ try {
+ BearerAccessToken accessToken = BearerAccessToken.parse(bearerToken);
+ List providers = getAvailableOidcProviders();
+
+ // Ensure at least one OIDC provider is configured to validate the token.
+ if (providers.isEmpty()) {
+ logger.log(Level.WARNING, "Bearer token detected, no OIDC provider configured");
+ throw new AuthorizationException(BundleUtil.getStringFromBundle("authenticationServiceBean.errors.bearerTokenDetectedNoOIDCProviderConfigured"));
+ }
+
+ // Attempt to validate the token with each configured OIDC provider.
+ for (OIDCAuthProvider provider : providers) {
+ try {
+ // Retrieve OAuth2UserRecord if UserInfo is present
+ Optional userInfo = provider.getUserInfo(accessToken);
+ if (userInfo.isPresent()) {
+ logger.log(Level.FINE, "Bearer token detected, provider {0} confirmed validity and provided user info", provider.getId());
+ return provider.getUserRecord(userInfo.get());
+ }
+ } catch (IOException | OAuth2Exception e) {
+ logger.log(Level.FINE, "Bearer token detected, provider " + provider.getId() + " indicates an invalid Token, skipping", e);
+ }
+ }
+ } catch (ParseException e) {
+ logger.log(Level.FINE, "Bearer token detected, unable to parse bearer token (invalid Token)", e);
+ throw new AuthorizationException(BundleUtil.getStringFromBundle("authenticationServiceBean.errors.invalidBearerToken"));
+ }
+
+ // If no provider validated the token, throw an authorization exception.
+ logger.log(Level.FINE, "Bearer token detected, yet no configured OIDC provider validated it.");
+ throw new AuthorizationException(BundleUtil.getStringFromBundle("authenticationServiceBean.errors.unauthorizedBearerToken"));
+ }
+
+ /**
+ * Retrieves a list of configured OIDC authentication providers.
+ *
+ * @return A list of available OIDCAuthProviders.
+ */
+ private List getAvailableOidcProviders() {
+ return getAuthenticationProviderIdsOfType(OIDCAuthProvider.class).stream()
+ .map(providerId -> (OIDCAuthProvider) getAuthenticationProvider(providerId))
+ .toList();
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java
index 5eb2b391eb7..f396ebf6487 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java
@@ -242,7 +242,7 @@ public OAuth2UserRecord getUserRecord(String code, String state, String redirect
* @param userInfo
* @return the usable user record for processing ing {@link edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2LoginBackingBean}
*/
- OAuth2UserRecord getUserRecord(UserInfo userInfo) {
+ public OAuth2UserRecord getUserRecord(UserInfo userInfo) {
return new OAuth2UserRecord(
this.getId(),
userInfo.getSubject().getValue(),
@@ -291,7 +291,7 @@ Optional getAccessToken(AuthorizationGrant grant) throws IOEx
* Retrieve User Info from provider. Encapsulate for testing.
* @param accessToken The access token to enable reading data from userinfo endpoint
*/
- Optional getUserInfo(BearerAccessToken accessToken) throws IOException, OAuth2Exception {
+ public Optional getUserInfo(BearerAccessToken accessToken) throws IOException, OAuth2Exception {
// Retrieve data
HTTPResponse response = new UserInfoRequest(this.idpMetadata.getUserInfoEndpointURI(), accessToken)
.toHTTPRequest()
@@ -316,44 +316,4 @@ Optional getUserInfo(BearerAccessToken accessToken) throws IOException
throw new OAuth2Exception(-1, ex.getMessage(), BundleUtil.getStringFromBundle("auth.providers.exception.userinfo", Arrays.asList(this.getTitle())));
}
}
-
- /**
- * Trades an access token for an {@link UserRecordIdentifier} (if valid).
- *
- * @apiNote The resulting {@link UserRecordIdentifier} may be used with
- * {@link edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean#lookupUser(UserRecordIdentifier)}
- * to look up an {@link edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser} from the database.
- * @see edu.harvard.iq.dataverse.api.auth.BearerTokenAuthMechanism
- *
- * @param accessToken The token to use when requesting user information from the provider
- * @return Returns an {@link UserRecordIdentifier} for a valid access token or an empty {@link Optional}.
- * @throws IOException In case communication with the endpoint fails to succeed for an I/O reason
- */
- public Optional getUserIdentifier(BearerAccessToken accessToken) throws IOException {
- OAuth2UserRecord userRecord;
- try {
- // Try to retrieve with given token (throws if invalid token)
- Optional userInfo = getUserInfo(accessToken);
-
- if (userInfo.isPresent()) {
- // Take this detour to avoid code duplication and potentially hard to track conversion errors.
- userRecord = getUserRecord(userInfo.get());
- } else {
- // This should not happen - an error at the provider side will lead to an exception.
- logger.log(Level.WARNING,
- "User info retrieval from {0} returned empty optional but expected exception for token {1}.",
- List.of(getId(), accessToken).toArray()
- );
- return Optional.empty();
- }
- } catch (OAuth2Exception e) {
- logger.log(Level.FINE,
- "Could not retrieve user info with token {0} at provider {1}: {2}",
- List.of(accessToken, getId(), e.getMessage()).toArray());
- logger.log(Level.FINER, "Retrieval failed, details as follows: ", e);
- return Optional.empty();
- }
-
- return Optional.of(userRecord.getUserRecordIdentifier());
- }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java
index 3bf2107e52b..d0da66c38e0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java
@@ -215,7 +215,7 @@ public long retrieveSizeFromMedia() {
JsonArray dataArray = responseJson.getJsonArray("DATA");
if (dataArray != null && dataArray.size() != 0) {
//File found
- return (long) responseJson.getJsonArray("DATA").getJsonObject(0).getJsonNumber("size").longValueExact();
+ return (long) dataArray.getJsonObject(0).getJsonNumber("size").longValueExact();
}
} else {
logger.warning("Response from " + get.getURI().toString() + " was "
diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java
index d2fdec7b323..5b9e496281f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java
@@ -753,6 +753,12 @@ public Path getFileSystemPath() throws UnsupportedDataAccessOperationException {
@Override
public boolean exists() {
+ try {
+ key = getMainFileKey();
+ } catch (IOException e) {
+ logger.warning("Caught an IOException in S3AccessIO.exists(): " + e.getMessage());
+ return false;
+ }
String destinationKey = null;
if (dvObject instanceof DataFile) {
destinationKey = key;
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index a470f08f736..6b98848021c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -136,8 +136,6 @@ public class AddReplaceFileHelper{
private String newFileName; // step 30
private String newFileContentType; // step 30
private String newStorageIdentifier; // step 30
- private String newCheckSum; // step 30
- private ChecksumType newCheckSumType; //step 30
// -- Optional
private DataFile fileToReplace; // step 25
@@ -146,6 +144,7 @@ public class AddReplaceFileHelper{
private DatasetVersion clone;
List initialFileList;
List finalFileList;
+ private boolean trustSuppliedFileSizes;
// -----------------------------------
// Ingested files
@@ -610,15 +609,9 @@ private boolean runAddReplacePhase1(Dataset owner,
return false;
}
- if(optionalFileParams != null) {
- if(optionalFileParams.hasCheckSum()) {
- newCheckSum = optionalFileParams.getCheckSum();
- newCheckSumType = optionalFileParams.getCheckSumType();
- }
- }
msgt("step_030_createNewFilesViaIngest");
- if (!this.step_030_createNewFilesViaIngest()){
+ if (!this.step_030_createNewFilesViaIngest(optionalFileParams)){
return false;
}
@@ -1191,7 +1184,7 @@ private boolean step_007_auto_isReplacementInLatestVersion(DataFile existingFile
}
- private boolean step_030_createNewFilesViaIngest(){
+ private boolean step_030_createNewFilesViaIngest(OptionalFileParams optionalFileParams){
if (this.hasError()){
return false;
@@ -1203,21 +1196,28 @@ private boolean step_030_createNewFilesViaIngest(){
//Don't repeatedly update the clone (losing changes) in multifile case
clone = workingVersion.cloneDatasetVersion();
}
+
+ Long suppliedFileSize = null;
+ String newCheckSum = null;
+ ChecksumType newCheckSumType = null;
+
+
+ if (optionalFileParams != null) {
+ if (optionalFileParams.hasCheckSum()) {
+ newCheckSum = optionalFileParams.getCheckSum();
+ newCheckSumType = optionalFileParams.getCheckSumType();
+ }
+ if (trustSuppliedFileSizes && optionalFileParams.hasFileSize()) {
+ suppliedFileSize = optionalFileParams.getFileSize();
+ }
+ }
+
try {
- /*CreateDataFileResult result = FileUtil.createDataFiles(workingVersion,
- this.newFileInputStream,
- this.newFileName,
- this.newFileContentType,
- this.newStorageIdentifier,
- this.newCheckSum,
- this.newCheckSumType,
- this.systemConfig);*/
-
UploadSessionQuotaLimit quota = null;
if (systemConfig.isStorageQuotasEnforced()) {
quota = fileService.getUploadSessionQuotaLimit(dataset);
}
- Command cmd = new CreateNewDataFilesCommand(dvRequest, workingVersion, newFileInputStream, newFileName, newFileContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType);
+ Command cmd = new CreateNewDataFilesCommand(dvRequest, workingVersion, newFileInputStream, newFileName, newFileContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType, suppliedFileSize);
CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd);
initialFileList = createDataFilesResult.getDataFiles();
@@ -2033,9 +2033,15 @@ public void setDuplicateFileWarning(String duplicateFileWarning) {
* @param jsonData - an array of jsonData entries (one per file) using the single add file jsonData format
* @param dataset
* @param authUser
+ * @param trustSuppliedSizes - whether to accept the fileSize values passed
+ * in jsonData (we don't want to trust the users of the S3 direct
+ * upload API with that information - we will verify the status of
+ * the files in the S3 bucket and confirm the sizes in the process.
+ * we do want GlobusService to be able to pass the file sizes, since
+ * they are obtained and verified via a Globus API lookup).
* @return
*/
- public Response addFiles(String jsonData, Dataset dataset, User authUser) {
+ public Response addFiles(String jsonData, Dataset dataset, User authUser, boolean trustSuppliedFileSizes) {
msgt("(addFilesToDataset) jsonData: " + jsonData.toString());
JsonArrayBuilder jarr = Json.createArrayBuilder();
@@ -2044,6 +2050,7 @@ public Response addFiles(String jsonData, Dataset dataset, User authUser) {
int totalNumberofFiles = 0;
int successNumberofFiles = 0;
+ this.trustSuppliedFileSizes = trustSuppliedFileSizes;
// -----------------------------------------------------------
// Read jsonData and Parse files information from jsondata :
// -----------------------------------------------------------
@@ -2176,6 +2183,10 @@ public Response addFiles(String jsonData, Dataset dataset, User authUser) {
.add("data", Json.createObjectBuilder().add("Files", jarr).add("Result", result)).build() ).build();
}
+ public Response addFiles(String jsonData, Dataset dataset, User authUser) {
+ return addFiles(jsonData, dataset, authUser, false);
+ }
+
/**
* Replace multiple files with prepositioned replacements as listed in the
* jsonData. Works with direct upload, Globus, and other out-of-band methods.
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
index 959dbc4e262..54844160163 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
@@ -39,6 +39,12 @@
* - Provenance related information
*
* @author rmp553
+ * @todo (?) We may want to consider renaming this class to DataFileParams or
+ * DataFileInfo... it was originally created to encode some bits of info -
+ * the file "tags" specifically, that didn't fit in elsewhere in the normal
+ * workflow; but it's been expanded to cover pretty much everything else associated
+ * with DataFiles and it's not really "optional" anymore when, for example, used
+ * in the direct upload workflow. (?)
*/
public class OptionalFileParams {
@@ -76,6 +82,8 @@ public class OptionalFileParams {
public static final String MIME_TYPE_ATTR_NAME = "mimeType";
private String checkSumValue;
private ChecksumType checkSumType;
+ public static final String FILE_SIZE_ATTR_NAME = "fileSize";
+ private Long fileSize;
public static final String LEGACY_CHECKSUM_ATTR_NAME = "md5Hash";
public static final String CHECKSUM_OBJECT_NAME = "checksum";
public static final String CHECKSUM_OBJECT_TYPE = "@type";
@@ -268,6 +276,18 @@ public String getCheckSum() {
public ChecksumType getCheckSumType() {
return checkSumType;
}
+
+ public boolean hasFileSize() {
+ return fileSize != null;
+ }
+
+ public Long getFileSize() {
+ return fileSize;
+ }
+
+ public void setFileSize(long fileSize) {
+ this.fileSize = fileSize;
+ }
/**
* Set tags
@@ -416,7 +436,13 @@ else if ((jsonObj.has(CHECKSUM_OBJECT_NAME)) && (!jsonObj.get(CHECKSUM_OBJECT_NA
this.checkSumType = ChecksumType.fromString(((JsonObject) jsonObj.get(CHECKSUM_OBJECT_NAME)).get(CHECKSUM_OBJECT_TYPE).getAsString());
}
-
+ // -------------------------------
+ // get file size as a Long, if supplied
+ // -------------------------------
+ if ((jsonObj.has(FILE_SIZE_ATTR_NAME)) && (!jsonObj.get(FILE_SIZE_ATTR_NAME).isJsonNull())){
+
+ this.fileSize = jsonObj.get(FILE_SIZE_ATTR_NAME).getAsLong();
+ }
// -------------------------------
// get tags
// -------------------------------
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/exception/InvalidFieldsCommandException.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/exception/InvalidFieldsCommandException.java
new file mode 100644
index 00000000000..9bd1869f8a9
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/exception/InvalidFieldsCommandException.java
@@ -0,0 +1,42 @@
+package edu.harvard.iq.dataverse.engine.command.exception;
+
+import edu.harvard.iq.dataverse.engine.command.Command;
+import java.util.Map;
+
+public class InvalidFieldsCommandException extends CommandException {
+
+ private final Map fieldErrors;
+
+ /**
+ * Constructs a new InvalidFieldsCommandException with the specified detail message,
+ * command, and a map of field errors.
+ *
+ * @param message The detail message.
+ * @param aCommand The command where the exception was encountered.
+ * @param fieldErrors A map containing the fields as keys and the reasons for their errors as values.
+ */
+ public InvalidFieldsCommandException(String message, Command aCommand, Map fieldErrors) {
+ super(message, aCommand);
+ this.fieldErrors = fieldErrors;
+ }
+
+ /**
+ * Gets the map of fields and their corresponding error messages.
+ *
+ * @return The map of field errors.
+ */
+ public Map getFieldErrors() {
+ return fieldErrors;
+ }
+
+ /**
+ * Returns a string representation of this exception, including the
+ * message and details of the invalid fields and their errors.
+ *
+ * @return A string representation of this exception.
+ */
+ @Override
+ public String toString() {
+ return super.toString() + ", fieldErrors=" + fieldErrors;
+ }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/exception/PermissionException.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/exception/PermissionException.java
index a7881fc7b6e..2ca63c9c4aa 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/exception/PermissionException.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/exception/PermissionException.java
@@ -3,6 +3,7 @@
import edu.harvard.iq.dataverse.DvObject;
import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.engine.command.Command;
+
import java.util.Set;
/**
@@ -12,22 +13,31 @@
* @author michael
*/
public class PermissionException extends CommandException {
-
- private final Set required;
- private final DvObject dvObject;
-
- public PermissionException(String message, Command failedCommand, Set required, DvObject aDvObject ) {
- super(message, failedCommand);
- this.required = required;
- dvObject = aDvObject;
- }
-
- public Set getRequiredPermissions() {
- return required;
- }
-
- public DvObject getDvObject() {
- return dvObject;
- }
-
+
+ private final Set required;
+ private final DvObject dvObject;
+ private final boolean isDetailedMessageRequired;
+
+ public PermissionException(String message, Command failedCommand, Set required, DvObject dvObject, boolean isDetailedMessageRequired) {
+ super(message, failedCommand);
+ this.required = required;
+ this.dvObject = dvObject;
+ this.isDetailedMessageRequired = isDetailedMessageRequired;
+ }
+
+ public PermissionException(String message, Command failedCommand, Set required, DvObject dvObject) {
+ this(message, failedCommand, required, dvObject, false);
+ }
+
+ public Set getRequiredPermissions() {
+ return required;
+ }
+
+ public DvObject getDvObject() {
+ return dvObject;
+ }
+
+ public boolean isDetailedMessageRequired() {
+ return isDetailedMessageRequired;
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java
index 40c2abf5d21..91f3a5b823c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java
@@ -19,13 +19,15 @@ abstract class AbstractWriteDataverseCommand extends AbstractCommand
private final List inputLevels;
private final List facets;
protected final List metadataBlocks;
+ private final boolean resetRelationsOnNullValues;
public AbstractWriteDataverseCommand(Dataverse dataverse,
Dataverse affectedDataverse,
DataverseRequest request,
List facets,
List inputLevels,
- List metadataBlocks) {
+ List metadataBlocks,
+ boolean resetRelationsOnNullValues) {
super(request, affectedDataverse);
this.dataverse = dataverse;
if (facets != null) {
@@ -43,42 +45,61 @@ public AbstractWriteDataverseCommand(Dataverse dataverse,
} else {
this.metadataBlocks = null;
}
+ this.resetRelationsOnNullValues = resetRelationsOnNullValues;
}
@Override
public Dataverse execute(CommandContext ctxt) throws CommandException {
dataverse = innerExecute(ctxt);
+ processMetadataBlocks();
+ processFacets(ctxt);
+ processInputLevels(ctxt);
+
+ return ctxt.dataverses().save(dataverse);
+ }
+
+ private void processMetadataBlocks() {
if (metadataBlocks != null && !metadataBlocks.isEmpty()) {
dataverse.setMetadataBlockRoot(true);
dataverse.setMetadataBlocks(metadataBlocks);
+ } else if (resetRelationsOnNullValues) {
+ dataverse.setMetadataBlockRoot(false);
+ dataverse.clearMetadataBlocks();
}
+ }
+ private void processFacets(CommandContext ctxt) {
if (facets != null) {
ctxt.facets().deleteFacetsFor(dataverse);
-
+ dataverse.setDataverseFacets(new ArrayList<>());
+
if (!facets.isEmpty()) {
dataverse.setFacetRoot(true);
}
- int i = 0;
- for (DatasetFieldType df : facets) {
- ctxt.facets().create(i++, df, dataverse);
+ for (int i = 0; i < facets.size(); i++) {
+ ctxt.facets().create(i, facets.get(i), dataverse);
}
+ } else if (resetRelationsOnNullValues) {
+ ctxt.facets().deleteFacetsFor(dataverse);
+ dataverse.setFacetRoot(false);
}
+ }
+ private void processInputLevels(CommandContext ctxt) {
if (inputLevels != null) {
if (!inputLevels.isEmpty()) {
dataverse.addInputLevelsMetadataBlocksIfNotPresent(inputLevels);
}
ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse);
- for (DataverseFieldTypeInputLevel inputLevel : inputLevels) {
+ inputLevels.forEach(inputLevel -> {
inputLevel.setDataverse(dataverse);
ctxt.fieldTypeInputLevels().create(inputLevel);
- }
+ });
+ } else if (resetRelationsOnNullValues) {
+ ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse);
}
-
- return ctxt.dataverses().save(dataverse);
}
abstract protected Dataverse innerExecute(CommandContext ctxt) throws IllegalCommandException;
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java
index 145cfb6199c..3728f3ee6ce 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java
@@ -39,7 +39,7 @@ public CreateDataverseCommand(Dataverse created,
List facets,
List inputLevels,
List metadataBlocks) {
- super(created, created.getOwner(), request, facets, inputLevels, metadataBlocks);
+ super(created, created.getOwner(), request, facets, inputLevels, metadataBlocks, false);
}
@Override
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java
index 76939751899..e9a2025b112 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java
@@ -93,6 +93,10 @@ public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion versi
this(aRequest, version, inputStream, fileName, suppliedContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType, null, null);
}
+ public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, UploadSessionQuotaLimit quota, String newCheckSum, DataFile.ChecksumType newCheckSumType, Long newFileSize) {
+ this(aRequest, version, inputStream, fileName, suppliedContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType, newFileSize, null);
+ }
+
// This version of the command must be used when files are created in the
// context of creating a brand new dataset (from the Add Dataset page):
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java
index e6e8279a314..e378e2e2ef7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java
@@ -131,7 +131,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
throw new IllegalCommandException(BundleUtil.getStringFromBundle("datasetversion.update.failure"), this);
} else {
- metadataUpdated = DatasetVersionDifference.compareFileMetadatas(publishedFmd, draftFmd);
+ metadataUpdated = !DatasetVersionDifference.compareFileMetadatas(publishedFmd, draftFmd).isEmpty();
publishedFmd.setLabel(draftFmd.getLabel());
publishedFmd.setDescription(draftFmd.getDescription());
publishedFmd.setCategories(draftFmd.getCategories());
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java
index 1ac41105237..902bea7f833 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java
@@ -11,6 +11,9 @@
import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
import edu.harvard.iq.dataverse.workflow.Workflow;
import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType;
+
+import jakarta.persistence.OptimisticLockException;
+
import java.util.Optional;
import java.util.logging.Logger;
import static java.util.stream.Collectors.joining;
@@ -105,10 +108,15 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException
Optional prePubWf = ctxt.workflows().getDefaultWorkflow(TriggerType.PrePublishDataset);
if ( prePubWf.isPresent() ) {
// We start a workflow
- theDataset = ctxt.em().merge(theDataset);
- ctxt.em().flush();
- ctxt.workflows().start(prePubWf.get(), buildContext(theDataset, TriggerType.PrePublishDataset, datasetExternallyReleased), true);
- return new PublishDatasetResult(theDataset, Status.Workflow);
+ try {
+ theDataset = ctxt.em().merge(theDataset);
+ ctxt.em().flush();
+ ctxt.workflows().start(prePubWf.get(),
+ buildContext(theDataset, TriggerType.PrePublishDataset, datasetExternallyReleased), true);
+ return new PublishDatasetResult(theDataset, Status.Workflow);
+ } catch (OptimisticLockException e) {
+ throw new CommandException(e.getMessage(), e, this);
+ }
} else{
// We will skip trying to register the global identifiers for datafiles
@@ -157,7 +165,12 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException
lock.setInfo(info);
ctxt.datasets().addDatasetLock(theDataset, lock);
}
- theDataset = ctxt.em().merge(theDataset);
+ try {
+ theDataset = ctxt.em().merge(theDataset);
+ } catch (OptimisticLockException e) {
+ ctxt.datasets().removeDatasetLocks(theDataset, DatasetLock.Reason.finalizePublication);
+ throw new CommandException(e.getMessage(), e, this);
+ }
// The call to FinalizePublicationCommand has been moved to the new @onSuccess()
// method:
//ctxt.datasets().callFinalizePublishCommandAsynchronously(theDataset.getId(), ctxt, request, datasetExternallyReleased);
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterOIDCUserCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterOIDCUserCommand.java
new file mode 100644
index 00000000000..c7745c75aa9
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterOIDCUserCommand.java
@@ -0,0 +1,204 @@
+package edu.harvard.iq.dataverse.engine.command.impl;
+
+import edu.harvard.iq.dataverse.DvObject;
+import edu.harvard.iq.dataverse.api.dto.UserDTO;
+import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo;
+import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier;
+import edu.harvard.iq.dataverse.authorization.exceptions.AuthorizationException;
+import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2UserRecord;
+import edu.harvard.iq.dataverse.engine.command.*;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
+import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
+import edu.harvard.iq.dataverse.engine.command.exception.InvalidFieldsCommandException;
+import edu.harvard.iq.dataverse.settings.FeatureFlags;
+import edu.harvard.iq.dataverse.util.BundleUtil;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+@RequiredPermissions({})
+public class RegisterOIDCUserCommand extends AbstractVoidCommand {
+
+ private static final String FIELD_USERNAME = "username";
+ private static final String FIELD_FIRST_NAME = "firstName";
+ private static final String FIELD_LAST_NAME = "lastName";
+ private static final String FIELD_EMAIL_ADDRESS = "emailAddress";
+ private static final String FIELD_TERMS_ACCEPTED = "termsAccepted";
+
+ private final String bearerToken;
+ private final UserDTO userDTO;
+
+ public RegisterOIDCUserCommand(DataverseRequest aRequest, String bearerToken, UserDTO userDTO) {
+ super(aRequest, (DvObject) null);
+ this.bearerToken = bearerToken;
+ this.userDTO = userDTO;
+ }
+
+ @Override
+ protected void executeImpl(CommandContext ctxt) throws CommandException {
+ try {
+ OAuth2UserRecord oAuth2UserRecord = ctxt.authentication().verifyOIDCBearerTokenAndGetOAuth2UserRecord(bearerToken);
+ UserRecordIdentifier userRecordIdentifier = oAuth2UserRecord.getUserRecordIdentifier();
+
+ if (ctxt.authentication().lookupUser(userRecordIdentifier) != null) {
+ throw new IllegalCommandException(BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.userAlreadyRegisteredWithToken"), this);
+ }
+
+ boolean provideMissingClaimsEnabled = FeatureFlags.API_BEARER_AUTH_PROVIDE_MISSING_CLAIMS.enabled();
+
+ updateUserDTO(oAuth2UserRecord, provideMissingClaimsEnabled);
+
+ AuthenticatedUserDisplayInfo userDisplayInfo = new AuthenticatedUserDisplayInfo(
+ userDTO.getFirstName(),
+ userDTO.getLastName(),
+ userDTO.getEmailAddress(),
+ userDTO.getAffiliation() != null ? userDTO.getAffiliation() : "",
+ userDTO.getPosition() != null ? userDTO.getPosition() : ""
+ );
+
+ validateUserFields(ctxt, provideMissingClaimsEnabled);
+
+ ctxt.authentication().createAuthenticatedUser(userRecordIdentifier, userDTO.getUsername(), userDisplayInfo, true);
+
+ } catch (AuthorizationException ex) {
+ throw new PermissionException(ex.getMessage(), this, null, null, true);
+ }
+ }
+
+ private void updateUserDTO(OAuth2UserRecord oAuth2UserRecord, boolean provideMissingClaimsEnabled) throws InvalidFieldsCommandException {
+ if (provideMissingClaimsEnabled) {
+ Map fieldErrors = validateConflictingClaims(oAuth2UserRecord);
+ throwInvalidFieldsCommandExceptionIfErrorsExist(fieldErrors);
+ updateUserDTOWithClaims(oAuth2UserRecord);
+ } else {
+ Map fieldErrors = validateUserDTOHasNoClaims();
+ throwInvalidFieldsCommandExceptionIfErrorsExist(fieldErrors);
+ overwriteUserDTOWithClaims(oAuth2UserRecord);
+ }
+ }
+
+ private Map validateConflictingClaims(OAuth2UserRecord oAuth2UserRecord) {
+ Map fieldErrors = new HashMap<>();
+
+ addFieldErrorIfConflict(FIELD_USERNAME, oAuth2UserRecord.getUsername(), userDTO.getUsername(), fieldErrors);
+ addFieldErrorIfConflict(FIELD_FIRST_NAME, oAuth2UserRecord.getDisplayInfo().getFirstName(), userDTO.getFirstName(), fieldErrors);
+ addFieldErrorIfConflict(FIELD_LAST_NAME, oAuth2UserRecord.getDisplayInfo().getLastName(), userDTO.getLastName(), fieldErrors);
+ addFieldErrorIfConflict(FIELD_EMAIL_ADDRESS, oAuth2UserRecord.getDisplayInfo().getEmailAddress(), userDTO.getEmailAddress(), fieldErrors);
+
+ return fieldErrors;
+ }
+
+ private void addFieldErrorIfConflict(String fieldName, String claimValue, String existingValue, Map fieldErrors) {
+ if (claimValue != null && !claimValue.trim().isEmpty() && existingValue != null && !claimValue.equals(existingValue)) {
+ String errorMessage = BundleUtil.getStringFromBundle(
+ "registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldAlreadyPresentInProvider",
+ List.of(fieldName)
+ );
+ fieldErrors.put(fieldName, errorMessage);
+ }
+ }
+
+ private Map validateUserDTOHasNoClaims() {
+ Map fieldErrors = new HashMap<>();
+ if (userDTO.getUsername() != null) {
+ String errorMessage = BundleUtil.getStringFromBundle(
+ "registerOidcUserCommand.errors.provideMissingClaimsDisabled.unableToSetFieldViaJSON",
+ List.of(FIELD_USERNAME)
+ );
+ fieldErrors.put(FIELD_USERNAME, errorMessage);
+ }
+ if (userDTO.getEmailAddress() != null) {
+ String errorMessage = BundleUtil.getStringFromBundle(
+ "registerOidcUserCommand.errors.provideMissingClaimsDisabled.unableToSetFieldViaJSON",
+ List.of(FIELD_EMAIL_ADDRESS)
+ );
+ fieldErrors.put(FIELD_EMAIL_ADDRESS, errorMessage);
+ }
+ if (userDTO.getFirstName() != null) {
+ String errorMessage = BundleUtil.getStringFromBundle(
+ "registerOidcUserCommand.errors.provideMissingClaimsDisabled.unableToSetFieldViaJSON",
+ List.of(FIELD_FIRST_NAME)
+ );
+ fieldErrors.put(FIELD_FIRST_NAME, errorMessage);
+ }
+ if (userDTO.getLastName() != null) {
+ String errorMessage = BundleUtil.getStringFromBundle(
+ "registerOidcUserCommand.errors.provideMissingClaimsDisabled.unableToSetFieldViaJSON",
+ List.of(FIELD_LAST_NAME)
+ );
+ fieldErrors.put(FIELD_LAST_NAME, errorMessage);
+ }
+ return fieldErrors;
+ }
+
+ private void updateUserDTOWithClaims(OAuth2UserRecord oAuth2UserRecord) {
+ userDTO.setUsername(getValueOrDefault(oAuth2UserRecord.getUsername(), userDTO.getUsername()));
+ userDTO.setFirstName(getValueOrDefault(oAuth2UserRecord.getDisplayInfo().getFirstName(), userDTO.getFirstName()));
+ userDTO.setLastName(getValueOrDefault(oAuth2UserRecord.getDisplayInfo().getLastName(), userDTO.getLastName()));
+ userDTO.setEmailAddress(getValueOrDefault(oAuth2UserRecord.getDisplayInfo().getEmailAddress(), userDTO.getEmailAddress()));
+ }
+
+ private void overwriteUserDTOWithClaims(OAuth2UserRecord oAuth2UserRecord) {
+ userDTO.setUsername(oAuth2UserRecord.getUsername());
+ userDTO.setFirstName(oAuth2UserRecord.getDisplayInfo().getFirstName());
+ userDTO.setLastName(oAuth2UserRecord.getDisplayInfo().getLastName());
+ userDTO.setEmailAddress(oAuth2UserRecord.getDisplayInfo().getEmailAddress());
+ }
+
+ private void throwInvalidFieldsCommandExceptionIfErrorsExist(Map fieldErrors) throws InvalidFieldsCommandException {
+ if (!fieldErrors.isEmpty()) {
+ throw new InvalidFieldsCommandException(
+ BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.invalidFields"),
+ this,
+ fieldErrors
+ );
+ }
+ }
+
+ private String getValueOrDefault(String oidcValue, String dtoValue) {
+ return (oidcValue == null || oidcValue.trim().isEmpty()) ? dtoValue : oidcValue;
+ }
+
+ private void validateUserFields(CommandContext ctxt, boolean provideMissingClaimsEnabled) throws InvalidFieldsCommandException {
+ Map fieldErrors = new HashMap<>();
+
+ if (!FeatureFlags.API_BEARER_AUTH_HANDLE_TOS_ACCEPTANCE_IN_IDP.enabled()) {
+ validateTermsAccepted(fieldErrors);
+ }
+
+ validateField(fieldErrors, FIELD_EMAIL_ADDRESS, userDTO.getEmailAddress(), ctxt, provideMissingClaimsEnabled);
+ validateField(fieldErrors, FIELD_USERNAME, userDTO.getUsername(), ctxt, provideMissingClaimsEnabled);
+ validateField(fieldErrors, FIELD_FIRST_NAME, userDTO.getFirstName(), ctxt, provideMissingClaimsEnabled);
+ validateField(fieldErrors, FIELD_LAST_NAME, userDTO.getLastName(), ctxt, provideMissingClaimsEnabled);
+
+ throwInvalidFieldsCommandExceptionIfErrorsExist(fieldErrors);
+ }
+
+ private void validateTermsAccepted(Map fieldErrors) {
+ if (!userDTO.isTermsAccepted()) {
+ fieldErrors.put(FIELD_TERMS_ACCEPTED, BundleUtil.getStringFromBundle("registerOidcUserCommand.errors.userShouldAcceptTerms"));
+ }
+ }
+
+ private void validateField(Map fieldErrors, String fieldName, String fieldValue, CommandContext ctxt, boolean provideMissingClaimsEnabled) {
+ if (fieldValue == null || fieldValue.isEmpty()) {
+ String errorKey = provideMissingClaimsEnabled ?
+ "registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldRequired" :
+ "registerOidcUserCommand.errors.provideMissingClaimsDisabled.fieldRequired";
+ fieldErrors.put(fieldName, BundleUtil.getStringFromBundle(errorKey, List.of(fieldName)));
+ } else if (isFieldInUse(ctxt, fieldName, fieldValue)) {
+ fieldErrors.put(fieldName, BundleUtil.getStringFromBundle("registerOidcUserCommand.errors." + fieldName + "InUse"));
+ }
+ }
+
+ private boolean isFieldInUse(CommandContext ctxt, String fieldName, String value) {
+ if (FIELD_EMAIL_ADDRESS.equals(fieldName)) {
+ return ctxt.authentication().getAuthenticatedUserByEmail(value) != null;
+ } else if (FIELD_USERNAME.equals(fieldName)) {
+ return ctxt.authentication().getAuthenticatedUser(value) != null;
+ }
+ return false;
+ }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java
index 55cc3708097..6dc4ab4d00d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java
@@ -32,7 +32,7 @@ public UpdateDataverseCommand(Dataverse dataverse,
List featuredDataverses,
DataverseRequest request,
List inputLevels) {
- this(dataverse, facets, featuredDataverses, request, inputLevels, null, null);
+ this(dataverse, facets, featuredDataverses, request, inputLevels, null, null, false);
}
public UpdateDataverseCommand(Dataverse dataverse,
@@ -41,8 +41,9 @@ public UpdateDataverseCommand(Dataverse dataverse,
DataverseRequest request,
List inputLevels,
List metadataBlocks,
- DataverseDTO updatedDataverseDTO) {
- super(dataverse, dataverse, request, facets, inputLevels, metadataBlocks);
+ DataverseDTO updatedDataverseDTO,
+ boolean resetRelationsOnNullValues) {
+ super(dataverse, dataverse, request, facets, inputLevels, metadataBlocks, resetRelationsOnNullValues);
if (featuredDataverses != null) {
this.featuredDataverseList = new ArrayList<>(featuredDataverses);
} else {
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java
index 05ddbe83e78..8fab6a6704d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java
@@ -5,11 +5,13 @@
import edu.harvard.iq.dataverse.ControlledVocabularyValue;
import edu.harvard.iq.dataverse.DatasetFieldConstant;
import edu.harvard.iq.dataverse.DvObjectContainer;
+import edu.harvard.iq.dataverse.api.dto.MetadataBlockDTO;
import edu.harvard.iq.dataverse.api.dto.DatasetDTO;
import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO;
-import edu.harvard.iq.dataverse.api.dto.FieldDTO;
import edu.harvard.iq.dataverse.api.dto.FileDTO;
-import edu.harvard.iq.dataverse.api.dto.MetadataBlockDTO;
+import edu.harvard.iq.dataverse.api.dto.FieldDTO;
+import edu.harvard.iq.dataverse.api.dto.LicenseDTO;
+
import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.LEVEL_FILE;
import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_SUBJECT_TAG;
@@ -313,8 +315,16 @@ private static void writeDataAccess(XMLStreamWriter xmlw , DatasetVersionDTO ver
XmlWriterUtil.writeFullElement(xmlw, "conditions", version.getConditions());
XmlWriterUtil.writeFullElement(xmlw, "disclaimer", version.getDisclaimer());
xmlw.writeEndElement(); //useStmt
-
+
/* any s: */
+ if (version.getTermsOfUse() != null && !version.getTermsOfUse().trim().equals("")) {
+ xmlw.writeStartElement("notes");
+ xmlw.writeAttribute("type", NOTE_TYPE_TERMS_OF_USE);
+ xmlw.writeAttribute("level", LEVEL_DV);
+ xmlw.writeCharacters(version.getTermsOfUse());
+ xmlw.writeEndElement(); //notes
+ }
+
if (version.getTermsOfAccess() != null && !version.getTermsOfAccess().trim().equals("")) {
xmlw.writeStartElement("notes");
xmlw.writeAttribute("type", NOTE_TYPE_TERMS_OF_ACCESS);
@@ -322,6 +332,19 @@ private static void writeDataAccess(XMLStreamWriter xmlw , DatasetVersionDTO ver
xmlw.writeCharacters(version.getTermsOfAccess());
xmlw.writeEndElement(); //notes
}
+
+ LicenseDTO license = version.getLicense();
+ if (license != null) {
+ String name = license.getName();
+ String uri = license.getUri();
+ if ((name != null && !name.trim().equals("")) && (uri != null && !uri.trim().equals(""))) {
+ xmlw.writeStartElement("notes");
+ xmlw.writeAttribute("type", NOTE_TYPE_TERMS_OF_USE);
+ xmlw.writeAttribute("level", LEVEL_DV);
+ xmlw.writeCharacters("" + name + "");
+ xmlw.writeEndElement(); //notes
+ }
+ }
xmlw.writeEndElement(); //dataAccs
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java
index ac3c81622fc..58992805dc8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java
@@ -74,6 +74,7 @@
import edu.harvard.iq.dataverse.util.URLTokenUtil;
import edu.harvard.iq.dataverse.util.UrlSignerUtil;
import edu.harvard.iq.dataverse.util.json.JsonUtil;
+import jakarta.json.JsonNumber;
import jakarta.json.JsonReader;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
@@ -284,6 +285,52 @@ private int makeDir(GlobusEndpoint endpoint, String dir) {
return result.status;
}
+ private Map lookupFileSizes(GlobusEndpoint endpoint, String dir) {
+ MakeRequestResponse result;
+
+ try {
+ logger.fine("Attempting to look up the contents of the Globus folder "+dir);
+ URL url = new URL(
+ "https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + endpoint.getId()
+ + "/ls?path=" + dir);
+ result = makeRequest(url, "Bearer", endpoint.getClientToken(), "GET", null);
+
+ switch (result.status) {
+ case 200:
+ logger.fine("Looked up directory " + dir + " successfully.");
+ break;
+ default:
+ logger.warning("Status " + result.status + " received when looking up dir " + dir);
+ logger.fine("Response: " + result.jsonResponse);
+ return null;
+ }
+ } catch (MalformedURLException ex) {
+ // Misconfiguration
+ logger.warning("Failed to list the contents of the directory "+ dir + " on endpoint " + endpoint.getId());
+ return null;
+ }
+
+ Map ret = new HashMap<>();
+
+ JsonObject listObject = JsonUtil.getJsonObject(result.jsonResponse);
+ JsonArray dataArray = listObject.getJsonArray("DATA");
+
+ if (dataArray != null && !dataArray.isEmpty()) {
+ for (int i = 0; i < dataArray.size(); i++) {
+ String dataType = dataArray.getJsonObject(i).getString("DATA_TYPE", null);
+ if (dataType != null && dataType.equals("file")) {
+ // is it safe to assume that any entry with a valid "DATA_TYPE": "file"
+ // will also have valid "name" and "size" entries?
+ String fileName = dataArray.getJsonObject(i).getString("name");
+ long fileSize = dataArray.getJsonObject(i).getJsonNumber("size").longValueExact();
+ ret.put(fileName, fileSize);
+ }
+ }
+ }
+
+ return ret;
+ }
+
private int requestPermission(GlobusEndpoint endpoint, Dataset dataset, Permissions permissions) {
Gson gson = new GsonBuilder().create();
MakeRequestResponse result = null;
@@ -938,9 +985,20 @@ private void processUploadedFiles(JsonArray filesJsonArray, Dataset dataset, Aut
inputList.add(fileId + "IDsplit" + fullPath + "IDsplit" + fileName);
}
+
+ Map fileSizeMap = null;
+
+ if (filesJsonArray.size() >= systemConfig.getGlobusBatchLookupSize()) {
+ // Look up the sizes of all the files in the dataset folder, to avoid
+ // looking them up one by one later:
+ // @todo: we should only be doing this if this is a managed store, probably (?)
+ GlobusEndpoint endpoint = getGlobusEndpoint(dataset);
+ fileSizeMap = lookupFileSizes(endpoint, endpoint.getBasePath());
+ }
// calculateMissingMetadataFields: checksum, mimetype
JsonObject newfilesJsonObject = calculateMissingMetadataFields(inputList, myLogger);
+
JsonArray newfilesJsonArray = newfilesJsonObject.getJsonArray("files");
logger.fine("Size: " + newfilesJsonArray.size());
logger.fine("Val: " + JsonUtil.prettyPrint(newfilesJsonArray.getJsonObject(0)));
@@ -964,20 +1022,26 @@ private void processUploadedFiles(JsonArray filesJsonArray, Dataset dataset, Aut
if (newfileJsonObject != null) {
logger.fine("List Size: " + newfileJsonObject.size());
// if (!newfileJsonObject.get(0).getString("hash").equalsIgnoreCase("null")) {
- JsonPatch path = Json.createPatchBuilder()
+ JsonPatch patch = Json.createPatchBuilder()
.add("/md5Hash", newfileJsonObject.get(0).getString("hash")).build();
- fileJsonObject = path.apply(fileJsonObject);
- path = Json.createPatchBuilder()
+ fileJsonObject = patch.apply(fileJsonObject);
+ patch = Json.createPatchBuilder()
.add("/mimeType", newfileJsonObject.get(0).getString("mime")).build();
- fileJsonObject = path.apply(fileJsonObject);
+ fileJsonObject = patch.apply(fileJsonObject);
+ // If we already know the size of this file on the Globus end,
+ // we'll pass it to /addFiles, to avoid looking up file sizes
+ // one by one:
+ if (fileSizeMap != null && fileSizeMap.get(fileId) != null) {
+ Long uploadedFileSize = fileSizeMap.get(fileId);
+ myLogger.info("Found size for file " + fileId + ": " + uploadedFileSize + " bytes");
+ patch = Json.createPatchBuilder()
+ .add("/fileSize", Json.createValue(uploadedFileSize)).build();
+ fileJsonObject = patch.apply(fileJsonObject);
+ } else {
+ logger.fine("No file size entry found for file "+fileId);
+ }
addFilesJsonData.add(fileJsonObject);
countSuccess++;
- // } else {
- // globusLogger.info(fileName
- // + " will be skipped from adding to dataset by second API due to missing
- // values ");
- // countError++;
- // }
} else {
myLogger.info(fileName
+ " will be skipped from adding to dataset in the final AddReplaceFileHelper.addFiles() call. ");
@@ -1029,7 +1093,7 @@ private void processUploadedFiles(JsonArray filesJsonArray, Dataset dataset, Aut
// The old code had 2 sec. of sleep, so ...
Thread.sleep(2000);
- Response addFilesResponse = addFileHelper.addFiles(newjsonData, dataset, authUser);
+ Response addFilesResponse = addFileHelper.addFiles(newjsonData, dataset, authUser, true);
if (addFilesResponse == null) {
logger.info("null response from addFiles call");
@@ -1211,7 +1275,7 @@ private GlobusTaskState globusStatusCheck(GlobusEndpoint endpoint, String taskId
return task;
}
- public JsonObject calculateMissingMetadataFields(List inputList, Logger globusLogger)
+ private JsonObject calculateMissingMetadataFields(List inputList, Logger globusLogger)
throws InterruptedException, ExecutionException, IOException {
List> hashvalueCompletableFutures = inputList.stream()
@@ -1230,7 +1294,7 @@ public JsonObject calculateMissingMetadataFields(List inputList, Logger
});
JsonArrayBuilder filesObject = (JsonArrayBuilder) completableFuture.get();
-
+
JsonObject output = Json.createObjectBuilder().add("files", filesObject).build();
return output;
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java
index b42fd950528..71c498a4d0b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java
@@ -344,10 +344,20 @@ public List saveAndAddFilesToDataset(DatasetVersion version,
try {
StorageIO dataAccess = DataAccess.getStorageIO(dataFile);
//Populate metadata
- dataAccess.open(DataAccessOption.READ_ACCESS);
- // (the .open() above makes a remote call to check if
- // the file exists and obtains its size)
- confirmedFileSize = dataAccess.getSize();
+
+ // There are direct upload sub-cases where the file size
+ // is already known at this point. For example, direct uploads
+ // to S3 that go through the jsf dataset page. Or the Globus
+ // uploads, where the file sizes are looked up in bulk on
+ // the completion of the remote upload task.
+ if (dataFile.getFilesize() >= 0) {
+ confirmedFileSize = dataFile.getFilesize();
+ } else {
+ dataAccess.open(DataAccessOption.READ_ACCESS);
+ // (the .open() above makes a remote call to check if
+ // the file exists and obtains its size)
+ confirmedFileSize = dataAccess.getSize();
+ }
// For directly-uploaded files, we will perform the file size
// limit and quota checks here. Perform them *again*, in
@@ -362,13 +372,16 @@ public List saveAndAddFilesToDataset(DatasetVersion version,
if (fileSizeLimit == null || confirmedFileSize < fileSizeLimit) {
//set file size
- logger.fine("Setting file size: " + confirmedFileSize);
- dataFile.setFilesize(confirmedFileSize);
+ if (dataFile.getFilesize() < 0) {
+ logger.fine("Setting file size: " + confirmedFileSize);
+ dataFile.setFilesize(confirmedFileSize);
+ }
if (dataAccess instanceof S3AccessIO) {
((S3AccessIO) dataAccess).removeTempTag();
}
savedSuccess = true;
+ logger.info("directly uploaded file successfully saved. file size: "+dataFile.getFilesize());
}
} catch (IOException ioex) {
logger.warning("Failed to get file size, storage id, or failed to remove the temp tag on the saved S3 object" + dataFile.getStorageIdentifier() + " ("
diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java
index beb676f60d1..63b5bf03ea7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java
+++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java
@@ -30,7 +30,7 @@ public class PrivateUrl {
public PrivateUrl(RoleAssignment roleAssignment, Dataset dataset, String dataverseSiteUrl) {
this.token = roleAssignment.getPrivateUrlToken();
- this.link = dataverseSiteUrl + "/privateurl.xhtml?token=" + token;
+ this.link = dataverseSiteUrl + "/previewurl.xhtml?token=" + token;
this.dataset = dataset;
this.roleAssignment = roleAssignment;
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java
index 9af4bb6af9e..17c622be9e2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java
@@ -1,6 +1,10 @@
package edu.harvard.iq.dataverse.privateurl;
+import edu.harvard.iq.dataverse.Dataset;
+import edu.harvard.iq.dataverse.DatasetServiceBean;
+import edu.harvard.iq.dataverse.DataverseRequestServiceBean;
import edu.harvard.iq.dataverse.DataverseSession;
+import edu.harvard.iq.dataverse.PermissionsWrapper;
import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
import java.io.Serializable;
import java.util.logging.Logger;
@@ -20,8 +24,14 @@ public class PrivateUrlPage implements Serializable {
@EJB
PrivateUrlServiceBean privateUrlService;
+ @EJB
+ DatasetServiceBean datasetServiceBean;
@Inject
DataverseSession session;
+ @Inject
+ PermissionsWrapper permissionsWrapper;
+ @Inject
+ DataverseRequestServiceBean dvRequestService;
/**
* The unique string used to look up a PrivateUrlUser and the associated
@@ -34,7 +44,16 @@ public String init() {
PrivateUrlRedirectData privateUrlRedirectData = privateUrlService.getPrivateUrlRedirectDataFromToken(token);
String draftDatasetPageToBeRedirectedTo = privateUrlRedirectData.getDraftDatasetPageToBeRedirectedTo() + "&faces-redirect=true";
PrivateUrlUser privateUrlUser = privateUrlRedirectData.getPrivateUrlUser();
- session.setUser(privateUrlUser);
+ boolean sessionUserCanViewUnpublishedDataset = false;
+ if (session.getUser().isAuthenticated()){
+ Long datasetId = privateUrlUser.getDatasetId();
+ Dataset dataset = datasetServiceBean.find(datasetId);
+ sessionUserCanViewUnpublishedDataset = permissionsWrapper.canViewUnpublishedDataset(dvRequestService.getDataverseRequest(), dataset);
+ }
+ if(!sessionUserCanViewUnpublishedDataset){
+ //Only Reset if user cannot view this Draft Version
+ session.setUser(privateUrlUser);
+ }
logger.info("Redirecting PrivateUrlUser '" + privateUrlUser.getIdentifier() + "' to " + draftDatasetPageToBeRedirectedTo);
return draftDatasetPageToBeRedirectedTo;
} catch (Exception ex) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/package-info.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/package-info.java
index 6e939c1bb6d..1310e0eb199 100644
--- a/src/main/java/edu/harvard/iq/dataverse/privateurl/package-info.java
+++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/package-info.java
@@ -1,19 +1,19 @@
/**
- * Private URL for unpublished datasets.
+ * Preview URL for unpublished datasets.
*
- * The Private URL feature has been implemented as a specialized role assignment
+ * The Preview (formerly Private) URL feature has been implemented as a specialized role assignment
* with an associated token that permits read-only access to the metadata and
* all files (regardless of if the files are restricted or not) of a draft
* version of a dataset.
*
- * As of this note, a second option - to create a Private URL that provides an
+ * As of this note, a second option - to create a Preview URL that provides an
* anonymized view of the dataset has been added. This option works the same as
* the original except that it hides author names in the citation block, hides
* the values for an admin specified list of metadata fields, disables citation
* downloads, and disables API access (except for file and file thumbnail
* downloads which are used by the UI).
*
- * The primary use case for a Private URL is for journal editors to send a link
+ * The primary use case for a Preview URL is for journal editors to send a link
* to reviewers of a dataset before publication. In most cases, these journal
* editors do not permit depositors to publish on their own, which is to say
* they only allow depositors to have the "Contributor" role on the datasets
@@ -24,42 +24,42 @@
* the depositor, who is in charge of both the security of the dataset and the
* timing of when the dataset is published.
*
- * A secondary use case for a Private URL is for depositors who have the ability
+ * A secondary use case for a Preview URL is for depositors who have the ability
* to manage permissions on their dataset (depositors who have the "Curator" or
* "Admin" role, which grants much more power than the "Contributor" role) to
* send a link to coauthors or other trusted parties to preview the dataset
* before the depositors publish the dataset on their own. For better security,
* these depositors could ask their coauthors to create Dataverse accounts and
- * assign roles to them directly, rather than using a Private URL which requires
+ * assign roles to them directly, rather than using a Preview URL which requires
* no username or password.
*
* As of this note, a second option aimed specifically at the review use case -
- * to create a Private URL that provides an anonymized view of the dataset - has
+ * to create a Preview URL that provides an anonymized view of the dataset - has
* been added. This option works the same as the original except that it hides
* author names in the citation block, hides the values for an admin specified
* list of metadata fields, disables citation downloads, and disables API access
* (except for file and file thumbnail downloads which are used by the UI).
*
- * The token associated with the Private URL role assignment that can be used
+ * The token associated with the Preview URL role assignment that can be used
* either in the GUI or, for the non-anonymized-access option, via the API to
* elevate privileges beyond what a "Guest" can see. The ability to use a
- * Private URL token via API was added mostly to facilitate automated testing of
- * the feature but the far more common case is expected to be use of the Private
+ * Preview URL token via API was added mostly to facilitate automated testing of
+ * the feature but the far more common case is expected to be use of the Preview
* URL token in a link that is clicked to open a browser, similar to links
* shared via Dropbox, Google, etc.
*
- * When reviewers click a Private URL their browser sessions are set to the
+ * When reviewers click a Preview URL their browser sessions are set to the
* "{@link edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser}" that
* has the "Member" role only on the dataset in question and redirected to that
* dataset, where they will see an indication in blue at the top of the page
* that they are viewing an unpublished dataset. If the reviewer happens to be
* logged into Dataverse already, clicking the link will log them out because
* the review is meant to be blind. Because the dataset is always in draft when
- * a Private URL is in effect, no downloads or any other activity by the
- * reviewer are logged to the guestbook. All reviewers click the same Private
+ * a Preview URL is in effect, no downloads or any other activity by the
+ * reviewer are logged to the guestbook. All reviewers click the same Preview
* URL containing the same token, and with the exception of an IP address being
* logged, it should be impossible to trace which reviewers have clicked a
- * Private URL. If the reviewer navigates to the home page, the session is set
+ * Preview URL. If the reviewer navigates to the home page, the session is set
* to the Guest user and they will see what a Guest would see.
*
* The "Member" role is used because it contains the necessary read-only
@@ -76,51 +76,51 @@
* version. A Member can also download restricted files that have been deleted
* from previously published versions.
*
- * Likewise, when a Private URL token is used via API, commands are executed
+ * Likewise, when a Preview URL token is used via API, commands are executed
* using the "PrivateUrlUser" that has the "Member" role only on the dataset in
* question. This means that read-only operations such as downloads of the
- * dataset's files are permitted. The Search API does not respect the Private
+ * dataset's files are permitted. The Search API does not respect the Preview
* URL token but you can download files using the Access API, and, with the
* non-anonymized-access option, download unpublished metadata using the Native
* API.
*
- * A Private URL cannot be created for a published version of a dataset. In the
+ * A Preview URL cannot be created for a published version of a dataset. In the
* GUI, you will be reminded of this fact with a popup. The API will explain
* this as well.
*
- * An anonymized-access Private URL can't be created if any published dataset
+ * An anonymized-access Preview URL can't be created if any published dataset
* version exists. The primary reason for this is that, since datasets have
* DOIs, the full metadata about published versions is available directly from
* the DOI provider. (While the metadata for that version could be somewhat
* different, in practice it would probably provide a means of identifying
* some/all of the authors).
*
- * If a draft dataset containing a Private URL is
- * published, the Private URL is deleted. This means that reviewers who click
+ * If a draft dataset containing a Preview URL is
+ * published, the Preview URL is deleted. This means that reviewers who click
* the link after publication will see a 404.
*
- * If a post-publication draft containing a Private URL is deleted, the Private
+ * If a post-publication draft containing a Preview URL is deleted, the Preview
* URL is deleted. This is to ensure that if a new draft is created in the
* future, a new token will be used.
*
- * The creation and deletion of a Private URL are limited to the "Curator" and
+ * The creation and deletion of a Preview URL are limited to the "Curator" and
* "Admin" roles because only those roles have the permission called
* "ManageDatasetPermissions", which is the permission used by the
* "AssignRoleCommand" and "RevokeRoleCommand" commands. If you have the
- * permission to create or delete a Private URL, the fact that a Private URL is
+ * permission to create or delete a Preview URL, the fact that a Preview URL is
* enabled for a dataset will be indicated in blue at the top of the page.
* Success messages are shown at the top of the page when you create or delete a
- * Private URL. In the GUI, deleting a Private URL is called "disabling" and you
+ * Preview URL. In the GUI, deleting a Preview URL is called "disabling" and you
* will be prompted for a confirmation. No matter what you call it the role is
- * revoked. You can also delete a Private URL by revoking the role.
+ * revoked. You can also delete a Preview URL by revoking the role.
*
* A "Contributor" does not have the "ManageDatasetPermissions" permission and
- * cannot see "Permissions" nor "Private URL" under the "Edit" menu of their
- * dataset. When a Curator or Admin has enabled a Private URL on a Contributor's
- * dataset, the Contributor does not see a visual indication that a Private URL
+ * cannot see "Permissions" nor "Preview URL" under the "Edit" menu of their
+ * dataset. When a Curator or Admin has enabled a Preview URL on a Contributor's
+ * dataset, the Contributor does not see a visual indication that a Preview URL
* has been enabled for their dataset.
*
- * There is no way for an "Admin" or "Curator" to see when a Private URL was
+ * There is no way for an "Admin" or "Curator" to see when a Preview URL was
* created or deleted for a dataset but someone who has access to the database
* can see that the following commands are logged to the "actionlogrecord"
* database table:
@@ -129,7 +129,7 @@
*
{@link edu.harvard.iq.dataverse.engine.command.impl.CreatePrivateUrlCommand}
* {@link edu.harvard.iq.dataverse.engine.command.impl.DeletePrivateUrlCommand}
*
- * See also the Private URL To Unpublished Dataset BRD at
* https://docs.google.com/document/d/1FT47QkZKcmjSgRnePaJO2g1nzcotLyN3Yb2ORvBr6cs/edit?usp=sharing
*/
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java
index d0dcf3461cf..9b7998b0a8e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java
@@ -135,6 +135,9 @@ public class IndexServiceBean {
@EJB
DatasetFieldServiceBean datasetFieldService;
+ @Inject
+ DatasetVersionFilesServiceBean datasetVersionFilesServiceBean;
+
public static final String solrDocIdentifierDataverse = "dataverse_";
public static final String solrDocIdentifierFile = "datafile_";
public static final String solrDocIdentifierDataset = "dataset_";
@@ -1018,6 +1021,8 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, SetFeature Request/Idea: Harvest metadata values that aren't from a list of controlled values #9992
@@ -1296,7 +1299,6 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set variables = fileMetadata.getDataFile().getDataTable().getDataVariables();
+ Long observations = fileMetadata.getDataFile().getDataTable().getCaseQuantity();
+ datafileSolrInputDocument.addField(SearchFields.OBSERVATIONS, observations);
+ datafileSolrInputDocument.addField(SearchFields.VARIABLE_COUNT, variables.size());
Map variableMap = null;
List variablesByMetadata = variableService.findVarMetByFileMetaId(fileMetadata.getId());
@@ -2230,8 +2237,7 @@ public List findPermissionsInSolrOnly() throws SearchException {
String dtype = dvObjectService.getDtype(id);
if (dtype == null) {
permissionInSolrOnly.add(docId);
- }
- if (dtype.equals(DType.Dataset.getDType())) {
+ }else if (dtype.equals(DType.Dataset.getDType())) {
List states = datasetService.getVersionStates(id);
if (states != null) {
String latestState = states.get(states.size() - 1);
@@ -2252,7 +2258,7 @@ public List findPermissionsInSolrOnly() throws SearchException {
} else if (dtype.equals(DType.DataFile.getDType())) {
List states = dataFileService.findVersionStates(id);
Set strings = states.stream().map(VersionState::toString).collect(Collectors.toSet());
- logger.fine("States for " + docId + ": " + String.join(", ", strings));
+ logger.finest("States for " + docId + ": " + String.join(", ", strings));
if (docId.endsWith("draft_permission")) {
if (!states.contains(VersionState.DRAFT)) {
permissionInSolrOnly.add(docId);
@@ -2266,7 +2272,7 @@ public List findPermissionsInSolrOnly() throws SearchException {
permissionInSolrOnly.add(docId);
} else {
if (!dataFileService.isInReleasedVersion(id)) {
- logger.fine("Adding doc " + docId + " to list of permissions in Solr only");
+ logger.finest("Adding doc " + docId + " to list of permissions in Solr only");
permissionInSolrOnly.add(docId);
}
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java
index ef27a5eefaf..712f90186f5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java
@@ -171,6 +171,7 @@ public class SearchFields {
public static final String FILE_CHECKSUM_TYPE = "fileChecksumType";
public static final String FILE_CHECKSUM_VALUE = "fileChecksumValue";
public static final String FILENAME_WITHOUT_EXTENSION = "fileNameWithoutExtension";
+ public static final String FILE_RESTRICTED = "fileRestricted";
/**
* Indexed as a string so we can facet on it.
*/
@@ -270,6 +271,8 @@ more targeted results for just datasets. The format is YYYY (i.e.
*/
public static final String DATASET_TYPE = "datasetType";
+ public static final String OBSERVATIONS = "observations";
+ public static final String VARIABLE_COUNT = "variableCount";
public static final String VARIABLE_NAME = "variableName";
public static final String VARIABLE_LABEL = "variableLabel";
public static final String LITERAL_QUESTION = "literalQuestion";
@@ -291,5 +294,6 @@ more targeted results for just datasets. The format is YYYY (i.e.
public static final String DATASET_VALID = "datasetValid";
public static final String DATASET_LICENSE = "license";
+ public static final String FILE_COUNT = "fileCount";
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java
index de75c88009f..60bcc9f846e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java
@@ -1,6 +1,7 @@
package edu.harvard.iq.dataverse.search;
import edu.harvard.iq.dataverse.*;
+import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.authorization.groups.Group;
import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
@@ -18,6 +19,7 @@
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
+import java.util.EnumSet;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
@@ -75,6 +77,8 @@ public class SearchServiceBean {
SystemConfig systemConfig;
@EJB
SolrClientService solrClientService;
+ @EJB
+ PermissionServiceBean permissionService;
@Inject
ThumbnailServiceWrapper thumbnailServiceWrapper;
@@ -497,7 +501,8 @@ public SolrQueryResponse search(
Long retentionEndDate = (Long) solrDocument.getFieldValue(SearchFields.RETENTION_END_DATE);
//
Boolean datasetValid = (Boolean) solrDocument.getFieldValue(SearchFields.DATASET_VALID);
-
+ Long fileCount = (Long) solrDocument.getFieldValue(SearchFields.FILE_COUNT);
+
List matchedFields = new ArrayList<>();
SolrSearchResult solrSearchResult = new SolrSearchResult(query, name);
@@ -570,6 +575,7 @@ public SolrQueryResponse search(
solrSearchResult.setDeaccessionReason(deaccessionReason);
solrSearchResult.setDvTree(dvTree);
solrSearchResult.setDatasetValid(datasetValid);
+ solrSearchResult.setFileCount(fileCount);
if (Boolean.TRUE.equals((Boolean) solrDocument.getFieldValue(SearchFields.IS_HARVESTED))) {
solrSearchResult.setHarvested(true);
@@ -675,6 +681,15 @@ public SolrQueryResponse search(
logger.info("Exception setting setFileChecksumType: " + ex);
}
solrSearchResult.setFileChecksumValue((String) solrDocument.getFieldValue(SearchFields.FILE_CHECKSUM_VALUE));
+
+ if (solrDocument.getFieldValue(SearchFields.FILE_RESTRICTED) != null) {
+ solrSearchResult.setFileRestricted((Boolean) solrDocument.getFieldValue(SearchFields.FILE_RESTRICTED));
+ }
+
+ if (solrSearchResult.getEntity() != null) {
+ solrSearchResult.setCanDownloadFile(permissionService.hasPermissionsFor(dataverseRequest, solrSearchResult.getEntity(), EnumSet.of(Permission.DownloadFile)));
+ }
+
solrSearchResult.setUnf((String) solrDocument.getFieldValue(SearchFields.UNF));
solrSearchResult.setDatasetVersionId(datasetVersionId);
List fileCategories = (List) solrDocument.getFieldValues(SearchFields.FILE_TAG);
@@ -686,6 +701,10 @@ public SolrQueryResponse search(
Collections.sort(tabularDataTags);
solrSearchResult.setTabularDataTags(tabularDataTags);
}
+ Long observations = (Long) solrDocument.getFieldValue(SearchFields.OBSERVATIONS);
+ solrSearchResult.setObservations(observations);
+ Long tabCount = (Long) solrDocument.getFieldValue(SearchFields.VARIABLE_COUNT);
+ solrSearchResult.setTabularDataCount(tabCount);
String filePID = (String) solrDocument.getFieldValue(SearchFields.FILE_PERSISTENT_ID);
if(null != filePID && !"".equals(filePID) && !"".equals("null")) {
solrSearchResult.setFilePersistentId(filePID);
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java
index 27900bac63f..2250a245dab 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java
@@ -78,6 +78,10 @@ public class SolrSearchResult {
private String citation;
private String citationHtml;
private String datasetType;
+ /**
+ * Only Dataset can have a file count
+ */
+ private Long fileCount;
/**
* Files and datasets might have a UNF. Dataverses don't.
*/
@@ -93,6 +97,8 @@ public class SolrSearchResult {
private String fileMd5;
private DataFile.ChecksumType fileChecksumType;
private String fileChecksumValue;
+ private Boolean fileRestricted;
+ private Boolean canDownloadFile;
private String dataverseAlias;
private String dataverseParentAlias;
private String dataverseParentName;
@@ -118,6 +124,8 @@ public class SolrSearchResult {
private String harvestingDescription = null;
private List fileCategories = null;
private List tabularDataTags = null;
+ private Long tabularDataCount;
+ private Long observations;
private String identifierOfDataverse = null;
private String nameOfDataverse = null;
@@ -456,10 +464,10 @@ public JsonObjectBuilder getJsonForMyData(boolean isValid) {
} // getJsonForMydata
public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, boolean showApiUrls) {
- return json(showRelevance, showEntityIds, showApiUrls, null, null);
+ return json(showRelevance, showEntityIds, showApiUrls, null);
}
- public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, boolean showApiUrls, List metadataFields, Long datasetFileCount) {
+ public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, boolean showApiUrls, List metadataFields) {
if (this.type == null) {
return jsonObjectBuilder();
}
@@ -561,7 +569,12 @@ public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, bool
.add("citationHtml", this.citationHtml)
.add("identifier_of_dataverse", this.identifierOfDataverse)
.add("name_of_dataverse", this.nameOfDataverse)
- .add("citation", this.citation);
+ .add("citation", this.citation)
+ .add("restricted", this.fileRestricted)
+ .add("variables", this.tabularDataCount)
+ .add("observations", this.observations)
+ .add("canDownloadFile", this.canDownloadFile);
+
// Now that nullSafeJsonBuilder has been instatiated, check for null before adding to it!
if (showRelevance) {
nullSafeJsonBuilder.add("matches", getRelevance());
@@ -575,6 +588,12 @@ public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, bool
if (!getPublicationStatuses().isEmpty()) {
nullSafeJsonBuilder.add("publicationStatuses", getPublicationStatusesAsJSON());
}
+ if (this.fileCategories != null && !this.fileCategories.isEmpty()) {
+ nullSafeJsonBuilder.add("categories", JsonPrinter.asJsonArray(this.fileCategories));
+ }
+ if (this.tabularDataTags != null && !this.tabularDataTags.isEmpty()) {
+ nullSafeJsonBuilder.add("tabularTags", JsonPrinter.asJsonArray(this.tabularDataTags));
+ }
if (this.entity == null) {
@@ -597,7 +616,7 @@ public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, bool
subjects.add(subject);
}
nullSafeJsonBuilder.add("subjects", subjects);
- nullSafeJsonBuilder.add("fileCount", datasetFileCount);
+ nullSafeJsonBuilder.add("fileCount", this.fileCount);
nullSafeJsonBuilder.add("versionId", dv.getId());
nullSafeJsonBuilder.add("versionState", dv.getVersionState().toString());
if (this.isPublishedState()) {
@@ -952,6 +971,18 @@ public List getTabularDataTags() {
public void setTabularDataTags(List tabularDataTags) {
this.tabularDataTags = tabularDataTags;
}
+ public void setTabularDataCount(Long tabularDataCount) {
+ this.tabularDataCount = tabularDataCount;
+ }
+ public Long getTabularDataCount() {
+ return tabularDataCount;
+ }
+ public Long getObservations() {
+ return observations;
+ }
+ public void setObservations(Long observations) {
+ this.observations = observations;
+ }
public Map getParent() {
return parent;
@@ -1074,6 +1105,21 @@ public void setFileChecksumValue(String fileChecksumValue) {
this.fileChecksumValue = fileChecksumValue;
}
+ public Boolean getFileRestricted() {
+ return fileRestricted;
+ }
+
+ public void setFileRestricted(Boolean fileRestricted) {
+ this.fileRestricted = fileRestricted;
+ }
+ public Boolean getCanDownloadFile() {
+ return canDownloadFile;
+ }
+
+ public void setCanDownloadFile(Boolean canDownloadFile) {
+ this.canDownloadFile = canDownloadFile;
+ }
+
public String getNameSort() {
return nameSort;
}
@@ -1348,4 +1394,12 @@ public boolean isValid(Predicate canUpdateDataset) {
}
return !canUpdateDataset.test(this);
}
+
+ public Long getFileCount() {
+ return fileCount;
+ }
+
+ public void setFileCount(Long fileCount) {
+ this.fileCount = fileCount;
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java
index 20632c170e4..2242b0f51c6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java
+++ b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java
@@ -33,9 +33,32 @@ public enum FeatureFlags {
/**
* Enables API authentication via Bearer Token.
* @apiNote Raise flag by setting "dataverse.feature.api-bearer-auth"
- * @since Dataverse @TODO:
+ * @since Dataverse 5.14:
*/
API_BEARER_AUTH("api-bearer-auth"),
+ /**
+ * Enables sending the missing user claims in the request JSON provided during OIDC user registration
+ * (see API endpoint /users/register) when these claims are not returned by the identity provider
+ * but are necessary for registering the user in Dataverse.
+ *
+ * The value of this feature flag is only considered when the feature flag
+ * {@link #API_BEARER_AUTH} is enabled.
+ *
+ * @apiNote Raise flag by setting "dataverse.feature.api-bearer-auth-provide-missing-claims"
+ * @since Dataverse @TODO:
+ */
+ API_BEARER_AUTH_PROVIDE_MISSING_CLAIMS("api-bearer-auth-provide-missing-claims"),
+ /**
+ * Specifies that Terms of Service acceptance is handled by the IdP, eliminating the need to include
+ * ToS acceptance boolean parameter (termsAccepted) in the OIDC user registration request body.
+ *
+ * The value of this feature flag is only considered when the feature flag
+ * {@link #API_BEARER_AUTH} is enabled.
+ *
+ * @apiNote Raise flag by setting "dataverse.feature.api-bearer-auth-handle-tos-acceptance-in-idp"
+ * @since Dataverse @TODO:
+ */
+ API_BEARER_AUTH_HANDLE_TOS_ACCEPTANCE_IN_IDP("api-bearer-auth-handle-tos-acceptance-in-idp"),
/**
* For published (public) objects, don't use a join when searching Solr.
* Experimental! Requires a reindex with the following feature flag enabled,
diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java
index 8ed96690e84..b5eb483c2c8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java
@@ -539,6 +539,12 @@ Whether Harvesting (OAI) service is enabled
*
*/
GlobusSingleFileTransfer,
+ /** Lower limit of the number of files in a Globus upload task where
+ * the batch mode should be utilized in looking up the file information
+ * on the remote end node (file sizes, primarily), instead of individual
+ * lookups.
+ */
+ GlobusBatchLookupSize,
/**
* Optional external executables to run on the metadata for dataverses
* and datasets being published; as an extra validation step, to
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java
index 922e6ff5d28..771cf5fd0f0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java
@@ -111,7 +111,7 @@ public static ResourceBundle getResourceBundle(String propertyFileName, Locale c
ClassLoader loader = getClassLoader(filesRootDirectory);
bundle = ResourceBundle.getBundle(propertyFileName, currentLocale, loader);
} catch (MissingResourceException mre) {
- logger.warning("No property file named " + propertyFileName + "_" + currentLocale.getLanguage()
+ logger.fine("No property file named " + propertyFileName + "_" + currentLocale.getLanguage()
+ " found in " + filesRootDirectory + ", using untranslated values");
bundle = ResourceBundle.getBundle("propertyFiles/" + propertyFileName, currentLocale);
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java
index 434b3bd8f8f..e769cacfdb1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java
@@ -78,6 +78,7 @@ public class SystemConfig {
public static final long defaultZipDownloadLimit = 104857600L; // 100MB
private static final int defaultMultipleUploadFilesLimit = 1000;
private static final int defaultLoginSessionTimeout = 480; // = 8 hours
+ private static final int defaultGlobusBatchLookupSize = 50;
private String buildNumber = null;
@@ -954,6 +955,11 @@ public boolean isGlobusFileDownload() {
return (isGlobusDownload() && settingsService.isTrueForKey(SettingsServiceBean.Key.GlobusSingleFileTransfer, false));
}
+ public int getGlobusBatchLookupSize() {
+ String batchSizeOption = settingsService.getValueForKey(SettingsServiceBean.Key.GlobusBatchLookupSize);
+ return getIntLimitFromStringOrDefault(batchSizeOption, defaultGlobusBatchLookupSize);
+ }
+
private Boolean getMethodAvailable(String method, boolean upload) {
String methods = settingsService.getValueForKey(
upload ? SettingsServiceBean.Key.UploadMethods : SettingsServiceBean.Key.DownloadMethods);
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java
index efbf36e53d9..308213b5cc0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java
@@ -21,6 +21,7 @@
import edu.harvard.iq.dataverse.api.Util;
import edu.harvard.iq.dataverse.api.dto.DataverseDTO;
import edu.harvard.iq.dataverse.api.dto.FieldDTO;
+import edu.harvard.iq.dataverse.api.dto.UserDTO;
import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroup;
import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress;
import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddressRange;
@@ -31,6 +32,7 @@
import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
import edu.harvard.iq.dataverse.license.License;
import edu.harvard.iq.dataverse.license.LicenseServiceBean;
+import edu.harvard.iq.dataverse.settings.FeatureFlags;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.util.BundleUtil;
import edu.harvard.iq.dataverse.workflow.Workflow;
@@ -49,6 +51,7 @@
import java.util.Map;
import java.util.Optional;
import java.util.Set;
+import java.util.function.Function;
import java.util.function.Consumer;
import java.util.logging.Logger;
import java.util.stream.Collectors;
@@ -76,11 +79,11 @@ public class JsonParser {
DatasetTypeServiceBean datasetTypeService;
HarvestingClient harvestingClient = null;
boolean allowHarvestingMissingCVV = false;
-
+
/**
* if lenient, we will accept alternate spellings for controlled vocabulary values
*/
- boolean lenient = false;
+ boolean lenient = false;
@Deprecated
public JsonParser(DatasetFieldServiceBean datasetFieldSvc, MetadataBlockServiceBean blockService, SettingsServiceBean settingsService) {
@@ -92,7 +95,7 @@ public JsonParser(DatasetFieldServiceBean datasetFieldSvc, MetadataBlockServiceB
public JsonParser(DatasetFieldServiceBean datasetFieldSvc, MetadataBlockServiceBean blockService, SettingsServiceBean settingsService, LicenseServiceBean licenseService, DatasetTypeServiceBean datasetTypeService) {
this(datasetFieldSvc, blockService, settingsService, licenseService, datasetTypeService, null);
}
-
+
public JsonParser(DatasetFieldServiceBean datasetFieldSvc, MetadataBlockServiceBean blockService, SettingsServiceBean settingsService, LicenseServiceBean licenseService, DatasetTypeServiceBean datasetTypeService, HarvestingClient harvestingClient) {
this.datasetFieldSvc = datasetFieldSvc;
this.blockService = blockService;
@@ -106,7 +109,7 @@ public JsonParser(DatasetFieldServiceBean datasetFieldSvc, MetadataBlockServiceB
public JsonParser() {
this( null,null,null );
}
-
+
public boolean isLenient() {
return lenient;
}
@@ -282,11 +285,19 @@ public DataverseTheme parseDataverseTheme(JsonObject obj) {
return theme;
}
- private static String getMandatoryString(JsonObject jobj, String name) throws JsonParseException {
+ private static T getMandatoryField(JsonObject jobj, String name, Function getter) throws JsonParseException {
if (jobj.containsKey(name)) {
- return jobj.getString(name);
+ return getter.apply(name);
}
- throw new JsonParseException("Field " + name + " is mandatory");
+ throw new JsonParseException("Field '" + name + "' is mandatory");
+ }
+
+ private static String getMandatoryString(JsonObject jobj, String name) throws JsonParseException {
+ return getMandatoryField(jobj, name, jobj::getString);
+ }
+
+ private static Boolean getMandatoryBoolean(JsonObject jobj, String name) throws JsonParseException {
+ return getMandatoryField(jobj, name, jobj::getBoolean);
}
public IpGroup parseIpGroup(JsonObject obj) {
@@ -318,10 +329,10 @@ public IpGroup parseIpGroup(JsonObject obj) {
return retVal;
}
-
+
public MailDomainGroup parseMailDomainGroup(JsonObject obj) throws JsonParseException {
MailDomainGroup grp = new MailDomainGroup();
-
+
if (obj.containsKey("id")) {
grp.setId(obj.getJsonNumber("id").longValue());
}
@@ -345,7 +356,7 @@ public MailDomainGroup parseMailDomainGroup(JsonObject obj) throws JsonParseExce
} else {
throw new JsonParseException("Field domains is mandatory.");
}
-
+
return grp;
}
@@ -383,7 +394,7 @@ public Dataset parseDataset(JsonObject obj) throws JsonParseException {
throw new JsonParseException("Invalid dataset type: " + datasetTypeIn);
}
- DatasetVersion dsv = new DatasetVersion();
+ DatasetVersion dsv = new DatasetVersion();
dsv.setDataset(dataset);
dsv = parseDatasetVersion(obj.getJsonObject("datasetVersion"), dsv);
List versions = new ArrayList<>(1);
@@ -414,7 +425,7 @@ public DatasetVersion parseDatasetVersion(JsonObject obj, DatasetVersion dsv) th
if (dsv.getId()==null) {
dsv.setId(parseLong(obj.getString("id", null)));
}
-
+
String versionStateStr = obj.getString("versionState", null);
if (versionStateStr != null) {
dsv.setVersionState(DatasetVersion.VersionState.valueOf(versionStateStr));
@@ -427,8 +438,8 @@ public DatasetVersion parseDatasetVersion(JsonObject obj, DatasetVersion dsv) th
// Terms of Use related fields
TermsOfUseAndAccess terms = new TermsOfUseAndAccess();
- License license = null;
-
+ License license = null;
+
try {
// This method will attempt to parse the license in the format
// in which it appears in our json exports, as a compound
@@ -447,7 +458,7 @@ public DatasetVersion parseDatasetVersion(JsonObject obj, DatasetVersion dsv) th
// "license" : "CC0 1.0"
license = parseLicense(obj.getString("license", null));
}
-
+
if (license == null) {
terms.setLicense(license);
terms.setTermsOfUse(obj.getString("termsOfUse", null));
@@ -485,13 +496,13 @@ public DatasetVersion parseDatasetVersion(JsonObject obj, DatasetVersion dsv) th
dsv.setFileMetadatas(parseFiles(filesJson, dsv));
}
return dsv;
- } catch (ParseException ex) {
+ } catch (ParseException ex) {
throw new JsonParseException(BundleUtil.getStringFromBundle("jsonparser.error.parsing.date", Arrays.asList(ex.getMessage())) , ex);
} catch (NumberFormatException ex) {
throw new JsonParseException(BundleUtil.getStringFromBundle("jsonparser.error.parsing.number", Arrays.asList(ex.getMessage())), ex);
}
}
-
+
private edu.harvard.iq.dataverse.license.License parseLicense(String licenseNameOrUri) throws JsonParseException {
if (licenseNameOrUri == null){
boolean safeDefaultIfKeyNotFound = true;
@@ -505,7 +516,7 @@ private edu.harvard.iq.dataverse.license.License parseLicense(String licenseName
if (license == null) throw new JsonParseException("Invalid license: " + licenseNameOrUri);
return license;
}
-
+
private edu.harvard.iq.dataverse.license.License parseLicense(JsonObject licenseObj) throws JsonParseException {
if (licenseObj == null){
boolean safeDefaultIfKeyNotFound = true;
@@ -515,12 +526,12 @@ private edu.harvard.iq.dataverse.license.License parseLicense(JsonObject license
return licenseService.getDefault();
}
}
-
+
String licenseName = licenseObj.getString("name", null);
String licenseUri = licenseObj.getString("uri", null);
-
- License license = null;
-
+
+ License license = null;
+
// If uri is provided, we'll try that first. This is an easier lookup
// method; the uri is always the same. The name may have been customized
// (translated) on this instance, so we may be dealing with such translated
@@ -530,17 +541,17 @@ private edu.harvard.iq.dataverse.license.License parseLicense(JsonObject license
if (licenseUri != null) {
license = licenseService.getByNameOrUri(licenseUri);
}
-
+
if (license != null) {
return license;
}
-
+
if (licenseName == null) {
- String exMsg = "Invalid or unsupported license section submitted"
+ String exMsg = "Invalid or unsupported license section submitted"
+ (licenseUri != null ? ": " + licenseUri : ".");
- throw new JsonParseException("Invalid or unsupported license section submitted.");
+ throw new JsonParseException("Invalid or unsupported license section submitted.");
}
-
+
license = licenseService.getByPotentiallyLocalizedName(licenseName);
if (license == null) {
throw new JsonParseException("Invalid or unsupported license: " + licenseName);
@@ -559,13 +570,13 @@ public List parseMetadataBlocks(JsonObject json) throws JsonParseE
}
return fields;
}
-
+
public List parseMultipleFields(JsonObject json) throws JsonParseException {
JsonArray fieldsJson = json.getJsonArray("fields");
List fields = parseFieldsFromArray(fieldsJson, false);
return fields;
}
-
+
public List parseMultipleFieldsForDelete(JsonObject json) throws JsonParseException {
List fields = new LinkedList<>();
for (JsonObject fieldJson : json.getJsonArray("fields").getValuesAs(JsonObject.class)) {
@@ -573,7 +584,7 @@ public List parseMultipleFieldsForDelete(JsonObject json) throws J
}
return fields;
}
-
+
private List parseFieldsFromArray(JsonArray fieldsArray, Boolean testType) throws JsonParseException {
List fields = new LinkedList<>();
for (JsonObject fieldJson : fieldsArray.getValuesAs(JsonObject.class)) {
@@ -585,18 +596,18 @@ private List parseFieldsFromArray(JsonArray fieldsArray, Boolean t
} catch (CompoundVocabularyException ex) {
DatasetFieldType fieldType = datasetFieldSvc.findByNameOpt(fieldJson.getString("typeName", ""));
if (lenient && (DatasetFieldConstant.geographicCoverage).equals(fieldType.getName())) {
- fields.add(remapGeographicCoverage( ex));
+ fields.add(remapGeographicCoverage( ex));
} else {
// if not lenient mode, re-throw exception
throw ex;
}
- }
+ }
}
return fields;
-
+
}
-
+
public List parseFiles(JsonArray metadatasJson, DatasetVersion dsv) throws JsonParseException {
List fileMetadatas = new LinkedList<>();
if (metadatasJson != null) {
@@ -610,7 +621,7 @@ public List parseFiles(JsonArray metadatasJson, DatasetVersion dsv
fileMetadata.setDirectoryLabel(directoryLabel);
fileMetadata.setDescription(description);
fileMetadata.setDatasetVersion(dsv);
-
+
if ( filemetadataJson.containsKey("dataFile") ) {
DataFile dataFile = parseDataFile(filemetadataJson.getJsonObject("dataFile"));
dataFile.getFileMetadatas().add(fileMetadata);
@@ -623,7 +634,7 @@ public List parseFiles(JsonArray metadatasJson, DatasetVersion dsv
dsv.getDataset().getFiles().add(dataFile);
}
}
-
+
fileMetadatas.add(fileMetadata);
fileMetadata.setCategories(getCategories(filemetadataJson, dsv.getDataset()));
}
@@ -631,19 +642,19 @@ public List parseFiles(JsonArray metadatasJson, DatasetVersion dsv
return fileMetadatas;
}
-
+
public DataFile parseDataFile(JsonObject datafileJson) {
DataFile dataFile = new DataFile();
-
+
Timestamp timestamp = new Timestamp(new Date().getTime());
dataFile.setCreateDate(timestamp);
dataFile.setModificationTime(timestamp);
dataFile.setPermissionModificationTime(timestamp);
-
+
if ( datafileJson.containsKey("filesize") ) {
dataFile.setFilesize(datafileJson.getJsonNumber("filesize").longValueExact());
}
-
+
String contentType = datafileJson.getString("contentType", null);
if (contentType == null) {
contentType = "application/octet-stream";
@@ -706,21 +717,21 @@ public DataFile parseDataFile(JsonObject datafileJson) {
// TODO:
// unf (if available)... etc.?
-
+
dataFile.setContentType(contentType);
dataFile.setStorageIdentifier(storageIdentifier);
-
+
return dataFile;
}
/**
* Special processing for GeographicCoverage compound field:
* Handle parsing exceptions caused by invalid controlled vocabulary in the "country" field by
* putting the invalid data in "otherGeographicCoverage" in a new compound value.
- *
+ *
* @param ex - contains the invalid values to be processed
- * @return a compound DatasetField that contains the newly created values, in addition to
+ * @return a compound DatasetField that contains the newly created values, in addition to
* the original valid values.
- * @throws JsonParseException
+ * @throws JsonParseException
*/
private DatasetField remapGeographicCoverage(CompoundVocabularyException ex) throws JsonParseException{
List> geoCoverageList = new ArrayList<>();
@@ -747,23 +758,23 @@ private DatasetField remapGeographicCoverage(CompoundVocabularyException ex) thr
}
return geoCoverageField;
}
-
-
+
+
public DatasetField parseFieldForDelete(JsonObject json) throws JsonParseException{
DatasetField ret = new DatasetField();
- DatasetFieldType type = datasetFieldSvc.findByNameOpt(json.getString("typeName", ""));
+ DatasetFieldType type = datasetFieldSvc.findByNameOpt(json.getString("typeName", ""));
if (type == null) {
throw new JsonParseException("Can't find type '" + json.getString("typeName", "") + "'");
}
return ret;
}
-
-
+
+
public DatasetField parseField(JsonObject json) throws JsonParseException{
return parseField(json, true);
}
-
-
+
+
public DatasetField parseField(JsonObject json, Boolean testType) throws JsonParseException {
if (json == null) {
return null;
@@ -771,7 +782,7 @@ public DatasetField parseField(JsonObject json, Boolean testType) throws JsonPar
DatasetField ret = new DatasetField();
DatasetFieldType type = datasetFieldSvc.findByNameOpt(json.getString("typeName", ""));
-
+
if (type == null) {
logger.fine("Can't find type '" + json.getString("typeName", "") + "'");
@@ -789,8 +800,8 @@ public DatasetField parseField(JsonObject json, Boolean testType) throws JsonPar
if (testType && type.isControlledVocabulary() && !json.getString("typeClass").equals("controlledVocabulary")) {
throw new JsonParseException("incorrect typeClass for field " + json.getString("typeName", "") + ", should be controlledVocabulary");
}
-
-
+
+
ret.setDatasetFieldType(type);
if (type.isCompound()) {
@@ -803,11 +814,11 @@ public DatasetField parseField(JsonObject json, Boolean testType) throws JsonPar
return ret;
}
-
+
public void parseCompoundValue(DatasetField dsf, DatasetFieldType compoundType, JsonObject json) throws JsonParseException {
parseCompoundValue(dsf, compoundType, json, true);
}
-
+
public void parseCompoundValue(DatasetField dsf, DatasetFieldType compoundType, JsonObject json, Boolean testType) throws JsonParseException {
List vocabExceptions = new ArrayList<>();
List vals = new LinkedList<>();
@@ -829,7 +840,7 @@ public void parseCompoundValue(DatasetField dsf, DatasetFieldType compoundType,
} catch(ControlledVocabularyException ex) {
vocabExceptions.add(ex);
}
-
+
if (f!=null) {
if (!compoundType.getChildDatasetFieldTypes().contains(f.getDatasetFieldType())) {
throw new JsonParseException("field " + f.getDatasetFieldType().getName() + " is not a child of " + compoundType.getName());
@@ -846,10 +857,10 @@ public void parseCompoundValue(DatasetField dsf, DatasetFieldType compoundType,
order++;
}
-
+
} else {
-
+
DatasetFieldCompoundValue cv = new DatasetFieldCompoundValue();
List fields = new LinkedList<>();
JsonObject value = json.getJsonObject("value");
@@ -870,7 +881,7 @@ public void parseCompoundValue(DatasetField dsf, DatasetFieldType compoundType,
cv.setChildDatasetFields(fields);
vals.add(cv);
}
-
+
}
if (!vocabExceptions.isEmpty()) {
throw new CompoundVocabularyException( "Invalid controlled vocabulary in compound field ", vocabExceptions, vals);
@@ -909,7 +920,7 @@ public void parsePrimitiveValue(DatasetField dsf, DatasetFieldType dft , JsonObj
try {json.getString("value");}
catch (ClassCastException cce) {
throw new JsonParseException("Invalid value submitted for " + dft.getName() + ". It should be a single value.");
- }
+ }
DatasetFieldValue datasetFieldValue = new DatasetFieldValue();
datasetFieldValue.setValue(json.getString("value", "").trim());
datasetFieldValue.setDatasetField(dsf);
@@ -923,7 +934,7 @@ public void parsePrimitiveValue(DatasetField dsf, DatasetFieldType dft , JsonObj
dsf.setDatasetFieldValues(vals);
}
-
+
public Workflow parseWorkflow(JsonObject json) throws JsonParseException {
Workflow retVal = new Workflow();
validate("", json, "name", ValueType.STRING);
@@ -937,12 +948,12 @@ public Workflow parseWorkflow(JsonObject json) throws JsonParseException {
retVal.setSteps(steps);
return retVal;
}
-
+
public WorkflowStepData parseStepData( JsonObject json ) throws JsonParseException {
WorkflowStepData wsd = new WorkflowStepData();
validate("step", json, "provider", ValueType.STRING);
validate("step", json, "stepType", ValueType.STRING);
-
+
wsd.setProviderId(json.getString("provider"));
wsd.setStepType(json.getString("stepType"));
if ( json.containsKey("parameters") ) {
@@ -959,7 +970,7 @@ public WorkflowStepData parseStepData( JsonObject json ) throws JsonParseExcepti
}
return wsd;
}
-
+
private String jsonValueToString(JsonValue jv) {
switch ( jv.getValueType() ) {
case STRING: return ((JsonString)jv).getString();
@@ -1007,7 +1018,6 @@ public void parseControlledVocabularyValue(DatasetField dsf, DatasetFieldType cv
if (cvv == null) {
if (allowHarvestingMissingCVV) {
// we need to process this as a primitive value
- logger.warning(">>>> Value '" + strValue + "' does not exist in type '" + cvvType.getName() + "'. Processing as primitive per setting override.");
parsePrimitiveValue(dsf, cvvType , json);
return;
} else {
@@ -1039,11 +1049,11 @@ Long parseLong(String str) throws NumberFormatException {
int parsePrimitiveInt(String str, int defaultValue) {
return str == null ? defaultValue : Integer.parseInt(str);
}
-
+
public String parseHarvestingClient(JsonObject obj, HarvestingClient harvestingClient) throws JsonParseException {
-
+
String dataverseAlias = obj.getString("dataverseAlias",null);
-
+
harvestingClient.setName(obj.getString("nickName",null));
harvestingClient.setHarvestStyle(obj.getString("style", "default"));
harvestingClient.setHarvestingUrl(obj.getString("harvestUrl",null));
@@ -1080,7 +1090,7 @@ private List getCategories(JsonObject filemetadataJson, Datase
}
return dataFileCategories;
}
-
+
/**
* Validate than a JSON object has a field of an expected type, or throw an
* inforamtive exception.
@@ -1088,12 +1098,29 @@ private List getCategories(JsonObject filemetadataJson, Datase
* @param jobject
* @param fieldName
* @param expectedValueType
- * @throws JsonParseException
+ * @throws JsonParseException
*/
private void validate(String objectName, JsonObject jobject, String fieldName, ValueType expectedValueType) throws JsonParseException {
- if ( (!jobject.containsKey(fieldName))
+ if ( (!jobject.containsKey(fieldName))
|| (jobject.get(fieldName).getValueType()!=expectedValueType) ) {
throw new JsonParseException( objectName + " missing a field named '"+fieldName+"' of type " + expectedValueType );
}
}
+
+ public UserDTO parseUserDTO(JsonObject jobj) throws JsonParseException {
+ UserDTO userDTO = new UserDTO();
+
+ userDTO.setUsername(jobj.getString("username", null));
+ userDTO.setEmailAddress(jobj.getString("emailAddress", null));
+ userDTO.setFirstName(jobj.getString("firstName", null));
+ userDTO.setLastName(jobj.getString("lastName", null));
+ userDTO.setAffiliation(jobj.getString("affiliation", null));
+ userDTO.setPosition(jobj.getString("position", null));
+
+ if (!FeatureFlags.API_BEARER_AUTH_HANDLE_TOS_ACCEPTANCE_IN_IDP.enabled()) {
+ userDTO.setTermsAccepted(getMandatoryBoolean(jobj, "termsAccepted"));
+ }
+
+ return userDTO;
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
index 6666a7f0e7d..06ccd2769cd 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
@@ -56,6 +56,7 @@
import jakarta.ejb.Singleton;
import jakarta.json.JsonArray;
import jakarta.json.JsonObject;
+import java.util.function.Predicate;
/**
* Convert objects to Json.
@@ -642,22 +643,31 @@ public static JsonObjectBuilder json(MetadataBlock metadataBlock, boolean printO
.add("displayName", metadataBlock.getDisplayName())
.add("displayOnCreate", metadataBlock.isDisplayOnCreate());
- Set datasetFieldTypes;
-
- if (ownerDataverse != null) {
- datasetFieldTypes = new TreeSet<>(datasetFieldService.findAllInMetadataBlockAndDataverse(
- metadataBlock, ownerDataverse, printOnlyDisplayedOnCreateDatasetFieldTypes));
- } else {
- datasetFieldTypes = printOnlyDisplayedOnCreateDatasetFieldTypes
- ? new TreeSet<>(datasetFieldService.findAllDisplayedOnCreateInMetadataBlock(metadataBlock))
- : new TreeSet<>(metadataBlock.getDatasetFieldTypes());
- }
-
JsonObjectBuilder fieldsBuilder = Json.createObjectBuilder();
- for (DatasetFieldType datasetFieldType : datasetFieldTypes) {
- fieldsBuilder.add(datasetFieldType.getName(), json(datasetFieldType, ownerDataverse));
+
+ Predicate isNoChild = element -> element.isChild() == false;
+ List childLessList = metadataBlock.getDatasetFieldTypes().stream().filter(isNoChild).toList();
+ Set datasetFieldTypesNoChildSorted = new TreeSet<>(childLessList);
+
+ for (DatasetFieldType datasetFieldType : datasetFieldTypesNoChildSorted) {
+
+ Long datasetFieldTypeId = datasetFieldType.getId();
+ boolean requiredAsInputLevelInOwnerDataverse = ownerDataverse != null && ownerDataverse.isDatasetFieldTypeRequiredAsInputLevel(datasetFieldTypeId);
+ boolean includedAsInputLevelInOwnerDataverse = ownerDataverse != null && ownerDataverse.isDatasetFieldTypeIncludedAsInputLevel(datasetFieldTypeId);
+ boolean isNotInputLevelInOwnerDataverse = ownerDataverse != null && !ownerDataverse.isDatasetFieldTypeInInputLevels(datasetFieldTypeId);
+
+ DatasetFieldType parentDatasetFieldType = datasetFieldType.getParentDatasetFieldType();
+ boolean isRequired = parentDatasetFieldType == null ? datasetFieldType.isRequired() : parentDatasetFieldType.isRequired();
+
+ boolean displayCondition = printOnlyDisplayedOnCreateDatasetFieldTypes
+ ? (datasetFieldType.isDisplayOnCreate() || isRequired || requiredAsInputLevelInOwnerDataverse)
+ : ownerDataverse == null || includedAsInputLevelInOwnerDataverse || isNotInputLevelInOwnerDataverse;
+
+ if (displayCondition) {
+ fieldsBuilder.add(datasetFieldType.getName(), json(datasetFieldType, ownerDataverse));
+ }
}
-
+
jsonObjectBuilder.add("fields", fieldsBuilder);
return jsonObjectBuilder;
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java b/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java
index ef8ab39122f..21360fcd708 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java
@@ -85,7 +85,10 @@ public NullSafeJsonBuilder add(String name, boolean value) {
delegate.add(name, value);
return this;
}
-
+ public NullSafeJsonBuilder add(String name, Boolean value) {
+ return (value != null) ? add(name, value.booleanValue()) : this;
+ }
+
@Override
public NullSafeJsonBuilder addNull(String name) {
delegate.addNull(name);
diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties
index 012b389ce32..2e1dbeae767 100644
--- a/src/main/java/propertyFiles/Bundle.properties
+++ b/src/main/java/propertyFiles/Bundle.properties
@@ -307,7 +307,13 @@ notification.typeDescription.WORKFLOW_FAILURE=External workflow run has failed
notification.typeDescription.STATUSUPDATED=Status of dataset has been updated
notification.typeDescription.DATASETCREATED=Dataset was created by user
notification.typeDescription.DATASETMENTIONED=Dataset was referenced in remote system
-
+notification.typeDescription.GLOBUSUPLOADCOMPLETED=Globus upload is completed
+notification.typeDescription.GLOBUSUPLOADCOMPLETEDWITHERRORS=Globus upload completed with errors
+notification.typeDescription.GLOBUSDOWNLOADCOMPLETED=Globus download is completed
+notification.typeDescription.GLOBUSDOWNLOADCOMPLETEDWITHERRORS=Globus download completed with errors
+notification.typeDescription.GLOBUSUPLOADLOCALFAILURE=Globus upload failed, internal error
+notification.typeDescription.GLOBUSUPLOADREMOTEFAILURE=Globus upload failed, remote transfer error
+notification.typeDescription.REQUESTEDFILEACCESS=File access requested
groupAndRoles.manageTips=Here is where you can access and manage all the groups you belong to, and the roles you have been assigned.
user.message.signup.label=Create Account
user.message.signup.tip=Why have a Dataverse account? To create your own dataverse and customize it, add datasets, or request access to restricted files.
@@ -837,7 +843,8 @@ notification.email.datasetWasMentioned=Hello {0},
The {1} has just been
notification.email.datasetWasMentioned.subject={0}: A Dataset Relationship has been reported!
notification.email.globus.uploadCompleted.subject={0}: Files uploaded successfully via Globus and verified
notification.email.globus.downloadCompleted.subject={0}: Files downloaded successfully via Globus
-notification.email.globus.uploadCompletedWithErrors.subject={0}: Uploaded files via Globus with errors
+notification.email.globus.downloadCompletedWithErrors.subject={0}: Globus download task completed, errors encountered
+notification.email.globus.uploadCompletedWithErrors.subject={0}: Globus upload task completed with errors
notification.email.globus.uploadFailedRemotely.subject={0}: Failed to upload files via Globus
notification.email.globus.uploadFailedLocally.subject={0}: Failed to add files uploaded via Globus to dataset
# dataverse.xhtml
@@ -1457,7 +1464,7 @@ dataset.editBtn.itemLabel.metadata=Metadata
dataset.editBtn.itemLabel.terms=Terms
dataset.editBtn.itemLabel.permissions=Permissions
dataset.editBtn.itemLabel.thumbnailsAndWidgets=Thumbnails + Widgets
-dataset.editBtn.itemLabel.privateUrl=Private URL
+dataset.editBtn.itemLabel.privateUrl=Preview URL
dataset.editBtn.itemLabel.permissionsDataset=Dataset
dataset.editBtn.itemLabel.permissionsFile=Restricted Files
dataset.editBtn.itemLabel.deleteDataset=Delete Dataset
@@ -1669,6 +1676,8 @@ dataset.message.createFailure=The dataset could not be created.
dataset.message.termsFailure=The dataset terms could not be updated.
dataset.message.label.fileAccess=Publicly-accessible storage
dataset.message.publicInstall=Files in this dataset may be readable outside Dataverse, restricted and embargoed access are disabled
+dataset.message.parallelUpdateError=Changes cannot be saved. This dataset has been edited since this page was opened. To continue, copy your changes, refresh the page to see the recent updates, and re-enter any changes you want to save.
+dataset.message.parallelPublishError=Publishing is blocked. This dataset has been edited since this page was opened. To publish it, refresh the page to see the recent updates, and publish again.
dataset.metadata.publicationDate=Publication Date
dataset.metadata.publicationDate.tip=The publication date of a Dataset.
dataset.metadata.citationDate=Citation Date
@@ -1721,23 +1730,34 @@ dataset.transferUnrestricted=Click Continue to transfer the elligible files.
dataset.requestAccessToRestrictedFiles=You may request access to any restricted file(s) by clicking the Request Access button.
dataset.requestAccessToRestrictedFilesWithEmbargo=Embargoed files cannot be accessed during the embargo period. If your selection contains restricted files, you may request access to them by clicking the Request Access button.
dataset.privateurl.infoMessageAuthor=Privately share this dataset before it is published: {0}
-dataset.privateurl.infoMessageReviewer=This unpublished dataset is being privately shared.
-dataset.privateurl.header=Unpublished Dataset Private URL
-dataset.privateurl.tip=Use a Private URL to allow those without Dataverse accounts to access your unpublished dataset. For more information about the Private URL feature, please refer to the User Guide.
-dataset.privateurl.absent=Private URL has not been created.
-dataset.privateurl.createPrivateUrl=Create Private URL
+dataset.privateurl.infoMessageReviewer=You are viewing a preview of this unpublished dataset version.
+dataset.privateurl.header=Unpublished Dataset Preview URL
+dataset.privateurl.tip=To cite this data in publications, use the dataset's persistent ID instead of this URL. For more information about the Preview URL feature, please refer to the User Guide.
+dataset.privateurl.onlyone=Only one Preview URL can be active for a single dataset.
+dataset.privateurl.absent=Preview URL has not been created.
+dataset.privateurl.general.button.label=Create General Preview URL
+dataset.privateurl.general.description=Create a URL that others can use to review this dataset version before it is published. They will be able to access all files in the dataset and see all metadata, including metadata that may identify the dataset's authors.
+dataset.privateurl.general.title=General Preview
+dataset.privateurl.anonymous.title=Anonymous Preview
+dataset.privateurl.anonymous.button.label=Create Anonymous Preview URL
+dataset.privateurl.anonymous.description=Create a URL that others can use to access an anonymized view of this unpublished dataset version. Metadata that could identify the dataset author will not be displayed. Non-identifying metadata will be visible.
+dataset.privateurl.anonymous.description.paragraph.two=The dataset's files are not changed and users of the Anonymous Preview URL will be able to access them. Users of the Anonymous Preview URL will not be able to see the name of the Dataverse that this dataset is in but will be able to see the name of the repository, which might expose the dataset authors' identities.
+dataset.privateurl.createPrivateUrl=Create Preview URL
+dataset.privateurl.introduction=You can create a Preview URL to copy and share with others who will not need a repository account to review this unpublished dataset version. Once the dataset is published or if the URL is disabled, the URL will no longer work and will point to a "Page not found" page.
dataset.privateurl.createPrivateUrl.anonymized=Create URL for Anonymized Access
-dataset.privateurl.createPrivateUrl.anonymized.unavailable=Anonymized Access is not available once a version of the dataset has been published
-dataset.privateurl.disablePrivateUrl=Disable Private URL
-dataset.privateurl.disablePrivateUrlConfirm=Yes, Disable Private URL
-dataset.privateurl.disableConfirmationText=Are you sure you want to disable the Private URL? If you have shared the Private URL with others they will no longer be able to use it to access your unpublished dataset.
-dataset.privateurl.cannotCreate=Private URL can only be used with unpublished versions of datasets.
-dataset.privateurl.roleassigeeTitle=Private URL Enabled
+dataset.privateurl.createPrivateUrl.anonymized.unavailable=You won't be able to create an Anonymous Preview URL once a version of this dataset has been published.
+dataset.privateurl.disableGeneralPreviewUrl=Disable General Preview URL
+dataset.privateurl.disableAnonPreviewUrl=Disable Anonymous Preview URL
+dataset.privateurl.disableGeneralPreviewUrlConfirm=Yes, Disable General Preview URL
+dataset.privateurl.disableAnonPreviewUrlConfirm=Yes, Disable Anonymous Preview URL
+dataset.privateurl.disableConfirmationText=Are you sure you want to disable the Preview URL? If you have shared the Preview URL with others they will no longer be able to use it to access your unpublished dataset.
+dataset.privateurl.cannotCreate=Preview URL can only be used with unpublished versions of datasets.
+dataset.privateurl.roleassigeeTitle=Preview URL Enabled
dataset.privateurl.createdSuccess=Success!
-dataset.privateurl.full=This Private URL provides full read access to the dataset
-dataset.privateurl.anonymized=This Private URL provides access to the anonymized dataset
-dataset.privateurl.disabledSuccess=You have successfully disabled the Private URL for this unpublished dataset.
-dataset.privateurl.noPermToCreate=To create a Private URL you must have the following permissions: {0}.
+dataset.privateurl.full=This Preview URL provides full read access to the dataset
+dataset.privateurl.anonymized=This Preview URL provides access to the anonymized dataset
+dataset.privateurl.disabledSuccess=You have successfully disabled the Preview URL for this unpublished dataset.
+dataset.privateurl.noPermToCreate=To create a Preview URL you must have the following permissions: {0}.
dataset.externalstatus.header=Curation Status Changed
dataset.externalstatus.removed=Curation Status Removed
dataset.externalstatus.info=Curation Status is now "{0}"
@@ -2498,6 +2518,7 @@ dataset.version.file.changed=Files (Changed File Metadata: {0}
dataset.version.file.changed2=; Changed File Metadata: {0}
dataset.version.variablemetadata.changed=Variable Metadata (Changed Variable Metadata: {0}
dataset.version.variablemetadata.changed2=; Changed Variable Metadata: {0}
+dataset.version.compare.incorrect.order=Compare requires the older dataset version to be listed first.
#DataversePage.java
dataverse.item.required=Required
@@ -2719,8 +2740,8 @@ datasets.api.grant.role.assignee.has.role.error=User already has this role for t
datasets.api.revoke.role.not.found.error="Role assignment {0} not found"
datasets.api.revoke.role.success=Role {0} revoked for assignee {1} in {2}
datasets.api.privateurl.error.datasetnotfound=Could not find dataset.
-datasets.api.privateurl.error.alreadyexists=Private URL already exists for this dataset.
-datasets.api.privateurl.error.notdraft=Can't create Private URL because the latest version of this dataset is not a draft.
+datasets.api.privateurl.error.alreadyexists=Preview URL already exists for this dataset.
+datasets.api.privateurl.error.notdraft=Can't create Preview URL because the latest version of this dataset is not a draft.
datasets.api.privateurl.anonymized.error.released=Can't create a URL for anonymized access because this dataset has been published.
datasets.api.creationdate=Date Created
datasets.api.modificationdate=Last Modified Date
@@ -3062,3 +3083,27 @@ openapi.exception.invalid.format=Invalid format {0}, currently supported formats
openapi.exception=Supported format definition not found.
openapi.exception.unaligned=Unaligned parameters on Headers [{0}] and Request [{1}]
+#Users.java
+users.api.errors.bearerAuthFeatureFlagDisabled=This endpoint is only available when bearer authentication feature flag is enabled.
+users.api.errors.bearerTokenRequired=Bearer token required.
+users.api.errors.jsonParseToUserDTO=Error parsing the POSTed User json: {0}
+users.api.userRegistered=User registered.
+
+#RegisterOidcUserCommand.java
+registerOidcUserCommand.errors.userAlreadyRegisteredWithToken=User is already registered with this token.
+registerOidcUserCommand.errors.invalidFields=The provided fields are invalid for registering a new user.
+registerOidcUserCommand.errors.userShouldAcceptTerms=Terms should be accepted.
+registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldAlreadyPresentInProvider=Unable to set {0} because it conflicts with an existing claim from the OIDC identity provider.
+registerOidcUserCommand.errors.provideMissingClaimsEnabled.fieldRequired=It is required to include the field {0} in the request JSON for registering the user.
+registerOidcUserCommand.errors.provideMissingClaimsDisabled.unableToSetFieldViaJSON=Unable to set field {0} via JSON because the api-bearer-auth-provide-missing-claims feature flag is disabled.
+registerOidcUserCommand.errors.provideMissingClaimsDisabled.fieldRequired=The OIDC identity provider does not provide the user claim {0}, which is required for user registration. Please contact an administrator.
+registerOidcUserCommand.errors.emailAddressInUse=Email already in use.
+registerOidcUserCommand.errors.usernameInUse=Username already in use.
+
+#BearerTokenAuthMechanism.java
+bearerTokenAuthMechanism.errors.tokenValidatedButNoRegisteredUser=Bearer token is validated, but there is no linked user account.
+
+#AuthenticationServiceBean.java
+authenticationServiceBean.errors.unauthorizedBearerToken=Unauthorized bearer token.
+authenticationServiceBean.errors.invalidBearerToken=Could not parse bearer token.
+authenticationServiceBean.errors.bearerTokenDetectedNoOIDCProviderConfigured=Bearer token detected, no OIDC provider configured.
diff --git a/src/main/resources/db/migration/V6.4.0.2.sql b/src/main/resources/db/migration/V6.4.0.2.sql
new file mode 100644
index 00000000000..bc4a85b278f
--- /dev/null
+++ b/src/main/resources/db/migration/V6.4.0.2.sql
@@ -0,0 +1,2 @@
+-- #10118
+ALTER TABLE customquestion ALTER COLUMN questionstring TYPE text;
diff --git a/src/main/resources/db/migration/V6.4.0.3.sql b/src/main/resources/db/migration/V6.4.0.3.sql
index 792996414b4..307d8ed206c 100644
--- a/src/main/resources/db/migration/V6.4.0.3.sql
+++ b/src/main/resources/db/migration/V6.4.0.3.sql
@@ -1,4 +1,2 @@
--- Add these boolean flags to accommodate new harvesting client features
+-- Add this boolean flag to accommodate a new harvesting client feature
ALTER TABLE harvestingclient ADD COLUMN IF NOT EXISTS useOaiIdAsPid BOOLEAN DEFAULT FALSE;
-ALTER TABLE harvestingclient ADD COLUMN IF NOT EXISTS useListRecords BOOLEAN DEFAULT FALSE;
-ALTER TABLE harvestingclient ALTER COLUMN harvestingSet TYPE TEXT;
diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml
index 6de0f00e94e..9426884d349 100644
--- a/src/main/webapp/dataset.xhtml
+++ b/src/main/webapp/dataset.xhtml
@@ -20,6 +20,7 @@
+
-
+
@@ -834,7 +835,6 @@
/>
@@ -995,7 +995,6 @@
+
#{bundle['dataset.privateurl.introduction']}
+
+
+ #{bundle['dataset.privateurl.onlyone']}
+
+
+
+
+
+ #{bundle['dataset.privateurl.general.description']}
+
+
+
+
+
+
+ #{privateUrlLink}
+
+
+
+
+
+
+
+
+
+
+
+ #{bundle['dataset.privateurl.anonymous.description']}
+ #{bundle['dataset.privateurl.anonymous.description.paragraph.two']} #{bundle['dataset.privateurl.createPrivateUrl.anonymized.unavailable']}.
+
+
+
+