diff --git a/.github/workflows/.trivyignore b/.github/workflows/.trivyignore
index 2a0a586..e69de29 100644
--- a/.github/workflows/.trivyignore
+++ b/.github/workflows/.trivyignore
@@ -1,3 +0,0 @@
-# September 20, 2024
-# Vulnerability in io.kubernetes java client, may be fixed in 21.0.1 but not sure
-CVE-2024-7254
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index ad733bc..1387523 100644
--- a/pom.xml
+++ b/pom.xml
@@ -16,10 +16,9 @@
Backend for orchestration service
17
- 4.4
- 21.0.0
+ 22.0.0
1.13
- 1.19.8
+ 1.20.3
5.2.0
dissco
https://sonarcloud.io
@@ -190,13 +189,13 @@
eu.dissco.orchestration.backend.maven.MavenRunner
- https://schemas.dissco.tech/schemas/fdo-type/data-mapping/0.3.0/data-mapping.json
- https://schemas.dissco.tech/schemas/developer-schema/data-mapping/0.3.0/data-mapping-request.json
- https://schemas.dissco.tech/schemas/fdo-type/machine-annotation-service/0.3.0/machine-annotation-service.json
- https://schemas.dissco.tech/schemas/developer-schema/machine-annotation-service/0.3.0/machine-annotation-service-request.json
- https://schemas.dissco.tech/schemas/fdo-type/source-system/0.3.0/source-system.json
- https://schemas.dissco.tech/schemas/developer-schema/source-system/0.3.0/source-system-request.json
- https://schemas.dissco.tech/schemas/fdo-type/create-update-tombstone-event/0.3.0/create-update-tombstone-event.json
+ https://schemas.dissco.tech/schemas/fdo-type/data-mapping/0.4.0/data-mapping.json
+ https://schemas.dissco.tech/schemas/developer-schema/data-mapping/0.4.0/data-mapping-request.json
+ https://schemas.dissco.tech/schemas/fdo-type/machine-annotation-service/0.4.0/machine-annotation-service.json
+ https://schemas.dissco.tech/schemas/developer-schema/machine-annotation-service/0.4.0/machine-annotation-service-request.json
+ https://schemas.dissco.tech/schemas/fdo-type/source-system/0.4.0/source-system.json
+ https://schemas.dissco.tech/schemas/developer-schema/source-system/0.4.0/source-system-request.json
+ https://schemas.dissco.tech/schemas/fdo-type/create-update-tombstone-event/0.4.0/create-update-tombstone-event.json
diff --git a/src/main/java/eu/dissco/orchestration/backend/controller/DataMappingController.java b/src/main/java/eu/dissco/orchestration/backend/controller/DataMappingController.java
index 6692163..0f6e43c 100644
--- a/src/main/java/eu/dissco/orchestration/backend/controller/DataMappingController.java
+++ b/src/main/java/eu/dissco/orchestration/backend/controller/DataMappingController.java
@@ -1,5 +1,7 @@
package eu.dissco.orchestration.backend.controller;
+import static eu.dissco.orchestration.backend.domain.AgentRoleType.CREATOR;
+import static eu.dissco.orchestration.backend.domain.AgentRoleType.TOMBSTONER;
import static eu.dissco.orchestration.backend.utils.ControllerUtils.getAgent;
import com.fasterxml.jackson.core.JsonProcessingException;
@@ -47,10 +49,11 @@ public ResponseEntity createDataMapping(Authentication authentic
@RequestBody JsonApiRequestWrapper requestBody, HttpServletRequest servletRequest)
throws JsonProcessingException, ProcessingFailedException, ForbiddenException {
var dataMapping = getDataMappingRequestFromRequest(requestBody);
- var user = getAgent(authentication);
- log.info("Received create request for data mapping: {} from user: {}", dataMapping, user.getId());
+ var agent = getAgent(authentication, CREATOR);
+ log.info("Received create request for data mapping: {} from agent: {}", dataMapping,
+ agent.getId());
String path = appProperties.getBaseUrl() + servletRequest.getRequestURI();
- var result = service.createDataMapping(dataMapping, user, path);
+ var result = service.createDataMapping(dataMapping, agent, path);
return ResponseEntity.status(HttpStatus.CREATED).body(result);
}
@@ -61,10 +64,11 @@ public ResponseEntity updateDataMapping(Authentication authentic
throws JsonProcessingException, NotFoundException, ProcessingFailedException, ForbiddenException {
var dataMapping = getDataMappingRequestFromRequest(requestBody);
var id = prefix + '/' + suffix;
- var user = getAgent(authentication);
- log.info("Received update request for data mapping: {} from user: {}", dataMapping, user.getId());
+ var agent = getAgent(authentication, CREATOR);
+ log.info("Received update request for data mapping: {} from agent: {}", dataMapping,
+ agent.getId());
String path = appProperties.getBaseUrl() + servletRequest.getRequestURI();
- var result = service.updateDataMapping(id, dataMapping, user, path);
+ var result = service.updateDataMapping(id, dataMapping, agent, path);
if (result == null) {
return ResponseEntity.status(HttpStatus.NO_CONTENT).build();
} else {
@@ -78,8 +82,8 @@ public ResponseEntity tombstoneDataMapping(Authentication authentication,
@PathVariable("prefix") String prefix, @PathVariable("suffix") String suffix)
throws NotFoundException, ProcessingFailedException, ForbiddenException {
String id = prefix + "/" + suffix;
- var agent = getAgent(authentication);
- log.info("Received delete request for mapping: {} from user: {}", id, agent.getId());
+ var agent = getAgent(authentication, TOMBSTONER);
+ log.info("Received delete request for mapping: {} from agent: {}", id, agent.getId());
service.tombstoneDataMapping(id, agent);
return ResponseEntity.status(HttpStatus.NO_CONTENT).build();
}
@@ -103,7 +107,8 @@ public ResponseEntity getDataMappings(
log.info("Received get request for mappings with pageNumber: {} and pageSzie: {}: ", pageNum,
pageSize);
String path = appProperties.getBaseUrl() + servletRequest.getRequestURI();
- return ResponseEntity.status(HttpStatus.OK).body(service.getDataMappings(pageNum, pageSize, path));
+ return ResponseEntity.status(HttpStatus.OK)
+ .body(service.getDataMappings(pageNum, pageSize, path));
}
private DataMappingRequest getDataMappingRequestFromRequest(JsonApiRequestWrapper requestBody)
diff --git a/src/main/java/eu/dissco/orchestration/backend/controller/MachineAnnotationServiceController.java b/src/main/java/eu/dissco/orchestration/backend/controller/MachineAnnotationServiceController.java
index f748c19..229d447 100644
--- a/src/main/java/eu/dissco/orchestration/backend/controller/MachineAnnotationServiceController.java
+++ b/src/main/java/eu/dissco/orchestration/backend/controller/MachineAnnotationServiceController.java
@@ -1,5 +1,7 @@
package eu.dissco.orchestration.backend.controller;
+import static eu.dissco.orchestration.backend.domain.AgentRoleType.CREATOR;
+import static eu.dissco.orchestration.backend.domain.AgentRoleType.TOMBSTONER;
import static eu.dissco.orchestration.backend.utils.ControllerUtils.getAgent;
import com.fasterxml.jackson.core.JsonProcessingException;
@@ -48,11 +50,11 @@ public ResponseEntity createMachineAnnotationService(
@RequestBody JsonApiRequestWrapper requestBody, HttpServletRequest servletRequest)
throws JsonProcessingException, ProcessingFailedException, ForbiddenException {
var machineAnnotationService = getMachineAnnotation(requestBody);
- var user = getAgent(authentication);
- log.info("Received create request for machine annotation service: {} from user: {}",
- machineAnnotationService, user.getId());
+ var agent = getAgent(authentication, CREATOR);
+ log.info("Received create request for machine annotation service: {} from agent: {}",
+ machineAnnotationService, agent.getId());
String path = appProperties.getBaseUrl() + servletRequest.getRequestURI();
- var result = service.createMachineAnnotationService(machineAnnotationService, user, path);
+ var result = service.createMachineAnnotationService(machineAnnotationService, agent, path);
return ResponseEntity.status(HttpStatus.CREATED).body(result);
}
@@ -63,12 +65,12 @@ public ResponseEntity updateMachineAnnotationService(
@RequestBody JsonApiRequestWrapper requestBody, HttpServletRequest servletRequest)
throws JsonProcessingException, NotFoundException, ProcessingFailedException, ForbiddenException {
var machineAnnotationService = getMachineAnnotation(requestBody);
- var user = getAgent(authentication);
+ var agent = getAgent(authentication, CREATOR);
var id = prefix + '/' + suffix;
- log.info("Received update request for machine annotation service: {} from user: {}", id,
- user.getId());
+ log.info("Received update request for machine annotation service: {} from agent: {}", id,
+ agent.getId());
String path = appProperties.getBaseUrl() + servletRequest.getRequestURI();
- var result = service.updateMachineAnnotationService(id, machineAnnotationService, user, path);
+ var result = service.updateMachineAnnotationService(id, machineAnnotationService, agent, path);
if (result == null) {
return ResponseEntity.status(HttpStatus.NO_CONTENT).build();
} else {
@@ -82,8 +84,8 @@ public ResponseEntity tombstoneMachineAnnotationService(Authentication aut
@PathVariable("prefix") String prefix, @PathVariable("suffix") String suffix)
throws NotFoundException, ProcessingFailedException, ForbiddenException {
String id = prefix + "/" + suffix;
- var agent = getAgent(authentication);
- log.info("Received delete request for machine annotation service: {} from user: {}", id,
+ var agent = getAgent(authentication, TOMBSTONER);
+ log.info("Received delete request for machine annotation service: {} from agent: {}", id,
agent.getId());
service.tombstoneMachineAnnotationService(id, agent);
return ResponseEntity.status(HttpStatus.NO_CONTENT).build();
diff --git a/src/main/java/eu/dissco/orchestration/backend/controller/SourceSystemController.java b/src/main/java/eu/dissco/orchestration/backend/controller/SourceSystemController.java
index ab4fadf..fef303a 100644
--- a/src/main/java/eu/dissco/orchestration/backend/controller/SourceSystemController.java
+++ b/src/main/java/eu/dissco/orchestration/backend/controller/SourceSystemController.java
@@ -1,5 +1,7 @@
package eu.dissco.orchestration.backend.controller;
+import static eu.dissco.orchestration.backend.domain.AgentRoleType.CREATOR;
+import static eu.dissco.orchestration.backend.domain.AgentRoleType.TOMBSTONER;
import static eu.dissco.orchestration.backend.utils.ControllerUtils.getAgent;
import com.fasterxml.jackson.core.JsonProcessingException;
@@ -49,10 +51,10 @@ public ResponseEntity createSourceSystem(Authentication authenti
throws IOException, NotFoundException, ProcessingFailedException, ForbiddenException {
var sourceSystemRequest = getSourceSystemFromRequest(requestBody);
String path = appProperties.getBaseUrl() + servletRequest.getRequestURI();
- var user = getAgent(authentication);
- log.info("Received create request for source system: {} from user: {}", sourceSystemRequest,
- user.getId());
- var result = service.createSourceSystem(sourceSystemRequest, user, path);
+ var agent = getAgent(authentication, CREATOR);
+ log.info("Received create request for source system: {} from agent: {}", sourceSystemRequest,
+ agent.getId());
+ var result = service.createSourceSystem(sourceSystemRequest, agent, path);
return ResponseEntity.status(HttpStatus.CREATED).body(result);
}
@@ -64,10 +66,10 @@ public ResponseEntity updateSourceSystem(Authentication authenti
throws IOException, NotFoundException, ProcessingFailedException, ForbiddenException {
var sourceSystemRequest = getSourceSystemFromRequest(requestBody);
var id = prefix + '/' + suffix;
- var user = getAgent(authentication);
- log.info("Received update request for source system: {} from user: {}", id, user.getId());
+ var agent = getAgent(authentication, CREATOR);
+ log.info("Received update request for source system: {} from agent: {}", id, agent.getId());
String path = appProperties.getBaseUrl() + servletRequest.getRequestURI();
- var result = service.updateSourceSystem(id, sourceSystemRequest, user, path, trigger);
+ var result = service.updateSourceSystem(id, sourceSystemRequest, agent, path, trigger);
if (result == null) {
return ResponseEntity.status(HttpStatus.NO_CONTENT).build();
} else {
@@ -81,8 +83,8 @@ public ResponseEntity tombstoneSourceSystem(Authentication authentication,
@PathVariable("prefix") String prefix, @PathVariable("suffix") String suffix)
throws NotFoundException, ProcessingFailedException, ForbiddenException {
String id = prefix + "/" + suffix;
- var agent = getAgent(authentication);
- log.info("Received delete request for mapping: {} from user: {}", id, agent.getId());
+ var agent = getAgent(authentication, TOMBSTONER);
+ log.info("Received delete request for mapping: {} from agent: {}", id, agent.getId());
service.tombstoneSourceSystem(id, agent);
return ResponseEntity.status(HttpStatus.NO_CONTENT).build();
}
diff --git a/src/main/java/eu/dissco/orchestration/backend/domain/AgentRoleType.java b/src/main/java/eu/dissco/orchestration/backend/domain/AgentRoleType.java
new file mode 100644
index 0000000..320b904
--- /dev/null
+++ b/src/main/java/eu/dissco/orchestration/backend/domain/AgentRoleType.java
@@ -0,0 +1,23 @@
+package eu.dissco.orchestration.backend.domain;
+
+import lombok.Getter;
+
+@Getter
+public enum AgentRoleType {
+
+ COLLECTOR("collector"),
+ DATA_TRANSLATOR("data-translator"),
+ CREATOR("creator"),
+ IDENTIFIER("identifier"),
+ GEOREFERENCER("georeferencer"),
+ RIGHTS_OWNER("rights-owner"),
+ PROCESSING_SERVICE("processing-service"),
+ SOURCE_SYSTEM("source-system"),
+ TOMBSTONER("tombstoner");
+
+ private final String name;
+
+ AgentRoleType(String name) {
+ this.name = name;
+ }
+}
diff --git a/src/main/java/eu/dissco/orchestration/backend/service/DataMappingService.java b/src/main/java/eu/dissco/orchestration/backend/service/DataMappingService.java
index b587fa4..11234ff 100644
--- a/src/main/java/eu/dissco/orchestration/backend/service/DataMappingService.java
+++ b/src/main/java/eu/dissco/orchestration/backend/service/DataMappingService.java
@@ -22,7 +22,7 @@
import eu.dissco.orchestration.backend.schema.DataMapping.OdsStatus;
import eu.dissco.orchestration.backend.schema.DataMappingRequest;
import eu.dissco.orchestration.backend.schema.DefaultMapping;
-import eu.dissco.orchestration.backend.schema.FieldMapping;
+import eu.dissco.orchestration.backend.schema.TermMapping;
import eu.dissco.orchestration.backend.web.HandleComponent;
import java.time.Instant;
import java.util.ArrayList;
@@ -51,9 +51,10 @@ private static boolean isEqual(DataMapping dataMapping,
return Objects.equals(dataMapping.getSchemaName(), currentDataMapping.getSchemaName()) &&
Objects.equals(dataMapping.getSchemaDescription(),
currentDataMapping.getSchemaDescription()) &&
- Objects.equals(dataMapping.getOdsDefaultMapping(),
- currentDataMapping.getOdsDefaultMapping()) &&
- Objects.equals(dataMapping.getOdsFieldMapping(), currentDataMapping.getOdsFieldMapping()) &&
+ Objects.equals(dataMapping.getOdsHasDefaultMapping(),
+ currentDataMapping.getOdsHasDefaultMapping()) &&
+ Objects.equals(dataMapping.getOdsHasTermMapping(),
+ currentDataMapping.getOdsHasTermMapping()) &&
Objects.equals(dataMapping.getOdsMappingDataStandard(),
currentDataMapping.getOdsMappingDataStandard());
}
@@ -61,7 +62,7 @@ private static boolean isEqual(DataMapping dataMapping,
private static List buildDefaultMapping(
DataMappingRequest dataMappingRequest) {
var mappedList = new ArrayList();
- for (var odsDefaultMapping : dataMappingRequest.getOdsDefaultMapping()) {
+ for (var odsDefaultMapping : dataMappingRequest.getOdsHasDefaultMapping()) {
var mappedOdsDefaultMapping = new DefaultMapping();
for (var property : odsDefaultMapping.getAdditionalProperties()
.entrySet()) {
@@ -72,21 +73,42 @@ private static List buildDefaultMapping(
return mappedList;
}
- private static List buildFieldMapping(
- DataMappingRequest dataMappingRequest) {
- var mappedList = new ArrayList();
- for (var odsDefaultMapping : dataMappingRequest.getOdsFieldMapping()) {
- var mappedOdsFieldMapping = new FieldMapping();
+ private static List buildTermMapping(DataMappingRequest dataMappingRequest) {
+ var mappedList = new ArrayList();
+ for (var odsDefaultMapping : dataMappingRequest.getOdsHasTermMapping()) {
+ var mappedOdsTermMapping = new TermMapping();
for (var property : odsDefaultMapping.getAdditionalProperties()
.entrySet()) {
- mappedOdsFieldMapping.setAdditionalProperty(property.getKey(), property.getValue());
+ mappedOdsTermMapping.setAdditionalProperty(property.getKey(), property.getValue());
}
- mappedList.add(mappedOdsFieldMapping);
+ mappedList.add(mappedOdsTermMapping);
}
return mappedList;
}
- public JsonApiWrapper createDataMapping(DataMappingRequest mappingRequest, Agent user,
+ private static DataMapping buildTombstoneDataMapping(DataMapping dataMapping,
+ Agent tombstoningAgent, Instant timestamp) {
+ return new DataMapping()
+ .withId(dataMapping.getId())
+ .withType(dataMapping.getType())
+ .withSchemaIdentifier(dataMapping.getSchemaIdentifier())
+ .withOdsFdoType(dataMapping.getOdsFdoType())
+ .withOdsStatus(OdsStatus.TOMBSTONE)
+ .withSchemaVersion(dataMapping.getSchemaVersion() + 1)
+ .withSchemaName(dataMapping.getSchemaName())
+ .withSchemaDescription(dataMapping.getSchemaDescription())
+ .withSchemaDateCreated(dataMapping.getSchemaDateCreated())
+ .withSchemaDateModified(Date.from(timestamp))
+ .withSchemaCreator(dataMapping.getSchemaCreator())
+ .withOdsHasDefaultMapping(dataMapping.getOdsHasDefaultMapping())
+ .withOdsHasTermMapping(dataMapping.getOdsHasTermMapping())
+ .withOdsMappingDataStandard(dataMapping.getOdsMappingDataStandard())
+ .withOdsHasTombstoneMetadata(buildTombstoneMetadata(tombstoningAgent,
+ "Data Mapping tombstoned by agent through the orchestration backend", timestamp));
+
+ }
+
+ public JsonApiWrapper createDataMapping(DataMappingRequest mappingRequest, Agent agent,
String path)
throws ProcessingFailedException {
var requestBody = fdoRecordService.buildCreateRequest(mappingRequest, ObjectType.DATA_MAPPING);
@@ -96,38 +118,38 @@ public JsonApiWrapper createDataMapping(DataMappingRequest mappingRequest, Agent
} catch (PidException e) {
throw new ProcessingFailedException(e.getMessage(), e);
}
- var dataMapping = buildDataMapping(mappingRequest, 1, user, handle,
+ var dataMapping = buildDataMapping(mappingRequest, 1, agent, handle,
Date.from(Instant.now()));
repository.createDataMapping(dataMapping);
- publishCreateEvent(dataMapping);
+ publishCreateEvent(dataMapping, agent);
return wrapSingleResponse(dataMapping, path);
}
private DataMapping buildDataMapping(DataMappingRequest dataMappingRequest, int version,
- Agent user, String handle, Date created) {
+ Agent agent, String handle, Date created) {
var id = HANDLE_PROXY + handle;
return new DataMapping()
.withId(id)
- .withOdsID(id)
+ .withSchemaIdentifier(id)
.withType(ObjectType.DATA_MAPPING.getFullName())
- .withOdsType(fdoProperties.getDataMappingType())
+ .withOdsFdoType(fdoProperties.getDataMappingType())
.withSchemaVersion(version)
- .withOdsStatus(OdsStatus.ODS_ACTIVE)
+ .withOdsStatus(OdsStatus.ACTIVE)
.withSchemaName(dataMappingRequest.getSchemaName())
.withSchemaDescription(dataMappingRequest.getSchemaDescription())
.withSchemaDateCreated(created)
.withSchemaDateModified(Date.from(Instant.now()))
- .withSchemaCreator(user)
- .withOdsDefaultMapping(buildDefaultMapping(dataMappingRequest))
- .withOdsFieldMapping(buildFieldMapping(dataMappingRequest))
+ .withSchemaCreator(agent)
+ .withOdsHasDefaultMapping(buildDefaultMapping(dataMappingRequest))
+ .withOdsHasTermMapping(buildTermMapping(dataMappingRequest))
.withOdsMappingDataStandard(OdsMappingDataStandard.fromValue(
dataMappingRequest.getOdsMappingDataStandard().value()));
}
- private void publishCreateEvent(DataMapping dataMapping)
+ private void publishCreateEvent(DataMapping dataMapping, Agent agent)
throws ProcessingFailedException {
try {
- kafkaPublisherService.publishCreateEvent(mapper.valueToTree(dataMapping));
+ kafkaPublisherService.publishCreateEvent(mapper.valueToTree(dataMapping), agent);
} catch (JsonProcessingException e) {
log.error("Unable to publish message to Kafka", e);
rollbackMappingCreation(dataMapping);
@@ -148,7 +170,7 @@ private void rollbackMappingCreation(DataMapping dataMapping) {
}
public JsonApiWrapper updateDataMapping(String id, DataMappingRequest dataMappingRequest,
- Agent user, String path) throws NotFoundException, ProcessingFailedException {
+ Agent agent, String path) throws NotFoundException, ProcessingFailedException {
var currentDataMappingOptional = repository.getActiveDataMapping(id);
if (currentDataMappingOptional.isEmpty()) {
throw new NotFoundException("Requested data mapping does not exist");
@@ -156,21 +178,21 @@ public JsonApiWrapper updateDataMapping(String id, DataMappingRequest dataMappin
var currentDataMapping = currentDataMappingOptional.get();
var dataMapping = buildDataMapping(dataMappingRequest,
currentDataMapping.getSchemaVersion() + 1,
- user, id, currentDataMapping.getSchemaDateCreated());
+ agent, id, currentDataMapping.getSchemaDateCreated());
if (isEqual(dataMapping, currentDataMapping)) {
return null;
} else {
repository.updateDataMapping(dataMapping);
- publishUpdateEvent(dataMapping, currentDataMappingOptional.get());
+ publishUpdateEvent(dataMapping, currentDataMappingOptional.get(), agent);
return wrapSingleResponse(dataMapping, path);
}
}
private void publishUpdateEvent(DataMapping dataMapping,
- DataMapping currentDataMapping) throws ProcessingFailedException {
+ DataMapping currentDataMapping, Agent agent) throws ProcessingFailedException {
try {
kafkaPublisherService.publishUpdateEvent(mapper.valueToTree(dataMapping),
- mapper.valueToTree(currentDataMapping));
+ mapper.valueToTree(currentDataMapping), agent);
} catch (JsonProcessingException e) {
log.error("Unable to publish message to Kafka", e);
rollbackToPreviousVersion(currentDataMapping);
@@ -193,10 +215,11 @@ public void tombstoneDataMapping(String id, Agent agent)
repository.tombstoneDataMapping(tombstoneDataMapping, timestamp);
try {
kafkaPublisherService.publishTombstoneEvent(mapper.valueToTree(tombstoneDataMapping),
- mapper.valueToTree(tombstoneDataMapping));
- } catch (JsonProcessingException e){
+ mapper.valueToTree(tombstoneDataMapping), agent);
+ } catch (JsonProcessingException e) {
log.error("Unable to publish tombstone event to provenance service", e);
- throw new ProcessingFailedException("Unable to publish tombstone event to provenance service", e);
+ throw new ProcessingFailedException(
+ "Unable to publish tombstone event to provenance service", e);
}
} else {
throw new NotFoundException("Requested data mapping " + id + " does not exist");
@@ -207,34 +230,12 @@ private void tombstoneHandle(String handle) throws ProcessingFailedException {
var request = fdoRecordService.buildTombstoneRequest(ObjectType.DATA_MAPPING, handle);
try {
handleComponent.tombstoneHandle(request, handle);
- } catch (PidException e){
+ } catch (PidException e) {
log.error("Unable to tombstone handle {}", handle, e);
throw new ProcessingFailedException("Unable to tombstone handle", e);
}
}
- private static DataMapping buildTombstoneDataMapping(DataMapping dataMapping, Agent tombstoningAgent,
- Instant timestamp) {
- return new DataMapping()
- .withId(dataMapping.getId())
- .withType(dataMapping.getType())
- .withOdsID(dataMapping.getOdsID())
- .withOdsType(dataMapping.getOdsType())
- .withOdsStatus(OdsStatus.ODS_TOMBSTONE)
- .withSchemaVersion(dataMapping.getSchemaVersion() + 1)
- .withSchemaName(dataMapping.getSchemaName())
- .withSchemaDescription(dataMapping.getSchemaDescription())
- .withSchemaDateCreated(dataMapping.getSchemaDateCreated())
- .withSchemaDateModified(Date.from(timestamp))
- .withSchemaCreator(dataMapping.getSchemaCreator())
- .withOdsDefaultMapping(dataMapping.getOdsDefaultMapping())
- .withOdsFieldMapping(dataMapping.getOdsFieldMapping())
- .withOdsMappingDataStandard(dataMapping.getOdsMappingDataStandard())
- .withOdsTombstoneMetadata(buildTombstoneMetadata(tombstoningAgent,
- "Data Mapping tombstoned by user through the orchestration backend", timestamp));
-
- }
-
protected Optional getActiveDataMapping(String id) {
return repository.getActiveDataMapping(id);
}
diff --git a/src/main/java/eu/dissco/orchestration/backend/service/FdoRecordService.java b/src/main/java/eu/dissco/orchestration/backend/service/FdoRecordService.java
index ec41468..182d5ff 100644
--- a/src/main/java/eu/dissco/orchestration/backend/service/FdoRecordService.java
+++ b/src/main/java/eu/dissco/orchestration/backend/service/FdoRecordService.java
@@ -40,7 +40,8 @@ public JsonNode buildTombstoneRequest(ObjectType type, String handle) {
.put("type", getFdoType(type))
.put("id", handle)
.set("attributes", mapper.createObjectNode()
- .put("tombstoneText", type.getFullName() + " tombstoned by user through the orchestration backend")));
+ .put("tombstoneText", type.getFullName()
+ + " tombstoned by agent through the orchestration backend")));
}
public JsonNode buildRollbackCreateRequest(String handle) {
diff --git a/src/main/java/eu/dissco/orchestration/backend/service/KafkaPublisherService.java b/src/main/java/eu/dissco/orchestration/backend/service/KafkaPublisherService.java
index 8f88776..772068b 100644
--- a/src/main/java/eu/dissco/orchestration/backend/service/KafkaPublisherService.java
+++ b/src/main/java/eu/dissco/orchestration/backend/service/KafkaPublisherService.java
@@ -3,6 +3,7 @@
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
+import eu.dissco.orchestration.backend.schema.Agent;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.kafka.core.KafkaTemplate;
@@ -13,28 +14,28 @@
@RequiredArgsConstructor
public class KafkaPublisherService {
+ private static final String TOPIC = "createUpdateDeleteTopic";
private final KafkaTemplate kafkaTemplate;
private final ObjectMapper mapper;
private final ProvenanceService provenanceService;
- private static final String TOPIC = "createUpdateDeleteTopic";
- public void publishCreateEvent(JsonNode object)
+ public void publishCreateEvent(JsonNode object, Agent agent)
throws JsonProcessingException {
- var event = provenanceService.generateCreateEvent(object);
+ var event = provenanceService.generateCreateEvent(object, agent);
log.info("Publishing new create message to queue: {}", event);
kafkaTemplate.send(TOPIC, mapper.writeValueAsString(event));
}
- public void publishUpdateEvent(JsonNode object, JsonNode currentObject)
+ public void publishUpdateEvent(JsonNode object, JsonNode currentObject, Agent agent)
throws JsonProcessingException {
- var event = provenanceService.generateUpdateEvent(object, currentObject);
+ var event = provenanceService.generateUpdateEvent(object, currentObject, agent);
log.info("Publishing new update message to queue: {}", event);
kafkaTemplate.send(TOPIC, mapper.writeValueAsString(event));
}
- public void publishTombstoneEvent(JsonNode tombstoneObject, JsonNode currentObject)
+ public void publishTombstoneEvent(JsonNode tombstoneObject, JsonNode currentObject, Agent agent)
throws JsonProcessingException {
- var event = provenanceService.generateTombstoneEvent(tombstoneObject, currentObject);
+ var event = provenanceService.generateTombstoneEvent(tombstoneObject, currentObject, agent);
log.info("Publishing new tombstone message to queue: {}", event);
kafkaTemplate.send(TOPIC, mapper.writeValueAsString(event));
}
diff --git a/src/main/java/eu/dissco/orchestration/backend/service/MachineAnnotationServiceService.java b/src/main/java/eu/dissco/orchestration/backend/service/MachineAnnotationServiceService.java
index 589d3bf..107a9c5 100644
--- a/src/main/java/eu/dissco/orchestration/backend/service/MachineAnnotationServiceService.java
+++ b/src/main/java/eu/dissco/orchestration/backend/service/MachineAnnotationServiceService.java
@@ -27,8 +27,8 @@
import eu.dissco.orchestration.backend.schema.MachineAnnotationService;
import eu.dissco.orchestration.backend.schema.MachineAnnotationService.OdsStatus;
import eu.dissco.orchestration.backend.schema.MachineAnnotationServiceRequest;
-import eu.dissco.orchestration.backend.schema.OdsTargetDigitalObjectFilter;
-import eu.dissco.orchestration.backend.schema.OdsTargetDigitalObjectFilter__1;
+import eu.dissco.orchestration.backend.schema.OdsHasTargetDigitalObjectFilter;
+import eu.dissco.orchestration.backend.schema.OdsHasTargetDigitalObjectFilter__1;
import eu.dissco.orchestration.backend.schema.SchemaContactPoint;
import eu.dissco.orchestration.backend.schema.SchemaContactPoint__1;
import eu.dissco.orchestration.backend.web.HandleComponent;
@@ -92,18 +92,55 @@ private static SchemaContactPoint__1 buildContactPoint(SchemaContactPoint schema
.withSchemaTelephone(schemaContactPoint.getSchemaTelephone());
}
+ private static MachineAnnotationService buildTombstoneMachineAnnotationService(
+ MachineAnnotationService mas,
+ Agent tombstoningAgent, Instant timestamp) {
+ return new MachineAnnotationService()
+ .withId(mas.getId())
+ .withType(mas.getType())
+ .withSchemaIdentifier(mas.getSchemaIdentifier())
+ .withOdsFdoType(mas.getOdsFdoType())
+ .withOdsStatus(OdsStatus.TOMBSTONE)
+ .withSchemaVersion(mas.getSchemaVersion() + 1)
+ .withSchemaName(mas.getSchemaName())
+ .withSchemaDescription(mas.getSchemaDescription())
+ .withSchemaDateCreated(mas.getSchemaDateCreated())
+ .withSchemaDateModified(Date.from(timestamp))
+ .withSchemaCreator(mas.getSchemaCreator())
+ .withOdsContainerImage(mas.getOdsContainerImage())
+ .withOdsContainerTag(mas.getOdsContainerTag())
+ .withOdsHasTargetDigitalObjectFilter(mas.getOdsHasTargetDigitalObjectFilter())
+ .withSchemaCreativeWorkStatus(mas.getSchemaCreativeWorkStatus())
+ .withSchemaCodeRepository(mas.getSchemaCodeRepository())
+ .withSchemaProgrammingLanguage(mas.getSchemaProgrammingLanguage())
+ .withOdsServiceAvailability(mas.getOdsServiceAvailability())
+ .withSchemaMaintainer(mas.getSchemaMaintainer())
+ .withSchemaLicense(mas.getSchemaLicense())
+ .withSchemaContactPoint(mas.getSchemaContactPoint())
+ .withOdsSlaDocumentation(mas.getOdsSlaDocumentation())
+ .withOdsTopicName(mas.getOdsTopicName())
+ .withOdsMaxReplicas(mas.getOdsMaxReplicas())
+ .withOdsBatchingPermitted(mas.getOdsBatchingPermitted())
+ .withOdsTimeToLive(mas.getOdsTimeToLive())
+ .withOdsHasTombstoneMetadata(buildTombstoneMetadata(tombstoningAgent,
+ "Machine Annotation Service tombstoned by agent through the orchestration backend",
+ timestamp))
+ .withOdsHasEnvironmentalVariables(mas.getOdsHasEnvironmentalVariables())
+ .withOdsHasSecretVariables(mas.getOdsHasSecretVariables());
+ }
+
public JsonApiWrapper createMachineAnnotationService(
MachineAnnotationServiceRequest masRequest,
- Agent user, String path) throws ProcessingFailedException {
+ Agent agent, String path) throws ProcessingFailedException {
var requestBody = fdoRecordService.buildCreateRequest(masRequest, ObjectType.MAS);
try {
var handle = handleComponent.postHandle(requestBody);
setDefaultMas(masRequest, handle);
- var mas = buildMachineAnnotationService(masRequest, 1, user, handle,
+ var mas = buildMachineAnnotationService(masRequest, 1, agent, handle,
Instant.now());
repository.createMachineAnnotationService(mas);
createDeployment(mas);
- publishCreateEvent(mas);
+ publishCreateEvent(mas, agent);
return wrapSingleResponse(mas, path);
} catch (PidException e) {
throw new ProcessingFailedException(e.getMessage(), e);
@@ -111,44 +148,44 @@ public JsonApiWrapper createMachineAnnotationService(
}
private MachineAnnotationService buildMachineAnnotationService(
- MachineAnnotationServiceRequest mas, int version, Agent user, String handle,
+ MachineAnnotationServiceRequest mas, int version, Agent agent, String handle,
Instant created) {
var id = HANDLE_PROXY + handle;
return new MachineAnnotationService()
.withId(id)
- .withOdsID(id)
+ .withSchemaIdentifier(id)
.withType(ObjectType.MAS.getFullName())
- .withOdsType(fdoProperties.getMasType())
- .withOdsStatus(OdsStatus.ODS_ACTIVE)
+ .withOdsFdoType(fdoProperties.getMasType())
+ .withOdsStatus(OdsStatus.ACTIVE)
.withSchemaVersion(version)
.withSchemaName(mas.getSchemaName())
.withSchemaDescription(mas.getSchemaDescription())
.withSchemaDateCreated(Date.from(created))
.withSchemaDateModified(Date.from(Instant.now()))
- .withSchemaCreator(user)
+ .withSchemaCreator(agent)
.withOdsContainerTag(mas.getOdsContainerTag())
.withOdsContainerImage(mas.getOdsContainerImage())
- .withOdsTargetDigitalObjectFilter(buildTargetFilters(mas.getOdsTargetDigitalObjectFilter()))
+ .withOdsHasTargetDigitalObjectFilter(
+ buildTargetFilters(mas.getOdsHasTargetDigitalObjectFilter()))
.withSchemaCreativeWorkStatus(mas.getSchemaCreativeWorkStatus())
.withSchemaCodeRepository(mas.getSchemaCodeRepository())
.withSchemaProgrammingLanguage(mas.getSchemaProgrammingLanguage())
.withOdsServiceAvailability(mas.getOdsServiceAvailability())
.withSchemaMaintainer(mas.getSchemaMaintainer())
.withSchemaLicense(mas.getSchemaLicense())
- .withOdsDependency(mas.getOdsDependency())
.withSchemaContactPoint(buildContactPoint(mas.getSchemaContactPoint()))
.withOdsSlaDocumentation(mas.getOdsSlaDocumentation())
.withOdsTopicName(mas.getOdsTopicName())
.withOdsMaxReplicas(mas.getOdsMaxReplicas())
.withOdsBatchingPermitted(mas.getOdsBatchingPermitted())
.withOdsTimeToLive(mas.getOdsTimeToLive())
- .withOdsHasSecretVariable(mas.getOdsHasSecretVariable())
- .withOdsHasEnvironmentalVariable(mas.getOdsHasEnvironmentalVariable());
+ .withOdsHasSecretVariables(mas.getOdsHasSecretVariables())
+ .withOdsHasEnvironmentalVariables(mas.getOdsHasEnvironmentalVariables());
}
- private OdsTargetDigitalObjectFilter__1 buildTargetFilters(
- OdsTargetDigitalObjectFilter odsTargetDigitalObjectFilter) {
- var filter = new OdsTargetDigitalObjectFilter__1();
+ private OdsHasTargetDigitalObjectFilter__1 buildTargetFilters(
+ OdsHasTargetDigitalObjectFilter odsTargetDigitalObjectFilter) {
+ var filter = new OdsHasTargetDigitalObjectFilter__1();
for (var prop : odsTargetDigitalObjectFilter.getAdditionalProperties().entrySet()) {
filter.setAdditionalProperty(prop.getKey(), prop.getValue());
}
@@ -265,8 +302,8 @@ private Map getDeploymentTemplateProperties(MachineAnnotationSer
private List addMasKeys(MachineAnnotationService mas) {
var keyNode = new ArrayList();
- if (mas.getOdsHasEnvironmentalVariable() != null) {
- mas.getOdsHasEnvironmentalVariable().forEach(env -> {
+ if (mas.getOdsHasEnvironmentalVariables() != null) {
+ mas.getOdsHasEnvironmentalVariables().forEach(env -> {
if (env.getSchemaValue() instanceof String stringVal) {
keyNode.add(mapper.createObjectNode()
.put(NAME, env.getSchemaName())
@@ -284,8 +321,8 @@ private List addMasKeys(MachineAnnotationService mas) {
}
});
}
- if (mas.getOdsHasSecretVariable() != null) {
- mas.getOdsHasSecretVariable().forEach(secret -> keyNode.add(mapper.createObjectNode()
+ if (mas.getOdsHasSecretVariables() != null) {
+ mas.getOdsHasSecretVariables().forEach(secret -> keyNode.add(mapper.createObjectNode()
.put(NAME, secret.getSchemaName())
.set("valueFrom", mapper.createObjectNode()
.set("secretKeyRef", mapper.createObjectNode()
@@ -307,10 +344,10 @@ private String fillDeploymentTemplate(Map templateProperties,
return mapper.writeValueAsString(templateAsNode);
}
- private void publishCreateEvent(MachineAnnotationService mas)
+ private void publishCreateEvent(MachineAnnotationService mas, Agent agent)
throws ProcessingFailedException {
try {
- kafkaPublisherService.publishCreateEvent(mapper.valueToTree(mas));
+ kafkaPublisherService.publishCreateEvent(mapper.valueToTree(mas), agent);
} catch (JsonProcessingException e) {
log.error("Unable to publish message to Kafka", e);
rollbackMasCreation(mas, true, true);
@@ -354,21 +391,21 @@ private void rollbackMasCreation(MachineAnnotationService mas,
}
public JsonApiWrapper updateMachineAnnotationService(String id,
- MachineAnnotationServiceRequest masRequest, Agent user, String path)
+ MachineAnnotationServiceRequest masRequest, Agent agent, String path)
throws NotFoundException, ProcessingFailedException {
var currentMasOptional = repository.getActiveMachineAnnotationService(id);
if (currentMasOptional.isPresent()) {
var currentMas = currentMasOptional.get();
setDefaultMas(masRequest, id);
var machineAnnotationService = buildMachineAnnotationService(masRequest,
- currentMas.getSchemaVersion() + 1, user, id, Instant.now());
+ currentMas.getSchemaVersion() + 1, agent, id, Instant.now());
if (isEqual(machineAnnotationService, currentMas)) {
log.debug("No changes found for MAS");
return null;
} else {
repository.updateMachineAnnotationService(machineAnnotationService);
updateDeployment(machineAnnotationService, currentMas);
- publishUpdateEvent(machineAnnotationService, currentMas);
+ publishUpdateEvent(machineAnnotationService, currentMas, agent);
return wrapSingleResponse(machineAnnotationService, path);
}
} else {
@@ -381,8 +418,8 @@ private boolean isEqual(MachineAnnotationService mas, MachineAnnotationService c
Objects.equals(mas.getSchemaDescription(), currentMas.getSchemaDescription()) &&
Objects.equals(mas.getOdsContainerTag(), currentMas.getOdsContainerTag()) &&
Objects.equals(mas.getOdsContainerImage(), currentMas.getOdsContainerImage()) &&
- Objects.equals(mas.getOdsTargetDigitalObjectFilter(),
- currentMas.getOdsTargetDigitalObjectFilter()) &&
+ Objects.equals(mas.getOdsHasTargetDigitalObjectFilter(),
+ currentMas.getOdsHasTargetDigitalObjectFilter()) &&
Objects.equals(mas.getSchemaCreativeWorkStatus(),
currentMas.getSchemaCreativeWorkStatus()) &&
Objects.equals(mas.getSchemaCodeRepository(), currentMas.getSchemaCodeRepository())
@@ -391,7 +428,6 @@ private boolean isEqual(MachineAnnotationService mas, MachineAnnotationService c
Objects.equals(mas.getOdsServiceAvailability(), currentMas.getOdsServiceAvailability()) &&
Objects.equals(mas.getSchemaMaintainer(), currentMas.getSchemaMaintainer()) &&
Objects.equals(mas.getSchemaLicense(), currentMas.getSchemaLicense()) &&
- Objects.equals(mas.getOdsDependency(), currentMas.getOdsDependency()) &&
Objects.equals(mas.getSchemaContactPoint(), currentMas.getSchemaContactPoint()) &&
Objects.equals(mas.getOdsSlaDocumentation(), currentMas.getOdsSlaDocumentation()) &&
Objects.equals(mas.getOdsTopicName(), currentMas.getOdsTopicName()) &&
@@ -452,11 +488,11 @@ private void updateKedaResource(MachineAnnotationService mas,
}
private void publishUpdateEvent(MachineAnnotationService mas,
- MachineAnnotationService currentMas)
+ MachineAnnotationService currentMas, Agent agent)
throws ProcessingFailedException {
try {
kafkaPublisherService.publishUpdateEvent(mapper.valueToTree(mas),
- mapper.valueToTree(currentMas));
+ mapper.valueToTree(currentMas), agent);
} catch (JsonProcessingException e) {
log.error("Unable to publish message to Kafka", e);
rollbackToPreviousVersion(currentMas, true, true);
@@ -502,7 +538,7 @@ public void tombstoneMachineAnnotationService(String id, Agent agent)
repository.tombstoneMachineAnnotationService(tombstoneMas, timestamp);
try {
kafkaPublisherService.publishTombstoneEvent(mapper.valueToTree(tombstoneMas),
- mapper.valueToTree(mas));
+ mapper.valueToTree(mas), agent);
} catch (JsonProcessingException e) {
log.error("Unable to publish tombstone event to provenance service", e);
throw new ProcessingFailedException(
@@ -523,44 +559,6 @@ private void tombstoneHandle(String handle) throws ProcessingFailedException {
}
}
- private static MachineAnnotationService buildTombstoneMachineAnnotationService(
- MachineAnnotationService mas,
- Agent tombstoningAgent, Instant timestamp) {
- return new MachineAnnotationService()
- .withId(mas.getId())
- .withType(mas.getType())
- .withOdsID(mas.getOdsID())
- .withOdsType(mas.getOdsType())
- .withOdsStatus(OdsStatus.ODS_TOMBSTONE)
- .withSchemaVersion(mas.getSchemaVersion() + 1)
- .withSchemaName(mas.getSchemaName())
- .withSchemaDescription(mas.getSchemaDescription())
- .withSchemaDateCreated(mas.getSchemaDateCreated())
- .withSchemaDateModified(Date.from(timestamp))
- .withSchemaCreator(mas.getSchemaCreator())
- .withOdsContainerImage(mas.getOdsContainerImage())
- .withOdsContainerTag(mas.getOdsContainerTag())
- .withOdsTargetDigitalObjectFilter(mas.getOdsTargetDigitalObjectFilter())
- .withSchemaCreativeWorkStatus(mas.getSchemaCreativeWorkStatus())
- .withSchemaCodeRepository(mas.getSchemaCodeRepository())
- .withSchemaProgrammingLanguage(mas.getSchemaProgrammingLanguage())
- .withOdsServiceAvailability(mas.getOdsServiceAvailability())
- .withSchemaMaintainer(mas.getSchemaMaintainer())
- .withSchemaLicense(mas.getSchemaLicense())
- .withOdsDependency(mas.getOdsDependency())
- .withSchemaContactPoint(mas.getSchemaContactPoint())
- .withOdsSlaDocumentation(mas.getOdsSlaDocumentation())
- .withOdsTopicName(mas.getOdsTopicName())
- .withOdsMaxReplicas(mas.getOdsMaxReplicas())
- .withOdsBatchingPermitted(mas.getOdsBatchingPermitted())
- .withOdsTimeToLive(mas.getOdsTimeToLive())
- .withOdsTombstoneMetadata(buildTombstoneMetadata(tombstoningAgent,
- "Machine Annotation Service tombstoned by user through the orchestration backend",
- timestamp))
- .withOdsHasEnvironmentalVariable(mas.getOdsHasEnvironmentalVariable())
- .withOdsHasSecretVariable(mas.getOdsHasSecretVariable());
- }
-
private void deleteDeployment(MachineAnnotationService currentMas)
throws ProcessingFailedException {
var name = getName(currentMas.getId());
diff --git a/src/main/java/eu/dissco/orchestration/backend/service/ProvenanceService.java b/src/main/java/eu/dissco/orchestration/backend/service/ProvenanceService.java
index 18d071b..d7db92f 100644
--- a/src/main/java/eu/dissco/orchestration/backend/service/ProvenanceService.java
+++ b/src/main/java/eu/dissco/orchestration/backend/service/ProvenanceService.java
@@ -1,14 +1,20 @@
package eu.dissco.orchestration.backend.service;
-import com.fasterxml.jackson.core.JsonProcessingException;
+import static eu.dissco.orchestration.backend.domain.AgentRoleType.CREATOR;
+import static eu.dissco.orchestration.backend.domain.AgentRoleType.PROCESSING_SERVICE;
+import static eu.dissco.orchestration.backend.schema.Agent.Type.PROV_PERSON;
+import static eu.dissco.orchestration.backend.schema.Agent.Type.PROV_SOFTWARE_AGENT;
+import static eu.dissco.orchestration.backend.utils.AgentUtils.createAgent;
+import static eu.dissco.orchestration.backend.utils.ControllerUtils.ORCID;
+
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.fge.jsonpatch.diff.JsonDiff;
import eu.dissco.orchestration.backend.properties.ApplicationProperties;
import eu.dissco.orchestration.backend.schema.Agent;
-import eu.dissco.orchestration.backend.schema.Agent.Type;
import eu.dissco.orchestration.backend.schema.CreateUpdateTombstoneEvent;
+import eu.dissco.orchestration.backend.schema.Identifier.DctermsType;
import eu.dissco.orchestration.backend.schema.OdsChangeValue;
import eu.dissco.orchestration.backend.schema.ProvActivity;
import eu.dissco.orchestration.backend.schema.ProvEntity;
@@ -30,31 +36,43 @@ public class ProvenanceService {
private final ObjectMapper mapper;
private final ApplicationProperties properties;
- public CreateUpdateTombstoneEvent generateCreateEvent(JsonNode digitalObject)
- throws JsonProcessingException {
- return generateCreateUpdateTombStoneEvent(digitalObject, ProvActivity.Type.ODS_CREATE, null);
+ private static String getRdfsComment(ProvActivity.Type activityType) {
+ switch (activityType) {
+ case ODS_CREATE -> {
+ return "Object newly created";
+ }
+ case ODS_UPDATE -> {
+ return "Object updated";
+ }
+ case ODS_TOMBSTONE -> {
+ return "Object tombstoned";
+ }
+ }
+ return null;
+ }
+
+ public CreateUpdateTombstoneEvent generateCreateEvent(JsonNode digitalObject, Agent agent) {
+ return generateCreateUpdateTombStoneEvent(digitalObject, ProvActivity.Type.ODS_CREATE, null,
+ agent);
}
public CreateUpdateTombstoneEvent generateTombstoneEvent(JsonNode tombstoneObject,
- JsonNode currentObject)
- throws JsonProcessingException {
+ JsonNode currentObject, Agent agent) {
var patch = createJsonPatch(tombstoneObject, currentObject);
return generateCreateUpdateTombStoneEvent(tombstoneObject, ProvActivity.Type.ODS_TOMBSTONE,
- patch);
+ patch, agent);
}
private CreateUpdateTombstoneEvent generateCreateUpdateTombStoneEvent(
- JsonNode digitalObject, ProvActivity.Type activityType, JsonNode jsonPatch)
- throws JsonProcessingException {
+ JsonNode digitalObject, ProvActivity.Type activityType, JsonNode jsonPatch, Agent agent) {
var entityID =
digitalObject.get("@id").asText() + "/" + digitalObject.get("schema:version").asText();
var activityID = UUID.randomUUID().toString();
- Agent creator = mapper.treeToValue(digitalObject.get("schema:creator"), Agent.class);
return new CreateUpdateTombstoneEvent()
.withId(entityID)
.withType("ods:CreateUpdateTombstoneEvent")
- .withOdsID(entityID)
- .withOdsType(properties.getCreateUpdateTombstoneEventType())
+ .withDctermsIdentifier(entityID)
+ .withOdsFdoType(properties.getCreateUpdateTombstoneEventType())
.withProvActivity(new ProvActivity()
.withId(activityID)
.withType(activityType)
@@ -62,14 +80,14 @@ private CreateUpdateTombstoneEvent generateCreateUpdateTombStoneEvent(
.withProvEndedAtTime(Date.from(Instant.now()))
.withProvWasAssociatedWith(List.of(
new ProvWasAssociatedWith()
- .withId(creator.getId())
- .withProvHadRole(ProvHadRole.ODS_REQUESTOR),
+ .withId(agent.getId())
+ .withProvHadRole(ProvHadRole.REQUESTOR),
new ProvWasAssociatedWith()
- .withId(creator.getId())
- .withProvHadRole(ProvHadRole.ODS_APPROVER),
+ .withId(agent.getId())
+ .withProvHadRole(ProvHadRole.APPROVER),
new ProvWasAssociatedWith()
.withId(properties.getPid())
- .withProvHadRole(ProvHadRole.ODS_GENERATOR)))
+ .withProvHadRole(ProvHadRole.GENERATOR)))
.withProvUsed(entityID)
.withRdfsComment(getRdfsComment(activityType)))
.withProvEntity(new ProvEntity()
@@ -77,27 +95,10 @@ private CreateUpdateTombstoneEvent generateCreateUpdateTombStoneEvent(
.withType(digitalObject.get("@type").textValue())
.withProvValue(mapEntityToProvValue(digitalObject))
.withProvWasGeneratedBy(activityID))
- .withOdsHasProvAgent(List.of(creator,
- new Agent()
- .withType(Type.AS_APPLICATION)
- .withId(properties.getPid())
- .withSchemaName(properties.getName())
- ));
- }
-
- private static String getRdfsComment(ProvActivity.Type activityType) {
- switch (activityType) {
- case ODS_CREATE -> {
- return "Object newly created";
- }
- case ODS_UPDATE -> {
- return "Object updated";
- }
- case ODS_TOMBSTONE -> {
- return "Object tombstoned";
- }
- }
- return null;
+ .withOdsHasAgents(
+ List.of(createAgent(agent.getSchemaName(), agent.getId(), CREATOR, ORCID, PROV_PERSON),
+ createAgent(properties.getName(), properties.getPid(),
+ PROCESSING_SERVICE, DctermsType.DOI.value(), PROV_SOFTWARE_AGENT)));
}
private List mapJsonPatch(JsonNode jsonPatch) {
@@ -109,10 +110,10 @@ private List mapJsonPatch(JsonNode jsonPatch) {
}
public CreateUpdateTombstoneEvent generateUpdateEvent(JsonNode digitalObject,
- JsonNode currentDigitalObject) throws JsonProcessingException {
+ JsonNode currentDigitalObject, Agent agent) {
var jsonPatch = createJsonPatch(digitalObject, currentDigitalObject);
return generateCreateUpdateTombStoneEvent(digitalObject, ProvActivity.Type.ODS_UPDATE,
- jsonPatch);
+ jsonPatch, agent);
}
private ProvValue mapEntityToProvValue(JsonNode jsonNode) {
diff --git a/src/main/java/eu/dissco/orchestration/backend/service/SourceSystemService.java b/src/main/java/eu/dissco/orchestration/backend/service/SourceSystemService.java
index 8efc9f2..3667772 100644
--- a/src/main/java/eu/dissco/orchestration/backend/service/SourceSystemService.java
+++ b/src/main/java/eu/dissco/orchestration/backend/service/SourceSystemService.java
@@ -68,11 +68,6 @@ public class SourceSystemService {
private final Random random;
private final FdoProperties fdoProperties;
- @PostConstruct
- public void setup() throws ApiException {
- updateCronsToImageTag();
- }
-
private static String getSuffix(String sourceSystemId) {
return sourceSystemId.substring(sourceSystemId.lastIndexOf('/') + 1).toLowerCase();
}
@@ -113,6 +108,36 @@ private static boolean isEquals(SourceSystem sourceSystem, SourceSystem currentS
currentSourceSystem.getOdsMaximumRecords());
}
+ private static SourceSystem buildTombstoneSourceSystem(SourceSystem sourceSystem,
+ Agent tombstoningAgent,
+ Instant timestamp) {
+ return new SourceSystem()
+ .withId(sourceSystem.getId())
+ .withType(sourceSystem.getType())
+ .withSchemaIdentifier(sourceSystem.getSchemaIdentifier())
+ .withOdsFdoType(sourceSystem.getOdsFdoType())
+ .withOdsStatus(OdsStatus.TOMBSTONE)
+ .withSchemaVersion(sourceSystem.getSchemaVersion() + 1)
+ .withSchemaName(sourceSystem.getSchemaName())
+ .withSchemaDescription(sourceSystem.getSchemaDescription())
+ .withSchemaDateCreated(sourceSystem.getSchemaDateCreated())
+ .withSchemaDateModified(Date.from(timestamp))
+ .withSchemaCreator(sourceSystem.getSchemaCreator())
+ .withSchemaUrl(sourceSystem.getSchemaUrl())
+ .withLtcCollectionManagementSystem(sourceSystem.getLtcCollectionManagementSystem())
+ .withOdsTranslatorType(sourceSystem.getOdsTranslatorType())
+ .withOdsMaximumRecords(sourceSystem.getOdsMaximumRecords())
+ .withOdsDataMappingID(sourceSystem.getOdsDataMappingID())
+ .withOdsHasTombstoneMetadata(
+ buildTombstoneMetadata(tombstoningAgent,
+ "Source System tombstoned by agent through the orchestration backend", timestamp));
+ }
+
+ @PostConstruct
+ public void setup() throws ApiException {
+ updateCronsToImageTag();
+ }
+
private void updateCronsToImageTag() throws ApiException {
log.info("Updating all cron jobs to use image tag: {}", jobProperties.getImage());
var cronJobs = batchV1Api.listNamespacedCronJob(jobProperties.getNamespace()).execute();
@@ -136,21 +161,21 @@ private void updateCronsToImageTag() throws ApiException {
}
private SourceSystem buildSourceSystem(
- SourceSystemRequest sourceSystemRequest, int version, Agent user, String handle,
+ SourceSystemRequest sourceSystemRequest, int version, Agent agent, String handle,
Date created) {
var id = HANDLE_PROXY + handle;
return new SourceSystem()
.withId(id)
- .withOdsID(id)
+ .withSchemaIdentifier(id)
.withType(ObjectType.SOURCE_SYSTEM.getFullName())
- .withOdsType(fdoProperties.getSourceSystemType())
+ .withOdsFdoType(fdoProperties.getSourceSystemType())
.withSchemaVersion(version)
- .withOdsStatus(OdsStatus.ODS_ACTIVE)
+ .withOdsStatus(OdsStatus.ACTIVE)
.withSchemaName(sourceSystemRequest.getSchemaName())
.withSchemaDescription(sourceSystemRequest.getSchemaDescription())
.withSchemaDateCreated(created)
.withSchemaDateModified(Date.from(Instant.now()))
- .withSchemaCreator(user)
+ .withSchemaCreator(agent)
.withSchemaUrl(sourceSystemRequest.getSchemaUrl())
.withOdsDataMappingID(sourceSystemRequest.getOdsDataMappingID())
.withOdsTranslatorType(
@@ -159,17 +184,17 @@ private SourceSystem buildSourceSystem(
.withLtcCollectionManagementSystem(sourceSystemRequest.getLtcCollectionManagementSystem());
}
- public JsonApiWrapper createSourceSystem(SourceSystemRequest sourceSystemRequest, Agent user,
+ public JsonApiWrapper createSourceSystem(SourceSystemRequest sourceSystemRequest, Agent agent,
String path)
throws NotFoundException, ProcessingFailedException {
validateMappingExists(sourceSystemRequest.getOdsDataMappingID());
String handle = createHandle(sourceSystemRequest);
- var sourceSystem = buildSourceSystem(sourceSystemRequest, 1, user, handle,
+ var sourceSystem = buildSourceSystem(sourceSystemRequest, 1, agent, handle,
Date.from(Instant.now()));
repository.createSourceSystem(sourceSystem);
createCronJob(sourceSystem);
createTranslatorJob(sourceSystem, true);
- publishCreateEvent(sourceSystem);
+ publishCreateEvent(sourceSystem, agent);
return wrapSingleResponse(sourceSystem, path);
}
@@ -227,10 +252,10 @@ private V1CronJob setCronJobProperties(SourceSystem sourceSystem)
return k8sCron;
}
- private void publishCreateEvent(SourceSystem sourceSystem)
+ private void publishCreateEvent(SourceSystem sourceSystem, Agent agent)
throws ProcessingFailedException {
try {
- kafkaPublisherService.publishCreateEvent(mapper.valueToTree(sourceSystem));
+ kafkaPublisherService.publishCreateEvent(mapper.valueToTree(sourceSystem), agent);
} catch (JsonProcessingException e) {
log.error("Unable to publish message to Kafka", e);
rollbackSourceSystemCreation(sourceSystem, true);
@@ -267,7 +292,7 @@ private void validateMappingExists(String mappingId) throws NotFoundException {
}
public JsonApiWrapper updateSourceSystem(String id, SourceSystemRequest sourceSystemRequest,
- Agent user, String path, boolean trigger)
+ Agent agent, String path, boolean trigger)
throws NotFoundException, ProcessingFailedException {
var currentSourceSystemOptional = repository.getActiveSourceSystem(id);
if (currentSourceSystemOptional.isEmpty()) {
@@ -276,7 +301,7 @@ public JsonApiWrapper updateSourceSystem(String id, SourceSystemRequest sourceSy
}
var currentSourceSystem = currentSourceSystemOptional.get();
var sourceSystem = buildSourceSystem(sourceSystemRequest,
- currentSourceSystem.getSchemaVersion() + 1, user, id,
+ currentSourceSystem.getSchemaVersion() + 1, agent, id,
currentSourceSystem.getSchemaDateCreated());
if (isEquals(sourceSystem, currentSourceSystem)) {
log.info(
@@ -290,7 +315,7 @@ public JsonApiWrapper updateSourceSystem(String id, SourceSystemRequest sourceSy
log.info("Translator Job requested for updated source system: {}", id);
triggerTranslatorForUpdatedSourceSystem(sourceSystem, currentSourceSystem);
}
- publishUpdateEvent(sourceSystem, currentSourceSystem);
+ publishUpdateEvent(sourceSystem, currentSourceSystem, agent);
return wrapSingleResponse(sourceSystem, path);
}
@@ -319,10 +344,10 @@ private void updateCronJob(SourceSystem sourceSystem, SourceSystem currentSource
}
private void publishUpdateEvent(SourceSystem newSourceSystem,
- SourceSystem currentSourceSystem) throws ProcessingFailedException {
+ SourceSystem currentSourceSystem, Agent agent) throws ProcessingFailedException {
try {
kafkaPublisherService.publishUpdateEvent(mapper.valueToTree(newSourceSystem),
- mapper.valueToTree(currentSourceSystem));
+ mapper.valueToTree(currentSourceSystem), agent);
} catch (JsonProcessingException e) {
log.error("Unable to publish message to Kafka", e);
rollbackToPreviousVersion(currentSourceSystem, true);
@@ -379,10 +404,12 @@ public void tombstoneSourceSystem(String id, Agent agent)
var tombstoneSourceSystem = buildTombstoneSourceSystem(sourceSystem, agent, timestamp);
repository.tombstoneSourceSystem(tombstoneSourceSystem, timestamp);
try {
- kafkaPublisherService.publishTombstoneEvent(mapper.valueToTree(tombstoneSourceSystem), mapper.valueToTree(sourceSystem));
+ kafkaPublisherService.publishTombstoneEvent(mapper.valueToTree(tombstoneSourceSystem),
+ mapper.valueToTree(sourceSystem), agent);
} catch (JsonProcessingException e) {
log.error("Unable to publish tombstone event to provenance service", e);
- throw new ProcessingFailedException("Unable to publish tombstone event to provenance service", e);
+ throw new ProcessingFailedException(
+ "Unable to publish tombstone event to provenance service", e);
}
log.info("Delete request for source system: {} was successful", id);
} else {
@@ -394,36 +421,12 @@ private void tombstoneHandle(String handle) throws ProcessingFailedException {
var request = fdoRecordService.buildTombstoneRequest(ObjectType.SOURCE_SYSTEM, handle);
try {
handleComponent.tombstoneHandle(request, handle);
- } catch (PidException e){
+ } catch (PidException e) {
log.error("Unable to tombstone handle {}", handle, e);
throw new ProcessingFailedException("Unable to tombstone handle", e);
}
}
- private static SourceSystem buildTombstoneSourceSystem(SourceSystem sourceSystem, Agent tombstoningAgent,
- Instant timestamp) {
- return new SourceSystem()
- .withId(sourceSystem.getId())
- .withType(sourceSystem.getType())
- .withOdsID(sourceSystem.getOdsID())
- .withOdsType(sourceSystem.getOdsType())
- .withOdsStatus(OdsStatus.ODS_TOMBSTONE)
- .withSchemaVersion(sourceSystem.getSchemaVersion() + 1)
- .withSchemaName(sourceSystem.getSchemaName())
- .withSchemaDescription(sourceSystem.getSchemaDescription())
- .withSchemaDateCreated(sourceSystem.getSchemaDateCreated())
- .withSchemaDateModified(Date.from(timestamp))
- .withSchemaCreator(sourceSystem.getSchemaCreator())
- .withSchemaUrl(sourceSystem.getSchemaUrl())
- .withLtcCollectionManagementSystem(sourceSystem.getLtcCollectionManagementSystem())
- .withOdsTranslatorType(sourceSystem.getOdsTranslatorType())
- .withOdsMaximumRecords(sourceSystem.getOdsMaximumRecords())
- .withOdsDataMappingID(sourceSystem.getOdsDataMappingID())
- .withOdsTombstoneMetadata(
- buildTombstoneMetadata(tombstoningAgent,
- "Source System tombstoned by user through the orchestration backend", timestamp));
- }
-
private JsonApiListWrapper wrapResponse(List sourceSystems, int pageNum,
int pageSize, String path) {
boolean hasNext = sourceSystems.size() > pageSize;
diff --git a/src/main/java/eu/dissco/orchestration/backend/utils/AgentUtils.java b/src/main/java/eu/dissco/orchestration/backend/utils/AgentUtils.java
new file mode 100644
index 0000000..1d31372
--- /dev/null
+++ b/src/main/java/eu/dissco/orchestration/backend/utils/AgentUtils.java
@@ -0,0 +1,50 @@
+package eu.dissco.orchestration.backend.utils;
+
+import static eu.dissco.orchestration.backend.schema.Identifier.DctermsType.DOI;
+import static eu.dissco.orchestration.backend.schema.Identifier.OdsGupriLevel.GLOBALLY_UNIQUE_STABLE_PERSISTENT_RESOLVABLE_FDO_COMPLIANT;
+import static eu.dissco.orchestration.backend.schema.Identifier.OdsIdentifierStatus.PREFERRED;
+import static eu.dissco.orchestration.backend.utils.ControllerUtils.ORCID;
+
+import eu.dissco.orchestration.backend.domain.AgentRoleType;
+import eu.dissco.orchestration.backend.schema.Agent;
+import eu.dissco.orchestration.backend.schema.Agent.Type;
+import eu.dissco.orchestration.backend.schema.Identifier;
+import eu.dissco.orchestration.backend.schema.Identifier.DctermsType;
+import eu.dissco.orchestration.backend.schema.OdsHasRole;
+import java.util.List;
+
+public class AgentUtils {
+
+ private AgentUtils() {
+ }
+
+ public static Agent createAgent(String name, String pid, AgentRoleType role, String idTitle,
+ Type agentType) {
+ var agent = new Agent()
+ .withType(agentType)
+ .withId(pid)
+ .withSchemaName(name)
+ .withSchemaIdentifier(pid)
+ .withOdsHasRoles(List.of(new OdsHasRole().withType("schema:Role")
+ .withSchemaRoleName(role.getName())));
+ if (pid != null) {
+ var identifier = new Identifier()
+ .withType("ods:Identifier")
+ .withId(pid)
+ .withDctermsIdentifier(pid)
+ .withOdsIsPartOfLabel(false)
+ .withOdsIdentifierStatus(PREFERRED)
+ .withOdsGupriLevel(
+ GLOBALLY_UNIQUE_STABLE_PERSISTENT_RESOLVABLE_FDO_COMPLIANT);
+ if (DOI.value().equals(idTitle)) {
+ identifier.withDctermsType(DOI);
+ identifier.withDctermsTitle("DOI");
+ } else if (ORCID.equals(idTitle)) {
+ identifier.withDctermsType(DctermsType.URL);
+ identifier.withDctermsTitle("ORCID");
+ }
+ agent.setOdsHasIdentifiers(List.of(identifier));
+ }
+ return agent;
+ }
+}
diff --git a/src/main/java/eu/dissco/orchestration/backend/utils/ControllerUtils.java b/src/main/java/eu/dissco/orchestration/backend/utils/ControllerUtils.java
index 5f28c39..3c5d58f 100644
--- a/src/main/java/eu/dissco/orchestration/backend/utils/ControllerUtils.java
+++ b/src/main/java/eu/dissco/orchestration/backend/utils/ControllerUtils.java
@@ -1,5 +1,8 @@
package eu.dissco.orchestration.backend.utils;
+import static eu.dissco.orchestration.backend.utils.AgentUtils.createAgent;
+
+import eu.dissco.orchestration.backend.domain.AgentRoleType;
import eu.dissco.orchestration.backend.exception.ForbiddenException;
import eu.dissco.orchestration.backend.schema.Agent;
import eu.dissco.orchestration.backend.schema.Agent.Type;
@@ -9,26 +12,29 @@
@Slf4j
public class ControllerUtils {
- private ControllerUtils(){}
- public static Agent getAgent(Authentication authentication) throws ForbiddenException {
+ public static final String ORCID = "orcid";
+
+ private ControllerUtils() {
+ }
+
+ public static Agent getAgent(Authentication authentication, AgentRoleType roleType)
+ throws ForbiddenException {
var claims = ((Jwt) authentication.getPrincipal()).getClaims();
- if (claims.containsKey("orcid")) {
+ if (claims.containsKey(ORCID)) {
StringBuilder fullName = new StringBuilder();
- if (claims.containsKey("given_name")){
+ if (claims.containsKey("given_name")) {
fullName.append(claims.get("given_name"));
}
- if (claims.containsKey("family_name")){
+ if (claims.containsKey("family_name")) {
if (!fullName.isEmpty()) {
fullName.append(" ");
}
fullName.append(claims.get("family_name"));
}
var nameString = fullName.toString().isEmpty() ? null : fullName.toString();
- return new Agent()
- .withType(Type.SCHEMA_PERSON)
- .withSchemaName(nameString)
- .withId((String) claims.get("orcid"));
+ return createAgent(nameString, (String) claims.get(ORCID), roleType, ORCID,
+ Type.SCHEMA_PERSON);
} else {
log.error("Missing ORCID in token");
throw new ForbiddenException("No ORCID provided");
diff --git a/src/main/java/eu/dissco/orchestration/backend/utils/TombstoneUtils.java b/src/main/java/eu/dissco/orchestration/backend/utils/TombstoneUtils.java
index 7016d2f..21f27e7 100644
--- a/src/main/java/eu/dissco/orchestration/backend/utils/TombstoneUtils.java
+++ b/src/main/java/eu/dissco/orchestration/backend/utils/TombstoneUtils.java
@@ -4,6 +4,7 @@
import eu.dissco.orchestration.backend.schema.TombstoneMetadata;
import java.time.Instant;
import java.util.Date;
+import java.util.List;
public class TombstoneUtils {
@@ -11,10 +12,11 @@ private TombstoneUtils() {
// This is a utility class
}
- public static TombstoneMetadata buildTombstoneMetadata(Agent agent, String text, Instant timestamp) {
+ public static TombstoneMetadata buildTombstoneMetadata(Agent agent, String text,
+ Instant timestamp) {
return new TombstoneMetadata()
.withType("ods:TombstoneMetadata")
- .withOdsTombstonedByAgent(agent)
+ .withOdsHasAgents(List.of(agent))
.withOdsTombstoneDate(Date.from(timestamp))
.withOdsTombstoneText(text);
}
diff --git a/src/main/resources/json-schema/create-update-tombstone-event.json b/src/main/resources/json-schema/create-update-tombstone-event.json
index a7c1dc5..890fe02 100644
--- a/src/main/resources/json-schema/create-update-tombstone-event.json
+++ b/src/main/resources/json-schema/create-update-tombstone-event.json
@@ -1,7 +1,7 @@
{
- "$id": "https://schemas.dissco.tech/schemas/fdo-type/create-update-delete-tombstone-event/0.3.0/create-update-delete-tombstone-event.json",
+ "$id": "https://schemas.dissco.tech/schemas/fdo-type/create-update-delete-tombstone-event/0.4.0/create-update-delete-tombstone-event.json",
"$schema": "https://json-schema.org/draft/2020-12/schema",
- "$comment": "Create Update Tombstone Event Version 0.3.0",
+ "$comment": "Create Update Tombstone Event Version 0.4.0",
"title": "CreateUpdateTombstoneEvent",
"type": "object",
"properties": {
@@ -18,7 +18,7 @@
"description": "The type of the object, in this case ods:CreateUpdateTombstoneEvent",
"const": "ods:CreateUpdateTombstoneEvent"
},
- "ods:ID": {
+ "dcterms:identifier": {
"type": "string",
"description": "The unique identifier of the Create Update Tombstone Event, this includes the version of the object",
"examples": [
@@ -26,17 +26,18 @@
"https://doi.org/TEST/PGD-QGK-S0R/1"
]
},
- "ods:type": {
+ "ods:fdoType": {
"type": "string",
"description": "The DOI to the FDO type of the object",
"pattern": "^https:\/\/doi\\.org\/[\\w\\.]+/[\\w\\.]+",
"examples": [
- "https://doi.org/10.15468/1a2b3c"
+ "https://doi.org/21.T11148/bbad8c4e101e8af01115",
+ "https://doi.org/21.T11148/894b1e6cad57e921764e"
]
},
"prov:Activity": {
"type": "object",
- "description": "Based on the prov-ontology Activity Class, https://www.w3.org/ns/prov#Activity",
+ "description": "This class described the activity that was performed on the Digital Object, Based on the prov-ontology Activity Class",
"properties": {
"@id": {
"type": "string",
@@ -47,7 +48,7 @@
]
},
"@type": {
- "description": "The type of the activity",
+ "description": "The type of the activity that was performed on the object",
"enum": [
"ods:Create",
"ods:Update",
@@ -56,13 +57,13 @@
},
"prov:wasAssociatedWith": {
"type": "array",
- "description": "Contains zero a reference to one or more prov:Agent objects that were associated with the Activity",
+ "description": "Contains a reference to zero or more prov:Agent objects that were associated with the Activity",
"items": {
"type": "object",
"properties": {
"@id": {
"type": "string",
- "description": "The unique identifier of the prov:Agent that was associated with the Activity (as indicated at @id in the Activity Class), http://www.w3.org/ns/prov#wasAssociatedWith",
+ "description": "The unique identifier of the prov:Agent that was associated with the Activity",
"examples": [
"https://orcid.org/0000-0002-1825-0097",
"https://hdl.handle.net/20.5000.1025/XXX-XXX-XXX"
@@ -70,15 +71,16 @@
},
"prov:hadRole": {
"enum": [
- "ods:Approver",
- "ods:Requestor",
- "ods:Generator"
+ "Approver",
+ "Requestor",
+ "Generator"
],
"description": "The role of the agent in the activity"
}
},
"required": [
- "@id"
+ "@id",
+ "prov:hadRole"
],
"additionalProperties": false
}
@@ -86,14 +88,15 @@
"prov:endedAtTime": {
"type": "string",
"format": "date-time",
- "description": "The date and time when the activity ended, in ISO 8601 format, http://www.w3.org/ns/prov#endedAtTime",
+ "description": "The date and time when the activity ended. This is also the moment the CreateUpdateTombstone Event is created, following the ISO Date Time Format yyyy-MM-dd'T'HH:mm:ss.SSSXXX",
"examples": [
- "2024-06-11T09:14:00.233Z"
+ "2024-06-11T09:14:00.233Z",
+ "2022-04-23T12:13:00.789Z"
]
},
"prov:used": {
"type": "string",
- "description": "The unique identifier of the prov:Entity that was used in the Activity (as indicated at @id in the Activity Class), http://www.w3.org/ns/prov#used",
+ "description": "The unique identifier of the Digital Object that was the object of the Activity (as indicated at @id in the Activity Class)",
"examples": [
"https://hdl.handle.net/20.5000.1025/XXX-XXX-XXX/1",
"https://doi.org/10.15468/1a2b3c/3"
@@ -101,9 +104,10 @@
},
"rdfs:comment": {
"type": "string",
- "description": "A comment about the activity, https://www.w3.org/2000/01/rdf-schema#comment",
+ "description": "A comment about the activity",
"examples": [
- "This activity was created by the user"
+ "Digital Specimen was updated by user XXX at 2024-02-23T15:12:00.131Z",
+ "Annotation was created by user YYY at 2021-06-01T12:00:00.111Z"
]
},
"ods:changeValue": {
@@ -111,7 +115,7 @@
"items": {
"type": "object"
},
- "description": "The json patch object that describes the changes made to the object as opposed to the previous version, empty if this is a ods:Create or ods:Tombstone activity",
+ "description": "The json patch object that describes the changes made to the object as opposed to the previous version, empty if this is a ods:Create",
"examples": [
[
{
@@ -139,7 +143,7 @@
},
"prov:Entity": {
"type": "object",
- "description": "Based on the prov-ontology Entity Class, https://www.w3.org/ns/prov#Entity",
+ "description": "The Digital Object on which the Activity took place, the resulting Digital Object is stored as the `prov:value`",
"properties": {
"@id": {
"type": "string",
@@ -159,25 +163,29 @@
},
"prov:value": {
"type": "object",
- "description": "The full object that is the result of the prov:Activity, the structure of this object is defined by the `@type` field. When the `prov:Activity` is of type `ods:Tombstone`, this field is empty",
+ "description": "The full object that is the result of the prov:Activity, the structure of this object is defined by the `@type` field.",
"examples": [
{
"@id": "https://hdl.handle.net/20.5000.1025/ABC-DEF-GHI",
"@type": "ods:Mapping",
- "ods:ID": "https://hdl.handle.net/20.5000.1025/ABC-DEF-GHI",
- "ods:type": "https://doi.org/10.15468/1a2b3c",
+ "schema:identifier": "https://hdl.handle.net/20.5000.1025/ABC-DEF-GHI",
+ "ods:fdoType": "https://doi.org/10.15468/1a2b3c",
"schema:version": 1,
"schema:name": "Default Mapping for Herbarium of University of Coimbra (COI)",
"schema:description": "The default mapping for the Herbarium of the University of Coimbra (COI) to the Darwin Core standard.",
"schema:dateCreated": "2021-06-01T12:00:00.111Z",
- "ods:creator": "e2befba6-9324-4bb4-9f41-d7dfae4a44b0",
+ "schema:creator": {
+ "@id": "https://orcid.org/0000-0002-5669-2769",
+ "@type": "schema:Person",
+ "schema:name": "John Doe"
+ },
"ods:Mapping": {
"ods:DefaultMapping": [
{
"ods:physicalSpecimenIDType": "Global"
},
{
- "ods:type": "ZoologyVertebrateSpecimen"
+ "ods:fdoType": "ZoologyVertebrateSpecimen"
},
{
"ods:organisationId": "https://ror.org/05xg72x27"
@@ -212,17 +220,18 @@
"required": [
"@id",
"@type",
+ "dcterms:identifier",
"prov:wasGeneratedBy"
],
"additionalProperties": false
},
- "ods:hasProvAgent": {
+ "ods:hasAgents": {
"type": "array",
- "description": "Contains zero or more prov:Agent objects",
+ "description": "Contains zero or more prov:Agent objects, connected based on the identifier with the `prov:Activity`",
"items": {
"type": "object",
- "description": "Based on the prov-ontology Agent Class, https://www.w3.org/ns/prov#Agent",
- "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.3.0/agent.json"
+ "description": "Based on the prov-ontology Agent Class",
+ "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.4.0/agent.json"
}
}
}
diff --git a/src/main/resources/json-schema/data-mapping-request.json b/src/main/resources/json-schema/data-mapping-request.json
index 07f4817..d25b610 100644
--- a/src/main/resources/json-schema/data-mapping-request.json
+++ b/src/main/resources/json-schema/data-mapping-request.json
@@ -1,29 +1,31 @@
{
- "$id": "https://schemas.dissco.tech/schemas/fdo-type/data-mapping/0.3.0/data-mapping-request.json",
+ "$id": "https://schemas.dissco.tech/schemas/fdo-type/data-mapping/0.4.0/data-mapping-request.json",
"$schema": "https://json-schema.org/draft/2020-12/schema",
- "$comment": "Data Mapping Request Version 0.3.0",
+ "$comment": "Data Mapping Request Version 0.4.0",
"title": "DataMapping",
"type": "object",
"properties": {
"schema:name": {
"type": "string",
- "description": "Name of the Mapping as provided by the user, https://schema.org/name",
+ "description": "Name of the Mapping as provided by the user",
"examples": [
- "Default Mapping for Herbarium of University of Coimbra (COI)"
+ "Default Mapping for Herbarium of University of Coimbra (COI)",
+ "Mapping for Naturalis Darwin Core Archives"
]
},
"schema:description": {
"type": "string",
- "description": "Description of the Mapping as provided by the user, https://schema.org/description",
+ "description": "Description of the Mapping as provided by the user",
"examples": [
- "The default mapping for the Herbarium of the University of Coimbra (COI) to the Darwin Core standard."
+ "The default mapping for the Herbarium of the University of Coimbra (COI) to the Darwin Core standard.",
+ "A mapping that can be used for all Naturalis datasets"
]
},
- "ods:DefaultMapping": {
+ "ods:hasDefaultMapping": {
"type": "array",
"description": "A mapping object setting default values for ods terms. These default mappings will be set for each record in the dataset. Key is the ods term, value the default value to use",
"items": {
- "$ref": "https://schemas.dissco.tech/schemas/fdo-type/data-mapping/0.3.0/default-mapping.json"
+ "$ref": "https://schemas.dissco.tech/schemas/fdo-type/data-mapping/0.4.0/default-mapping.json"
},
"examples": [
[
@@ -39,11 +41,11 @@
]
]
},
- "ods:FieldMapping": {
+ "ods:hasTermMapping": {
"type": "array",
"description": "Setting an explicit mapping for an ods term to a term from the incoming dataset. Key is the ods term, value the incoming data standard term",
"items": {
- "$ref": "https://schemas.dissco.tech/schemas/fdo-type/data-mapping/0.3.0/field-mapping.json"
+ "$ref": "https://schemas.dissco.tech/schemas/fdo-type/data-mapping/0.4.0/term-mapping.json"
},
"examples": [
[
@@ -56,10 +58,15 @@
"ods:mappingDataStandard": {
"description": "Type of data standard we are mapping to",
"enum": [
- "dwc",
- "abcd",
- "abcdefg"
+ "DwC",
+ "ABCD",
+ "ABCDEFG"
]
+ },
+ "ods:hasTombstoneMetadata": {
+ "type": "object",
+ "description": "Object containing the tombstone metadata of the object. Only present when ods:status is ods:Tombstone",
+ "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.4.0/tombstone-metadata.json"
}
},
"required": [
diff --git a/src/main/resources/json-schema/data-mapping.json b/src/main/resources/json-schema/data-mapping.json
index ac57eb3..b1a9b81 100644
--- a/src/main/resources/json-schema/data-mapping.json
+++ b/src/main/resources/json-schema/data-mapping.json
@@ -1,16 +1,17 @@
{
- "$id": "https://schemas.dissco.tech/schemas/fdo-type/data-mapping/0.3.0/data-mapping.json",
+ "$id": "https://schemas.dissco.tech/schemas/fdo-type/data-mapping/0.4.0/data-mapping.json",
"$schema": "https://json-schema.org/draft/2020-12/schema",
- "$comment": "Data Mapping Version 0.3.0",
+ "$comment": "Data Mapping Version 0.4.0",
"title": "DataMapping",
"type": "object",
"properties": {
"@id": {
"type": "string",
- "description": "The unique identifier (handle) of the Mapping object",
+ "description": "The unique identifier (handle) of the Data Mapping object",
"pattern": "^https:\/\/hdl\\.handle\\.net\/[\\w.]+\/(.){3}-(.){3}-(.){3}",
"examples": [
- "https://hdl.handle.net/20.5000.1025/XXX-XXX-XXX"
+ "https://hdl.handle.net/20.5000.1025/XXX-XXX-XXX",
+ "https://hdl.handle.net/20.5000.1025/XXX-YYY-XXX"
]
},
"@type": {
@@ -18,52 +19,61 @@
"description": "The type of the object, in this case ods:DataMapping",
"const": "ods:DataMapping"
},
- "ods:ID": {
+ "schema:identifier": {
"type": "string",
- "description": "Handle of the Mapping",
+ "description": "The unique identifier (handle) of the Data Mapping object",
"pattern": "^https:\/\/hdl\\.handle\\.net\/[\\w.]+\/(.){3}-(.){3}-(.){3}",
"examples": [
- "https://hdl.handle.net/20.5000.1025/XXX-XXX-XXX"
+ "https://hdl.handle.net/20.5000.1025/XXX-XXX-XXX",
+ "https://hdl.handle.net/20.5000.1025/XXX-YYY-XXX"
]
},
- "ods:type": {
+ "ods:fdoType": {
"type": "string",
"description": "The DOI to the FDO type of the object",
"pattern": "^https:\/\/doi\\.org\/[\\w\\.]+/[\\w\\.]+",
"examples": [
- "https://doi.org/10.15468/1a2b3c"
+ "https://doi.org/21.T11148/bbad8c4e101e8af01115",
+ "https://doi.org/21.T11148/894b1e6cad57e921764e"
]
},
"ods:status": {
"enum": [
- "ods:Draft",
- "ods:Active",
- "ods:Tombstone"
+ "Draft",
+ "Active",
+ "Tombstone"
],
- "description": "The status of the Digital Object"
+ "description": "The status of the Digital Object. A digital object can be in Draft, when it is not published yet. Active when it is published and the object is active and Tombstone which means the object has been archived."
},
"schema:version": {
"type": "integer",
"minimum": 1,
- "description": "Version of the Mapping, https://schema.org/version"
+ "description": "The version of the object, each change generates a new version. The version starts at 1 and each change will increment the version number with 1",
+ "examples": [
+ 1,
+ 2,
+ 3
+ ]
},
"schema:name": {
"type": "string",
- "description": "Name of the Mapping as provided by the user, https://schema.org/name",
+ "description": "Name of the Mapping as provided by the user",
"examples": [
- "Default Mapping for Herbarium of University of Coimbra (COI)"
+ "Default Mapping for Herbarium of University of Coimbra (COI)",
+ "Mapping for Naturalis Darwin Core Archives"
]
},
"schema:description": {
"type": "string",
- "description": "Description of the Mapping as provided by the user, https://schema.org/description",
+ "description": "Description of the Mapping as provided by the user",
"examples": [
- "The default mapping for the Herbarium of the University of Coimbra (COI) to the Darwin Core standard."
+ "The default mapping for the Herbarium of the University of Coimbra (COI) to the Darwin Core standard.",
+ "A mapping that can be used for all Naturalis datasets"
]
},
"schema:dateCreated": {
"type": "string",
- "description": "Timestamp of creation. Internally generated, https://schema.org/dateCreated",
+ "description": "Timestamp of creation. Internally generated, follows the ISO Date Time Format yyyy-MM-dd'T'HH:mm:ss.SSSXXX",
"format": "date-time",
"examples": [
"2021-06-01T12:00:00.445Z"
@@ -71,7 +81,7 @@
},
"schema:dateModified": {
"type": "string",
- "description": "Timestamp of last modification. Internally generated, https://schema.org/dateModified",
+ "description": "Timestamp of last modification. Internally generated, follows the ISO Date Time Format yyyy-MM-dd'T'HH:mm:ss.SSSXXX",
"format": "date-time",
"examples": [
"2021-06-06T13:22:00.445Z"
@@ -79,10 +89,10 @@
},
"schema:creator": {
"type": "object",
- "description": "Contains an ods:Agent object",
- "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.3.0/agent.json"
+ "description": "The creator of the Data Mapping, generally this will also be the only person able to change the data mapping",
+ "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.4.0/agent.json"
},
- "ods:DefaultMapping": {
+ "ods:hasDefaultMapping": {
"type": "array",
"description": "A mapping object setting default values for ods terms. These default mappings will be set for each record in the dataset. Key is the ods term, value the default value to use",
"examples": [
@@ -91,7 +101,7 @@
"ods:physicalSpecimenIDType": "Global"
},
{
- "ods:type": "ZoologyVertebrateSpecimen"
+ "ods:fdoType": "ZoologyVertebrateSpecimen"
},
{
"ods:organisationID": "https://ror.org/05xg72x27"
@@ -99,10 +109,10 @@
]
],
"items": {
- "$ref": "https://schemas.dissco.tech/schemas/fdo-type/data-mapping/0.3.0/default-mapping.json"
+ "$ref": "https://schemas.dissco.tech/schemas/fdo-type/data-mapping/0.4.0/default-mapping.json"
}
},
- "ods:FieldMapping": {
+ "ods:hasTermMapping": {
"type": "array",
"description": "Setting an explicit mapping for an ods term to a term from the incoming dataset. Key is the ods term, value the incoming data standard term",
"examples": [
@@ -113,27 +123,28 @@
]
],
"items": {
- "$ref": "https://schemas.dissco.tech/schemas/fdo-type/data-mapping/0.3.0/field-mapping.json"
+ "$ref": "https://schemas.dissco.tech/schemas/fdo-type/data-mapping/0.4.0/term-mapping.json"
}
},
"ods:mappingDataStandard": {
- "description": "Type of data standard we are mapping to",
+ "type": "string",
+ "description": "The data standard that is being mapped from",
"enum": [
- "dwc",
- "abcd",
- "abcdefg"
+ "DwC",
+ "ABCD",
+ "ABCDEFG"
]
},
- "ods:TombstoneMetadata": {
+ "ods:hasTombstoneMetadata": {
"type": "object",
"description": "Object containing the tombstone metadata of the object. Only present when ods:status is ods:Tombstone",
- "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.3.0/tombstone-metadata.json"
+ "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.4.0/tombstone-metadata.json"
}
},
"required": [
"@type",
- "ods:ID",
- "ods:type",
+ "schema:identifier",
+ "ods:fdoType",
"schema:version",
"schema:dateCreated",
"schema:dateModified",
diff --git a/src/main/resources/json-schema/machine-annotation-service-request.json b/src/main/resources/json-schema/machine-annotation-service-request.json
index 4ec332a..5c878d5 100644
--- a/src/main/resources/json-schema/machine-annotation-service-request.json
+++ b/src/main/resources/json-schema/machine-annotation-service-request.json
@@ -1,21 +1,21 @@
{
- "$id": "https://schemas.dissco.tech/schemas/developer-schema/machine-annotation-service/0.3.0/machine-annotation-service-request.json",
+ "$id": "https://schemas.dissco.tech/schemas/developer-schema/machine-annotation-service/0.4.0/machine-annotation-service-request.json",
"$schema": "https://json-schema.org/draft/2020-12/schema",
- "$comment": "Machine Annotation Service RequestVersion 0.3.0",
+ "$comment": "Machine Annotation Service RequestVersion 0.4.0",
"title": "MachineAnnotationServiceRequest",
"type": "object",
"description": "Automated annotation services that enhance biodiversity data",
"properties": {
"schema:name": {
"type": "string",
- "description": "Name of the Machine Annotation Service as provided by the user, https://schema.org/name",
+ "description": "Name of the Machine Annotation Service as provided by the user",
"examples": [
"GBIF Linker Service"
]
},
"schema:description": {
"type": "string",
- "description": "Description of the Machine Annotation Service as provided by the user, https://schema.org/description",
+ "description": "Description of the Machine Annotation Service as provided by the user",
"examples": [
"A service that links GBIF records to a DiSSCo Digital Specimen. It creates an EntityRelationship indicating the relationship"
]
@@ -23,21 +23,21 @@
"ods:containerImage": {
"type": "string",
"description": "URI of the image of the containerized application",
- "example": [
+ "examples": [
"public.ecr.aws/dissco/mindat-georeferencing"
]
},
"ods:containerTag": {
"type": "string",
"description": "Tag of the image",
- "example": [
+ "examples": [
"sha-cb76994"
]
},
- "ods:TargetDigitalObjectFilter": {
+ "ods:hasTargetDigitalObjectFilter": {
"type": "object",
"description": "Filters describing the criteria that must be met in order to apply the MAS. No filters implies the MAS may run on *any* digital object. Field names are given in JSON paths, and accepted values for that field are provided as an arrays",
- "example": [
+ "examples": [
{
"$.ods:type": [
"https://doi.org/21.T11148/894b1e6cad57e921764e"
@@ -57,8 +57,8 @@
},
"schema:creativeWorkStatus": {
"type": "string",
- "description": "The current status of the service, https://schema.org/creativeWorkStatus",
- "example": [
+ "description": "The current status of the service",
+ "examples": [
"Alpha",
"Production"
]
@@ -66,12 +66,12 @@
"schema:codeRepository": {
"type": "string",
"format": "url",
- "description": "Link to code base of MAS, https://schema.org/codeRepository"
+ "description": "Link to code base of MAS"
},
"schema:programmingLanguage": {
"type": "string",
- "description": "The programming language of the MAS, https://schema.org/programmingLanguage",
- "example": [
+ "description": "The programming language of the MAS",
+ "examples": [
"Python"
]
},
@@ -82,13 +82,15 @@
"schema:maintainer": {
"type": "object",
"description": "Party maintaining the code, could be an schema:Organisation or a schema:Person",
- "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.3.0/agent.json"
+ "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.4.0/agent.json"
},
"schema:license": {
"type": "string",
"format": "url",
"description": "License of the service, https://schema.org/license",
- "example": "https://opensource.org/licenses/Apache-2.0"
+ "examples": [
+ "https://opensource.org/licenses/Apache-2.0"
+ ]
},
"ods:dependency": {
"type": "array",
@@ -149,23 +151,23 @@
"minimum": 3600,
"description": "Time in milliseconds the MAS message may remain in the Kafka Queue before being marked as timed out. Min 1 hour."
},
- "ods:TombstoneMetadata": {
+ "ods:hasTombstoneMetadata": {
"type": "object",
"description": "Object containing the tombstone metadata of the object",
- "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.3.0/tombstone-metadata.json"
+ "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.4.0/tombstone-metadata.json"
},
- "ods:hasEnvironmentalVariable": {
+ "ods:hasEnvironmentalVariables": {
"type": "array",
"description": "Environmental variables to supply to the MAS, non-sensitive",
"items": {
- "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.3.0/environmental-variable.json"
+ "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.4.0/environmental-variable.json"
}
},
- "ods:hasSecretVariable": {
+ "ods:hasSecretVariables": {
"type": "array",
"description": "Secret variables to supply to the MAS",
"items": {
- "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.3.0/secret-variable.json"
+ "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.4.0/secret-variable.json"
}
}
},
@@ -174,7 +176,7 @@
"ods:containerImage",
"ods:containerTag",
"ods:batchingPermitted",
- "ods:TargetDigitalObjectFilter"
+ "ods:hasTargetDigitalObjectFilter"
],
"additionalProperties": false
}
diff --git a/src/main/resources/json-schema/machine-annotation-service.json b/src/main/resources/json-schema/machine-annotation-service.json
index 4aea567..44a675f 100644
--- a/src/main/resources/json-schema/machine-annotation-service.json
+++ b/src/main/resources/json-schema/machine-annotation-service.json
@@ -1,7 +1,7 @@
{
- "$id": "https://schemas.dissco.tech/schemas/fdo-type/machine-annotation-service/0.3.0/machine-annotation-service.json",
+ "$id": "https://schemas.dissco.tech/schemas/fdo-type/machine-annotation-service/0.4.0/machine-annotation-service.json",
"$schema": "https://json-schema.org/draft/2020-12/schema",
- "$comment": "Machine Annotation Service Version 0.3.0",
+ "$comment": "Machine Annotation Service Version 0.4.0",
"title": "MachineAnnotationService",
"type": "object",
"description": "Automated annotation services that enhance biodiversity data",
@@ -11,7 +11,8 @@
"description": "The unique identifier (handle) of the Machine Annotation Service object",
"pattern": "^https:\/\/hdl\\.handle\\.net\/[\\w.]+\/(.){3}-(.){3}-(.){3}",
"examples": [
- "https://hdl.handle.net/20.5000.1025/XXX-XXX-XXX"
+ "https://hdl.handle.net/20.5000.1025/XXX-XXX-XXX",
+ "https://hdl.handle.net/20.5000.1025/JH1-C37-E7F"
]
},
"@type": {
@@ -19,34 +20,36 @@
"description": "The type of the object, in this case ods:MachineAnnotationService",
"const": "ods:MachineAnnotationService"
},
- "ods:ID": {
+ "schema:identifier": {
"type": "string",
"description": "Handle of the Machine Annotation Service",
"pattern": "^https:\/\/hdl\\.handle\\.net\/[\\w.]+\/(.){3}-(.){3}-(.){3}",
"examples": [
- "https://hdl.handle.net/20.5000.1025/XXX-XXX-XXX"
+ "https://hdl.handle.net/20.5000.1025/XXX-XXX-XXX",
+ "https://hdl.handle.net/20.5000.1025/JH1-C37-E7F"
]
},
- "ods:type": {
+ "ods:fdoType": {
"type": "string",
"description": "The DOI to the FDO type of the object",
"pattern": "^https:\/\/doi\\.org\/[\\w\\.]+/[\\w\\.]+",
"examples": [
- "https://doi.org/10.15468/1a2b3c"
+ "https://doi.org/21.T11148/417a4f472f60f7974c12"
]
},
"ods:status": {
+ "type": "string",
"enum": [
- "ods:Draft",
- "ods:Active",
- "ods:Tombstone"
+ "Draft",
+ "Active",
+ "Tombstone"
],
- "description": "The status of the Digital Object"
+ "description": "The status of the Digital Object. A digital object can be in Draft, when it is not published yet. Active when it is published and the object is active and Tombstone which means the object has been archived."
},
"schema:version": {
"type": "integer",
"minimum": 1,
- "description": "Version of the Machine Annotation Service, https://schema.org/version"
+ "description": "The version of the object, each change generates a new version. The version starts at 1 and each change will increment the version number with 1"
},
"schema:name": {
"type": "string",
@@ -64,7 +67,7 @@
},
"schema:dateCreated": {
"type": "string",
- "description": "Timestamp of creation. Internally generated, https://schema.org/dateCreated",
+ "description": "Timestamp of creation. Internally generated, following the ISO Date Time Format yyyy-MM-dd'T'HH:mm:ss.SSSXXX",
"format": "date-time",
"examples": [
"2021-06-01T12:00:000Z"
@@ -72,7 +75,7 @@
},
"schema:dateModified": {
"type": "string",
- "description": "Timestamp of last modification. Internally generated, https://schema.org/dateModified",
+ "description": "Timestamp of last modification. Internally generated, following the ISO Date Time Format yyyy-MM-dd'T'HH:mm:ss.SSSXXX",
"format": "date-time",
"examples": [
"2021-06-06T13:22:00.445Z"
@@ -80,27 +83,27 @@
},
"schema:creator": {
"type": "object",
- "description": "Contains an ods:Agent object",
- "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.3.0/agent.json"
+ "description": "Contains information about the creator of this MachineAnnotationService Digital Object, the agent creating this record in DiSSCo's system",
+ "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.4.0/agent.json"
},
"ods:containerImage": {
"type": "string",
- "description": "URI of the image of the containerized application",
- "example": [
+ "description": "The URI to the location of the image in a public container repository. Should only contain the image name and not the tag",
+ "examples": [
"public.ecr.aws/dissco/mindat-georeferencing"
]
},
"ods:containerTag": {
"type": "string",
- "description": "Tag of the image",
- "example": [
+ "description": "The image tag of the container image. This should not be `latest` but point to a specific version. On an update of the application a new image tag should be generated and the Digital Object should be updated",
+ "examples": [
"sha-cb76994"
]
},
- "ods:TargetDigitalObjectFilter": {
+ "ods:hasTargetDigitalObjectFilter": {
"type": "object",
- "description": "Filters describing the criteria that must be met in order to apply the MAS. No filters implies the MAS may run on *any* digital object. Field names are given in JSON paths, and accepted values for that field are provided as an arrays",
- "example": [
+ "description": "Filters describing the criteria that a Digital Object must meet in order to apply the MAS. No filters implies the MAS may run on *any* digital object. Field names are given in JSON paths, and accepted values for that field are provided as an arrays. An `*` can be used as a wildcard, indicating any value is seen as valid",
+ "examples": [
{
"$['ods:type']": [
"https://doi.org/21.T11148/894b1e6cad57e921764e"
@@ -112,7 +115,7 @@
"Other Geodiversity",
"Unclassified"
],
- "$['ods:hasEvent'][*]['ods:Location']['dwc:locality']": [
+ "$['ods:hasEvents'][*]['ods:hasLocation']['dwc:locality']": [
"*"
]
}
@@ -120,8 +123,8 @@
},
"schema:creativeWorkStatus": {
"type": "string",
- "description": "The current status of the service, https://schema.org/creativeWorkStatus",
- "example": [
+ "description": "The current status of the service in terms of its service lifecycle",
+ "examples": [
"Alpha",
"Production"
]
@@ -129,115 +132,130 @@
"schema:codeRepository": {
"type": "string",
"format": "url",
- "description": "Link to code base of MAS, https://schema.org/codeRepository"
+ "description": "Link to the repository where the un-compiled, human readable code and related code is located (SVN, GitHub, CodePlex)",
+ "examples": [
+ "https://github.com/rtdeb/GeoPick",
+ "https://github.com/DiSSCo/dissco-core-backend"
+ ]
},
"schema:programmingLanguage": {
"type": "string",
- "description": "The programming language of the MAS, https://schema.org/programmingLanguage",
- "example": [
- "Python"
+ "description": "The primary programming language used by the MAS",
+ "examples": [
+ "Python",
+ "Java 17"
]
},
"ods:serviceAvailability": {
"type": "string",
- "description": "Availability commitment of the service provider as described in the SLA"
+ "description": "Availability commitment in uptime percentage of the service provider as described in the SLA",
+ "examples": [
+ "99.9%",
+ "95.0%"
+ ]
},
"schema:maintainer": {
"type": "object",
"description": "Party maintaining the code, could be an schema:Organisation or a schema:Person",
- "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.3.0/agent.json"
+ "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.4.0/agent.json"
},
"schema:license": {
"type": "string",
"format": "url",
- "description": "License of the service, https://schema.org/license",
- "example": "https://opensource.org/licenses/Apache-2.0"
- },
- "ods:dependency": {
- "type": "array",
- "description": "Handles from other MAS that this MAS depends on",
- "items": {
- "type": "string",
- "description": "Handle of the Machine Annotation Service that this MAS depends on",
- "pattern": "^https:\/\/hdl\\.handle\\.net\/[\\w.]+\/(.){3}-(.){3}-(.){3}"
- },
+ "description": "A license document which applies to this Machine Annotation Service",
"examples": [
- [
- "https://hdl.handle.net/20.5000.1025/XXX-XXX-XXX"
- ]
+ "https://opensource.org/license/Apache-2.0",
+ "https://opensource.org/license/mit"
]
},
"schema:ContactPoint": {
"type": "object",
- "description": "The contact point for support and information for the MAS, https://schema.org/ContactPoint",
+ "description": "The contact point for support and information for the Machine Annotation Service",
"properties": {
"schema:description": {
"type": "string",
- "description": "Description of the contact point, https://schema.org/description"
+ "description": "General description of the contact point"
},
"schema:email": {
"type": "string",
"format": "email",
- "description": "Email of the contact point, https://schema.org/email"
+ "description": "Email of the contact point"
},
"schema:url": {
"type": "string",
"format": "url",
- "description": "Email of the contact point, https://schema.org/email"
+ "description": "URL of the contact point, this could be a link to a contact form or a link to a github issues page"
},
"schema:telephone": {
"type": "string",
- "description": "Telephone number of the contact point, https://schema.org/telephone"
+ "description": "Telephone number of the contact point"
}
}
},
"ods:slaDocumentation": {
"type": "string",
- "description": "Link to SLA documentation"
+ "format": "url",
+ "description": "Link to SLA documentation of the Machine Annotation Service",
+ "examples": [
+ "https://www.dissco.tech/sla/mas"
+ ]
},
"ods:topicName": {
"type": "string",
- "description": "Kafka topic through which the MAS receives messages. Defaults to PID of MAS"
+ "description": "Kafka topic through which the MAS receives messages. Defaults to PID of the Machine Annotation Service",
+ "examples": [
+ "p3r-7ff-s4n",
+ "fsj-g4m-l47"
+ ]
},
"ods:maxReplicas": {
"type": "integer",
- "description": "The maximum amount of this MAS that can simultaneously run without causing issues"
+ "description": "The maximum amount of this MAS that can simultaneously run without causing issues",
+ "minimum": 1,
+ "examples": [
+ 3
+ ]
},
"ods:batchingPermitted": {
"type": "boolean",
- "description": "Whether or not this MAS can create Batch Annotations. MAS outputs must then comply with batchMetadata, see https://schemas.dissco.tech/schemas/annotations/0.1.0/annotation-event.json"
+ "description": "Whether or not this Machine Annotation Service can create Batch Annotations. Machine Annotations Service outputs must then comply with batchMetadata, see https://schemas.dissco.tech/schemas/annotations/0.1.0/annotation-event.json"
},
"ods:timeToLive": {
"type": "integer",
"minimum": 3600,
- "description": "Time in milliseconds the MAS message may remain in the Kafka Queue before being marked as timed out. Min 1 hour."
+ "description": "Time in milliseconds the MAS message may remain in the Kafka Queue before being marked as timed out. Min 1 hour.",
+ "examples": [
+ 86400000,
+ 43200000
+ ]
},
- "ods:TombstoneMetadata": {
+ "ods:hasTombstoneMetadata": {
"type": "object",
"description": "Object containing the tombstone metadata of the object",
- "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.3.0/tombstone-metadata.json"
+ "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.4.0/tombstone-metadata.json"
},
- "ods:hasEnvironmentalVariable": {
+ "ods:hasEnvironmentalVariables": {
"type": "array",
- "description": "Environmental variables to supply to the MAS, non-sensitive",
+ "description": "Environmental variables to supply to the Machine Annotation Service, non-sensitive",
"items": {
- "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.3.0/environmental-variable.json"
+ "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.4.0/environmental-variable.json"
}
},
- "ods:hasSecretVariable": {
+ "ods:hasSecretVariables": {
"type": "array",
- "description": "Secret variables to supply to the MAS",
+ "description": "Secret variables to supply to the Machine Annotation Service, sensitive",
"items": {
- "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.3.0/secret-variable.json"
+ "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.4.0/secret-variable.json"
}
}
},
"required": [
"@type",
- "ods:ID",
- "ods:type",
+ "schema:identifier",
+ "ods:fdoType",
"schema:name",
"schema:dateCreated",
+ "schema:creator",
"schema:dateModified",
"ods:containerImage",
"ods:containerTag",
diff --git a/src/main/resources/json-schema/source-system-request.json b/src/main/resources/json-schema/source-system-request.json
index 12f03e7..6f6a617 100644
--- a/src/main/resources/json-schema/source-system-request.json
+++ b/src/main/resources/json-schema/source-system-request.json
@@ -1,20 +1,20 @@
{
- "$id": "https://schemas.dissco.tech/schemas/fdo-type/source-system/0.3.0/source-system-request.json",
+ "$id": "https://schemas.dissco.tech/schemas/fdo-type/source-system/0.4.0/source-system-request.json",
"$schema": "https://json-schema.org/draft/2020-12/schema",
- "$comment": "Source System Request Version 0.3.0",
+ "$comment": "Source System Request Version 0.4.0",
"title": "SourceSystemRequest",
"type": "object",
"properties": {
"schema:name": {
"type": "string",
- "description": "Name of the Source System as provided by the user, https://schema.org/name",
+ "description": "Name of the Source System as provided by the user",
"examples": [
"Herbarium of University of Coimbra (COI)"
]
},
"schema:description": {
"type": "string",
- "description": "Description of the Source System as provided by the user, https://schema.org/description",
+ "description": "Description of the Source System as provided by the user",
"examples": [
"The herbarium collection of the University of Coimbra is one of the oldest in Portugal, with specimens dating back to the 18th century."
]
@@ -53,13 +53,18 @@
"examples": [
"https://hdl.handle.net/20.5000.1025/XXX-XXX-XXX"
]
+ },
+ "ods:hasTombstoneMetadata": {
+ "type": "object",
+ "description": "Object containing the tombstone metadata of the object. Only present when ods:status is ods:Tombstone",
+ "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.4.0/tombstone-metadata.json"
}
},
"required": [
"schema:name",
- "schema:uri",
+ "schema:url",
"ods:translatorType",
- "ods:mappingID"
+ "ods:dataMappingID"
],
"additionalProperties": false
}
diff --git a/src/main/resources/json-schema/source-system.json b/src/main/resources/json-schema/source-system.json
index 26e3fa2..14a9afb 100644
--- a/src/main/resources/json-schema/source-system.json
+++ b/src/main/resources/json-schema/source-system.json
@@ -1,7 +1,7 @@
{
- "$id": "https://schemas.dissco.tech/schemas/fdo-type/source-system/0.3.0/source-system.json",
+ "$id": "https://schemas.dissco.tech/schemas/fdo-type/source-system/0.4.0/source-system.json",
"$schema": "https://json-schema.org/draft/2020-12/schema",
- "$comment": "Source System Version 0.3.0",
+ "$comment": "Source System Version 0.4.0",
"title": "SourceSystem",
"type": "object",
"properties": {
@@ -18,15 +18,15 @@
"description": "The type of the object, in this case ods:SourceSystem",
"const": "ods:SourceSystem"
},
- "ods:ID": {
+ "schema:identifier": {
"type": "string",
- "description": "Handle of the Source System",
+ "description": "The unique identifier (handle) of the Source System object",
"pattern": "^https:\/\/hdl\\.handle\\.net\/[\\w.]+\/(.){3}-(.){3}-(.){3}",
"examples": [
"https://hdl.handle.net/20.5000.1025/XXX-XXX-XXX"
]
},
- "ods:type": {
+ "ods:fdoType": {
"type": "string",
"description": "The DOI to the FDO type of the object",
"pattern": "^https:\/\/doi\\.org/[\\w\\.]+/[\\w\\.]+",
@@ -35,28 +35,35 @@
]
},
"ods:status": {
+ "type": "string",
"enum": [
- "ods:Draft",
- "ods:Active",
- "ods:Tombstone"
+ "Draft",
+ "Active",
+ "Tombstone"
],
- "description": "The status of the Digital Object"
+ "description": "The status of the Digital Object. A digital object can be in Draft, when it is not published yet. Active when it is published and the object is active and Tombstone which means the object has been archived."
},
"schema:version": {
"type": "integer",
"minimum": 1,
- "description": "Version of the Source System, https://schema.org/version"
+ "description": "The version of the object, each change generates a new version. The version starts at 1 and each change will increment the version number with 1",
+ "examples": [
+ 1,
+ 2,
+ 3
+ ]
},
"schema:name": {
"type": "string",
- "description": "Name of the Source System as provided by the user, https://schema.org/name",
+ "description": "Name of the Source System as provided by the user",
"examples": [
- "Herbarium of University of Coimbra (COI)"
+ "Herbarium of University of Coimbra (COI)",
+ "Naturalis Biodiversity Center (NL) - Vermes"
]
},
"schema:description": {
"type": "string",
- "description": "Description of the Source System as provided by the user, https://schema.org/description",
+ "description": "Description of the Source System as provided by the user",
"examples": [
"The herbarium collection of the University of Coimbra is one of the oldest in Portugal, with specimens dating back to the 18th century."
]
@@ -71,7 +78,7 @@
},
"schema:dateModified": {
"type": "string",
- "description": "Timestamp of last modification. Internally generated, https://schema.org/dateModified",
+ "description": "Timestamp of last modification. Internally generated, follows the ISO Date Time Format yyyy-MM-dd'T'HH:mm:ss.SSSXXX",
"format": "date-time",
"examples": [
"2021-06-06T13:22:00.445Z"
@@ -80,7 +87,7 @@
"schema:creator": {
"type": "object",
"description": "Contains an ods:Agent object",
- "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.3.0/agent.json"
+ "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.4.0/agent.json"
},
"schema:url": {
"type": "string",
@@ -92,12 +99,13 @@
},
"ltc:collectionManagementSystem": {
"type": "string",
- "description": "The collection management system that the source system is using, https://rs.tdwg.org/ltc/terms/collectionManagementSystem",
+ "description": "The collection management system that the source system is using",
"examples": [
"Specify 7"
]
},
"ods:translatorType": {
+ "type": "string",
"description": "The serialisation of the data the endpoint provides indicating what type of Translator is required",
"enum": [
"dwca",
@@ -117,16 +125,16 @@
"https://hdl.handle.net/20.5000.1025/XXX-XXX-XXX"
]
},
- "ods:TombstoneMetadata": {
+ "ods:hasTombstoneMetadata": {
"type": "object",
"description": "Object containing the tombstone metadata of the object. Only present when ods:status is ods:Tombstone",
- "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.3.0/tombstone-metadata.json"
+ "$ref": "https://schemas.dissco.tech/schemas/fdo-type/shared-model/0.4.0/tombstone-metadata.json"
}
},
"required": [
"@type",
- "ods:ID",
- "ods:type",
+ "schema:identifier",
+ "ods:fdoType",
"schema:version",
"schema:dateCreated",
"schema:dateModified",
diff --git a/src/test/java/eu/dissco/orchestration/backend/controller/ControllerUtilsTest.java b/src/test/java/eu/dissco/orchestration/backend/controller/ControllerUtilsTest.java
index 2bda144..17a1d16 100644
--- a/src/test/java/eu/dissco/orchestration/backend/controller/ControllerUtilsTest.java
+++ b/src/test/java/eu/dissco/orchestration/backend/controller/ControllerUtilsTest.java
@@ -1,5 +1,6 @@
package eu.dissco.orchestration.backend.controller;
+import static eu.dissco.orchestration.backend.domain.AgentRoleType.CREATOR;
import static eu.dissco.orchestration.backend.testutils.TestUtils.OBJECT_CREATOR;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertThrows;
@@ -9,6 +10,7 @@
import eu.dissco.orchestration.backend.exception.ForbiddenException;
import eu.dissco.orchestration.backend.schema.Agent;
import eu.dissco.orchestration.backend.schema.Agent.Type;
+import eu.dissco.orchestration.backend.utils.AgentUtils;
import eu.dissco.orchestration.backend.utils.ControllerUtils;
import java.util.HashMap;
import java.util.Map;
@@ -29,6 +31,30 @@ class ControllerUtilsTest {
@Mock
private Authentication authentication;
+ private static Stream claimsAndNames() {
+ return Stream.of(
+ Arguments.of(
+ new HashMap(Map.of(
+ "orcid", OBJECT_CREATOR,
+ "family_name", "Attenborough",
+ "given_name", "David")),
+ AgentUtils.createAgent("David Attenborough", OBJECT_CREATOR, CREATOR, "orcid",
+ Type.SCHEMA_PERSON)
+ ),
+ Arguments.of(new HashMap(Map.of(
+ "orcid", OBJECT_CREATOR,
+ "given_name", "David")),
+ AgentUtils.createAgent("David", OBJECT_CREATOR, CREATOR, "orcid",
+ Type.SCHEMA_PERSON)
+ ),
+ Arguments.of(new HashMap(Map.of(
+ "orcid", OBJECT_CREATOR,
+ "family_name", "Attenborough")),
+ AgentUtils.createAgent("Attenborough", OBJECT_CREATOR, CREATOR, "orcid",
+ Type.SCHEMA_PERSON)
+ ));
+ }
+
@ParameterizedTest
@MethodSource("claimsAndNames")
void testGetAgentFullName(Map claims, Agent expected)
@@ -37,7 +63,7 @@ void testGetAgentFullName(Map claims, Agent expected)
givenAuthentication(claims);
// When
- var agent = ControllerUtils.getAgent(authentication);
+ var agent = ControllerUtils.getAgent(authentication, CREATOR);
// Then
assertThat(agent).isEqualTo(expected);
@@ -52,41 +78,9 @@ void testNoOrcid() {
givenAuthentication(claims);
// When / Then
- assertThrows(ForbiddenException.class, () -> ControllerUtils.getAgent(authentication));
- }
-
-
- private static Stream claimsAndNames() {
- return Stream.of(
- Arguments.of(
- new HashMap(Map.of(
- "orcid", OBJECT_CREATOR,
- "family_name", "Attenborough",
- "given_name", "David")),
- new Agent()
- .withId(OBJECT_CREATOR)
- .withSchemaName("David Attenborough")
- .withType(Type.SCHEMA_PERSON)
- ),
- Arguments.of(new HashMap(Map.of(
- "orcid", OBJECT_CREATOR,
- "given_name", "David")),
- new Agent()
- .withId(OBJECT_CREATOR)
- .withSchemaName("David")
- .withType(Type.SCHEMA_PERSON)
- ),
- Arguments.of(new HashMap(Map.of(
- "orcid", OBJECT_CREATOR,
- "family_name", "Attenborough")),
- new Agent()
- .withId(OBJECT_CREATOR)
- .withSchemaName("Attenborough")
- .withType(Type.SCHEMA_PERSON)
- ));
+ assertThrows(ForbiddenException.class, () -> ControllerUtils.getAgent(authentication, CREATOR));
}
-
private void givenAuthentication(Map claims) {
var principal = mock(Jwt.class);
given(authentication.getPrincipal()).willReturn(principal);
diff --git a/src/test/java/eu/dissco/orchestration/backend/controller/DataMappingControllerTest.java b/src/test/java/eu/dissco/orchestration/backend/controller/DataMappingControllerTest.java
index fd39298..c2fc4ca 100644
--- a/src/test/java/eu/dissco/orchestration/backend/controller/DataMappingControllerTest.java
+++ b/src/test/java/eu/dissco/orchestration/backend/controller/DataMappingControllerTest.java
@@ -88,7 +88,8 @@ void testUpdateDataMapping() throws Exception {
givenDataMappingSingleJsonApiWrapper());
// When
- var result = controller.updateDataMapping(authentication, PREFIX, SUFFIX, requestBody, mockRequest);
+ var result = controller.updateDataMapping(authentication, PREFIX, SUFFIX, requestBody,
+ mockRequest);
// Then
assertThat(result.getStatusCode()).isEqualTo(HttpStatus.OK);
@@ -101,7 +102,8 @@ void testUpdateDataMappingNoChanges() throws Exception {
var requestBody = givenDataMappingRequestJson();
// When
- var result = controller.updateDataMapping(authentication, PREFIX, SUFFIX, requestBody, mockRequest);
+ var result = controller.updateDataMapping(authentication, PREFIX, SUFFIX, requestBody,
+ mockRequest);
// Then
assertThat(result.getStatusCode()).isEqualTo(HttpStatus.NO_CONTENT);
diff --git a/src/test/java/eu/dissco/orchestration/backend/controller/MachineAnnotationServiceControllerTest.java b/src/test/java/eu/dissco/orchestration/backend/controller/MachineAnnotationServiceControllerTest.java
index 7c4d721..c15f813 100644
--- a/src/test/java/eu/dissco/orchestration/backend/controller/MachineAnnotationServiceControllerTest.java
+++ b/src/test/java/eu/dissco/orchestration/backend/controller/MachineAnnotationServiceControllerTest.java
@@ -37,9 +37,9 @@
@ExtendWith(MockitoExtension.class)
class MachineAnnotationServiceControllerTest {
+ MockHttpServletRequest mockRequest = new MockHttpServletRequest();
@Mock
private Authentication authentication;
- MockHttpServletRequest mockRequest = new MockHttpServletRequest();
@Mock
private MachineAnnotationServiceService service;
@Mock
diff --git a/src/test/java/eu/dissco/orchestration/backend/repository/DataMappingRepositoryIT.java b/src/test/java/eu/dissco/orchestration/backend/repository/DataMappingRepositoryIT.java
index 45efdd8..c19675d 100644
--- a/src/test/java/eu/dissco/orchestration/backend/repository/DataMappingRepositoryIT.java
+++ b/src/test/java/eu/dissco/orchestration/backend/repository/DataMappingRepositoryIT.java
@@ -159,8 +159,8 @@ void testGetDataMappingsLastPage() throws JsonProcessingException {
void testTombstoneDataMapping() throws JsonProcessingException {
// Given
var dataMapping = givenDataMapping(HANDLE, 1);
- dataMapping.setOdsStatus(OdsStatus.ODS_TOMBSTONE);
- dataMapping.setOdsTombstoneMetadata(givenTombstoneMetadata(ObjectType.DATA_MAPPING));
+ dataMapping.setOdsStatus(OdsStatus.TOMBSTONE);
+ dataMapping.setOdsHasTombstoneMetadata(givenTombstoneMetadata(ObjectType.DATA_MAPPING));
postDataMappings(List.of(dataMapping));
// When
diff --git a/src/test/java/eu/dissco/orchestration/backend/repository/MachineAnnotationServiceRepositoryIT.java b/src/test/java/eu/dissco/orchestration/backend/repository/MachineAnnotationServiceRepositoryIT.java
index cce32e2..c8e87f6 100644
--- a/src/test/java/eu/dissco/orchestration/backend/repository/MachineAnnotationServiceRepositoryIT.java
+++ b/src/test/java/eu/dissco/orchestration/backend/repository/MachineAnnotationServiceRepositoryIT.java
@@ -140,7 +140,7 @@ void testGetMassSecondPage() {
void testDeleteMas() {
// Given
var mas = givenMas();
- mas.setOdsTombstoneMetadata(givenTombstoneMetadata(ObjectType.MAS));
+ mas.setOdsHasTombstoneMetadata(givenTombstoneMetadata(ObjectType.MAS));
postMass(List.of(mas));
// When
diff --git a/src/test/java/eu/dissco/orchestration/backend/repository/SourceSystemRepositoryIT.java b/src/test/java/eu/dissco/orchestration/backend/repository/SourceSystemRepositoryIT.java
index d50a9eb..b0f0254 100644
--- a/src/test/java/eu/dissco/orchestration/backend/repository/SourceSystemRepositoryIT.java
+++ b/src/test/java/eu/dissco/orchestration/backend/repository/SourceSystemRepositoryIT.java
@@ -123,8 +123,8 @@ void testGetSourceSystemsSecondPage() throws JsonProcessingException {
void testTombstoneSourceSystem() throws JsonProcessingException {
// Given
var sourceSystem = givenSourceSystem();
- sourceSystem.withOdsStatus(OdsStatus.ODS_TOMBSTONE);
- sourceSystem.withOdsTombstoneMetadata(givenTombstoneMetadata(ObjectType.SOURCE_SYSTEM));
+ sourceSystem.withOdsStatus(OdsStatus.TOMBSTONE);
+ sourceSystem.withOdsHasTombstoneMetadata(givenTombstoneMetadata(ObjectType.SOURCE_SYSTEM));
postSourceSystem(List.of(sourceSystem));
// When
diff --git a/src/test/java/eu/dissco/orchestration/backend/service/DataMappingServiceTest.java b/src/test/java/eu/dissco/orchestration/backend/service/DataMappingServiceTest.java
index 935fecf..30f31a2 100644
--- a/src/test/java/eu/dissco/orchestration/backend/service/DataMappingServiceTest.java
+++ b/src/test/java/eu/dissco/orchestration/backend/service/DataMappingServiceTest.java
@@ -57,6 +57,7 @@
@ExtendWith(MockitoExtension.class)
class DataMappingServiceTest {
+ Clock updatedClock = Clock.fixed(UPDATED, ZoneOffset.UTC);
private DataMappingService service;
@Mock
private FdoRecordService fdoRecordService;
@@ -68,10 +69,8 @@ class DataMappingServiceTest {
private DataMappingRepository repository;
@Mock
private FdoProperties fdoProperties;
-
private MockedStatic mockedStatic;
private MockedStatic mockedClock;
- Clock updatedClock = Clock.fixed(UPDATED, ZoneOffset.UTC);
@BeforeEach
void setup() {
@@ -102,7 +101,7 @@ void testCreateDataMapping() throws Exception {
then(fdoRecordService).should().buildCreateRequest(dataMapping, ObjectType.DATA_MAPPING);
then(repository).should().createDataMapping(givenDataMapping(HANDLE, 1));
then(kafkaPublisherService).should()
- .publishCreateEvent(MAPPER.valueToTree(givenDataMapping(HANDLE, 1)));
+ .publishCreateEvent(MAPPER.valueToTree(givenDataMapping(HANDLE, 1)), givenAgent());
}
@Test
@@ -112,7 +111,7 @@ void testCreateDataMappingKafkaFails() throws Exception {
given(fdoProperties.getDataMappingType()).willReturn(DATA_MAPPING_TYPE_DOI);
given(handleComponent.postHandle(any())).willReturn(BARE_HANDLE);
willThrow(JsonProcessingException.class).given(kafkaPublisherService)
- .publishCreateEvent(MAPPER.valueToTree(givenDataMapping(HANDLE, 1)));
+ .publishCreateEvent(MAPPER.valueToTree(givenDataMapping(HANDLE, 1)), givenAgent());
// When
assertThrowsExactly(ProcessingFailedException.class,
@@ -144,7 +143,7 @@ void testCreateDataMappingKafkaAndRollbackFails() throws Exception {
given(fdoProperties.getDataMappingType()).willReturn(DATA_MAPPING_TYPE_DOI);
given(handleComponent.postHandle(any())).willReturn(BARE_HANDLE);
willThrow(JsonProcessingException.class).given(kafkaPublisherService)
- .publishCreateEvent(MAPPER.valueToTree(givenDataMapping(HANDLE, 1)));
+ .publishCreateEvent(MAPPER.valueToTree(givenDataMapping(HANDLE, 1)), givenAgent());
willThrow(PidException.class).given(handleComponent).rollbackHandleCreation(any());
// When
@@ -177,7 +176,7 @@ void testUpdateDataMapping() throws Exception {
then(repository).should().updateDataMapping(givenDataMapping(HANDLE, 2));
then(kafkaPublisherService).should()
.publishUpdateEvent(MAPPER.valueToTree(givenDataMapping(HANDLE, 2)),
- MAPPER.valueToTree(prevDataMapping));
+ MAPPER.valueToTree(prevDataMapping), givenAgent());
}
@Test
@@ -189,7 +188,7 @@ void testUpdateDataMappingKafkaFails() throws Exception {
given(fdoProperties.getDataMappingType()).willReturn(DATA_MAPPING_TYPE_DOI);
willThrow(JsonProcessingException.class).given(kafkaPublisherService)
.publishUpdateEvent(MAPPER.valueToTree(givenDataMapping(HANDLE, 2)),
- MAPPER.valueToTree(prevDataMapping));
+ MAPPER.valueToTree(prevDataMapping), givenAgent());
// When
assertThrowsExactly(ProcessingFailedException.class,
@@ -271,7 +270,7 @@ void testTombstoneDataMapping() throws Exception {
// Then
then(repository).should().tombstoneDataMapping(givenTombstoneDataMapping(), UPDATED);
then(handleComponent).should().tombstoneHandle(any(), eq(BARE_HANDLE));
- then(kafkaPublisherService).should().publishTombstoneEvent(any(), any());
+ then(kafkaPublisherService).should().publishTombstoneEvent(any(), any(), eq(givenAgent()));
}
@Test
@@ -289,7 +288,8 @@ void testTombstoneDataMappingKafkaFailed() throws Exception {
// Given
given(repository.getActiveDataMapping(BARE_HANDLE)).willReturn(
Optional.of(givenDataMapping(HANDLE, 1)));
- doThrow(JsonProcessingException.class).when(kafkaPublisherService).publishTombstoneEvent(any(), any());
+ doThrow(JsonProcessingException.class).when(kafkaPublisherService)
+ .publishTombstoneEvent(any(), any(), eq(givenAgent()));
// Then
assertThrowsExactly(ProcessingFailedException.class,
diff --git a/src/test/java/eu/dissco/orchestration/backend/service/KafkaPublisherServiceTest.java b/src/test/java/eu/dissco/orchestration/backend/service/KafkaPublisherServiceTest.java
index 8062fba..69fe169 100644
--- a/src/test/java/eu/dissco/orchestration/backend/service/KafkaPublisherServiceTest.java
+++ b/src/test/java/eu/dissco/orchestration/backend/service/KafkaPublisherServiceTest.java
@@ -1,6 +1,7 @@
package eu.dissco.orchestration.backend.service;
import static eu.dissco.orchestration.backend.testutils.TestUtils.MAPPER;
+import static eu.dissco.orchestration.backend.testutils.TestUtils.givenAgent;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.BDDMockito.then;
@@ -35,7 +36,7 @@ void testPublishCreateEvent() throws JsonProcessingException {
// Given
// When
- service.publishCreateEvent(MAPPER.valueToTree(TestUtils.givenMas()));
+ service.publishCreateEvent(MAPPER.valueToTree(TestUtils.givenMas()), givenAgent());
// Then
then(kafkaTemplate).should().send(eq("createUpdateDeleteTopic"), anyString());
@@ -47,7 +48,7 @@ void testPublishUpdateEvent() throws JsonProcessingException {
// When
service.publishUpdateEvent(MAPPER.valueToTree(TestUtils.givenMas()),
- MAPPER.createObjectNode());
+ MAPPER.createObjectNode(), givenAgent());
// Then
then(kafkaTemplate).should().send(eq("createUpdateDeleteTopic"), anyString());
@@ -59,7 +60,7 @@ void testPublishTombstoneEvent() throws Exception {
// When
service.publishTombstoneEvent(MAPPER.valueToTree(TestUtils.givenTombstoneMas()),
- MAPPER.valueToTree(TestUtils.givenMas()));
+ MAPPER.valueToTree(TestUtils.givenMas()), givenAgent());
// Then
then(kafkaTemplate).should().send(eq("createUpdateDeleteTopic"), anyString());
diff --git a/src/test/java/eu/dissco/orchestration/backend/service/MachineAnnotationServiceServiceTest.java b/src/test/java/eu/dissco/orchestration/backend/service/MachineAnnotationServiceServiceTest.java
index f0e4eb7..0ad3b77 100644
--- a/src/test/java/eu/dissco/orchestration/backend/service/MachineAnnotationServiceServiceTest.java
+++ b/src/test/java/eu/dissco/orchestration/backend/service/MachineAnnotationServiceServiceTest.java
@@ -86,6 +86,7 @@ class MachineAnnotationServiceServiceTest {
private final Configuration configuration = new Configuration(Configuration.VERSION_2_3_32);
private final KubernetesProperties kubernetesProperties = new KubernetesProperties();
+ Clock updatedClock = Clock.fixed(UPDATED, ZoneOffset.UTC);
@Mock
private MachineAnnotationServiceRepository repository;
@Mock
@@ -103,11 +104,19 @@ class MachineAnnotationServiceServiceTest {
@Mock
private FdoProperties fdoProperties;
private MachineAnnotationServiceService service;
-
private MockedStatic mockedStatic;
private MockedStatic mockedClock;
- Clock updatedClock = Clock.fixed(UPDATED, ZoneOffset.UTC);
+ private static Stream masKeys() {
+ return Stream.of(
+ Arguments.of(null, null),
+ Arguments.of(givenMasEnvironment(), givenMasSecrets()),
+ Arguments.of(List.of(new EnvironmentalVariable("name", 1)),
+ givenMasSecrets()),
+ Arguments.of(List.of(new EnvironmentalVariable("name", true)),
+ givenMasSecrets())
+ );
+ }
@BeforeEach
void setup() throws IOException {
@@ -136,16 +145,16 @@ void testCreateMas(List masEnv,
List masSecret) throws Exception {
// Given
var expectedMas = givenMas()
- .withOdsHasEnvironmentalVariable(masEnv)
- .withOdsHasSecretVariable(masSecret);
+ .withOdsHasEnvironmentalVariables(masEnv)
+ .withOdsHasSecretVariables(masSecret);
var expected = new JsonApiWrapper(new JsonApiData(
expectedMas.getId(),
ObjectType.MAS,
flattenMas(expectedMas)
), new JsonApiLinks(MAS_PATH));
var masRequest = givenMasRequest()
- .withOdsHasEnvironmentalVariable(masEnv)
- .withOdsHasSecretVariable(masSecret);
+ .withOdsHasEnvironmentalVariables(masEnv)
+ .withOdsHasSecretVariables(masSecret);
given(handleComponent.postHandle(any())).willReturn(BARE_HANDLE);
given(properties.getKafkaHost()).willReturn("kafka.svc.cluster.local:9092");
given(properties.getNamespace()).willReturn("namespace");
@@ -171,7 +180,7 @@ void testCreateMas(List masEnv,
.createNamespacedCustomObject(anyString(), anyString(), eq("namespace"), anyString(),
any(Object.class));
then(kafkaPublisherService).should()
- .publishCreateEvent(MAPPER.valueToTree(expectedMas));
+ .publishCreateEvent(MAPPER.valueToTree(expectedMas), givenAgent());
}
@ParameterizedTest
@@ -219,7 +228,7 @@ void testCreateMasMinimum(Integer maxReplicas) throws Exception {
.createNamespacedCustomObject(anyString(), anyString(), eq("namespace"), anyString(),
any(Object.class));
then(kafkaPublisherService).should()
- .publishCreateEvent(MAPPER.valueToTree(mas));
+ .publishCreateEvent(MAPPER.valueToTree(mas), givenAgent());
}
@Test
@@ -302,7 +311,7 @@ void testCreateMasKafkaFails() throws Exception {
given(properties.getNamespace()).willReturn("namespace");
given(properties.getRunningEndpoint()).willReturn("https://dev.dissco.tech/api/running");
willThrow(JsonProcessingException.class).given(kafkaPublisherService)
- .publishCreateEvent(MAPPER.valueToTree(givenMas()));
+ .publishCreateEvent(MAPPER.valueToTree(givenMas()), givenAgent());
var createDeploy = mock(APIcreateNamespacedDeploymentRequest.class);
given(appsV1Api.createNamespacedDeployment(eq("namespace"), any(V1Deployment.class)))
.willReturn(createDeploy);
@@ -351,7 +360,7 @@ void testCreateMasKafkaAndPidFails() throws Exception {
given(properties.getNamespace()).willReturn("namespace");
given(properties.getRunningEndpoint()).willReturn("https://dev.dissco.tech/api/running");
willThrow(JsonProcessingException.class).given(kafkaPublisherService)
- .publishCreateEvent(MAPPER.valueToTree(givenMas()));
+ .publishCreateEvent(MAPPER.valueToTree(givenMas()), givenAgent());
willThrow(PidException.class).given(handleComponent).rollbackHandleCreation(any());
var createDeploy = mock(APIcreateNamespacedDeploymentRequest.class);
given(appsV1Api.createNamespacedDeployment(eq("namespace"), any(V1Deployment.class)))
@@ -391,7 +400,6 @@ void testCreateMasKafkaAndPidFails() throws Exception {
eq(SUFFIX.toLowerCase() + "-scaled-object"));
}
-
@Test
void testUpdateMas() throws Exception {
// Given
@@ -429,7 +437,8 @@ void testUpdateMas() throws Exception {
.createNamespacedCustomObject(anyString(), anyString(), eq("namespace"), anyString(),
any(Object.class));
then(kafkaPublisherService).should()
- .publishUpdateEvent(MAPPER.valueToTree(givenMas(2)), MAPPER.valueToTree(prevMas.get()));
+ .publishUpdateEvent(MAPPER.valueToTree(givenMas(2)), MAPPER.valueToTree(prevMas.get()),
+ givenAgent());
}
@Test
@@ -512,7 +521,8 @@ void testUpdateMasKafkaFails() throws Exception {
given(properties.getNamespace()).willReturn("namespace");
given(properties.getRunningEndpoint()).willReturn("https://dev.dissco.tech/api/running");
willThrow(JsonProcessingException.class).given(kafkaPublisherService)
- .publishUpdateEvent(MAPPER.valueToTree(givenMas(2)), MAPPER.valueToTree(prevMas.get()));
+ .publishUpdateEvent(MAPPER.valueToTree(givenMas(2)), MAPPER.valueToTree(prevMas.get()),
+ givenAgent());
var replaceDeploy = mock(APIreplaceNamespacedDeploymentRequest.class);
given(appsV1Api.replaceNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"),
eq("namespace"), any(V1Deployment.class))).willReturn(replaceDeploy);
@@ -653,7 +663,7 @@ void testTombstoneMas()
.deleteNamespacedCustomObject(anyString(), anyString(), eq("namespace"), anyString(),
eq(SUFFIX.toLowerCase() + "-scaled-object"));
then(handleComponent).should().tombstoneHandle(any(), eq(BARE_HANDLE));
- then(kafkaPublisherService).should().publishTombstoneEvent(any(), any());
+ then(kafkaPublisherService).should().publishTombstoneEvent(any(), any(), eq(givenAgent()));
}
@Test
@@ -669,7 +679,7 @@ void testTombstoneMasKafkaFailed() throws Exception {
given(customObjectsApi.deleteNamespacedCustomObject("keda.sh", "v1alpha1", "namespace",
"scaledobjects", "gw0-pop-xsl-scaled-object")).willReturn(deleteCustom);
doThrow(JsonProcessingException.class).when(kafkaPublisherService)
- .publishTombstoneEvent(any(), any());
+ .publishTombstoneEvent(any(), any(), eq(givenAgent()));
given(fdoRecordService.buildTombstoneRequest(ObjectType.MAS, BARE_HANDLE)).willReturn(
givenTombstoneRequestMas());
@@ -741,17 +751,6 @@ void testDeleteKedaFails() throws ApiException {
then(kafkaPublisherService).shouldHaveNoInteractions();
}
- private static Stream masKeys() {
- return Stream.of(
- Arguments.of(null, null),
- Arguments.of(givenMasEnvironment(), givenMasSecrets()),
- Arguments.of(List.of(new EnvironmentalVariable("name", 1)),
- givenMasSecrets()),
- Arguments.of(List.of(new EnvironmentalVariable("name", true)),
- givenMasSecrets())
- );
- }
-
private Optional buildOptionalPrev() {
return Optional.of(givenMas(1, "Another MAS", TTL));
}
diff --git a/src/test/java/eu/dissco/orchestration/backend/service/ProvenanceServiceTest.java b/src/test/java/eu/dissco/orchestration/backend/service/ProvenanceServiceTest.java
index 02aa864..1f883a8 100644
--- a/src/test/java/eu/dissco/orchestration/backend/service/ProvenanceServiceTest.java
+++ b/src/test/java/eu/dissco/orchestration/backend/service/ProvenanceServiceTest.java
@@ -1,5 +1,7 @@
package eu.dissco.orchestration.backend.service;
+import static eu.dissco.orchestration.backend.domain.AgentRoleType.CREATOR;
+import static eu.dissco.orchestration.backend.domain.AgentRoleType.PROCESSING_SERVICE;
import static eu.dissco.orchestration.backend.testutils.TestUtils.APP_HANDLE;
import static eu.dissco.orchestration.backend.testutils.TestUtils.APP_NAME;
import static eu.dissco.orchestration.backend.testutils.TestUtils.HANDLE;
@@ -7,6 +9,7 @@
import static eu.dissco.orchestration.backend.testutils.TestUtils.OBJECT_CREATOR;
import static eu.dissco.orchestration.backend.testutils.TestUtils.TTL;
import static eu.dissco.orchestration.backend.testutils.TestUtils.UPDATED;
+import static eu.dissco.orchestration.backend.testutils.TestUtils.givenAgent;
import static eu.dissco.orchestration.backend.testutils.TestUtils.givenDataMapping;
import static eu.dissco.orchestration.backend.testutils.TestUtils.givenMas;
import static eu.dissco.orchestration.backend.testutils.TestUtils.givenSourceSystem;
@@ -17,14 +20,15 @@
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.BDDMockito.given;
-import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import eu.dissco.orchestration.backend.domain.ObjectType;
import eu.dissco.orchestration.backend.properties.ApplicationProperties;
import eu.dissco.orchestration.backend.schema.Agent;
import eu.dissco.orchestration.backend.schema.Agent.Type;
+import eu.dissco.orchestration.backend.schema.Identifier.DctermsType;
import eu.dissco.orchestration.backend.schema.MachineAnnotationService.OdsStatus;
import eu.dissco.orchestration.backend.schema.OdsChangeValue;
+import eu.dissco.orchestration.backend.utils.AgentUtils;
import java.util.Date;
import java.util.List;
import org.junit.jupiter.api.BeforeEach;
@@ -42,41 +46,63 @@ class ProvenanceServiceTest {
private static List givenExpectedAgents() {
return List.of(
- new Agent()
- .withId(OBJECT_CREATOR)
- .withType(Type.SCHEMA_PERSON),
- new Agent()
- .withId(APP_HANDLE)
- .withType(Type.AS_APPLICATION)
- .withSchemaName(APP_NAME)
+ AgentUtils.createAgent(null, OBJECT_CREATOR, CREATOR,
+ "orcid", Type.PROV_PERSON),
+ AgentUtils.createAgent(APP_NAME, APP_HANDLE, PROCESSING_SERVICE,
+ DctermsType.DOI.value(), Type.PROV_SOFTWARE_AGENT)
);
}
+ private static List givenChangeValueTombstone(ObjectType objectType) {
+ return List.of(
+ givenOdsChangeValue("add", "/ods:hasTombstoneMetadata", givenTombstoneMetadata(objectType)),
+ givenOdsChangeValue("replace", "/ods:status", OdsStatus.TOMBSTONE),
+ givenOdsChangeValue("replace", "/schema:version", 2),
+ givenOdsChangeValue("replace", "/schema:dateModified", Date.from(UPDATED))
+ );
+ }
+
+ private static List givenChangeValueUpdate() {
+ return List.of(
+ givenOdsChangeValue("replace", "/schema:version", 2),
+ givenOdsChangeValue("replace", "/schema:name", "A Machine Annotation Service")
+ );
+ }
+
+ private static OdsChangeValue givenOdsChangeValue(String op, String path, Object value) {
+ return new OdsChangeValue()
+ .withAdditionalProperty("op", op)
+ .withAdditionalProperty("path", path)
+ .withAdditionalProperty("value", MAPPER.convertValue(value, new TypeReference<>() {
+ }));
+ }
+
@BeforeEach
void setup() {
this.service = new ProvenanceService(MAPPER, properties);
}
@Test
- void testGenerateCreateEvent() throws JsonProcessingException {
+ void testGenerateCreateEvent() {
// Given
given(properties.getName()).willReturn(APP_NAME);
given(properties.getPid()).willReturn(APP_HANDLE);
var machineAnnotationService = givenMas();
// When
- var event = service.generateCreateEvent(MAPPER.valueToTree(machineAnnotationService));
+ var event = service.generateCreateEvent(MAPPER.valueToTree(machineAnnotationService),
+ givenAgent());
// Then
- assertThat(event.getOdsID()).isEqualTo(HANDLE + "/" + "1");
+ assertThat(event.getDctermsIdentifier()).isEqualTo(HANDLE + "/" + "1");
assertThat(event.getProvActivity().getOdsChangeValue()).isNull();
assertThat(event.getProvActivity().getRdfsComment()).isEqualTo("Object newly created");
assertThat(event.getProvEntity().getProvValue()).isNotNull();
- assertThat(event.getOdsHasProvAgent()).isEqualTo(givenExpectedAgents());
+ assertThat(event.getOdsHasAgents()).isEqualTo(givenExpectedAgents());
}
@Test
- void testGenerateUpdateEvent() throws JsonProcessingException {
+ void testGenerateUpdateEvent() {
// Given
given(properties.getName()).willReturn(APP_NAME);
given(properties.getPid()).willReturn(APP_HANDLE);
@@ -85,18 +111,18 @@ void testGenerateUpdateEvent() throws JsonProcessingException {
// When
var event = service.generateUpdateEvent(MAPPER.valueToTree(machineAnnotationService),
- MAPPER.valueToTree(prevMachineAnnotationService));
+ MAPPER.valueToTree(prevMachineAnnotationService), givenAgent());
// Then
- assertThat(event.getOdsID()).isEqualTo(HANDLE + "/" + "2");
+ assertThat(event.getDctermsIdentifier()).isEqualTo(HANDLE + "/" + "2");
assertThat(event.getProvActivity().getRdfsComment()).isEqualTo("Object updated");
assertThat(event.getProvActivity().getOdsChangeValue()).isEqualTo(givenChangeValueUpdate());
assertThat(event.getProvEntity().getProvValue()).isNotNull();
- assertThat(event.getOdsHasProvAgent()).isEqualTo(givenExpectedAgents());
+ assertThat(event.getOdsHasAgents()).isEqualTo(givenExpectedAgents());
}
@Test
- void testGenerateTombstoneEventMas() throws Exception {
+ void testGenerateTombstoneEventMas() {
// Given
given(properties.getName()).willReturn(APP_NAME);
given(properties.getPid()).willReturn(APP_HANDLE);
@@ -104,19 +130,19 @@ void testGenerateTombstoneEventMas() throws Exception {
var tombstoneMas = MAPPER.valueToTree(givenTombstoneMas());
// When
- var event = service.generateTombstoneEvent(tombstoneMas, originalMas);
+ var event = service.generateTombstoneEvent(tombstoneMas, originalMas, givenAgent());
// Then
- assertThat(event.getOdsID()).isEqualTo(HANDLE + "/" + "2");
+ assertThat(event.getDctermsIdentifier()).isEqualTo(HANDLE + "/" + "2");
assertThat(event.getProvActivity().getOdsChangeValue()).isEqualTo(
givenChangeValueTombstone(ObjectType.MAS));
assertThat(event.getProvEntity().getProvValue()).isNotNull();
assertThat(event.getProvActivity().getRdfsComment()).isEqualTo("Object tombstoned");
- assertThat(event.getOdsHasProvAgent()).isEqualTo(givenExpectedAgents());
+ assertThat(event.getOdsHasAgents()).isEqualTo(givenExpectedAgents());
}
@Test
- void testGenerateTombstoneEventSourceSystem() throws Exception {
+ void testGenerateTombstoneEventSourceSystem() {
// Given
given(properties.getName()).willReturn(APP_NAME);
given(properties.getPid()).willReturn(APP_HANDLE);
@@ -124,18 +150,19 @@ void testGenerateTombstoneEventSourceSystem() throws Exception {
var tombstoneSourceSystem = MAPPER.valueToTree(givenTombstoneSourceSystem());
// When
- var event = service.generateTombstoneEvent(tombstoneSourceSystem, originalSourceSystem);
+ var event = service.generateTombstoneEvent(tombstoneSourceSystem, originalSourceSystem,
+ givenAgent());
// Then
- assertThat(event.getOdsID()).isEqualTo(HANDLE + "/" + "2");
+ assertThat(event.getDctermsIdentifier()).isEqualTo(HANDLE + "/" + "2");
assertThat(event.getProvActivity().getOdsChangeValue()).hasSameElementsAs(
givenChangeValueTombstone(ObjectType.SOURCE_SYSTEM));
assertThat(event.getProvEntity().getProvValue()).isNotNull();
- assertThat(event.getOdsHasProvAgent()).isEqualTo(givenExpectedAgents());
+ assertThat(event.getOdsHasAgents()).isEqualTo(givenExpectedAgents());
}
@Test
- void testGenerateTombstoneEventDataMapping() throws Exception {
+ void testGenerateTombstoneEventDataMapping() {
// Given
given(properties.getName()).willReturn(APP_NAME);
given(properties.getPid()).willReturn(APP_HANDLE);
@@ -143,38 +170,14 @@ void testGenerateTombstoneEventDataMapping() throws Exception {
var tombstoneDataMapping = MAPPER.valueToTree(givenTombstoneDataMapping());
// When
- var event = service.generateTombstoneEvent(tombstoneDataMapping, originalDataMapping);
+ var event = service.generateTombstoneEvent(tombstoneDataMapping, originalDataMapping,
+ givenAgent());
// Then
- assertThat(event.getOdsID()).isEqualTo(HANDLE + "/" + "2");
+ assertThat(event.getDctermsIdentifier()).isEqualTo(HANDLE + "/" + "2");
assertThat(event.getProvActivity().getOdsChangeValue()).hasSameElementsAs(
givenChangeValueTombstone(ObjectType.DATA_MAPPING));
assertThat(event.getProvEntity().getProvValue()).isNotNull();
- assertThat(event.getOdsHasProvAgent()).isEqualTo(givenExpectedAgents());
- }
-
-
- private static List givenChangeValueTombstone(ObjectType objectType) {
- return List.of(
- givenOdsChangeValue("add", "/ods:TombstoneMetadata", givenTombstoneMetadata(objectType)),
- givenOdsChangeValue("replace", "/ods:status", OdsStatus.ODS_TOMBSTONE),
- givenOdsChangeValue("replace", "/schema:version", 2),
- givenOdsChangeValue("replace", "/schema:dateModified", Date.from(UPDATED))
- );
- }
-
- private static List givenChangeValueUpdate() {
- return List.of(
- givenOdsChangeValue("replace", "/schema:version", 2),
- givenOdsChangeValue("replace", "/schema:name", "A Machine Annotation Service")
- );
- }
-
- private static OdsChangeValue givenOdsChangeValue(String op, String path, Object value) {
- return new OdsChangeValue()
- .withAdditionalProperty("op", op)
- .withAdditionalProperty("path", path)
- .withAdditionalProperty("value", MAPPER.convertValue(value, new TypeReference<>() {
- }));
+ assertThat(event.getOdsHasAgents()).isEqualTo(givenExpectedAgents());
}
}
diff --git a/src/test/java/eu/dissco/orchestration/backend/service/SourceSystemServiceTest.java b/src/test/java/eu/dissco/orchestration/backend/service/SourceSystemServiceTest.java
index 6042799..2c59b94 100644
--- a/src/test/java/eu/dissco/orchestration/backend/service/SourceSystemServiceTest.java
+++ b/src/test/java/eu/dissco/orchestration/backend/service/SourceSystemServiceTest.java
@@ -96,7 +96,7 @@ class SourceSystemServiceTest {
private final Configuration configuration = new Configuration(Configuration.VERSION_2_3_32);
private final Random random = new Random();
-
+ Clock updatedClock = Clock.fixed(UPDATED, ZoneOffset.UTC);
private SourceSystemService service;
@Mock
private KafkaPublisherService kafkaPublisherService;
@@ -112,11 +112,8 @@ class SourceSystemServiceTest {
private BatchV1Api batchV1Api;
@Mock
private FdoProperties fdoProperties;
-
private MockedStatic mockedStatic;
private MockedStatic mockedClock;
- Clock updatedClock = Clock.fixed(UPDATED, ZoneOffset.UTC);
-
private static V1CronJob getV1CronJob(String image, String name) {
var container = new V1Container();
@@ -181,7 +178,7 @@ void testCreateSourceSystem() throws Exception {
assertThat(result).isEqualTo(expected);
then(repository).should().createSourceSystem(givenSourceSystem());
then(kafkaPublisherService).should()
- .publishCreateEvent(MAPPER.valueToTree(givenSourceSystem()));
+ .publishCreateEvent(MAPPER.valueToTree(givenSourceSystem()), givenAgent());
}
@Test
@@ -265,7 +262,7 @@ void testCreateSourceSystemKafkaFails() throws Exception {
given(
batchV1Api.createNamespacedJob(eq(NAMESPACE), any(V1Job.class))).willReturn(createJob);
willThrow(JsonProcessingException.class).given(kafkaPublisherService)
- .publishCreateEvent(MAPPER.valueToTree(givenSourceSystem()));
+ .publishCreateEvent(MAPPER.valueToTree(givenSourceSystem()), givenAgent());
// When
assertThrowsExactly(ProcessingFailedException.class,
@@ -300,7 +297,8 @@ void testCreateSourceSystemKafkaAndRollbackFails() throws Exception {
given(dataMappingService.getActiveDataMapping(sourceSystem.getOdsDataMappingID())).willReturn(
Optional.of(givenDataMapping(sourceSystem.getOdsDataMappingID(), 1)));
willThrow(JsonProcessingException.class).given(kafkaPublisherService)
- .publishCreateEvent(MAPPER.valueToTree(givenSourceSystem(OdsTranslatorType.DWCA)));
+ .publishCreateEvent(MAPPER.valueToTree(givenSourceSystem(OdsTranslatorType.DWCA)),
+ givenAgent());
willThrow(PidException.class).given(handleComponent).rollbackHandleCreation(any());
var createCron = mock(APIcreateNamespacedCronJobRequest.class);
given(batchV1Api.createNamespacedCronJob(eq(NAMESPACE), any(V1CronJob.class)))
@@ -339,7 +337,8 @@ void testRunSourceSystemById() {
@ValueSource(booleans = {true, false})
void testUpdateSourceSystem(boolean triggerTranslator) throws Exception {
var sourceSystem = givenSourceSystemRequest();
- var prevSourceSystem = Optional.of(givenSourceSystem(OdsTranslatorType.DWCA).withOdsMaximumRecords(25));
+ var prevSourceSystem = Optional.of(
+ givenSourceSystem(OdsTranslatorType.DWCA).withOdsMaximumRecords(25));
given(fdoProperties.getSourceSystemType()).willReturn(SOURCE_SYSTEM_TYPE_DOI);
var expected = givenSourceSystemSingleJsonApiWrapper(2);
given(repository.getActiveSourceSystem(BARE_HANDLE)).willReturn(prevSourceSystem);
@@ -366,7 +365,7 @@ void testUpdateSourceSystem(boolean triggerTranslator) throws Exception {
}
then(kafkaPublisherService).should()
.publishUpdateEvent(MAPPER.valueToTree(givenSourceSystem(2)),
- MAPPER.valueToTree(prevSourceSystem.get()));
+ MAPPER.valueToTree(prevSourceSystem.get()), givenAgent());
}
@Test
@@ -398,7 +397,7 @@ void testUpdateSourceSystemKafkaFails() throws Exception {
given(fdoProperties.getSourceSystemType()).willReturn(SOURCE_SYSTEM_TYPE_DOI);
willThrow(JsonProcessingException.class).given(kafkaPublisherService)
.publishUpdateEvent(MAPPER.valueToTree(givenSourceSystem(2)),
- MAPPER.valueToTree(prevSourceSystem.get()));
+ MAPPER.valueToTree(prevSourceSystem.get()), givenAgent());
var updateCron = mock(APIreplaceNamespacedCronJobRequest.class);
given(batchV1Api.replaceNamespacedCronJob(anyString(), eq(NAMESPACE), any(V1CronJob.class)))
.willReturn(updateCron);
@@ -552,7 +551,7 @@ void testTombstoneSourceSystem() throws Exception {
// Then
then(repository).should().tombstoneSourceSystem(givenTombstoneSourceSystem(), UPDATED);
then(handleComponent).should().tombstoneHandle(any(), eq(BARE_HANDLE));
- then(kafkaPublisherService).should().publishTombstoneEvent(any(), any());
+ then(kafkaPublisherService).should().publishTombstoneEvent(any(), any(), eq(givenAgent()));
}
@Test
diff --git a/src/test/java/eu/dissco/orchestration/backend/testutils/TestUtils.java b/src/test/java/eu/dissco/orchestration/backend/testutils/TestUtils.java
index a1835f6..27150ea 100644
--- a/src/test/java/eu/dissco/orchestration/backend/testutils/TestUtils.java
+++ b/src/test/java/eu/dissco/orchestration/backend/testutils/TestUtils.java
@@ -3,6 +3,7 @@
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
+import eu.dissco.orchestration.backend.domain.AgentRoleType;
import eu.dissco.orchestration.backend.domain.ObjectType;
import eu.dissco.orchestration.backend.domain.jsonapi.JsonApiData;
import eu.dissco.orchestration.backend.domain.jsonapi.JsonApiLinks;
@@ -17,11 +18,10 @@
import eu.dissco.orchestration.backend.schema.DataMappingRequest.OdsMappingDataStandard;
import eu.dissco.orchestration.backend.schema.DefaultMapping;
import eu.dissco.orchestration.backend.schema.EnvironmentalVariable;
-import eu.dissco.orchestration.backend.schema.FieldMapping;
import eu.dissco.orchestration.backend.schema.MachineAnnotationService;
import eu.dissco.orchestration.backend.schema.MachineAnnotationServiceRequest;
-import eu.dissco.orchestration.backend.schema.OdsTargetDigitalObjectFilter;
-import eu.dissco.orchestration.backend.schema.OdsTargetDigitalObjectFilter__1;
+import eu.dissco.orchestration.backend.schema.OdsHasTargetDigitalObjectFilter;
+import eu.dissco.orchestration.backend.schema.OdsHasTargetDigitalObjectFilter__1;
import eu.dissco.orchestration.backend.schema.SchemaContactPoint;
import eu.dissco.orchestration.backend.schema.SchemaContactPoint__1;
import eu.dissco.orchestration.backend.schema.SecretVariable;
@@ -29,7 +29,9 @@
import eu.dissco.orchestration.backend.schema.SourceSystem.OdsStatus;
import eu.dissco.orchestration.backend.schema.SourceSystemRequest;
import eu.dissco.orchestration.backend.schema.SourceSystemRequest.OdsTranslatorType;
+import eu.dissco.orchestration.backend.schema.TermMapping;
import eu.dissco.orchestration.backend.schema.TombstoneMetadata;
+import eu.dissco.orchestration.backend.utils.AgentUtils;
import java.net.URI;
import java.time.Instant;
import java.util.ArrayList;
@@ -144,10 +146,10 @@ public static SourceSystem givenSourceSystem(String id, int version,
SourceSystem.OdsTranslatorType translatorType) {
return new SourceSystem()
.withId(id)
- .withOdsID(id)
+ .withSchemaIdentifier(id)
.withType("ods:SourceSystem")
- .withOdsType(SOURCE_SYSTEM_TYPE_DOI)
- .withOdsStatus(OdsStatus.ODS_ACTIVE)
+ .withOdsFdoType(SOURCE_SYSTEM_TYPE_DOI)
+ .withOdsStatus(OdsStatus.ACTIVE)
.withSchemaVersion(version)
.withSchemaDateCreated(Date.from(CREATED))
.withSchemaDateModified(Date.from(CREATED))
@@ -159,12 +161,12 @@ public static SourceSystem givenSourceSystem(String id, int version,
.withOdsDataMappingID(HANDLE_ALT);
}
- public static SourceSystem givenTombstoneSourceSystem(){
+ public static SourceSystem givenTombstoneSourceSystem() {
return givenSourceSystem()
- .withOdsStatus(OdsStatus.ODS_TOMBSTONE)
+ .withOdsStatus(OdsStatus.TOMBSTONE)
.withSchemaDateModified(Date.from(UPDATED))
.withSchemaVersion(2)
- .withOdsTombstoneMetadata(givenTombstoneMetadata(ObjectType.SOURCE_SYSTEM));
+ .withOdsHasTombstoneMetadata(givenTombstoneMetadata(ObjectType.SOURCE_SYSTEM));
}
public static SourceSystemRequest givenSourceSystemRequest() {
@@ -215,31 +217,31 @@ public static DataMapping givenDataMapping(String id, int version) {
public static DataMapping givenDataMapping(String id, int version, String name) {
return new DataMapping()
.withId(id)
- .withOdsID(id)
+ .withSchemaIdentifier(id)
.withType("ods:DataMapping")
- .withOdsType(DATA_MAPPING_TYPE_DOI)
+ .withOdsFdoType(DATA_MAPPING_TYPE_DOI)
.withSchemaVersion(version)
- .withOdsStatus(DataMapping.OdsStatus.ODS_ACTIVE)
+ .withOdsStatus(DataMapping.OdsStatus.ACTIVE)
.withSchemaName(name)
.withSchemaDescription(OBJECT_DESCRIPTION)
.withSchemaDateCreated(Date.from(CREATED))
.withSchemaDateModified(Date.from(CREATED))
- .withOdsDefaultMapping(List.of(
+ .withOdsHasDefaultMapping(List.of(
new DefaultMapping().withAdditionalProperty("ods:organisationID",
"https://ror.org/05xg72x27")))
- .withOdsFieldMapping(List.of(
- new FieldMapping().withAdditionalProperty("ods:physicalSpecimenID",
+ .withOdsHasTermMapping(List.of(
+ new TermMapping().withAdditionalProperty("ods:physicalSpecimenID",
"dwc:catalogNumber")))
.withSchemaCreator(givenAgent())
- .withOdsMappingDataStandard(DataMapping.OdsMappingDataStandard.DWC);
+ .withOdsMappingDataStandard(DataMapping.OdsMappingDataStandard.DW_C);
}
- public static DataMapping givenTombstoneDataMapping(){
+ public static DataMapping givenTombstoneDataMapping() {
return givenDataMapping()
- .withOdsStatus(DataMapping.OdsStatus.ODS_TOMBSTONE)
+ .withOdsStatus(DataMapping.OdsStatus.TOMBSTONE)
.withSchemaDateModified(Date.from(UPDATED))
.withSchemaVersion(2)
- .withOdsTombstoneMetadata(givenTombstoneMetadata(ObjectType.DATA_MAPPING));
+ .withOdsHasTombstoneMetadata(givenTombstoneMetadata(ObjectType.DATA_MAPPING));
}
@@ -247,13 +249,13 @@ public static DataMappingRequest givenDataMappingRequest() {
return new DataMappingRequest()
.withSchemaName(OBJECT_NAME)
.withSchemaDescription(OBJECT_DESCRIPTION)
- .withOdsDefaultMapping(List.of(
+ .withOdsHasDefaultMapping(List.of(
new DefaultMapping().withAdditionalProperty("ods:organisationID",
"https://ror.org/05xg72x27")))
- .withOdsFieldMapping(List.of(
- new FieldMapping().withAdditionalProperty("ods:physicalSpecimenID",
+ .withOdsHasTermMapping(List.of(
+ new TermMapping().withAdditionalProperty("ods:physicalSpecimenID",
"dwc:catalogNumber")))
- .withOdsMappingDataStandard(OdsMappingDataStandard.DWC);
+ .withOdsMappingDataStandard(OdsMappingDataStandard.DW_C);
}
public static JsonApiListWrapper givenMappingResponse(List dataMappings,
@@ -281,8 +283,8 @@ public static MachineAnnotationServiceRequest givenMasRequest() {
.withSchemaName(MAS_NAME)
.withOdsContainerImage("public.ecr.aws/dissco/fancy-mas")
.withOdsContainerTag("sha-54289")
- .withOdsTargetDigitalObjectFilter(
- new OdsTargetDigitalObjectFilter().withAdditionalProperty("ods:topicDiscipline",
+ .withOdsHasTargetDigitalObjectFilter(
+ new OdsHasTargetDigitalObjectFilter().withAdditionalProperty("ods:topicDiscipline",
"botany"))
.withSchemaDescription("A fancy mas making all dreams come true")
.withSchemaCreativeWorkStatus("Definitely production ready")
@@ -297,8 +299,8 @@ public static MachineAnnotationServiceRequest givenMasRequest() {
.withOdsMaxReplicas(5)
.withOdsBatchingPermitted(false)
.withOdsTimeToLive(TTL)
- .withOdsHasSecretVariable(givenMasSecrets())
- .withOdsHasEnvironmentalVariable(givenMasEnvironment());
+ .withOdsHasSecretVariables(givenMasSecrets())
+ .withOdsHasEnvironmentalVariables(givenMasEnvironment());
}
public static List givenMasEnvironment() {
@@ -329,19 +331,19 @@ public static MachineAnnotationService givenMas(String id, int version, String n
Integer ttl) {
return new MachineAnnotationService()
.withId(id)
- .withOdsID(id)
+ .withSchemaIdentifier(id)
.withType("ods:MachineAnnotationService")
- .withOdsType("https://hdl.handle.net/21.T11148/22e71a0015cbcfba8ffa")
+ .withOdsFdoType("https://hdl.handle.net/21.T11148/22e71a0015cbcfba8ffa")
.withSchemaVersion(version)
- .withOdsStatus(MachineAnnotationService.OdsStatus.ODS_ACTIVE)
+ .withOdsStatus(MachineAnnotationService.OdsStatus.ACTIVE)
.withSchemaDateCreated(Date.from(CREATED))
.withSchemaDateModified(Date.from(CREATED))
.withSchemaCreator(givenAgent())
.withSchemaName(name)
.withOdsContainerImage("public.ecr.aws/dissco/fancy-mas")
.withOdsContainerTag("sha-54289")
- .withOdsTargetDigitalObjectFilter(
- new OdsTargetDigitalObjectFilter__1().withAdditionalProperty("ods:topicDiscipline",
+ .withOdsHasTargetDigitalObjectFilter(
+ new OdsHasTargetDigitalObjectFilter__1().withAdditionalProperty("ods:topicDiscipline",
"botany"))
.withSchemaDescription("A fancy mas making all dreams come true")
.withSchemaCreativeWorkStatus("Definitely production ready")
@@ -350,22 +352,21 @@ public static MachineAnnotationService givenMas(String id, int version, String n
.withOdsServiceAvailability("99.99%")
.withSchemaMaintainer(givenAgent())
.withSchemaLicense("https://www.apache.org/licenses/LICENSE-2.0")
- .withOdsDependency(List.of())
.withSchemaContactPoint(new SchemaContactPoint__1().withSchemaEmail("dontmail@dissco.eu"))
.withOdsTopicName("fancy-topic-name")
.withOdsMaxReplicas(5)
.withOdsBatchingPermitted(false)
.withOdsTimeToLive(ttl)
- .withOdsHasEnvironmentalVariable(givenMasEnvironment())
- .withOdsHasSecretVariable(givenMasSecrets());
+ .withOdsHasEnvironmentalVariables(givenMasEnvironment())
+ .withOdsHasSecretVariables(givenMasSecrets());
}
- public static MachineAnnotationService givenTombstoneMas(){
+ public static MachineAnnotationService givenTombstoneMas() {
return givenMas()
- .withOdsStatus(MachineAnnotationService.OdsStatus.ODS_TOMBSTONE)
+ .withOdsStatus(MachineAnnotationService.OdsStatus.TOMBSTONE)
.withSchemaVersion(2)
.withSchemaDateModified(Date.from(UPDATED))
- .withOdsTombstoneMetadata(givenTombstoneMetadata(ObjectType.MAS));
+ .withOdsHasTombstoneMetadata(givenTombstoneMetadata(ObjectType.MAS));
}
public static JsonNode givenMasHandleRequest() throws Exception {
@@ -398,7 +399,7 @@ public static JsonNode givenMappingHandleRequest() throws Exception {
"data": {
"type": "https://doi.org/21.T11148/ce794a6f4df42eb7e77e",
"attributes": {
- "sourceDataStandard": "dwc"
+ "sourceDataStandard": "DwC"
}
}
}""");
@@ -416,10 +417,10 @@ public static JsonNode givenTombstoneRequestMas() throws JsonProcessingException
return MAPPER.readTree("""
{
"data": {
- "id": "20.5000.1025/GW0-POP-XSL",
"type": "https://doi.org/21.T11148/22e71a0015cbcfba8ffa",
+ "id": "20.5000.1025/GW0-POP-XSL",
"attributes": {
- "tombstoneText": "ods:MachineAnnotationService tombstoned by user through the orchestration backend"
+ "tombstoneText": "ods:MachineAnnotationService tombstoned by agent through the orchestration backend"
}
}
}
@@ -428,24 +429,25 @@ public static JsonNode givenTombstoneRequestMas() throws JsonProcessingException
public static TombstoneMetadata givenTombstoneMetadata(ObjectType objectType) {
var message = new StringBuilder();
- if (objectType.equals(ObjectType.MAS)){
+ if (objectType.equals(ObjectType.MAS)) {
message.append("Machine Annotation Service ");
- } else if (objectType.equals(ObjectType.DATA_MAPPING)){
+ } else if (objectType.equals(ObjectType.DATA_MAPPING)) {
message.append("Data Mapping ");
} else {
message.append("Source System ");
}
- message.append("tombstoned by user through the orchestration backend");
+ message.append("tombstoned by agent through the orchestration backend");
return new TombstoneMetadata()
.withType("ods:TombstoneMetadata")
- .withOdsTombstonedByAgent(givenAgent())
+ .withOdsHasAgents(List.of(givenAgent()))
.withOdsTombstoneDate(Date.from(UPDATED))
.withOdsTombstoneText(message.toString());
}
- public static Agent givenAgent(){
- return new Agent().withType(Type.SCHEMA_PERSON).withId(OBJECT_CREATOR);
+ public static Agent givenAgent() {
+ return AgentUtils.createAgent(null, OBJECT_CREATOR, AgentRoleType.CREATOR,
+ "orcid", Type.SCHEMA_PERSON);
}
// Token