From b3d64763e8ab949a077bbd11dbeba55f047d2d91 Mon Sep 17 00:00:00 2001 From: Sam Leeflang Date: Tue, 27 Feb 2024 10:45:17 +0100 Subject: [PATCH 1/4] Trigger ingestion cron and ingestion job - Update libraries --- .github/workflows/.trivyignore | 8 +- pom.xml | 18 +- ...ion.java => DiSSCoBackendApplication.java} | 4 +- .../MachineAnnotationServiceController.java | 7 +- .../backend/controller/MappingController.java | 5 +- .../controller/SourceSystemController.java | 21 +- .../controller/TranslatorController.java | 91 ---- .../backend/database/jooq/Indexes.java | 27 - .../backend/database/jooq/Keys.java | 3 - .../backend/database/jooq/Public.java | 7 - .../backend/database/jooq/Tables.java | 6 - .../database/jooq/enums/TranslatorType.java | 56 ++ .../backend/database/jooq/tables/Handles.java | 226 -------- .../database/jooq/tables/SourceSystem.java | 20 +- .../jooq/tables/records/HandlesRecord.java | 515 ------------------ .../tables/records/SourceSystemRecord.java | 58 +- .../backend/domain/SourceSystem.java | 3 + .../backend/domain/TranslatorRequest.java | 13 - .../backend/domain/TranslatorResponse.java | 15 - .../backend/domain/TranslatorType.java | 5 - .../exception/ProcessingFailedException.java | 2 +- .../properties/TranslatorJobProperties.java | 7 + .../repository/SourceSystemRepository.java | 3 + .../MachineAnnotationServiceService.java | 47 +- .../backend/service/MappingService.java | 10 +- .../backend/service/SourceSystemService.java | 237 +++++++- .../backend/service/TranslatorService.java | 147 ----- src/main/resources/jooq-configuration.xml | 2 +- .../resources/templates/biocase-cron-job.ftl | 49 ++ .../templates/biocase-translator-job.ftl | 47 ++ .../resources/templates/dwca-cron-job.ftl | 59 ++ .../templates/dwca-translator-job.ftl | 22 +- .../templates/geocase-translator-job.ftl | 32 -- ...achineAnnotationServiceControllerTest.java | 2 +- .../repository/SourceSystemRepositoryIT.java | 5 + .../MachineAnnotationServiceServiceTest.java | 202 ++++--- .../service/SourceSystemServiceTest.java | 182 ++++++- .../backend/testutils/TestUtils.java | 5 +- .../resources/db/migration/V1__init_db.sql | 5 +- 39 files changed, 921 insertions(+), 1252 deletions(-) rename src/main/java/eu/dissco/orchestration/backend/{DemoApplication.java => DiSSCoBackendApplication.java} (76%) delete mode 100644 src/main/java/eu/dissco/orchestration/backend/controller/TranslatorController.java delete mode 100644 src/main/java/eu/dissco/orchestration/backend/database/jooq/Indexes.java create mode 100644 src/main/java/eu/dissco/orchestration/backend/database/jooq/enums/TranslatorType.java delete mode 100644 src/main/java/eu/dissco/orchestration/backend/database/jooq/tables/Handles.java delete mode 100644 src/main/java/eu/dissco/orchestration/backend/database/jooq/tables/records/HandlesRecord.java delete mode 100644 src/main/java/eu/dissco/orchestration/backend/domain/TranslatorRequest.java delete mode 100644 src/main/java/eu/dissco/orchestration/backend/domain/TranslatorResponse.java delete mode 100644 src/main/java/eu/dissco/orchestration/backend/domain/TranslatorType.java delete mode 100644 src/main/java/eu/dissco/orchestration/backend/service/TranslatorService.java create mode 100644 src/main/resources/templates/biocase-cron-job.ftl create mode 100644 src/main/resources/templates/biocase-translator-job.ftl create mode 100644 src/main/resources/templates/dwca-cron-job.ftl delete mode 100644 src/main/resources/templates/geocase-translator-job.ftl diff --git a/.github/workflows/.trivyignore b/.github/workflows/.trivyignore index 27a5b47..c41ad4e 100644 --- a/.github/workflows/.trivyignore +++ b/.github/workflows/.trivyignore @@ -1,4 +1,4 @@ -# Date: November 14, 2023 -# Issue: openssl: Incorrect cipher key and IV length processing https://avd.aquasec.com/nvd/cve-2023-5363 -# Solution: Docker image needs an update -CVE-2023-5363 \ No newline at end of file +# Date: Feb 27, 2024 +# Notes: Issue with libexpat, parsing large tokens can trigger a denial of service +# Needs to be fixed in Docker Image. +CVE-2023-52425 \ No newline at end of file diff --git a/pom.xml b/pom.xml index d8c7746..873bebe 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ org.springframework.boot spring-boot-starter-parent - 3.2.0 + 3.2.3 eu.dissco.core @@ -16,17 +16,17 @@ Backend for orchestration service 17 - 4.3 - 19.0.0 - 5.2.0 + 4.4 + 1.26.0 + 20.0.0 1.13 + 1.19.6 + 5.2.0 dissco - 1.4.12 https://sonarcloud.io ../app-it/target/site/jacoco-aggregate/jacoco.xml - 1.17.6 @@ -115,6 +115,12 @@ json-patch ${json-patch.version} + + + org.apache.commons + commons-compress + ${apachecommons-compress.version} + org.junit.jupiter diff --git a/src/main/java/eu/dissco/orchestration/backend/DemoApplication.java b/src/main/java/eu/dissco/orchestration/backend/DiSSCoBackendApplication.java similarity index 76% rename from src/main/java/eu/dissco/orchestration/backend/DemoApplication.java rename to src/main/java/eu/dissco/orchestration/backend/DiSSCoBackendApplication.java index b6e5503..7aa1430 100644 --- a/src/main/java/eu/dissco/orchestration/backend/DemoApplication.java +++ b/src/main/java/eu/dissco/orchestration/backend/DiSSCoBackendApplication.java @@ -6,10 +6,10 @@ @SpringBootApplication @ConfigurationPropertiesScan -public class DemoApplication { +public class DiSSCoBackendApplication { public static void main(String[] args) { - SpringApplication.run(DemoApplication.class, args); + SpringApplication.run(DiSSCoBackendApplication.class, args); } } diff --git a/src/main/java/eu/dissco/orchestration/backend/controller/MachineAnnotationServiceController.java b/src/main/java/eu/dissco/orchestration/backend/controller/MachineAnnotationServiceController.java index c41fbc9..25dc41a 100644 --- a/src/main/java/eu/dissco/orchestration/backend/controller/MachineAnnotationServiceController.java +++ b/src/main/java/eu/dissco/orchestration/backend/controller/MachineAnnotationServiceController.java @@ -8,6 +8,7 @@ import eu.dissco.orchestration.backend.domain.jsonapi.JsonApiRequestWrapper; import eu.dissco.orchestration.backend.domain.jsonapi.JsonApiWrapper; import eu.dissco.orchestration.backend.exception.NotFoundException; +import eu.dissco.orchestration.backend.exception.ProcessingFailedException; import eu.dissco.orchestration.backend.properties.ApplicationProperties; import eu.dissco.orchestration.backend.service.MachineAnnotationServiceService; import jakarta.servlet.http.HttpServletRequest; @@ -43,7 +44,7 @@ public class MachineAnnotationServiceController { public ResponseEntity createMachineAnnotationService( Authentication authentication, @RequestBody JsonApiRequestWrapper requestBody, HttpServletRequest servletRequest) - throws JsonProcessingException, TransformerException { + throws JsonProcessingException, TransformerException, ProcessingFailedException { var machineAnnotationService = getMachineAnnotation(requestBody); var userId = authentication.getName(); log.info("Received create request for machine annotation service: {} from user: {}", @@ -58,7 +59,7 @@ public ResponseEntity updateMachineAnnotationService( Authentication authentication, @PathVariable("prefix") String prefix, @PathVariable("suffix") String suffix, @RequestBody JsonApiRequestWrapper requestBody, HttpServletRequest servletRequest) - throws JsonProcessingException, NotFoundException { + throws JsonProcessingException, NotFoundException, ProcessingFailedException { var machineAnnotationService = getMachineAnnotation(requestBody); var userId = authentication.getName(); var id = prefix + '/' + suffix; @@ -77,7 +78,7 @@ public ResponseEntity updateMachineAnnotationService( @DeleteMapping(value = "/{prefix}/{suffix}", produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity deleteMachineAnnotationService(Authentication authentication, @PathVariable("prefix") String prefix, @PathVariable("suffix") String suffix) - throws NotFoundException { + throws NotFoundException, ProcessingFailedException { String id = prefix + "/" + suffix; log.info("Received delete request for machine annotation service: {} from user: {}", id, authentication.getName()); diff --git a/src/main/java/eu/dissco/orchestration/backend/controller/MappingController.java b/src/main/java/eu/dissco/orchestration/backend/controller/MappingController.java index 49098c4..874149e 100644 --- a/src/main/java/eu/dissco/orchestration/backend/controller/MappingController.java +++ b/src/main/java/eu/dissco/orchestration/backend/controller/MappingController.java @@ -8,6 +8,7 @@ import eu.dissco.orchestration.backend.domain.jsonapi.JsonApiRequestWrapper; import eu.dissco.orchestration.backend.domain.jsonapi.JsonApiWrapper; import eu.dissco.orchestration.backend.exception.NotFoundException; +import eu.dissco.orchestration.backend.exception.ProcessingFailedException; import eu.dissco.orchestration.backend.properties.ApplicationProperties; import eu.dissco.orchestration.backend.service.MappingService; import jakarta.servlet.http.HttpServletRequest; @@ -45,7 +46,7 @@ public class MappingController { @PostMapping(consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity createMapping(Authentication authentication, @RequestBody JsonApiRequestWrapper requestBody, HttpServletRequest servletRequest) - throws JsonProcessingException { + throws JsonProcessingException, ProcessingFailedException { var mapping = getMappingFromRequest(requestBody); var userId = authentication.getName(); log.info("Received create request for mapping: {} from user: {}", mapping, userId); @@ -58,7 +59,7 @@ public ResponseEntity createMapping(Authentication authenticatio public ResponseEntity updateMapping(Authentication authentication, @PathVariable("prefix") String prefix, @PathVariable("suffix") String suffix, @RequestBody JsonApiRequestWrapper requestBody, HttpServletRequest servletRequest) - throws JsonProcessingException, NotFoundException { + throws JsonProcessingException, NotFoundException, ProcessingFailedException { var mapping = getMappingFromRequest(requestBody); var id = prefix + '/' + suffix; var userId = authentication.getName(); diff --git a/src/main/java/eu/dissco/orchestration/backend/controller/SourceSystemController.java b/src/main/java/eu/dissco/orchestration/backend/controller/SourceSystemController.java index d14e20f..8947b81 100644 --- a/src/main/java/eu/dissco/orchestration/backend/controller/SourceSystemController.java +++ b/src/main/java/eu/dissco/orchestration/backend/controller/SourceSystemController.java @@ -8,9 +8,13 @@ import eu.dissco.orchestration.backend.domain.jsonapi.JsonApiRequestWrapper; import eu.dissco.orchestration.backend.domain.jsonapi.JsonApiWrapper; import eu.dissco.orchestration.backend.exception.NotFoundException; +import eu.dissco.orchestration.backend.exception.ProcessingFailedException; import eu.dissco.orchestration.backend.properties.ApplicationProperties; import eu.dissco.orchestration.backend.service.SourceSystemService; +import freemarker.template.TemplateException; +import io.kubernetes.client.openapi.ApiException; import jakarta.servlet.http.HttpServletRequest; +import java.io.IOException; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.http.HttpStatus; @@ -41,7 +45,7 @@ public class SourceSystemController { @PostMapping(consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity createSourceSystem(Authentication authentication, @RequestBody JsonApiRequestWrapper requestBody, HttpServletRequest servletRequest) - throws JsonProcessingException, NotFoundException { + throws IOException, NotFoundException, ProcessingFailedException { var sourceSystem = getSourceSystemFromRequest(requestBody); String path = appProperties.getBaseUrl() + servletRequest.getRequestURI(); var userId = authentication.getName(); @@ -54,7 +58,7 @@ public ResponseEntity createSourceSystem(Authentication authenti public ResponseEntity updateSourceSystem(Authentication authentication, @PathVariable("prefix") String prefix, @PathVariable("suffix") String suffix, @RequestBody JsonApiRequestWrapper requestBody, HttpServletRequest servletRequest) - throws JsonProcessingException, NotFoundException { + throws IOException, NotFoundException, ProcessingFailedException { var sourceSystem = getSourceSystemFromRequest(requestBody); var id = prefix + '/' + suffix; var userId = authentication.getName(); @@ -72,7 +76,7 @@ public ResponseEntity updateSourceSystem(Authentication authenti @DeleteMapping(value = "/{prefix}/{suffix}", produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity deleteSourceSystem(Authentication authentication, @PathVariable("prefix") String prefix, @PathVariable("suffix") String suffix) - throws NotFoundException { + throws NotFoundException, ProcessingFailedException { String id = prefix + "/" + suffix; log.info("Received delete request for mapping: {} from user: {}", id, authentication.getName()); @@ -91,6 +95,17 @@ public ResponseEntity getSourceSystemById(@PathVariable("prefix" return ResponseEntity.ok(sourceSystem); } + @ResponseStatus(HttpStatus.OK) + @PostMapping(value = "/{prefix}/{suffix}/run", produces = MediaType.APPLICATION_JSON_VALUE) + public ResponseEntity scheduleRunSourceSystemById( + @PathVariable("prefix") String prefix, @PathVariable("suffix") String suffix) + throws ProcessingFailedException { + var id = prefix + '/' + suffix; + log.info("Received a request to start a new run for Source System: {}", id); + service.runSourceSystemById(id); + return ResponseEntity.accepted().build(); + } + @GetMapping("") public ResponseEntity getSourceSystems( @RequestParam(value = "pageNumber", defaultValue = "1") int pageNum, diff --git a/src/main/java/eu/dissco/orchestration/backend/controller/TranslatorController.java b/src/main/java/eu/dissco/orchestration/backend/controller/TranslatorController.java deleted file mode 100644 index d0594af..0000000 --- a/src/main/java/eu/dissco/orchestration/backend/controller/TranslatorController.java +++ /dev/null @@ -1,91 +0,0 @@ -package eu.dissco.orchestration.backend.controller; - -import eu.dissco.orchestration.backend.domain.TranslatorRequest; -import eu.dissco.orchestration.backend.domain.TranslatorResponse; -import eu.dissco.orchestration.backend.exception.NotFoundException; -import eu.dissco.orchestration.backend.service.TranslatorService; -import freemarker.template.TemplateException; -import io.kubernetes.client.openapi.ApiException; -import java.io.IOException; -import java.util.List; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; -import org.springframework.http.HttpStatus; -import org.springframework.http.MediaType; -import org.springframework.http.ResponseEntity; -import org.springframework.security.core.Authentication; -import org.springframework.web.bind.annotation.DeleteMapping; -import org.springframework.web.bind.annotation.GetMapping; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.PostMapping; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RestController; - -@Slf4j -@RestController -@RequestMapping("/translator") -@RequiredArgsConstructor -public class TranslatorController { - - private final TranslatorService service; - private static final String ERROR_MSG = "Application threw error with message: {}"; - - @GetMapping(produces = MediaType.APPLICATION_JSON_VALUE) - public ResponseEntity> getAll() { - try { - return ResponseEntity.ok(service.getAll()); - } catch (ApiException e) { - log.error(ERROR_MSG, e.getResponseBody()); - return new ResponseEntity<>(HttpStatus.valueOf(e.getCode())); - } - } - - @GetMapping(value = "/{id}", produces = MediaType.APPLICATION_JSON_VALUE) - public ResponseEntity get(@PathVariable String id) { - try { - var optionalResponse = service.get(id); - return optionalResponse.map(ResponseEntity::ok) - .orElseGet(() -> new ResponseEntity<>(HttpStatus.NOT_FOUND)); - } catch (ApiException e) { - log.error(ERROR_MSG, e.getResponseBody()); - return new ResponseEntity<>(HttpStatus.valueOf(e.getCode())); - } - } - - @PostMapping(produces = MediaType.APPLICATION_JSON_VALUE) - public ResponseEntity scheduleTranslator(Authentication authentication, - @RequestBody TranslatorRequest request) - throws TemplateException, IOException { - try { - log.info("Received new post request: {} by user: {}", request, - getNameFromToken(authentication)); - var result = service.createTranslator(request); - return new ResponseEntity<>(result, HttpStatus.CREATED); - } catch (ApiException e) { - log.error(ERROR_MSG, e.getResponseBody()); - return new ResponseEntity<>(HttpStatus.valueOf(e.getCode())); - } - } - - private String getNameFromToken(Authentication authentication) { - return authentication.getName(); - } - - @DeleteMapping(value = "/{id}") - public ResponseEntity deleteTranslator(Authentication authentication, - @PathVariable String id) { - log.info("Received delete request for job: {} from user: {}", id, - getNameFromToken(authentication)); - try { - service.deleteJob(id); - return new ResponseEntity<>(HttpStatus.NO_CONTENT); - } catch (ApiException e) { - log.error(ERROR_MSG, e.getResponseBody()); - return new ResponseEntity<>(HttpStatus.valueOf(e.getCode())); - } catch (NotFoundException e) { - log.error("Job was not found on the cluster"); - return new ResponseEntity<>(HttpStatus.NOT_FOUND); - } - } -} diff --git a/src/main/java/eu/dissco/orchestration/backend/database/jooq/Indexes.java b/src/main/java/eu/dissco/orchestration/backend/database/jooq/Indexes.java deleted file mode 100644 index 16e452b..0000000 --- a/src/main/java/eu/dissco/orchestration/backend/database/jooq/Indexes.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * This file is generated by jOOQ. - */ -package eu.dissco.orchestration.backend.database.jooq; - - -import eu.dissco.orchestration.backend.database.jooq.tables.Handles; - -import org.jooq.Index; -import org.jooq.OrderField; -import org.jooq.impl.DSL; -import org.jooq.impl.Internal; - - -/** - * A class modelling indexes of tables in public. - */ -@SuppressWarnings({ "all", "unchecked", "rawtypes" }) -public class Indexes { - - // ------------------------------------------------------------------------- - // INDEX definitions - // ------------------------------------------------------------------------- - - public static final Index DATAINDEX = Internal.createIndex(DSL.name("dataindex"), Handles.HANDLES, new OrderField[] { Handles.HANDLES.DATA }, false); - public static final Index HANDLEINDEX = Internal.createIndex(DSL.name("handleindex"), Handles.HANDLES, new OrderField[] { Handles.HANDLES.HANDLE }, false); -} diff --git a/src/main/java/eu/dissco/orchestration/backend/database/jooq/Keys.java b/src/main/java/eu/dissco/orchestration/backend/database/jooq/Keys.java index 23dba70..002f172 100644 --- a/src/main/java/eu/dissco/orchestration/backend/database/jooq/Keys.java +++ b/src/main/java/eu/dissco/orchestration/backend/database/jooq/Keys.java @@ -4,11 +4,9 @@ package eu.dissco.orchestration.backend.database.jooq; -import eu.dissco.orchestration.backend.database.jooq.tables.Handles; import eu.dissco.orchestration.backend.database.jooq.tables.MachineAnnotationServices; import eu.dissco.orchestration.backend.database.jooq.tables.Mapping; import eu.dissco.orchestration.backend.database.jooq.tables.SourceSystem; -import eu.dissco.orchestration.backend.database.jooq.tables.records.HandlesRecord; import eu.dissco.orchestration.backend.database.jooq.tables.records.MachineAnnotationServicesRecord; import eu.dissco.orchestration.backend.database.jooq.tables.records.MappingRecord; import eu.dissco.orchestration.backend.database.jooq.tables.records.SourceSystemRecord; @@ -30,7 +28,6 @@ public class Keys { // UNIQUE and PRIMARY KEY definitions // ------------------------------------------------------------------------- - public static final UniqueKey HANDLES_PKEY = Internal.createUniqueKey(Handles.HANDLES, DSL.name("handles_pkey"), new TableField[] { Handles.HANDLES.HANDLE, Handles.HANDLES.IDX }, true); public static final UniqueKey MACHINE_ANNOTATION_SERVICES_PKEY = Internal.createUniqueKey(MachineAnnotationServices.MACHINE_ANNOTATION_SERVICES, DSL.name("machine_annotation_services_pkey"), new TableField[] { MachineAnnotationServices.MACHINE_ANNOTATION_SERVICES.ID }, true); public static final UniqueKey NEW_MAPPING_PK = Internal.createUniqueKey(Mapping.MAPPING, DSL.name("new_mapping_pk"), new TableField[] { Mapping.MAPPING.ID, Mapping.MAPPING.VERSION }, true); public static final UniqueKey NEW_SOURCE_SYSTEM_PKEY = Internal.createUniqueKey(SourceSystem.SOURCE_SYSTEM, DSL.name("new_source_system_pkey"), new TableField[] { SourceSystem.SOURCE_SYSTEM.ID }, true); diff --git a/src/main/java/eu/dissco/orchestration/backend/database/jooq/Public.java b/src/main/java/eu/dissco/orchestration/backend/database/jooq/Public.java index 8a00851..1b9ac66 100644 --- a/src/main/java/eu/dissco/orchestration/backend/database/jooq/Public.java +++ b/src/main/java/eu/dissco/orchestration/backend/database/jooq/Public.java @@ -4,7 +4,6 @@ package eu.dissco.orchestration.backend.database.jooq; -import eu.dissco.orchestration.backend.database.jooq.tables.Handles; import eu.dissco.orchestration.backend.database.jooq.tables.MachineAnnotationServices; import eu.dissco.orchestration.backend.database.jooq.tables.Mapping; import eu.dissco.orchestration.backend.database.jooq.tables.SourceSystem; @@ -30,11 +29,6 @@ public class Public extends SchemaImpl { */ public static final Public PUBLIC = new Public(); - /** - * The table public.handles. - */ - public final Handles HANDLES = Handles.HANDLES; - /** * The table public.machine_annotation_services. */ @@ -66,7 +60,6 @@ public Catalog getCatalog() { @Override public final List> getTables() { return Arrays.asList( - Handles.HANDLES, MachineAnnotationServices.MACHINE_ANNOTATION_SERVICES, Mapping.MAPPING, SourceSystem.SOURCE_SYSTEM diff --git a/src/main/java/eu/dissco/orchestration/backend/database/jooq/Tables.java b/src/main/java/eu/dissco/orchestration/backend/database/jooq/Tables.java index b3cc42d..5221622 100644 --- a/src/main/java/eu/dissco/orchestration/backend/database/jooq/Tables.java +++ b/src/main/java/eu/dissco/orchestration/backend/database/jooq/Tables.java @@ -4,7 +4,6 @@ package eu.dissco.orchestration.backend.database.jooq; -import eu.dissco.orchestration.backend.database.jooq.tables.Handles; import eu.dissco.orchestration.backend.database.jooq.tables.MachineAnnotationServices; import eu.dissco.orchestration.backend.database.jooq.tables.Mapping; import eu.dissco.orchestration.backend.database.jooq.tables.SourceSystem; @@ -16,11 +15,6 @@ @SuppressWarnings({ "all", "unchecked", "rawtypes" }) public class Tables { - /** - * The table public.handles. - */ - public static final Handles HANDLES = Handles.HANDLES; - /** * The table public.machine_annotation_services. */ diff --git a/src/main/java/eu/dissco/orchestration/backend/database/jooq/enums/TranslatorType.java b/src/main/java/eu/dissco/orchestration/backend/database/jooq/enums/TranslatorType.java new file mode 100644 index 0000000..d7dd4fb --- /dev/null +++ b/src/main/java/eu/dissco/orchestration/backend/database/jooq/enums/TranslatorType.java @@ -0,0 +1,56 @@ +/* + * This file is generated by jOOQ. + */ +package eu.dissco.orchestration.backend.database.jooq.enums; + + +import eu.dissco.orchestration.backend.database.jooq.Public; + +import org.jooq.Catalog; +import org.jooq.EnumType; +import org.jooq.Schema; + + +/** + * This class is generated by jOOQ. + */ +@SuppressWarnings({ "all", "unchecked", "rawtypes" }) +public enum TranslatorType implements EnumType { + + biocase("biocase"), + + dwca("dwca"); + + private final String literal; + + private TranslatorType(String literal) { + this.literal = literal; + } + + @Override + public Catalog getCatalog() { + return getSchema().getCatalog(); + } + + @Override + public Schema getSchema() { + return Public.PUBLIC; + } + + @Override + public String getName() { + return "translator_type"; + } + + @Override + public String getLiteral() { + return literal; + } + + /** + * Lookup a value of this EnumType by its literal + */ + public static TranslatorType lookupLiteral(String literal) { + return EnumType.lookupLiteral(TranslatorType.class, literal); + } +} diff --git a/src/main/java/eu/dissco/orchestration/backend/database/jooq/tables/Handles.java b/src/main/java/eu/dissco/orchestration/backend/database/jooq/tables/Handles.java deleted file mode 100644 index d05d3f4..0000000 --- a/src/main/java/eu/dissco/orchestration/backend/database/jooq/tables/Handles.java +++ /dev/null @@ -1,226 +0,0 @@ -/* - * This file is generated by jOOQ. - */ -package eu.dissco.orchestration.backend.database.jooq.tables; - - -import eu.dissco.orchestration.backend.database.jooq.Indexes; -import eu.dissco.orchestration.backend.database.jooq.Keys; -import eu.dissco.orchestration.backend.database.jooq.Public; -import eu.dissco.orchestration.backend.database.jooq.tables.records.HandlesRecord; - -import java.util.Arrays; -import java.util.List; -import java.util.function.Function; - -import org.jooq.Field; -import org.jooq.ForeignKey; -import org.jooq.Function12; -import org.jooq.Index; -import org.jooq.Name; -import org.jooq.Record; -import org.jooq.Records; -import org.jooq.Row12; -import org.jooq.Schema; -import org.jooq.SelectField; -import org.jooq.Table; -import org.jooq.TableField; -import org.jooq.TableOptions; -import org.jooq.UniqueKey; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; -import org.jooq.impl.TableImpl; - - -/** - * This class is generated by jOOQ. - */ -@SuppressWarnings({ "all", "unchecked", "rawtypes" }) -public class Handles extends TableImpl { - - private static final long serialVersionUID = 1L; - - /** - * The reference instance of public.handles - */ - public static final Handles HANDLES = new Handles(); - - /** - * The class holding records for this type - */ - @Override - public Class getRecordType() { - return HandlesRecord.class; - } - - /** - * The column public.handles.handle. - */ - public final TableField HANDLE = createField(DSL.name("handle"), SQLDataType.BLOB.nullable(false), this, ""); - - /** - * The column public.handles.idx. - */ - public final TableField IDX = createField(DSL.name("idx"), SQLDataType.INTEGER.nullable(false), this, ""); - - /** - * The column public.handles.type. - */ - public final TableField TYPE = createField(DSL.name("type"), SQLDataType.BLOB, this, ""); - - /** - * The column public.handles.data. - */ - public final TableField DATA = createField(DSL.name("data"), SQLDataType.BLOB, this, ""); - - /** - * The column public.handles.ttl_type. - */ - public final TableField TTL_TYPE = createField(DSL.name("ttl_type"), SQLDataType.SMALLINT, this, ""); - - /** - * The column public.handles.ttl. - */ - public final TableField TTL = createField(DSL.name("ttl"), SQLDataType.INTEGER, this, ""); - - /** - * The column public.handles.timestamp. - */ - public final TableField TIMESTAMP = createField(DSL.name("timestamp"), SQLDataType.BIGINT, this, ""); - - /** - * The column public.handles.refs. - */ - public final TableField REFS = createField(DSL.name("refs"), SQLDataType.CLOB, this, ""); - - /** - * The column public.handles.admin_read. - */ - public final TableField ADMIN_READ = createField(DSL.name("admin_read"), SQLDataType.BOOLEAN, this, ""); - - /** - * The column public.handles.admin_write. - */ - public final TableField ADMIN_WRITE = createField(DSL.name("admin_write"), SQLDataType.BOOLEAN, this, ""); - - /** - * The column public.handles.pub_read. - */ - public final TableField PUB_READ = createField(DSL.name("pub_read"), SQLDataType.BOOLEAN, this, ""); - - /** - * The column public.handles.pub_write. - */ - public final TableField PUB_WRITE = createField(DSL.name("pub_write"), SQLDataType.BOOLEAN, this, ""); - - private Handles(Name alias, Table aliased) { - this(alias, aliased, null); - } - - private Handles(Name alias, Table aliased, Field[] parameters) { - super(alias, null, aliased, parameters, DSL.comment(""), TableOptions.table()); - } - - /** - * Create an aliased public.handles table reference - */ - public Handles(String alias) { - this(DSL.name(alias), HANDLES); - } - - /** - * Create an aliased public.handles table reference - */ - public Handles(Name alias) { - this(alias, HANDLES); - } - - /** - * Create a public.handles table reference - */ - public Handles() { - this(DSL.name("handles"), null); - } - - public Handles(Table child, ForeignKey key) { - super(child, key, HANDLES); - } - - @Override - public Schema getSchema() { - return aliased() ? null : Public.PUBLIC; - } - - @Override - public List getIndexes() { - return Arrays.asList(Indexes.DATAINDEX, Indexes.HANDLEINDEX); - } - - @Override - public UniqueKey getPrimaryKey() { - return Keys.HANDLES_PKEY; - } - - @Override - public Handles as(String alias) { - return new Handles(DSL.name(alias), this); - } - - @Override - public Handles as(Name alias) { - return new Handles(alias, this); - } - - @Override - public Handles as(Table alias) { - return new Handles(alias.getQualifiedName(), this); - } - - /** - * Rename this table - */ - @Override - public Handles rename(String name) { - return new Handles(DSL.name(name), null); - } - - /** - * Rename this table - */ - @Override - public Handles rename(Name name) { - return new Handles(name, null); - } - - /** - * Rename this table - */ - @Override - public Handles rename(Table name) { - return new Handles(name.getQualifiedName(), null); - } - - // ------------------------------------------------------------------------- - // Row12 type methods - // ------------------------------------------------------------------------- - - @Override - public Row12 fieldsRow() { - return (Row12) super.fieldsRow(); - } - - /** - * Convenience mapping calling {@link SelectField#convertFrom(Function)}. - */ - public SelectField mapping(Function12 from) { - return convertFrom(Records.mapping(from)); - } - - /** - * Convenience mapping calling {@link SelectField#convertFrom(Class, - * Function)}. - */ - public SelectField mapping(Class toType, Function12 from) { - return convertFrom(toType, Records.mapping(from)); - } -} diff --git a/src/main/java/eu/dissco/orchestration/backend/database/jooq/tables/SourceSystem.java b/src/main/java/eu/dissco/orchestration/backend/database/jooq/tables/SourceSystem.java index 4d15ab9..6f4f3b5 100644 --- a/src/main/java/eu/dissco/orchestration/backend/database/jooq/tables/SourceSystem.java +++ b/src/main/java/eu/dissco/orchestration/backend/database/jooq/tables/SourceSystem.java @@ -6,6 +6,7 @@ import eu.dissco.orchestration.backend.database.jooq.Keys; import eu.dissco.orchestration.backend.database.jooq.Public; +import eu.dissco.orchestration.backend.database.jooq.enums.TranslatorType; import eu.dissco.orchestration.backend.database.jooq.tables.records.SourceSystemRecord; import java.time.Instant; @@ -13,11 +14,11 @@ import org.jooq.Field; import org.jooq.ForeignKey; -import org.jooq.Function9; +import org.jooq.Function10; import org.jooq.Name; import org.jooq.Record; import org.jooq.Records; -import org.jooq.Row9; +import org.jooq.Row10; import org.jooq.Schema; import org.jooq.SelectField; import org.jooq.Table; @@ -95,6 +96,11 @@ public Class getRecordType() { */ public final TableField CREATOR = createField(DSL.name("creator"), SQLDataType.CLOB.nullable(false).defaultValue(DSL.field(DSL.raw("'0000-0002-5669-2769'::text"), SQLDataType.CLOB)), this, ""); + /** + * The column public.source_system.translator_type. + */ + public final TableField TRANSLATOR_TYPE = createField(DSL.name("translator_type"), SQLDataType.VARCHAR.asEnumDataType(eu.dissco.orchestration.backend.database.jooq.enums.TranslatorType.class), this, ""); + private SourceSystem(Name alias, Table aliased) { this(alias, aliased, null); } @@ -178,18 +184,18 @@ public SourceSystem rename(Table name) { } // ------------------------------------------------------------------------- - // Row9 type methods + // Row10 type methods // ------------------------------------------------------------------------- @Override - public Row9 fieldsRow() { - return (Row9) super.fieldsRow(); + public Row10 fieldsRow() { + return (Row10) super.fieldsRow(); } /** * Convenience mapping calling {@link SelectField#convertFrom(Function)}. */ - public SelectField mapping(Function9 from) { + public SelectField mapping(Function10 from) { return convertFrom(Records.mapping(from)); } @@ -197,7 +203,7 @@ public SelectField mapping(Function9 SelectField mapping(Class toType, Function9 from) { + public SelectField mapping(Class toType, Function10 from) { return convertFrom(toType, Records.mapping(from)); } } diff --git a/src/main/java/eu/dissco/orchestration/backend/database/jooq/tables/records/HandlesRecord.java b/src/main/java/eu/dissco/orchestration/backend/database/jooq/tables/records/HandlesRecord.java deleted file mode 100644 index 61b43b5..0000000 --- a/src/main/java/eu/dissco/orchestration/backend/database/jooq/tables/records/HandlesRecord.java +++ /dev/null @@ -1,515 +0,0 @@ -/* - * This file is generated by jOOQ. - */ -package eu.dissco.orchestration.backend.database.jooq.tables.records; - - -import eu.dissco.orchestration.backend.database.jooq.tables.Handles; - -import org.jooq.Field; -import org.jooq.Record12; -import org.jooq.Record2; -import org.jooq.Row12; -import org.jooq.impl.UpdatableRecordImpl; - - -/** - * This class is generated by jOOQ. - */ -@SuppressWarnings({ "all", "unchecked", "rawtypes" }) -public class HandlesRecord extends UpdatableRecordImpl implements Record12 { - - private static final long serialVersionUID = 1L; - - /** - * Setter for public.handles.handle. - */ - public void setHandle(byte[] value) { - set(0, value); - } - - /** - * Getter for public.handles.handle. - */ - public byte[] getHandle() { - return (byte[]) get(0); - } - - /** - * Setter for public.handles.idx. - */ - public void setIdx(Integer value) { - set(1, value); - } - - /** - * Getter for public.handles.idx. - */ - public Integer getIdx() { - return (Integer) get(1); - } - - /** - * Setter for public.handles.type. - */ - public void setType(byte[] value) { - set(2, value); - } - - /** - * Getter for public.handles.type. - */ - public byte[] getType() { - return (byte[]) get(2); - } - - /** - * Setter for public.handles.data. - */ - public void setData(byte[] value) { - set(3, value); - } - - /** - * Getter for public.handles.data. - */ - public byte[] getData() { - return (byte[]) get(3); - } - - /** - * Setter for public.handles.ttl_type. - */ - public void setTtlType(Short value) { - set(4, value); - } - - /** - * Getter for public.handles.ttl_type. - */ - public Short getTtlType() { - return (Short) get(4); - } - - /** - * Setter for public.handles.ttl. - */ - public void setTtl(Integer value) { - set(5, value); - } - - /** - * Getter for public.handles.ttl. - */ - public Integer getTtl() { - return (Integer) get(5); - } - - /** - * Setter for public.handles.timestamp. - */ - public void setTimestamp(Long value) { - set(6, value); - } - - /** - * Getter for public.handles.timestamp. - */ - public Long getTimestamp() { - return (Long) get(6); - } - - /** - * Setter for public.handles.refs. - */ - public void setRefs(String value) { - set(7, value); - } - - /** - * Getter for public.handles.refs. - */ - public String getRefs() { - return (String) get(7); - } - - /** - * Setter for public.handles.admin_read. - */ - public void setAdminRead(Boolean value) { - set(8, value); - } - - /** - * Getter for public.handles.admin_read. - */ - public Boolean getAdminRead() { - return (Boolean) get(8); - } - - /** - * Setter for public.handles.admin_write. - */ - public void setAdminWrite(Boolean value) { - set(9, value); - } - - /** - * Getter for public.handles.admin_write. - */ - public Boolean getAdminWrite() { - return (Boolean) get(9); - } - - /** - * Setter for public.handles.pub_read. - */ - public void setPubRead(Boolean value) { - set(10, value); - } - - /** - * Getter for public.handles.pub_read. - */ - public Boolean getPubRead() { - return (Boolean) get(10); - } - - /** - * Setter for public.handles.pub_write. - */ - public void setPubWrite(Boolean value) { - set(11, value); - } - - /** - * Getter for public.handles.pub_write. - */ - public Boolean getPubWrite() { - return (Boolean) get(11); - } - - // ------------------------------------------------------------------------- - // Primary key information - // ------------------------------------------------------------------------- - - @Override - public Record2 key() { - return (Record2) super.key(); - } - - // ------------------------------------------------------------------------- - // Record12 type implementation - // ------------------------------------------------------------------------- - - @Override - public Row12 fieldsRow() { - return (Row12) super.fieldsRow(); - } - - @Override - public Row12 valuesRow() { - return (Row12) super.valuesRow(); - } - - @Override - public Field field1() { - return Handles.HANDLES.HANDLE; - } - - @Override - public Field field2() { - return Handles.HANDLES.IDX; - } - - @Override - public Field field3() { - return Handles.HANDLES.TYPE; - } - - @Override - public Field field4() { - return Handles.HANDLES.DATA; - } - - @Override - public Field field5() { - return Handles.HANDLES.TTL_TYPE; - } - - @Override - public Field field6() { - return Handles.HANDLES.TTL; - } - - @Override - public Field field7() { - return Handles.HANDLES.TIMESTAMP; - } - - @Override - public Field field8() { - return Handles.HANDLES.REFS; - } - - @Override - public Field field9() { - return Handles.HANDLES.ADMIN_READ; - } - - @Override - public Field field10() { - return Handles.HANDLES.ADMIN_WRITE; - } - - @Override - public Field field11() { - return Handles.HANDLES.PUB_READ; - } - - @Override - public Field field12() { - return Handles.HANDLES.PUB_WRITE; - } - - @Override - public byte[] component1() { - return getHandle(); - } - - @Override - public Integer component2() { - return getIdx(); - } - - @Override - public byte[] component3() { - return getType(); - } - - @Override - public byte[] component4() { - return getData(); - } - - @Override - public Short component5() { - return getTtlType(); - } - - @Override - public Integer component6() { - return getTtl(); - } - - @Override - public Long component7() { - return getTimestamp(); - } - - @Override - public String component8() { - return getRefs(); - } - - @Override - public Boolean component9() { - return getAdminRead(); - } - - @Override - public Boolean component10() { - return getAdminWrite(); - } - - @Override - public Boolean component11() { - return getPubRead(); - } - - @Override - public Boolean component12() { - return getPubWrite(); - } - - @Override - public byte[] value1() { - return getHandle(); - } - - @Override - public Integer value2() { - return getIdx(); - } - - @Override - public byte[] value3() { - return getType(); - } - - @Override - public byte[] value4() { - return getData(); - } - - @Override - public Short value5() { - return getTtlType(); - } - - @Override - public Integer value6() { - return getTtl(); - } - - @Override - public Long value7() { - return getTimestamp(); - } - - @Override - public String value8() { - return getRefs(); - } - - @Override - public Boolean value9() { - return getAdminRead(); - } - - @Override - public Boolean value10() { - return getAdminWrite(); - } - - @Override - public Boolean value11() { - return getPubRead(); - } - - @Override - public Boolean value12() { - return getPubWrite(); - } - - @Override - public HandlesRecord value1(byte[] value) { - setHandle(value); - return this; - } - - @Override - public HandlesRecord value2(Integer value) { - setIdx(value); - return this; - } - - @Override - public HandlesRecord value3(byte[] value) { - setType(value); - return this; - } - - @Override - public HandlesRecord value4(byte[] value) { - setData(value); - return this; - } - - @Override - public HandlesRecord value5(Short value) { - setTtlType(value); - return this; - } - - @Override - public HandlesRecord value6(Integer value) { - setTtl(value); - return this; - } - - @Override - public HandlesRecord value7(Long value) { - setTimestamp(value); - return this; - } - - @Override - public HandlesRecord value8(String value) { - setRefs(value); - return this; - } - - @Override - public HandlesRecord value9(Boolean value) { - setAdminRead(value); - return this; - } - - @Override - public HandlesRecord value10(Boolean value) { - setAdminWrite(value); - return this; - } - - @Override - public HandlesRecord value11(Boolean value) { - setPubRead(value); - return this; - } - - @Override - public HandlesRecord value12(Boolean value) { - setPubWrite(value); - return this; - } - - @Override - public HandlesRecord values(byte[] value1, Integer value2, byte[] value3, byte[] value4, Short value5, Integer value6, Long value7, String value8, Boolean value9, Boolean value10, Boolean value11, Boolean value12) { - value1(value1); - value2(value2); - value3(value3); - value4(value4); - value5(value5); - value6(value6); - value7(value7); - value8(value8); - value9(value9); - value10(value10); - value11(value11); - value12(value12); - return this; - } - - // ------------------------------------------------------------------------- - // Constructors - // ------------------------------------------------------------------------- - - /** - * Create a detached HandlesRecord - */ - public HandlesRecord() { - super(Handles.HANDLES); - } - - /** - * Create a detached, initialised HandlesRecord - */ - public HandlesRecord(byte[] handle, Integer idx, byte[] type, byte[] data, Short ttlType, Integer ttl, Long timestamp, String refs, Boolean adminRead, Boolean adminWrite, Boolean pubRead, Boolean pubWrite) { - super(Handles.HANDLES); - - setHandle(handle); - setIdx(idx); - setType(type); - setData(data); - setTtlType(ttlType); - setTtl(ttl); - setTimestamp(timestamp); - setRefs(refs); - setAdminRead(adminRead); - setAdminWrite(adminWrite); - setPubRead(pubRead); - setPubWrite(pubWrite); - resetChangedOnNotNull(); - } -} diff --git a/src/main/java/eu/dissco/orchestration/backend/database/jooq/tables/records/SourceSystemRecord.java b/src/main/java/eu/dissco/orchestration/backend/database/jooq/tables/records/SourceSystemRecord.java index d7604e6..a1df8ad 100644 --- a/src/main/java/eu/dissco/orchestration/backend/database/jooq/tables/records/SourceSystemRecord.java +++ b/src/main/java/eu/dissco/orchestration/backend/database/jooq/tables/records/SourceSystemRecord.java @@ -4,14 +4,15 @@ package eu.dissco.orchestration.backend.database.jooq.tables.records; +import eu.dissco.orchestration.backend.database.jooq.enums.TranslatorType; import eu.dissco.orchestration.backend.database.jooq.tables.SourceSystem; import java.time.Instant; import org.jooq.Field; import org.jooq.Record1; -import org.jooq.Record9; -import org.jooq.Row9; +import org.jooq.Record10; +import org.jooq.Row10; import org.jooq.impl.UpdatableRecordImpl; @@ -19,7 +20,7 @@ * This class is generated by jOOQ. */ @SuppressWarnings({ "all", "unchecked", "rawtypes" }) -public class SourceSystemRecord extends UpdatableRecordImpl implements Record9 { +public class SourceSystemRecord extends UpdatableRecordImpl implements Record10 { private static final long serialVersionUID = 1L; @@ -149,6 +150,20 @@ public String getCreator() { return (String) get(8); } + /** + * Setter for public.source_system.translator_type. + */ + public void setTranslatorType(TranslatorType value) { + set(9, value); + } + + /** + * Getter for public.source_system.translator_type. + */ + public TranslatorType getTranslatorType() { + return (TranslatorType) get(9); + } + // ------------------------------------------------------------------------- // Primary key information // ------------------------------------------------------------------------- @@ -159,17 +174,17 @@ public Record1 key() { } // ------------------------------------------------------------------------- - // Record9 type implementation + // Record10 type implementation // ------------------------------------------------------------------------- @Override - public Row9 fieldsRow() { - return (Row9) super.fieldsRow(); + public Row10 fieldsRow() { + return (Row10) super.fieldsRow(); } @Override - public Row9 valuesRow() { - return (Row9) super.valuesRow(); + public Row10 valuesRow() { + return (Row10) super.valuesRow(); } @Override @@ -217,6 +232,11 @@ public Field field9() { return SourceSystem.SOURCE_SYSTEM.CREATOR; } + @Override + public Field field10() { + return SourceSystem.SOURCE_SYSTEM.TRANSLATOR_TYPE; + } + @Override public String component1() { return getId(); @@ -262,6 +282,11 @@ public String component9() { return getCreator(); } + @Override + public TranslatorType component10() { + return getTranslatorType(); + } + @Override public String value1() { return getId(); @@ -307,6 +332,11 @@ public String value9() { return getCreator(); } + @Override + public TranslatorType value10() { + return getTranslatorType(); + } + @Override public SourceSystemRecord value1(String value) { setId(value); @@ -362,7 +392,13 @@ public SourceSystemRecord value9(String value) { } @Override - public SourceSystemRecord values(String value1, String value2, String value3, String value4, Instant value5, Instant value6, String value7, Integer value8, String value9) { + public SourceSystemRecord value10(TranslatorType value) { + setTranslatorType(value); + return this; + } + + @Override + public SourceSystemRecord values(String value1, String value2, String value3, String value4, Instant value5, Instant value6, String value7, Integer value8, String value9, TranslatorType value10) { value1(value1); value2(value2); value3(value3); @@ -372,6 +408,7 @@ public SourceSystemRecord values(String value1, String value2, String value3, St value7(value7); value8(value8); value9(value9); + value10(value10); return this; } @@ -389,7 +426,7 @@ public SourceSystemRecord() { /** * Create a detached, initialised SourceSystemRecord */ - public SourceSystemRecord(String id, String name, String endpoint, String description, Instant created, Instant deleted, String mappingId, Integer version, String creator) { + public SourceSystemRecord(String id, String name, String endpoint, String description, Instant created, Instant deleted, String mappingId, Integer version, String creator, TranslatorType translatorType) { super(SourceSystem.SOURCE_SYSTEM); setId(id); @@ -401,6 +438,7 @@ public SourceSystemRecord(String id, String name, String endpoint, String descri setMappingId(mappingId); setVersion(version); setCreator(creator); + setTranslatorType(translatorType); resetChangedOnNotNull(); } } diff --git a/src/main/java/eu/dissco/orchestration/backend/domain/SourceSystem.java b/src/main/java/eu/dissco/orchestration/backend/domain/SourceSystem.java index 5f0474d..ab86e92 100644 --- a/src/main/java/eu/dissco/orchestration/backend/domain/SourceSystem.java +++ b/src/main/java/eu/dissco/orchestration/backend/domain/SourceSystem.java @@ -1,5 +1,6 @@ package eu.dissco.orchestration.backend.domain; +import eu.dissco.orchestration.backend.database.jooq.enums.TranslatorType; import jakarta.validation.constraints.NotBlank; public record SourceSystem( @@ -8,6 +9,8 @@ public record SourceSystem( @NotBlank String endpoint, String description, + + TranslatorType translatorType, @NotBlank String mappingId ) { diff --git a/src/main/java/eu/dissco/orchestration/backend/domain/TranslatorRequest.java b/src/main/java/eu/dissco/orchestration/backend/domain/TranslatorRequest.java deleted file mode 100644 index a798a2b..0000000 --- a/src/main/java/eu/dissco/orchestration/backend/domain/TranslatorRequest.java +++ /dev/null @@ -1,13 +0,0 @@ -package eu.dissco.orchestration.backend.domain; - -import java.util.List; - -public record TranslatorRequest( - String sourceSystemId, - TranslatorType translatorType, - String query, - Integer itemsPerRequest, - List enrichmentList -) { - -} diff --git a/src/main/java/eu/dissco/orchestration/backend/domain/TranslatorResponse.java b/src/main/java/eu/dissco/orchestration/backend/domain/TranslatorResponse.java deleted file mode 100644 index 75d0f78..0000000 --- a/src/main/java/eu/dissco/orchestration/backend/domain/TranslatorResponse.java +++ /dev/null @@ -1,15 +0,0 @@ -package eu.dissco.orchestration.backend.domain; - -import java.time.Instant; -import lombok.Builder; -import lombok.Value; - -@Value -@Builder -public class TranslatorResponse { - - String jobName; - String jobStatus; - Instant startTime; - Instant completedAt; -} diff --git a/src/main/java/eu/dissco/orchestration/backend/domain/TranslatorType.java b/src/main/java/eu/dissco/orchestration/backend/domain/TranslatorType.java deleted file mode 100644 index 98e3dcb..0000000 --- a/src/main/java/eu/dissco/orchestration/backend/domain/TranslatorType.java +++ /dev/null @@ -1,5 +0,0 @@ -package eu.dissco.orchestration.backend.domain; - -public enum TranslatorType { - BIOCASE, DWCA -} diff --git a/src/main/java/eu/dissco/orchestration/backend/exception/ProcessingFailedException.java b/src/main/java/eu/dissco/orchestration/backend/exception/ProcessingFailedException.java index 085be38..b3b1e4b 100644 --- a/src/main/java/eu/dissco/orchestration/backend/exception/ProcessingFailedException.java +++ b/src/main/java/eu/dissco/orchestration/backend/exception/ProcessingFailedException.java @@ -1,6 +1,6 @@ package eu.dissco.orchestration.backend.exception; -public class ProcessingFailedException extends RuntimeException { +public class ProcessingFailedException extends Exception { public ProcessingFailedException(String message, Throwable e) { super(message, e); diff --git a/src/main/java/eu/dissco/orchestration/backend/properties/TranslatorJobProperties.java b/src/main/java/eu/dissco/orchestration/backend/properties/TranslatorJobProperties.java index 011621e..00f3040 100644 --- a/src/main/java/eu/dissco/orchestration/backend/properties/TranslatorJobProperties.java +++ b/src/main/java/eu/dissco/orchestration/backend/properties/TranslatorJobProperties.java @@ -1,5 +1,6 @@ package eu.dissco.orchestration.backend.properties; +import jakarta.validation.constraints.NotBlank; import lombok.Data; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.stereotype.Component; @@ -11,10 +12,16 @@ @ConfigurationProperties("translator-job") public class TranslatorJobProperties { + @NotBlank private String image = "public.ecr.aws/dissco/dissco-core-translator:latest"; + @NotBlank private String kafkaHost = "kafka.kafka.svc.cluster.local:9092"; + @NotBlank private String kafkaTopic = "digital-specimen"; + @NotBlank + private String namespace = "default"; + } diff --git a/src/main/java/eu/dissco/orchestration/backend/repository/SourceSystemRepository.java b/src/main/java/eu/dissco/orchestration/backend/repository/SourceSystemRepository.java index 2e9279b..0b732c6 100644 --- a/src/main/java/eu/dissco/orchestration/backend/repository/SourceSystemRepository.java +++ b/src/main/java/eu/dissco/orchestration/backend/repository/SourceSystemRepository.java @@ -29,6 +29,7 @@ public int createSourceSystem(SourceSystemRecord sourceSystemRecord) { .set(SOURCE_SYSTEM.ENDPOINT, sourceSystemRecord.sourceSystem().endpoint()) .set(SOURCE_SYSTEM.DESCRIPTION, sourceSystemRecord.sourceSystem().description()) .set(SOURCE_SYSTEM.MAPPING_ID, sourceSystemRecord.sourceSystem().mappingId()) + .set(SOURCE_SYSTEM.TRANSLATOR_TYPE, sourceSystemRecord.sourceSystem().translatorType()) .execute(); } @@ -41,6 +42,7 @@ public int updateSourceSystem(SourceSystemRecord sourceSystemRecord) { .set(SOURCE_SYSTEM.ENDPOINT, sourceSystemRecord.sourceSystem().endpoint()) .set(SOURCE_SYSTEM.DESCRIPTION, sourceSystemRecord.sourceSystem().description()) .set(SOURCE_SYSTEM.MAPPING_ID, sourceSystemRecord.sourceSystem().mappingId()) + .set(SOURCE_SYSTEM.TRANSLATOR_TYPE, sourceSystemRecord.sourceSystem().translatorType()) .where(SOURCE_SYSTEM.ID.eq(sourceSystemRecord.id())).execute(); } @@ -88,6 +90,7 @@ private SourceSystemRecord mapToSourceSystemRecord( row.get(SOURCE_SYSTEM.NAME), row.get(SOURCE_SYSTEM.ENDPOINT), row.get(SOURCE_SYSTEM.DESCRIPTION), + row.get(SOURCE_SYSTEM.TRANSLATOR_TYPE), row.get(SOURCE_SYSTEM.MAPPING_ID))); } diff --git a/src/main/java/eu/dissco/orchestration/backend/service/MachineAnnotationServiceService.java b/src/main/java/eu/dissco/orchestration/backend/service/MachineAnnotationServiceService.java index f426742..f92d8c8 100644 --- a/src/main/java/eu/dissco/orchestration/backend/service/MachineAnnotationServiceService.java +++ b/src/main/java/eu/dissco/orchestration/backend/service/MachineAnnotationServiceService.java @@ -68,7 +68,7 @@ private static String getName(String pid) { } public JsonApiWrapper createMachineAnnotationService(MachineAnnotationService mas, String userId, - String path) { + String path) throws ProcessingFailedException { var requestBody = fdoRecordService.buildCreateRequest(mas, ObjectType.MAS); try { var handle = handleComponent.postHandle(requestBody); @@ -94,7 +94,8 @@ private void setDefaultMas(MachineAnnotationService mas, String handle) { } } - private void createDeployment(MachineAnnotationServiceRecord masRecord) { + private void createDeployment(MachineAnnotationServiceRecord masRecord) + throws ProcessingFailedException { var successfulDeployment = false; try { successfulDeployment = deployMasToCluster(masRecord, true); @@ -112,7 +113,7 @@ private void deployKedaToCluster(MachineAnnotationServiceRecord masRecord) var keda = createKedaFiles(masRecord, name); customObjectsApi.createNamespacedCustomObject(kubernetesProperties.getKedaGroup(), kubernetesProperties.getKedaVersion(), properties.getNamespace(), - kubernetesProperties.getKedaResource(), keda, null, null, null); + kubernetesProperties.getKedaResource(), keda).execute(); } catch (TemplateException | IOException e) { log.error("Failed to create keda scaledObject files for: {}", masRecord, e); throw new KubernetesFailedException("Failed to deploy keda to cluster"); @@ -130,10 +131,10 @@ private boolean deployMasToCluster(MachineAnnotationServiceRecord masRecord, boo var deployment = getV1Deployment(masRecord, shortPid); if (create) { appsV1Api.createNamespacedDeployment(properties.getNamespace(), - deployment, null, null, null, null); + deployment).execute(); } else { appsV1Api.replaceNamespacedDeployment(shortPid + DEPLOYMENT, - properties.getNamespace(), deployment, null, null, null, null); + properties.getNamespace(), deployment).execute(); } } catch (IOException | TemplateException e) { log.error("Failed to create deployment files for: {}", masRecord, e); @@ -198,7 +199,8 @@ private StringWriter fillDeploymentTemplate(Map templateProperti return writer; } - private void publishCreateEvent(String handle, MachineAnnotationServiceRecord masRecord) { + private void publishCreateEvent(String handle, MachineAnnotationServiceRecord masRecord) + throws ProcessingFailedException { try { kafkaPublisherService.publishCreateEvent(handle, mapper.valueToTree(masRecord), SUBJECT_TYPE); } catch (JsonProcessingException e) { @@ -223,7 +225,7 @@ private void rollbackMasCreation(MachineAnnotationServiceRecord masRecord, if (rollbackDeployment) { try { appsV1Api.deleteNamespacedDeployment(name + DEPLOYMENT, - properties.getNamespace(), null, null, null, null, null, null); + properties.getNamespace()).execute(); } catch (ApiException e) { log.error( "Fatal exception, unable to rollback kubernetes deployment for: {} error message with code: {} and message: {}", @@ -234,8 +236,7 @@ private void rollbackMasCreation(MachineAnnotationServiceRecord masRecord, try { customObjectsApi.deleteNamespacedCustomObject(kubernetesProperties.getKedaGroup(), kubernetesProperties.getKedaVersion(), properties.getNamespace(), - kubernetesProperties.getKedaResource(), name + SCALED_OBJECT, null, - null, null, null, null); + kubernetesProperties.getKedaResource(), name + SCALED_OBJECT).execute(); } catch (ApiException e) { log.error( "Fatal exception, unable to rollback kubernetes keda for: {} error message with code: {} and message: {}", @@ -246,7 +247,7 @@ private void rollbackMasCreation(MachineAnnotationServiceRecord masRecord, public JsonApiWrapper updateMachineAnnotationService(String id, MachineAnnotationService mas, String userId, String path) - throws NotFoundException { + throws NotFoundException, ProcessingFailedException { var currentMasOptional = repository.getActiveMachineAnnotationService(id); if (currentMasOptional.isPresent()) { var currentMasRecord = currentMasOptional.get(); @@ -267,7 +268,7 @@ public JsonApiWrapper updateMachineAnnotationService(String id, } private void updateDeployment(MachineAnnotationServiceRecord newMasRecord, - MachineAnnotationServiceRecord currentMasRecord) { + MachineAnnotationServiceRecord currentMasRecord) throws ProcessingFailedException { var successfulDeployment = false; try { successfulDeployment = deployMasToCluster(newMasRecord, false); @@ -280,13 +281,12 @@ private void updateDeployment(MachineAnnotationServiceRecord newMasRecord, private void updateKedaResource(MachineAnnotationServiceRecord masRecord, MachineAnnotationServiceRecord rollbackRecord) - throws KubernetesFailedException { + throws KubernetesFailedException, ProcessingFailedException { var name = getName(masRecord.id()); try { customObjectsApi.deleteNamespacedCustomObject(kubernetesProperties.getKedaGroup(), kubernetesProperties.getKedaVersion(), properties.getNamespace(), - kubernetesProperties.getKedaResource(), name + SCALED_OBJECT, null, - null, null, null, null); + kubernetesProperties.getKedaResource(), name + SCALED_OBJECT).execute(); } catch (ApiException e) { log.error( "Deletion of kubernetes keda failed for record: {}, with code: {} and message: {}", @@ -318,7 +318,7 @@ private void updateKedaResource(MachineAnnotationServiceRecord masRecord, } private void publishUpdateEvent(MachineAnnotationServiceRecord newMasRecord, - MachineAnnotationServiceRecord currentMasRecord) { + MachineAnnotationServiceRecord currentMasRecord) throws ProcessingFailedException { JsonNode jsonPatch = JsonDiff.asJson(mapper.valueToTree(newMasRecord.mas()), mapper.valueToTree(currentMasRecord.mas())); try { @@ -333,7 +333,7 @@ private void publishUpdateEvent(MachineAnnotationServiceRecord newMasRecord, } private void rollbackToPreviousVersion(MachineAnnotationServiceRecord currentMasRecord, - boolean rollbackDeployment, boolean rollbackKeda) { + boolean rollbackDeployment, boolean rollbackKeda) throws ProcessingFailedException { repository.updateMachineAnnotationService(currentMasRecord); if (rollbackDeployment) { try { @@ -355,7 +355,8 @@ private void rollbackToPreviousVersion(MachineAnnotationServiceRecord currentMas } } - public void deleteMachineAnnotationService(String id) throws NotFoundException { + public void deleteMachineAnnotationService(String id) + throws NotFoundException, ProcessingFailedException { var currentMasOptional = repository.getActiveMachineAnnotationService(id); if (currentMasOptional.isPresent()) { var deleted = Instant.now(); @@ -366,11 +367,11 @@ public void deleteMachineAnnotationService(String id) throws NotFoundException { } } - private void deleteDeployment(MachineAnnotationServiceRecord currentMasRecord) { + private void deleteDeployment(MachineAnnotationServiceRecord currentMasRecord) + throws ProcessingFailedException { var name = getName(currentMasRecord.id()); try { - appsV1Api.deleteNamespacedDeployment(name + DEPLOYMENT, - properties.getNamespace(), null, null, null, null, null, null); + appsV1Api.deleteNamespacedDeployment(name + DEPLOYMENT, properties.getNamespace()).execute(); } catch (ApiException e) { log.error( "Deletion of kubernetes deployment failed for record: {}, with code: {} and message: {}", @@ -381,8 +382,7 @@ private void deleteDeployment(MachineAnnotationServiceRecord currentMasRecord) { try { customObjectsApi.deleteNamespacedCustomObject(kubernetesProperties.getKedaGroup(), kubernetesProperties.getKedaVersion(), properties.getNamespace(), - kubernetesProperties.getKedaResource(), name + SCALED_OBJECT, - null, null, null, null, null); + kubernetesProperties.getKedaResource(), name + SCALED_OBJECT).execute(); } catch (ApiException e) { log.error( "Deletion of kubernetes keda failed for record: {}, with code: {} and message: {}", @@ -417,8 +417,7 @@ private JsonApiWrapper wrapSingleResponse(String id, MachineAnnotationServiceRec } private JsonApiListWrapper wrapResponse(List masRecords, - int pageNum, - int pageSize, String path) { + int pageNum, int pageSize, String path) { boolean hasNext = masRecords.size() > pageSize; masRecords = hasNext ? masRecords.subList(0, pageSize) : masRecords; var linksNode = new JsonApiLinks(pageSize, pageNum, hasNext, path); diff --git a/src/main/java/eu/dissco/orchestration/backend/service/MappingService.java b/src/main/java/eu/dissco/orchestration/backend/service/MappingService.java index 061959a..3c2d776 100644 --- a/src/main/java/eu/dissco/orchestration/backend/service/MappingService.java +++ b/src/main/java/eu/dissco/orchestration/backend/service/MappingService.java @@ -38,7 +38,8 @@ public class MappingService { private final MappingRepository repository; private final ObjectMapper mapper; - public JsonApiWrapper createMapping(Mapping mapping, String userId, String path) { + public JsonApiWrapper createMapping(Mapping mapping, String userId, String path) + throws ProcessingFailedException { var requestBody = fdoRecordService.buildCreateRequest(mapping, ObjectType.MAPPING); String handle = null; try { @@ -52,7 +53,8 @@ public JsonApiWrapper createMapping(Mapping mapping, String userId, String path) return wrapSingleResponse(handle, mappingRecord, path); } - private void publishCreateEvent(String handle, MappingRecord mappingRecord) { + private void publishCreateEvent(String handle, MappingRecord mappingRecord) + throws ProcessingFailedException { try { kafkaPublisherService.publishCreateEvent(handle, mapper.valueToTree(mappingRecord), SUBJECT_TYPE); @@ -76,7 +78,7 @@ private void rollbackMappingCreation(MappingRecord mappingRecord) { } public JsonApiWrapper updateMapping(String id, Mapping mapping, String userId, String path) - throws NotFoundException { + throws NotFoundException, ProcessingFailedException { var currentVersion = repository.getActiveMapping(id); if (currentVersion.isEmpty()) { throw new NotFoundException("Requested mapping does not exist"); @@ -94,7 +96,7 @@ public JsonApiWrapper updateMapping(String id, Mapping mapping, String userId, S } private void publishUpdateEvent(MappingRecord newMappingRecord, - MappingRecord currentMappingRecord) { + MappingRecord currentMappingRecord) throws ProcessingFailedException { JsonNode jsonPatch = JsonDiff.asJson(mapper.valueToTree(newMappingRecord.mapping()), mapper.valueToTree(currentMappingRecord.mapping())); try { diff --git a/src/main/java/eu/dissco/orchestration/backend/service/SourceSystemService.java b/src/main/java/eu/dissco/orchestration/backend/service/SourceSystemService.java index 3bfbfbc..d2bd24f 100644 --- a/src/main/java/eu/dissco/orchestration/backend/service/SourceSystemService.java +++ b/src/main/java/eu/dissco/orchestration/backend/service/SourceSystemService.java @@ -5,6 +5,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import com.github.fge.jsonpatch.diff.JsonDiff; +import eu.dissco.orchestration.backend.database.jooq.enums.TranslatorType; +import eu.dissco.orchestration.backend.domain.Enrichment; import eu.dissco.orchestration.backend.domain.HandleType; import eu.dissco.orchestration.backend.domain.ObjectType; import eu.dissco.orchestration.backend.domain.SourceSystem; @@ -17,18 +19,34 @@ import eu.dissco.orchestration.backend.exception.PidAuthenticationException; import eu.dissco.orchestration.backend.exception.PidCreationException; import eu.dissco.orchestration.backend.exception.ProcessingFailedException; +import eu.dissco.orchestration.backend.properties.TranslatorJobProperties; import eu.dissco.orchestration.backend.repository.SourceSystemRepository; import eu.dissco.orchestration.backend.web.HandleComponent; +import freemarker.template.Configuration; +import freemarker.template.TemplateException; +import io.kubernetes.client.openapi.ApiException; +import io.kubernetes.client.openapi.apis.BatchV1Api; +import io.kubernetes.client.openapi.models.V1Container; +import io.kubernetes.client.openapi.models.V1CronJob; +import io.kubernetes.client.openapi.models.V1EnvVar; +import io.kubernetes.client.openapi.models.V1Job; +import java.io.IOException; +import java.io.StringWriter; import java.time.Instant; +import java.util.HashMap; import java.util.List; +import java.util.Map; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.RandomStringUtils; +import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Service; @Slf4j @Service @RequiredArgsConstructor public class SourceSystemService { + public static final String SUBJECT_TYPE = "SourceSystem"; private final FdoRecordService fdoRecordService; private final HandleComponent handleComponent; @@ -36,36 +54,104 @@ public class SourceSystemService { private final MappingService mappingService; private final KafkaPublisherService kafkaPublisherService; private final ObjectMapper mapper; + @Qualifier("yamlMapper") + private final ObjectMapper yamlMapper; + private final TranslatorJobProperties jobProperties; + private final Configuration configuration; + private final BatchV1Api batchV1Api; - public JsonApiWrapper createSourceSystem(SourceSystem sourceSystem, String userId, String path) - throws NotFoundException { - String handle = null; - var request = fdoRecordService.buildCreateRequest(sourceSystem, ObjectType.SOURCE_SYSTEM); - try { - handle = handleComponent.postHandle(request); - } catch (PidAuthenticationException | PidCreationException e) { - throw new ProcessingFailedException(e.getMessage(), e); + private static String getSuffix(String sourceSystemId) { + return sourceSystemId.substring(sourceSystemId.indexOf('/') + 1).toLowerCase(); + } + + private static String generateJobName(SourceSystemRecord sourceSystem, boolean isCron) { + var name = sourceSystem.sourceSystem().translatorType().name().toLowerCase() + "-" + getSuffix( + sourceSystem.id()) + "-translator-service"; + if (!isCron) { + name = name + "-" + RandomStringUtils.randomAlphabetic(6).toLowerCase(); } + return name; + } + + public JsonApiWrapper createSourceSystem(SourceSystem sourceSystem, String userId, String path) + throws NotFoundException, ProcessingFailedException { validateMappingExists(sourceSystem.mappingId()); + String handle = createHandle(sourceSystem); var sourceSystemRecord = new SourceSystemRecord(handle, 1, userId, Instant.now(), null, sourceSystem); repository.createSourceSystem(sourceSystemRecord); + createCronJob(sourceSystemRecord); + createTranslatorJob(sourceSystemRecord, true); publishCreateEvent(handle, sourceSystemRecord); return wrapSingleResponse(handle, sourceSystemRecord, path); } - private void publishCreateEvent(String handle, SourceSystemRecord sourceSystemRecord) { + private String createHandle(SourceSystem sourceSystem) throws ProcessingFailedException { + var request = fdoRecordService.buildCreateRequest(sourceSystem, ObjectType.SOURCE_SYSTEM); + try { + return handleComponent.postHandle(request); + } catch (PidAuthenticationException | PidCreationException e) { + throw new ProcessingFailedException(e.getMessage(), e); + } + } + + private void createTranslatorJob(SourceSystemRecord sourceSystemRecord, boolean rollback) + throws ProcessingFailedException { + try { + triggerTranslatorJob(sourceSystemRecord); + } catch (IOException | TemplateException | ApiException e) { + logException(sourceSystemRecord, e); + if (rollback) { + rollbackSourceSystemCreation(sourceSystemRecord, true); + } + throw new ProcessingFailedException("Failed to deploy job to cluster", e); + } + } + + private void createCronJob(SourceSystemRecord sourceSystemRecord) + throws ProcessingFailedException { + try { + deployCronJob(sourceSystemRecord); + } catch (IOException | TemplateException | ApiException e) { + logException(sourceSystemRecord, e); + rollbackSourceSystemCreation(sourceSystemRecord, false); + throw new ProcessingFailedException("Failed to create new source system", e); + } + } + + private void deployCronJob(SourceSystemRecord sourceSystemRecord) + throws IOException, TemplateException, ApiException { + var k8sCron = setCronJobProperties(sourceSystemRecord); + batchV1Api.createNamespacedCronJob(jobProperties.getNamespace(), k8sCron).execute(); + log.info("Successfully published cronJob: {} to Kubernetes for source system: {}", + k8sCron.getMetadata().getName(), sourceSystemRecord.id()); + } + + private V1CronJob setCronJobProperties(SourceSystemRecord sourceSystemRecord) + throws IOException, TemplateException { + var jobProps = getTemplateProperties(sourceSystemRecord, true); + var job = fillTemplate(jobProps, sourceSystemRecord.sourceSystem().translatorType(), true); + var k8sCron = yamlMapper.readValue(job.toString(), V1CronJob.class); + addEnrichmentService( + k8sCron.getSpec().getJobTemplate().getSpec().getTemplate().getSpec().getContainers().get(0), + List.of()); + return k8sCron; + } + + private void publishCreateEvent(String handle, SourceSystemRecord sourceSystemRecord) + throws ProcessingFailedException { try { kafkaPublisherService.publishCreateEvent(handle, mapper.valueToTree(sourceSystemRecord), SUBJECT_TYPE); } catch (JsonProcessingException e) { log.error("Unable to publish message to Kafka", e); - rollbackSourceSystemCreation(sourceSystemRecord); + rollbackSourceSystemCreation(sourceSystemRecord, true); throw new ProcessingFailedException("Failed to create new machine annotation service", e); } } - private void rollbackSourceSystemCreation(SourceSystemRecord sourceSystemRecord) { + private void rollbackSourceSystemCreation(SourceSystemRecord sourceSystemRecord, + boolean removeCron) { var request = fdoRecordService.buildRollbackCreateRequest(sourceSystemRecord.id()); try { handleComponent.rollbackHandleCreation(request); @@ -75,6 +161,14 @@ private void rollbackSourceSystemCreation(SourceSystemRecord sourceSystemRecord) sourceSystemRecord.id(), e); } repository.rollbackSourceSystemCreation(sourceSystemRecord.id()); + if (removeCron) { + try { + batchV1Api.deleteNamespacedCronJob(generateJobName(sourceSystemRecord, true), + jobProperties.getNamespace()).execute(); + } catch (ApiException e) { + log.error("Unable to delete cronJob for source system: {}", sourceSystemRecord.id(), e); + } + } } private void validateMappingExists(String mappingId) throws NotFoundException { @@ -85,7 +179,8 @@ private void validateMappingExists(String mappingId) throws NotFoundException { } public JsonApiWrapper updateSourceSystem(String id, SourceSystem sourceSystem, String userId, - String path) throws NotFoundException { + String path) + throws NotFoundException, ProcessingFailedException { var currentSourceSystemOptional = repository.getActiveSourceSystem(id); if (currentSourceSystemOptional.isEmpty()) { throw new NotFoundException( @@ -98,12 +193,35 @@ public JsonApiWrapper updateSourceSystem(String id, SourceSystem sourceSystem, S var sourceSystemRecord = new SourceSystemRecord(id, currentSourceSystem.version() + 1, userId, Instant.now(), null, sourceSystem); repository.updateSourceSystem(sourceSystemRecord); + updateCronJob(sourceSystemRecord, currentSourceSystem); publishUpdateEvent(sourceSystemRecord, currentSourceSystem); return wrapSingleResponse(id, sourceSystemRecord, path); } + private void updateCronJob(SourceSystemRecord sourceSystemRecord, + SourceSystemRecord currentSourceSystem) throws ProcessingFailedException { + try { + var cronjob = setCronJobProperties(sourceSystemRecord); + batchV1Api.replaceNamespacedCronJob(generateJobName(currentSourceSystem, true), + jobProperties.getNamespace(), cronjob).execute(); + } catch (IOException | TemplateException | ApiException e) { + logException(sourceSystemRecord, e); + rollbackToPreviousVersion(currentSourceSystem, false); + throw new ProcessingFailedException("Failed to update new source system", e); + } + } + + private static void logException(SourceSystemRecord sourceSystemRecord, Exception e) { + if (e instanceof IOException || e instanceof TemplateException) { + log.error("Failed to create translator template for: {}", sourceSystemRecord, e); + } else if (e instanceof ApiException apiException){ + log.error("Failed to deploy kubernetes deployment to cluster with code: {} and message: {}", + apiException.getCode(), apiException.getResponseBody()); + } + } + private void publishUpdateEvent(SourceSystemRecord newSourceSystemRecord, - SourceSystemRecord currentSourceSystemRecord) { + SourceSystemRecord currentSourceSystemRecord) throws ProcessingFailedException { JsonNode jsonPatch = JsonDiff.asJson(mapper.valueToTree(newSourceSystemRecord.sourceSystem()), mapper.valueToTree(currentSourceSystemRecord.sourceSystem())); try { @@ -112,13 +230,24 @@ private void publishUpdateEvent(SourceSystemRecord newSourceSystemRecord, jsonPatch, SUBJECT_TYPE); } catch (JsonProcessingException e) { log.error("Unable to publish message to Kafka", e); - rollbackToPreviousVersion(currentSourceSystemRecord); + rollbackToPreviousVersion(currentSourceSystemRecord, true); throw new ProcessingFailedException("Failed to create new machine annotation service", e); } } - private void rollbackToPreviousVersion(SourceSystemRecord currentSourceSystemRecord) { + private void rollbackToPreviousVersion(SourceSystemRecord currentSourceSystemRecord, + boolean rollbackCron) { repository.updateSourceSystem(currentSourceSystemRecord); + if (rollbackCron) { + try { + var cronjob = setCronJobProperties(currentSourceSystemRecord); + batchV1Api.replaceNamespacedCronJob(generateJobName(currentSourceSystemRecord, true), + jobProperties.getNamespace(), cronjob).execute(); + } catch (IOException | TemplateException | ApiException e) { + log.error("Fatal error, unable to rollback to previous cronjob, manual action necessary", + e); + } + } } public JsonApiWrapper getSourceSystemById(String id, String path) { @@ -140,11 +269,18 @@ private JsonApiWrapper wrapSingleResponse(String id, SourceSystemRecord sourceSy ); } - public void deleteSourceSystem(String id) throws NotFoundException { + public void deleteSourceSystem(String id) throws NotFoundException, ProcessingFailedException { var result = repository.getActiveSourceSystem(id); if (result.isPresent()) { var deleted = Instant.now(); + try { + batchV1Api.deleteNamespacedCronJob(generateJobName(result.get(), true), + jobProperties.getNamespace()).execute(); + } catch (ApiException e) { + throw new ProcessingFailedException("Failed to delete cronJob for source system: " + id, e); + } repository.deleteSourceSystem(id, deleted); + log.info("Delete request for source system: {} was successful", id); } else { throw new NotFoundException("Requested source system: " + id + " does not exist"); } @@ -174,4 +310,73 @@ private JsonNode flattenSourceSystemRecord(SourceSystemRecord sourceSystemRecord } return sourceSystemNode; } + + public void runSourceSystemById(String id) throws ProcessingFailedException { + var sourceSystemRecord = repository.getSourceSystem(id); + createTranslatorJob(sourceSystemRecord, false); + } + + private void triggerTranslatorJob(SourceSystemRecord sourceSystemRecord) + throws IOException, TemplateException, ApiException { + var jobProps = getTemplateProperties(sourceSystemRecord, false); + var job = fillTemplate(jobProps, sourceSystemRecord.sourceSystem().translatorType(), false); + var k8sJob = yamlMapper.readValue(job.toString(), V1Job.class); + addEnrichmentService(k8sJob.getSpec().getTemplate().getSpec().getContainers().get(0), + List.of()); + batchV1Api.createNamespacedJob(jobProperties.getNamespace(), k8sJob).execute(); + log.info("Successfully published job: {} to Kubernetes for source system: {}", + k8sJob.getMetadata().getName(), sourceSystemRecord.id()); + } + + private Map getTemplateProperties(SourceSystemRecord sourceSystem, + boolean isCronJob) { + var map = new HashMap(); + var jobName = generateJobName(sourceSystem, isCronJob); + map.put("image", jobProperties.getImage()); + map.put("sourceSystemId", sourceSystem.id()); + map.put("jobName", jobName); + map.put("containerName", jobName); + map.put("kafkaHost", jobProperties.getKafkaHost()); + map.put("kafkaTopic", jobProperties.getKafkaTopic()); + if (isCronJob) { + map.put("cron", "0 0 * * *"); + } + return map; + } + + private StringWriter fillTemplate(Map templateProperties, + TranslatorType translatorType, boolean isCron) throws IOException, TemplateException { + var writer = new StringWriter(); + var templateFile = determineTemplate(translatorType, isCron); + var template = configuration.getTemplate(templateFile); + template.process(templateProperties, writer); + return writer; + } + + private String determineTemplate(TranslatorType translatorType, boolean isCron) { + if (isCron) { + return switch (translatorType) { + case dwca -> "dwca-cron-job.ftl"; + case biocase -> "biocase-cron-job.ftl"; + }; + } else { + return switch (translatorType) { + case dwca -> "dwca-translator-job.ftl"; + case biocase -> "biocase-translator-job.ftl"; + }; + } + } + + private void addEnrichmentService(V1Container container, List enrichmentList) { + for (int i = 0; i < enrichmentList.size(); i++) { + var envName = new V1EnvVar(); + envName.setName("ENRICHMENT_LIST_" + i + "_NAME"); + envName.setValue(enrichmentList.get(i).getName()); + container.addEnvItem(envName); + var envImageOnly = new V1EnvVar(); + envImageOnly.setName("ENRICHMENT_LIST_" + i + "_IMAGE_ONLY"); + envImageOnly.setValue(enrichmentList.get(i).getImageOnly()); + container.addEnvItem(envImageOnly); + } + } } diff --git a/src/main/java/eu/dissco/orchestration/backend/service/TranslatorService.java b/src/main/java/eu/dissco/orchestration/backend/service/TranslatorService.java deleted file mode 100644 index 8222136..0000000 --- a/src/main/java/eu/dissco/orchestration/backend/service/TranslatorService.java +++ /dev/null @@ -1,147 +0,0 @@ -package eu.dissco.orchestration.backend.service; - -import com.fasterxml.jackson.databind.ObjectMapper; -import eu.dissco.orchestration.backend.domain.Enrichment; -import eu.dissco.orchestration.backend.domain.TranslatorRequest; -import eu.dissco.orchestration.backend.domain.TranslatorResponse; -import eu.dissco.orchestration.backend.domain.TranslatorType; -import eu.dissco.orchestration.backend.exception.NotFoundException; -import eu.dissco.orchestration.backend.properties.TranslatorJobProperties; -import freemarker.template.Configuration; -import freemarker.template.TemplateException; -import io.kubernetes.client.openapi.ApiException; -import io.kubernetes.client.openapi.apis.BatchV1Api; -import io.kubernetes.client.openapi.models.V1EnvVar; -import io.kubernetes.client.openapi.models.V1Job; -import io.kubernetes.client.openapi.models.V1JobStatus; -import java.io.IOException; -import java.io.StringWriter; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.stereotype.Service; - -@Slf4j -@Service -public class TranslatorService { - - private static final String NAMESPACE = "default"; - - private final TranslatorJobProperties jobProperties; - private final Configuration configuration; - private final ObjectMapper mapper; - private final BatchV1Api batchV1Api; - - public TranslatorService( - TranslatorJobProperties jobProperties, Configuration configuration, - @Qualifier("yamlMapper") ObjectMapper mapper, BatchV1Api batchV1Api) { - this.jobProperties = jobProperties; - this.configuration = configuration; - this.mapper = mapper; - this.batchV1Api = batchV1Api; - } - - private static String getSuffix(String sourceSystemId) { - return sourceSystemId.substring(sourceSystemId.indexOf('/') + 1).toLowerCase(); - } - - public TranslatorResponse createTranslator(TranslatorRequest request) - throws TemplateException, IOException, ApiException { - var jobProps = getTemplateProperties(request); - var job = fillTemplate(jobProps, request.translatorType()); - var k8sJob = mapper.readValue(job.toString(), V1Job.class); - addEnrichmentService(k8sJob, request.enrichmentList()); - var result = batchV1Api.createNamespacedJob(NAMESPACE, k8sJob, "true", null, - null, null); - return mapToResponse(result); - } - - private void addEnrichmentService(V1Job k8sJob, List enrichmentList) { - var container = k8sJob.getSpec().getTemplate().getSpec().getContainers().get(0); - for (int i = 0; i < enrichmentList.size(); i++) { - var envName = new V1EnvVar(); - envName.setName("ENRICHMENT_LIST_" + i + "_NAME"); - envName.setValue(enrichmentList.get(i).getName()); - container.addEnvItem(envName); - var envImageOnly = new V1EnvVar(); - envImageOnly.setName("ENRICHMENT_LIST_" + i + "_IMAGE_ONLY"); - envImageOnly.setValue(enrichmentList.get(i).getImageOnly()); - container.addEnvItem(envImageOnly); - } - } - - private TranslatorResponse mapToResponse(V1Job result) { - return TranslatorResponse.builder(). - jobName(Objects.requireNonNull(result.getMetadata()).getName()) - .jobStatus(determineStatus(Objects.requireNonNull(result.getStatus()))) - .completedAt( - result.getStatus().getCompletionTime() != null ? result.getStatus().getCompletionTime() - .toInstant() : null) - .startTime(result.getStatus().getStartTime() != null ? result.getStatus().getStartTime() - .toInstant() : null) - .build(); - } - - private String determineStatus(V1JobStatus status) { - if (status.getActive() != null) { - return "Active"; - } else if (status.getFailed() != null) { - return "Failed"; - } else if (status.getSucceeded() != null) { - return "Completed"; - } - return "Unknown"; - } - - private Map getTemplateProperties(TranslatorRequest request) { - var map = new HashMap(); - map.put("image", jobProperties.getImage()); - map.put("sourceSystemId", request.sourceSystemId()); - map.put("jobName", "job-" + getSuffix(request.sourceSystemId())); - map.put("containerName", "container-" + getSuffix(request.sourceSystemId())); - map.put("kafkaHost", jobProperties.getKafkaHost()); - map.put("kafkaTopic", jobProperties.getKafkaTopic()); - return map; - } - - private StringWriter fillTemplate(Map templateProperties, - TranslatorType translatorType) throws IOException, TemplateException { - var writer = new StringWriter(); - var templateFile = determineTemplate(translatorType); - var template = configuration.getTemplate(templateFile); - template.process(templateProperties, writer); - return writer; - } - - private String determineTemplate(TranslatorType translatorType) { - return switch (translatorType) { - case DWCA -> "dwca-translator-job.ftl"; - case BIOCASE -> "biocase-translator-job.ftl"; - }; - } - - public List getAll() throws ApiException { - return batchV1Api.listNamespacedJob(NAMESPACE, null, null, null, null, null, null, null, null, - null, null, null).getItems().stream().map(this::mapToResponse).toList(); - } - - public Optional get(String id) throws ApiException { - return batchV1Api.listNamespacedJob(NAMESPACE, null, null, null, null, null, null, null, null, - null, null, null).getItems().stream().filter(Objects::nonNull) - .filter(v1Job -> v1Job.getMetadata().getName().equals(id)) - .map(this::mapToResponse).findAny(); - } - - public void deleteJob(String id) throws ApiException, NotFoundException { - var job = get(id); - if (job.isPresent()) { - batchV1Api.deleteNamespacedJob(id, NAMESPACE, null, null, null, false, null, null); - } else { - throw new NotFoundException("Job with id: " + id + " not found in Kubernetes"); - } - } -} diff --git a/src/main/resources/jooq-configuration.xml b/src/main/resources/jooq-configuration.xml index 3b0e7dc..33735f4 100644 --- a/src/main/resources/jooq-configuration.xml +++ b/src/main/resources/jooq-configuration.xml @@ -13,7 +13,7 @@ org.jooq.meta.postgres.PostgresDatabase public - mapping | source_system | handles | machine_annotation_services + mapping | source_system | machine_annotation_services | translator_type INSTANT diff --git a/src/main/resources/templates/biocase-cron-job.ftl b/src/main/resources/templates/biocase-cron-job.ftl new file mode 100644 index 0000000..fbc150e --- /dev/null +++ b/src/main/resources/templates/biocase-cron-job.ftl @@ -0,0 +1,49 @@ +apiVersion: batch/v1 +kind: CronJob +metadata: + name: ${jobName} +spec: + schedule: ${cron} + jobTemplate: + spec: + template: + spec: + restartPolicy: Never + containers: + - name: ${containerName} + image: ${image} + resources: + requests: + memory: 2G + limits: + memory: 2G + env: + - name: spring.profiles.active + value: biocase + - name: kafka.host + value: ${kafkaHost} + - name: kafka.topic + value: ${kafkaTopic} + - name: webclient.sourceSystemId + value: ${sourceSystemId} + - name: spring.datasource.url + value: jdbc:postgresql://terraform-20230822143945532600000001.cbppwfnjypll.eu-west-2.rds.amazonaws.com:5432/dissco + - name: spring.datasource.username + valueFrom: + secretKeyRef: + name: db-secrets + key: db-username + - name: spring.datasource.password + valueFrom: + secretKeyRef: + name: db-secrets + key: db-password + - name: fdo.digital-media-object-type + value: https://doi.org/21.T11148/bbad8c4e101e8af01115 + - name: fdo.digital-specimen-type + value: https://doi.org/21.T11148/894b1e6cad57e921764e + - name: JAVA_OPTS + value: -server -XX:+useContainerSupport -XX:MaxRAMPercentage=75 --illegal-access=deny + securityContext: + runAsNonRoot: true + allowPrivilegeEscalation: false diff --git a/src/main/resources/templates/biocase-translator-job.ftl b/src/main/resources/templates/biocase-translator-job.ftl new file mode 100644 index 0000000..7d30fe4 --- /dev/null +++ b/src/main/resources/templates/biocase-translator-job.ftl @@ -0,0 +1,47 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: ${jobName} +spec: + backoffLimit: 2 + template: + spec: + restartPolicy: Never + containers: + - name: ${containerName} + image: ${image} + resources: + requests: + memory: 2G + limits: + memory: 2G + env: + - name: spring.profiles.active + value: biocase + - name: kafka.host + value: ${kafkaHost} + - name: kafka.topic + value: ${kafkaTopic} + - name: webclient.sourceSystemId + value: ${sourceSystemId} + - name: spring.datasource.url + value: jdbc:postgresql://terraform-20230822143945532600000001.cbppwfnjypll.eu-west-2.rds.amazonaws.com:5432/dissco + - name: spring.datasource.username + valueFrom: + secretKeyRef: + name: db-secrets + key: db-username + - name: spring.datasource.password + valueFrom: + secretKeyRef: + name: db-secrets + key: db-password + - name: fdo.digital-media-object-type + value: https://doi.org/21.T11148/bbad8c4e101e8af01115 + - name: fdo.digital-specimen-type + value: https://doi.org/21.T11148/894b1e6cad57e921764e + - name: JAVA_OPTS + value: -server -XX:+useContainerSupport -XX:MaxRAMPercentage=75 --illegal-access=deny + securityContext: + runAsNonRoot: true + allowPrivilegeEscalation: false diff --git a/src/main/resources/templates/dwca-cron-job.ftl b/src/main/resources/templates/dwca-cron-job.ftl new file mode 100644 index 0000000..5db51d8 --- /dev/null +++ b/src/main/resources/templates/dwca-cron-job.ftl @@ -0,0 +1,59 @@ +apiVersion: batch/v1 +kind: CronJob +metadata: + name: ${jobName} +spec: + schedule: ${cron} + jobTemplate: + spec: + template: + spec: + restartPolicy: Never + containers: + - name: ${containerName} + image: ${image} + resources: + requests: + memory: 2G + limits: + memory: 2G + env: + - name: spring.profiles.active + value: dwca + - name: dwca.download-file + value: /temp/darwin.zip + - name: dwca.temp-folder + value: /temp/darwin + - name: kafka.host + value: ${kafkaHost} + - name: kafka.topic + value: ${kafkaTopic} + - name: webclient.sourceSystemId + value: ${sourceSystemId} + - name: spring.datasource.url + value: jdbc:postgresql://terraform-20230822143945532600000001.cbppwfnjypll.eu-west-2.rds.amazonaws.com:5432/dissco + - name: spring.datasource.username + valueFrom: + secretKeyRef: + name: db-secrets + key: db-username + - name: spring.datasource.password + valueFrom: + secretKeyRef: + name: db-secrets + key: db-password + - name: fdo.digital-media-object-type + value: https://doi.org/21.T11148/bbad8c4e101e8af01115 + - name: fdo.digital-specimen-type + value: https://doi.org/21.T11148/894b1e6cad57e921764e + - name: JAVA_OPTS + value: -server -XX:+useContainerSupport -XX:MaxRAMPercentage=75 --illegal-access=deny + securityContext: + runAsNonRoot: true + allowPrivilegeEscalation: false + volumeMounts: + - mountPath: /temp + name: temp-volume + volumes: + - name: temp-volume + emptyDir: { } \ No newline at end of file diff --git a/src/main/resources/templates/dwca-translator-job.ftl b/src/main/resources/templates/dwca-translator-job.ftl index 53178b4..f866405 100644 --- a/src/main/resources/templates/dwca-translator-job.ftl +++ b/src/main/resources/templates/dwca-translator-job.ftl @@ -12,10 +12,12 @@ spec: image: ${image} resources: requests: - memory: 512M + memory: 2G limits: - memory: 512M + memory: 2G env: + - name: spring.profiles.active + value: dwca - name: dwca.download-file value: /temp/darwin.zip - name: dwca.temp-folder @@ -27,17 +29,23 @@ spec: - name: webclient.sourceSystemId value: ${sourceSystemId} - name: spring.datasource.url - value: jdbc:postgresql://database-1.cbppwfnjypll.eu-west-2.rds.amazonaws.com/dissco + value: jdbc:postgresql://terraform-20230822143945532600000001.cbppwfnjypll.eu-west-2.rds.amazonaws.com:5432/dissco - name: spring.datasource.username valueFrom: secretKeyRef: - name: db-user-pass - key: username + name: db-secrets + key: db-username - name: spring.datasource.password valueFrom: secretKeyRef: - name: db-user-pass - key: password + name: db-secrets + key: db-password + - name: fdo.digital-media-object-type + value: https://doi.org/21.T11148/bbad8c4e101e8af01115 + - name: fdo.digital-specimen-type + value: https://doi.org/21.T11148/894b1e6cad57e921764e + - name: JAVA_OPTS + value: -server -XX:+useContainerSupport -XX:MaxRAMPercentage=75 --illegal-access=deny securityContext: runAsNonRoot: true allowPrivilegeEscalation: false diff --git a/src/main/resources/templates/geocase-translator-job.ftl b/src/main/resources/templates/geocase-translator-job.ftl deleted file mode 100644 index 257bfee..0000000 --- a/src/main/resources/templates/geocase-translator-job.ftl +++ /dev/null @@ -1,32 +0,0 @@ -apiVersion: batch/v1 -kind: Job -metadata: - name: ${jobName} -spec: - backoffLimit: 2 - template: - spec: - restartPolicy: Never - containers: - - name: ${containerName} - image: ${image} - env: - - name: spring.profiles.active - value: geoCase - - name: webclient.endpoint - value: ${endpoint} - - name: webclient.query-params - value: ${query} - - name: opends.service-name - value: ${serviceName} - <#if itemsPerRequest??> - - name: webclient.items-per-request - value: ${itemsPerRequest} - - - name: kafka.host - value: ${kafkaHost} - - name: kafka.topic - value: ${kafkaTopic} - securityContext: - runAsNonRoot: true - allowPrivilegeEscalation: false diff --git a/src/test/java/eu/dissco/orchestration/backend/controller/MachineAnnotationServiceControllerTest.java b/src/test/java/eu/dissco/orchestration/backend/controller/MachineAnnotationServiceControllerTest.java index 5ed64b3..c137cba 100644 --- a/src/test/java/eu/dissco/orchestration/backend/controller/MachineAnnotationServiceControllerTest.java +++ b/src/test/java/eu/dissco/orchestration/backend/controller/MachineAnnotationServiceControllerTest.java @@ -118,7 +118,7 @@ void testGetMass() { } @Test - void testDeleteMas() throws NotFoundException { + void testDeleteMas() throws Exception { // When var result = controller.deleteMachineAnnotationService(authentication, PREFIX, SUFFIX); diff --git a/src/test/java/eu/dissco/orchestration/backend/repository/SourceSystemRepositoryIT.java b/src/test/java/eu/dissco/orchestration/backend/repository/SourceSystemRepositoryIT.java index f6ccdd5..ca453bb 100644 --- a/src/test/java/eu/dissco/orchestration/backend/repository/SourceSystemRepositoryIT.java +++ b/src/test/java/eu/dissco/orchestration/backend/repository/SourceSystemRepositoryIT.java @@ -1,6 +1,7 @@ package eu.dissco.orchestration.backend.repository; import static eu.dissco.orchestration.backend.database.jooq.Tables.SOURCE_SYSTEM; +import static eu.dissco.orchestration.backend.database.jooq.enums.TranslatorType.biocase; import static eu.dissco.orchestration.backend.testutils.TestUtils.CREATED; import static eu.dissco.orchestration.backend.testutils.TestUtils.HANDLE; import static eu.dissco.orchestration.backend.testutils.TestUtils.HANDLE_ALT; @@ -62,6 +63,7 @@ void testUpdateSourceSystem() { "new name", SS_ENDPOINT, OBJECT_DESCRIPTION, + biocase, HANDLE_ALT )); @@ -199,6 +201,7 @@ private SourceSystem givenSourceSystemWithId(String endPoint) { OBJECT_NAME, endPoint, OBJECT_DESCRIPTION, + biocase, HANDLE_ALT ); } @@ -230,6 +233,7 @@ null, new SourceSystem( row.get(SOURCE_SYSTEM.NAME), row.get(SOURCE_SYSTEM.ENDPOINT), row.get(SOURCE_SYSTEM.DESCRIPTION), + row.get(SOURCE_SYSTEM.TRANSLATOR_TYPE), row.get(SOURCE_SYSTEM.MAPPING_ID))); } @@ -243,6 +247,7 @@ private void postSourceSystem(List ssRecords) { .set(SOURCE_SYSTEM.NAME, sourceSystemRecord.sourceSystem().name()) .set(SOURCE_SYSTEM.ENDPOINT, sourceSystemRecord.sourceSystem().endpoint()) .set(SOURCE_SYSTEM.DESCRIPTION, sourceSystemRecord.sourceSystem().description()) + .set(SOURCE_SYSTEM.TRANSLATOR_TYPE, sourceSystemRecord.sourceSystem().translatorType()) .set(SOURCE_SYSTEM.MAPPING_ID, sourceSystemRecord.sourceSystem().mappingId()) .set(SOURCE_SYSTEM.DELETED, sourceSystemRecord.deleted()) .set(SOURCE_SYSTEM.CREATED, sourceSystemRecord.created())); diff --git a/src/test/java/eu/dissco/orchestration/backend/service/MachineAnnotationServiceServiceTest.java b/src/test/java/eu/dissco/orchestration/backend/service/MachineAnnotationServiceServiceTest.java index 0a07f24..593db41 100644 --- a/src/test/java/eu/dissco/orchestration/backend/service/MachineAnnotationServiceServiceTest.java +++ b/src/test/java/eu/dissco/orchestration/backend/service/MachineAnnotationServiceServiceTest.java @@ -20,6 +20,7 @@ import static org.mockito.BDDMockito.given; import static org.mockito.BDDMockito.then; import static org.mockito.BDDMockito.willThrow; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mockStatic; import static org.mockito.Mockito.times; @@ -39,7 +40,12 @@ import freemarker.template.Configuration; import io.kubernetes.client.openapi.ApiException; import io.kubernetes.client.openapi.apis.AppsV1Api; +import io.kubernetes.client.openapi.apis.AppsV1Api.APIcreateNamespacedDeploymentRequest; +import io.kubernetes.client.openapi.apis.AppsV1Api.APIdeleteNamespacedDeploymentRequest; +import io.kubernetes.client.openapi.apis.AppsV1Api.APIreplaceNamespacedDeploymentRequest; import io.kubernetes.client.openapi.apis.CustomObjectsApi; +import io.kubernetes.client.openapi.apis.CustomObjectsApi.APIcreateNamespacedCustomObjectRequest; +import io.kubernetes.client.openapi.apis.CustomObjectsApi.APIdeleteNamespacedCustomObjectRequest; import io.kubernetes.client.openapi.models.V1Deployment; import java.io.File; import java.io.IOException; @@ -88,9 +94,9 @@ void setup() throws IOException { initFreeMaker(); var kedaTemplate = configuration.getTemplate("keda-template.ftl"); var deploymentTemplate = configuration.getTemplate("mas-template.ftl"); - service = new MachineAnnotationServiceService(handleComponent, fdoRecordService, kafkaPublisherService, repository, - appsV1Api, customObjectsApi, kedaTemplate, deploymentTemplate, MAPPER, properties, - kubernetesProperties); + service = new MachineAnnotationServiceService(handleComponent, fdoRecordService, + kafkaPublisherService, repository, appsV1Api, customObjectsApi, kedaTemplate, + deploymentTemplate, MAPPER, properties, kubernetesProperties); } private void initFreeMaker() throws IOException { @@ -111,6 +117,12 @@ void testCreateMas() throws Exception { given(handleComponent.postHandle(any())).willReturn(HANDLE); given(properties.getKafkaHost()).willReturn("kafka.svc.cluster.local:9092"); given(properties.getNamespace()).willReturn("namespace"); + var createDeploy = mock(APIcreateNamespacedDeploymentRequest.class); + given(appsV1Api.createNamespacedDeployment(eq("namespace"), any(V1Deployment.class))) + .willReturn(createDeploy); + var createCustom = mock(APIcreateNamespacedCustomObjectRequest.class); + given(customObjectsApi.createNamespacedCustomObject(anyString(), anyString(), eq("namespace"), + anyString(), any(Object.class))).willReturn(createCustom); // When var result = service.createMachineAnnotationService(mas, OBJECT_CREATOR, MAS_PATH); @@ -120,11 +132,10 @@ void testCreateMas() throws Exception { then(fdoRecordService).should().buildCreateRequest(mas, ObjectType.MAS); then(repository).should().createMachineAnnotationService(givenMasRecord()); then(appsV1Api).should() - .createNamespacedDeployment(eq("namespace"), any(V1Deployment.class), eq(null), eq(null), - eq(null), eq(null)); + .createNamespacedDeployment(eq("namespace"), any(V1Deployment.class)); then(customObjectsApi).should() .createNamespacedCustomObject(anyString(), anyString(), eq("namespace"), anyString(), - any(Object.class), eq(null), eq(null), eq(null)); + any(Object.class)); then(kafkaPublisherService).should() .publishCreateEvent(HANDLE, MAPPER.valueToTree(givenMasRecord()), SUBJECT_TYPE); } @@ -147,8 +158,10 @@ void testCreateMasDeployFails() throws Exception { given(handleComponent.postHandle(any())).willReturn(HANDLE); given(properties.getKafkaHost()).willReturn("kafka.svc.cluster.local:9092"); given(properties.getNamespace()).willReturn("namespace"); - given(appsV1Api.createNamespacedDeployment(eq("namespace"), any(V1Deployment.class), eq(null), - eq(null), eq(null), eq(null))).willThrow(new ApiException()); + var createDeploy = mock(APIcreateNamespacedDeploymentRequest.class); + given(appsV1Api.createNamespacedDeployment(eq("namespace"), any(V1Deployment.class))) + .willReturn(createDeploy); + given(createDeploy.execute()).willThrow(new ApiException()); // When assertThrowsExactly(ProcessingFailedException.class, @@ -168,9 +181,16 @@ void testCreateKedaFails() throws Exception { given(handleComponent.postHandle(any())).willReturn(HANDLE); given(properties.getKafkaHost()).willReturn("kafka.svc.cluster.local:9092"); given(properties.getNamespace()).willReturn("namespace"); + var createDeploy = mock(APIcreateNamespacedDeploymentRequest.class); + given(appsV1Api.createNamespacedDeployment(eq("namespace"), any(V1Deployment.class))) + .willReturn(createDeploy); + var deleteDeploy = mock(APIdeleteNamespacedDeploymentRequest.class); + given(appsV1Api.deleteNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), + eq("namespace"))).willReturn(deleteDeploy); + var createCustom = mock(APIcreateNamespacedCustomObjectRequest.class); given(customObjectsApi.createNamespacedCustomObject(anyString(), anyString(), eq("namespace"), - anyString(), - any(Object.class), eq(null), eq(null), eq(null))).willThrow(new ApiException()); + anyString(), any(Object.class))).willReturn(createCustom); + given(createCustom.execute()).willThrow(new ApiException()); // When assertThrowsExactly(ProcessingFailedException.class, @@ -179,13 +199,11 @@ void testCreateKedaFails() throws Exception { // Then then(repository).should().createMachineAnnotationService(givenMasRecord()); then(appsV1Api).should() - .createNamespacedDeployment(eq("namespace"), any(V1Deployment.class), eq(null), eq(null), - eq(null), eq(null)); + .createNamespacedDeployment(eq("namespace"), any(V1Deployment.class)); then(handleComponent).should().rollbackHandleCreation(any()); then(repository).should().rollbackMasCreation(HANDLE); then(appsV1Api).should() - .deleteNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), eq("namespace"), - eq(null), eq(null), eq(null), eq(null), eq(null), eq(null)); + .deleteNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), eq("namespace")); } @Test @@ -198,6 +216,18 @@ void testCreateMasKafkaFails() throws Exception { willThrow(JsonProcessingException.class).given(kafkaPublisherService) .publishCreateEvent(HANDLE, MAPPER.valueToTree(givenMasRecord()), SUBJECT_TYPE); + var createDeploy = mock(APIcreateNamespacedDeploymentRequest.class); + given(appsV1Api.createNamespacedDeployment(eq("namespace"), any(V1Deployment.class))) + .willReturn(createDeploy); + var createCustom = mock(APIcreateNamespacedCustomObjectRequest.class); + given(customObjectsApi.createNamespacedCustomObject(anyString(), anyString(), eq("namespace"), + anyString(), any(Object.class))).willReturn(createCustom); + var deleteDeploy = mock(APIdeleteNamespacedDeploymentRequest.class); + given(appsV1Api.deleteNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), + eq("namespace"))).willReturn(deleteDeploy); + var deleteCustom = mock(APIdeleteNamespacedCustomObjectRequest.class); + given(customObjectsApi.deleteNamespacedCustomObject(anyString(), anyString(), eq("namespace"), + anyString(), eq(SUFFIX.toLowerCase() + "-scaled-object"))).willReturn(deleteCustom); // When assertThrowsExactly(ProcessingFailedException.class, @@ -210,21 +240,18 @@ void testCreateMasKafkaFails() throws Exception { then(handleComponent).should().rollbackHandleCreation(any()); then(appsV1Api).should() - .createNamespacedDeployment(eq("namespace"), any(V1Deployment.class), eq(null), eq(null), - eq(null), eq(null)); + .createNamespacedDeployment(eq("namespace"), any(V1Deployment.class)); then(customObjectsApi).should() .createNamespacedCustomObject(anyString(), anyString(), eq("namespace"), anyString(), - any(Object.class), eq(null), eq(null), eq(null)); + any(Object.class)); then(fdoRecordService).should().buildRollbackCreateRequest(HANDLE); then(handleComponent).should().rollbackHandleCreation(any()); then(repository).should().rollbackMasCreation(HANDLE); then(appsV1Api).should() - .deleteNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), eq("namespace"), - eq(null), eq(null), eq(null), eq(null), eq(null), eq(null)); + .deleteNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), eq("namespace")); then(customObjectsApi).should() .deleteNamespacedCustomObject(anyString(), anyString(), eq("namespace"), anyString(), - eq(SUFFIX.toLowerCase() + "-scaled-object"), eq(null), eq(null), eq(null), eq(null), - eq(null)); + eq(SUFFIX.toLowerCase() + "-scaled-object")); } @Test @@ -238,7 +265,18 @@ void testCreateMasKafkaAndPidFails() throws Exception { .publishCreateEvent(HANDLE, MAPPER.valueToTree(givenMasRecord()), SUBJECT_TYPE); willThrow(PidCreationException.class).given(handleComponent).rollbackHandleCreation(any()); - + var createDeploy = mock(APIcreateNamespacedDeploymentRequest.class); + given(appsV1Api.createNamespacedDeployment(eq("namespace"), any(V1Deployment.class))) + .willReturn(createDeploy); + var createCustom = mock(APIcreateNamespacedCustomObjectRequest.class); + given(customObjectsApi.createNamespacedCustomObject(anyString(), anyString(), eq("namespace"), + anyString(), any(Object.class))).willReturn(createCustom); + var deleteDeploy = mock(APIdeleteNamespacedDeploymentRequest.class); + given(appsV1Api.deleteNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), + eq("namespace"))).willReturn(deleteDeploy); + var deleteCustom = mock(APIdeleteNamespacedCustomObjectRequest.class); + given(customObjectsApi.deleteNamespacedCustomObject(anyString(), anyString(), eq("namespace"), + anyString(), eq(SUFFIX.toLowerCase() + "-scaled-object"))).willReturn(deleteCustom); // When assertThrowsExactly(ProcessingFailedException.class, @@ -251,21 +289,18 @@ void testCreateMasKafkaAndPidFails() throws Exception { then(handleComponent).should().rollbackHandleCreation(any()); then(appsV1Api).should() - .createNamespacedDeployment(eq("namespace"), any(V1Deployment.class), eq(null), eq(null), - eq(null), eq(null)); + .createNamespacedDeployment(eq("namespace"), any(V1Deployment.class)); then(customObjectsApi).should() .createNamespacedCustomObject(anyString(), anyString(), eq("namespace"), anyString(), - any(Object.class), eq(null), eq(null), eq(null)); + any(Object.class)); then(fdoRecordService).should().buildRollbackCreateRequest(HANDLE); then(handleComponent).should().rollbackHandleCreation(any()); then(repository).should().rollbackMasCreation(HANDLE); then(appsV1Api).should() - .deleteNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), eq("namespace"), - eq(null), eq(null), eq(null), eq(null), eq(null), eq(null)); + .deleteNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), eq("namespace")); then(customObjectsApi).should() .deleteNamespacedCustomObject(anyString(), anyString(), eq("namespace"), anyString(), - eq(SUFFIX.toLowerCase() + "-scaled-object"), eq(null), eq(null), eq(null), eq(null), - eq(null)); + eq(SUFFIX.toLowerCase() + "-scaled-object")); } @@ -278,6 +313,15 @@ void testUpdateMas() throws Exception { given(repository.getActiveMachineAnnotationService(HANDLE)).willReturn(prevRecord); given(properties.getKafkaHost()).willReturn("kafka.svc.cluster.local:9092"); given(properties.getNamespace()).willReturn("namespace"); + var replaceDeploy = mock(APIreplaceNamespacedDeploymentRequest.class); + given(appsV1Api.replaceNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), + eq("namespace"), any(V1Deployment.class))).willReturn(replaceDeploy); + var deleteCustom = mock(APIdeleteNamespacedCustomObjectRequest.class); + given(customObjectsApi.deleteNamespacedCustomObject(anyString(), anyString(), eq("namespace"), + anyString(), eq(SUFFIX.toLowerCase() + "-scaled-object"))).willReturn(deleteCustom); + var createCustom = mock(APIcreateNamespacedCustomObjectRequest.class); + given(customObjectsApi.createNamespacedCustomObject(anyString(), anyString(), eq("namespace"), + anyString(), any(Object.class))).willReturn(createCustom); // When var result = service.updateMachineAnnotationService(HANDLE, mas, OBJECT_CREATOR, MAS_PATH); @@ -287,15 +331,13 @@ void testUpdateMas() throws Exception { then(repository).should().updateMachineAnnotationService(givenMasRecord(2)); then(appsV1Api).should() .replaceNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), eq("namespace"), - any(V1Deployment.class), eq(null), eq(null), - eq(null), eq(null)); + any(V1Deployment.class)); then(customObjectsApi).should() .deleteNamespacedCustomObject(anyString(), anyString(), eq("namespace"), anyString(), - eq(SUFFIX.toLowerCase() + "-scaled-object"), eq(null), eq(null), eq(null), eq(null), - eq(null)); + eq(SUFFIX.toLowerCase() + "-scaled-object")); then(customObjectsApi).should() .createNamespacedCustomObject(anyString(), anyString(), eq("namespace"), anyString(), - any(Object.class), eq(null), eq(null), eq(null)); + any(Object.class)); then(kafkaPublisherService).should() .publishUpdateEvent(HANDLE, MAPPER.valueToTree(givenMasRecord(2)), givenJsonPatch(), SUBJECT_TYPE); @@ -309,10 +351,16 @@ void testUpdateMasDeployFails() throws Exception { given(repository.getActiveMachineAnnotationService(HANDLE)).willReturn(prevRecord); given(properties.getKafkaHost()).willReturn("kafka.svc.cluster.local:9092"); given(properties.getNamespace()).willReturn("namespace"); + var replaceDeploy = mock(APIreplaceNamespacedDeploymentRequest.class); + given(appsV1Api.replaceNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), + eq("namespace"), any(V1Deployment.class))).willReturn(replaceDeploy); + var deleteCustom = mock(APIdeleteNamespacedCustomObjectRequest.class); + given(customObjectsApi.deleteNamespacedCustomObject(anyString(), anyString(), eq("namespace"), + anyString(), eq(SUFFIX.toLowerCase() + "-scaled-object"))).willReturn(deleteCustom); + var createCustom = mock(APIcreateNamespacedCustomObjectRequest.class); given(customObjectsApi.createNamespacedCustomObject(anyString(), anyString(), eq("namespace"), - anyString(), - any(Object.class), eq(null), eq(null), eq(null))).willThrow(new ApiException()) - .willReturn(null); + anyString(), any(Object.class))).willReturn(createCustom); + given(createCustom.execute()).willThrow(new ApiException()); // When assertThrowsExactly(ProcessingFailedException.class, @@ -323,30 +371,28 @@ void testUpdateMasDeployFails() throws Exception { then(repository).should().updateMachineAnnotationService(prevRecord.get()); then(appsV1Api).should(times(2)) .replaceNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), eq("namespace"), - any(V1Deployment.class), eq(null), eq(null), - eq(null), eq(null)); + any(V1Deployment.class)); then(customObjectsApi).should() .deleteNamespacedCustomObject(anyString(), anyString(), eq("namespace"), anyString(), - eq(SUFFIX.toLowerCase() + "-scaled-object"), eq(null), eq(null), eq(null), eq(null), - eq(null)); + eq(SUFFIX.toLowerCase() + "-scaled-object")); then(customObjectsApi).should(times(2)) .createNamespacedCustomObject(anyString(), anyString(), eq("namespace"), anyString(), - any(Object.class), eq(null), eq(null), eq(null)); + any(Object.class)); then(kafkaPublisherService).shouldHaveNoInteractions(); } @Test - void testUpdateKedaFails() throws Exception { + void testUpdateKedaFails() throws ApiException { // Given var prevRecord = buildOptionalPrevRecord(); var mas = givenMas(); given(repository.getActiveMachineAnnotationService(HANDLE)).willReturn(prevRecord); given(properties.getKafkaHost()).willReturn("kafka.svc.cluster.local:9092"); given(properties.getNamespace()).willReturn("namespace"); + var replaceDeploy = mock(APIreplaceNamespacedDeploymentRequest.class); given(appsV1Api.replaceNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), - eq("namespace"), - any(V1Deployment.class), eq(null), eq(null), - eq(null), eq(null))).willThrow(new ApiException()); + eq("namespace"), any(V1Deployment.class))).willReturn(replaceDeploy); + given(replaceDeploy.execute()).willThrow(new ApiException()); // When assertThrowsExactly(ProcessingFailedException.class, @@ -357,8 +403,7 @@ void testUpdateKedaFails() throws Exception { then(repository).should().updateMachineAnnotationService(prevRecord.get()); then(appsV1Api).should() .replaceNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), eq("namespace"), - any(V1Deployment.class), eq(null), eq(null), - eq(null), eq(null)); + any(V1Deployment.class)); then(customObjectsApi).shouldHaveNoInteractions(); then(kafkaPublisherService).shouldHaveNoInteractions(); } @@ -374,6 +419,15 @@ void testUpdateMasKafkaFails() throws Exception { willThrow(JsonProcessingException.class).given(kafkaPublisherService) .publishUpdateEvent(HANDLE, MAPPER.valueToTree(givenMasRecord(2)), givenJsonPatch(), SUBJECT_TYPE); + var replaceDeploy = mock(APIreplaceNamespacedDeploymentRequest.class); + given(appsV1Api.replaceNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), + eq("namespace"), any(V1Deployment.class))).willReturn(replaceDeploy); + var deleteCustom = mock(APIdeleteNamespacedCustomObjectRequest.class); + given(customObjectsApi.deleteNamespacedCustomObject(anyString(), anyString(), eq("namespace"), + anyString(), eq(SUFFIX.toLowerCase() + "-scaled-object"))).willReturn(deleteCustom); + var createCustom = mock(APIcreateNamespacedCustomObjectRequest.class); + given(customObjectsApi.createNamespacedCustomObject(anyString(), anyString(), eq("namespace"), + anyString(), any(Object.class))).willReturn(createCustom); // When assertThrowsExactly(ProcessingFailedException.class, @@ -383,15 +437,13 @@ void testUpdateMasKafkaFails() throws Exception { then(repository).should().updateMachineAnnotationService(givenMasRecord(2)); then(appsV1Api).should(times(2)) .replaceNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), eq("namespace"), - any(V1Deployment.class), eq(null), eq(null), - eq(null), eq(null)); + any(V1Deployment.class)); then(customObjectsApi).should(times(2)) .deleteNamespacedCustomObject(anyString(), anyString(), eq("namespace"), anyString(), - eq(SUFFIX.toLowerCase() + "-scaled-object"), eq(null), eq(null), eq(null), eq(null), - eq(null)); + eq(SUFFIX.toLowerCase() + "-scaled-object")); then(customObjectsApi).should(times(2)) .createNamespacedCustomObject(anyString(), anyString(), eq("namespace"), anyString(), - any(Object.class), eq(null), eq(null), eq(null)); + any(Object.class)); then(repository).should().updateMachineAnnotationService(prevRecord.get()); } @@ -545,6 +597,12 @@ void testDeletedMas() { // Given given(repository.getActiveMachineAnnotationService(HANDLE)).willReturn( Optional.of(givenMasRecord())); + var deleteDeploy = mock(APIdeleteNamespacedDeploymentRequest.class); + given(appsV1Api.deleteNamespacedDeployment(SUFFIX.toLowerCase() + "-deployment", + null)).willReturn(deleteDeploy); + var deleteCustom = mock(APIdeleteNamespacedCustomObjectRequest.class); + given(customObjectsApi.deleteNamespacedCustomObject("keda.sh", "v1alpha1", null, + "scaledobjects", "gw0-pop-xsl-scaled-object")).willReturn(deleteCustom); // When / Then assertDoesNotThrow(() -> service.deleteMachineAnnotationService(HANDLE)); @@ -561,11 +619,17 @@ void testDeletedMasNotFound() { } @Test - void testDeleteMas() throws ApiException, NotFoundException { + void testDeleteMas() throws NotFoundException, ProcessingFailedException { // Given given(repository.getActiveMachineAnnotationService(HANDLE)).willReturn( Optional.of(givenMasRecord())); given(properties.getNamespace()).willReturn("namespace"); + var deleteDeploy = mock(APIdeleteNamespacedDeploymentRequest.class); + given(appsV1Api.deleteNamespacedDeployment(SUFFIX.toLowerCase() + "-deployment", + "namespace")).willReturn(deleteDeploy); + var deleteCustom = mock(APIdeleteNamespacedCustomObjectRequest.class); + given(customObjectsApi.deleteNamespacedCustomObject("keda.sh", "v1alpha1", "namespace", + "scaledobjects", "gw0-pop-xsl-scaled-object")).willReturn(deleteCustom); // When service.deleteMachineAnnotationService(HANDLE); @@ -573,12 +637,10 @@ void testDeleteMas() throws ApiException, NotFoundException { // Then then(repository).should().deleteMachineAnnotationService(HANDLE, Instant.now()); then(appsV1Api).should() - .deleteNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), eq("namespace"), - eq(null), eq(null), eq(null), eq(null), eq(null), eq(null)); + .deleteNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), eq("namespace")); then(customObjectsApi).should() .deleteNamespacedCustomObject(anyString(), anyString(), eq("namespace"), anyString(), - eq(SUFFIX.toLowerCase() + "-scaled-object"), eq(null), eq(null), eq(null), eq(null), - eq(null)); + eq(SUFFIX.toLowerCase() + "-scaled-object")); then(kafkaPublisherService).shouldHaveNoInteractions(); } @@ -588,8 +650,10 @@ void testDeleteDeployFails() throws ApiException { given(repository.getActiveMachineAnnotationService(HANDLE)).willReturn( Optional.of(givenMasRecord())); given(properties.getNamespace()).willReturn("namespace"); + var deleteDeploy = mock(APIdeleteNamespacedDeploymentRequest.class); given(appsV1Api.deleteNamespacedDeployment(SUFFIX.toLowerCase() + "-deployment", - "namespace", null, null, null, null, null, null)).willThrow(new ApiException()); + "namespace")).willReturn(deleteDeploy); + given(deleteDeploy.execute()).willThrow(new ApiException()); // When assertThrowsExactly(ProcessingFailedException.class, @@ -610,10 +674,16 @@ void testDeleteKedaFails() throws ApiException { Optional.of(givenMasRecord())); given(properties.getKafkaHost()).willReturn("kafka.svc.cluster.local:9092"); given(properties.getNamespace()).willReturn("namespace"); - given(customObjectsApi.deleteNamespacedCustomObject(anyString(), anyString(), eq("namespace"), - anyString(), - eq(SUFFIX.toLowerCase() + "-scaled-object"), eq(null), eq(null), eq(null), eq(null), - eq(null))).willThrow(new ApiException()); + var createDeploy = mock(APIcreateNamespacedDeploymentRequest.class); + given(appsV1Api.createNamespacedDeployment(eq("namespace"), any(V1Deployment.class))) + .willReturn(createDeploy); + var deleteDeploy = mock(APIdeleteNamespacedDeploymentRequest.class); + given(appsV1Api.deleteNamespacedDeployment(SUFFIX.toLowerCase() + "-deployment", + "namespace")).willReturn(deleteDeploy); + var deleteCustom = mock(APIdeleteNamespacedCustomObjectRequest.class); + given(customObjectsApi.deleteNamespacedCustomObject("keda.sh", "v1alpha1", "namespace", + "scaledobjects", "gw0-pop-xsl-scaled-object")).willReturn(deleteCustom); + given(deleteCustom.execute()).willThrow(new ApiException()); // When assertThrowsExactly(ProcessingFailedException.class, @@ -624,11 +694,9 @@ void testDeleteKedaFails() throws ApiException { then(repository).should() .createMachineAnnotationService(any(MachineAnnotationServiceRecord.class)); then(appsV1Api).should().deleteNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), - eq("namespace"), - eq(null), eq(null), eq(null), eq(null), eq(null), eq(null)); + eq("namespace")); then(appsV1Api).should() - .createNamespacedDeployment(eq("namespace"), any(V1Deployment.class), eq(null), eq(null), - eq(null), eq(null)); + .createNamespacedDeployment(eq("namespace"), any(V1Deployment.class)); then(kafkaPublisherService).shouldHaveNoInteractions(); } diff --git a/src/test/java/eu/dissco/orchestration/backend/service/SourceSystemServiceTest.java b/src/test/java/eu/dissco/orchestration/backend/service/SourceSystemServiceTest.java index ef8de83..ad478cc 100644 --- a/src/test/java/eu/dissco/orchestration/backend/service/SourceSystemServiceTest.java +++ b/src/test/java/eu/dissco/orchestration/backend/service/SourceSystemServiceTest.java @@ -1,5 +1,7 @@ package eu.dissco.orchestration.backend.service; +import static eu.dissco.orchestration.backend.database.jooq.enums.TranslatorType.biocase; +import static eu.dissco.orchestration.backend.database.jooq.enums.TranslatorType.dwca; import static eu.dissco.orchestration.backend.service.SourceSystemService.SUBJECT_TYPE; import static eu.dissco.orchestration.backend.testutils.TestUtils.CREATED; import static eu.dissco.orchestration.backend.testutils.TestUtils.HANDLE; @@ -18,13 +20,18 @@ import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertThrowsExactly; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.BDDMockito.given; import static org.mockito.BDDMockito.then; import static org.mockito.BDDMockito.willThrow; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mockStatic; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import eu.dissco.orchestration.backend.domain.HandleType; import eu.dissco.orchestration.backend.domain.SourceSystem; import eu.dissco.orchestration.backend.domain.SourceSystemRecord; @@ -34,8 +41,20 @@ import eu.dissco.orchestration.backend.exception.NotFoundException; import eu.dissco.orchestration.backend.exception.PidCreationException; import eu.dissco.orchestration.backend.exception.ProcessingFailedException; +import eu.dissco.orchestration.backend.properties.TranslatorJobProperties; import eu.dissco.orchestration.backend.repository.SourceSystemRepository; import eu.dissco.orchestration.backend.web.HandleComponent; +import freemarker.template.Configuration; +import io.kubernetes.client.openapi.ApiException; +import io.kubernetes.client.openapi.apis.BatchV1Api; +import io.kubernetes.client.openapi.apis.BatchV1Api.APIcreateNamespacedCronJobRequest; +import io.kubernetes.client.openapi.apis.BatchV1Api.APIcreateNamespacedJobRequest; +import io.kubernetes.client.openapi.apis.BatchV1Api.APIdeleteNamespacedCronJobRequest; +import io.kubernetes.client.openapi.apis.BatchV1Api.APIreplaceNamespacedCronJobRequest; +import io.kubernetes.client.openapi.models.V1CronJob; +import io.kubernetes.client.openapi.models.V1Job; +import java.io.File; +import java.io.IOException; import java.time.Clock; import java.time.Instant; import java.time.ZoneOffset; @@ -53,6 +72,14 @@ @ExtendWith(MockitoExtension.class) class SourceSystemServiceTest { + private static final String NAMESPACE = "default"; + + private final ObjectMapper yamlMapper = new ObjectMapper( + new YAMLFactory()).findAndRegisterModules(); + private final TranslatorJobProperties jobProperties = new TranslatorJobProperties(); + + private final Configuration configuration = new Configuration(Configuration.VERSION_2_3_32); + private SourceSystemService service; @Mock private KafkaPublisherService kafkaPublisherService; @@ -64,15 +91,23 @@ class SourceSystemServiceTest { private HandleComponent handleComponent; @Mock private MappingService mappingService; + @Mock + private BatchV1Api batchV1Api; + private MockedStatic mockedStatic; private MockedStatic mockedClock; @BeforeEach - void setup() { + void setup() throws IOException { service = new SourceSystemService(builder, handleComponent, repository, mappingService, - kafkaPublisherService, MAPPER); + kafkaPublisherService, MAPPER, yamlMapper, jobProperties, configuration, batchV1Api); initTime(); + initFreeMaker(); + } + + private void initFreeMaker() throws IOException { + configuration.setDirectoryForTemplateLoading(new File("src/main/resources/templates/")); } @AfterEach @@ -89,6 +124,12 @@ void testCreateSourceSystem() throws Exception { given(handleComponent.postHandle(any())).willReturn(HANDLE); given(mappingService.getActiveMapping(sourceSystem.mappingId())).willReturn( Optional.of(givenMappingRecord(sourceSystem.mappingId(), 1))); + var createCron = mock(APIcreateNamespacedCronJobRequest.class); + given(batchV1Api.createNamespacedCronJob(eq(NAMESPACE), any(V1CronJob.class))) + .willReturn(createCron); + var createJob = mock(APIcreateNamespacedJobRequest.class); + given( + batchV1Api.createNamespacedJob(eq(NAMESPACE), any(V1Job.class))).willReturn(createJob); // When var result = service.createSourceSystem(sourceSystem, OBJECT_CREATOR, SYSTEM_PATH); @@ -110,6 +151,57 @@ void testCreateSourceSystemMappingNotFound() { () -> service.createSourceSystem(sourceSystem, OBJECT_CREATOR, SYSTEM_PATH)); } + @Test + void testCreateSourceSystemCronFails() throws Exception { + // Given + var sourceSystem = givenSourceSystem(); + given(handleComponent.postHandle(any())).willReturn(HANDLE); + given(mappingService.getActiveMapping(sourceSystem.mappingId())).willReturn( + Optional.of(givenMappingRecord(sourceSystem.mappingId(), 1))); + var createCron = mock(APIcreateNamespacedCronJobRequest.class); + given(batchV1Api.createNamespacedCronJob(eq(NAMESPACE), any(V1CronJob.class))) + .willReturn(createCron); + given(createCron.execute()).willThrow(ApiException.class); + + // When + assertThrowsExactly(ProcessingFailedException.class, + () -> service.createSourceSystem(sourceSystem, OBJECT_CREATOR, SYSTEM_PATH)); + + // Then + then(repository).should().createSourceSystem(givenSourceSystemRecord()); + then(builder).should().buildRollbackCreateRequest(HANDLE); + then(handleComponent).should().rollbackHandleCreation(any()); + then(repository).should().rollbackSourceSystemCreation(HANDLE); + } + + @Test + void testCreateSourceSystemTranslatorJobFails() throws Exception { + // Given + var sourceSystem = givenSourceSystem(); + given(handleComponent.postHandle(any())).willReturn(HANDLE); + given(mappingService.getActiveMapping(sourceSystem.mappingId())).willReturn( + Optional.of(givenMappingRecord(sourceSystem.mappingId(), 1))); + var createCron = mock(APIcreateNamespacedCronJobRequest.class); + given(batchV1Api.createNamespacedCronJob(eq(NAMESPACE), any(V1CronJob.class))) + .willReturn(createCron); + var deleteCron = mock(APIdeleteNamespacedCronJobRequest.class); + given(batchV1Api.deleteNamespacedCronJob(anyString(), eq(NAMESPACE))).willReturn(deleteCron); + var createJob = mock(APIcreateNamespacedJobRequest.class); + given( + batchV1Api.createNamespacedJob(eq(NAMESPACE), any(V1Job.class))).willReturn(createJob); + given(createJob.execute()).willThrow(ApiException.class); + + // When + assertThrowsExactly(ProcessingFailedException.class, + () -> service.createSourceSystem(sourceSystem, OBJECT_CREATOR, SYSTEM_PATH)); + + // Then + then(repository).should().createSourceSystem(givenSourceSystemRecord()); + then(builder).should().buildRollbackCreateRequest(HANDLE); + then(handleComponent).should().rollbackHandleCreation(any()); + then(repository).should().rollbackSourceSystemCreation(HANDLE); + } + @Test void testCreateSourceSystemKafkaFails() throws Exception { // Given @@ -117,6 +209,14 @@ void testCreateSourceSystemKafkaFails() throws Exception { given(handleComponent.postHandle(any())).willReturn(HANDLE); given(mappingService.getActiveMapping(sourceSystem.mappingId())).willReturn( Optional.of(givenMappingRecord(sourceSystem.mappingId(), 1))); + var createCron = mock(APIcreateNamespacedCronJobRequest.class); + given(batchV1Api.createNamespacedCronJob(eq(NAMESPACE), any(V1CronJob.class))) + .willReturn(createCron); + var deleteCron = mock(APIdeleteNamespacedCronJobRequest.class); + given(batchV1Api.deleteNamespacedCronJob(anyString(), eq(NAMESPACE))).willReturn(deleteCron); + var createJob = mock(APIcreateNamespacedJobRequest.class); + given( + batchV1Api.createNamespacedJob(eq(NAMESPACE), any(V1Job.class))).willReturn(createJob); willThrow(JsonProcessingException.class).given(kafkaPublisherService) .publishCreateEvent(HANDLE, MAPPER.valueToTree(givenSourceSystemRecord()), SUBJECT_TYPE); @@ -135,9 +235,11 @@ void testCreateSourceSystemKafkaFails() throws Exception { void testCreateMasHandleFails() throws Exception { // Given var sourceSystem = givenSourceSystem(); + given(mappingService.getActiveMapping(sourceSystem.mappingId())).willReturn( + Optional.of(givenMappingRecord(sourceSystem.mappingId(), 1))); willThrow(PidCreationException.class).given(handleComponent).postHandle(any()); - // Then + // When / Then assertThrowsExactly(ProcessingFailedException.class, () -> service.createSourceSystem(sourceSystem, OBJECT_CREATOR, MAPPING_PATH)); } @@ -152,6 +254,14 @@ void testCreateSourceSystemKafkaAndRollbackFails() throws Exception { willThrow(JsonProcessingException.class).given(kafkaPublisherService) .publishCreateEvent(HANDLE, MAPPER.valueToTree(givenSourceSystemRecord()), SUBJECT_TYPE); willThrow(PidCreationException.class).given(handleComponent).rollbackHandleCreation(any()); + var createCron = mock(APIcreateNamespacedCronJobRequest.class); + given(batchV1Api.createNamespacedCronJob(eq(NAMESPACE), any(V1CronJob.class))) + .willReturn(createCron); + var deleteCron = mock(APIdeleteNamespacedCronJobRequest.class); + given(batchV1Api.deleteNamespacedCronJob(anyString(), eq(NAMESPACE))).willReturn(deleteCron); + var createJob = mock(APIcreateNamespacedJobRequest.class); + given( + batchV1Api.createNamespacedJob(eq(NAMESPACE), any(V1Job.class))).willReturn(createJob); // When assertThrowsExactly(ProcessingFailedException.class, @@ -164,6 +274,19 @@ void testCreateSourceSystemKafkaAndRollbackFails() throws Exception { then(repository).should().rollbackSourceSystemCreation(HANDLE); } + @Test + void testRunSourceSystemById() { + // Given + var sourceSystemRecord = givenSourceSystemRecord(); + given(repository.getSourceSystem(HANDLE)).willReturn(sourceSystemRecord); + var createJob = mock(APIcreateNamespacedJobRequest.class); + given( + batchV1Api.createNamespacedJob(eq(NAMESPACE), any(V1Job.class))).willReturn(createJob); + + // Then + assertDoesNotThrow(() -> service.runSourceSystemById(HANDLE)); + } + @Test void testUpdateSourceSystem() throws Exception { var sourceSystem = givenSourceSystem(); @@ -172,10 +295,13 @@ void testUpdateSourceSystem() throws Exception { 1, OBJECT_CREATOR, CREATED, - null, new SourceSystem("name", "endpoint", "description", "id") + null, new SourceSystem("name", "endpoint", "description", biocase, "id") )); var expected = givenSourceSystemSingleJsonApiWrapper(2); given(repository.getActiveSourceSystem(HANDLE)).willReturn(prevRecord); + var updateCron = mock(APIreplaceNamespacedCronJobRequest.class); + given(batchV1Api.replaceNamespacedCronJob(anyString(), eq(NAMESPACE), any(V1CronJob.class))) + .willReturn(updateCron); // When var result = service.updateSourceSystem(HANDLE, sourceSystem, OBJECT_CREATOR, SYSTEM_PATH); @@ -188,6 +314,31 @@ null, new SourceSystem("name", "endpoint", "description", "id") givenJsonPatch(), SUBJECT_TYPE); } + @Test + void testUpdateSourceSystemCronJobFails() throws Exception { + var sourceSystem = givenSourceSystem(); + var prevRecord = Optional.of(new SourceSystemRecord( + HANDLE, + 1, + OBJECT_CREATOR, + CREATED, + null, new SourceSystem("name", "endpoint", "description", biocase, "id") + )); + given(repository.getActiveSourceSystem(HANDLE)).willReturn(prevRecord); + var updateCron = mock(APIreplaceNamespacedCronJobRequest.class); + given(batchV1Api.replaceNamespacedCronJob(anyString(), eq(NAMESPACE), any(V1CronJob.class))) + .willReturn(updateCron); + given(updateCron.execute()).willThrow(ApiException.class); + + // When + assertThrowsExactly(ProcessingFailedException.class, + () -> service.updateSourceSystem(HANDLE, sourceSystem, OBJECT_CREATOR, SYSTEM_PATH)); + + // Then + then(repository).should().updateSourceSystem(givenSourceSystemRecord(2)); + then(repository).should().updateSourceSystem(prevRecord.get()); + } + @Test void testUpdateSourceSystemKafkaFails() throws Exception { var sourceSystem = givenSourceSystem(); @@ -196,12 +347,15 @@ void testUpdateSourceSystemKafkaFails() throws Exception { 1, OBJECT_CREATOR, CREATED, - null, new SourceSystem("name", "endpoint", "description", "id") + null, new SourceSystem("name", "endpoint", "description", biocase, "id") )); given(repository.getActiveSourceSystem(HANDLE)).willReturn(prevRecord); willThrow(JsonProcessingException.class).given(kafkaPublisherService) .publishUpdateEvent(HANDLE, MAPPER.valueToTree(givenSourceSystemRecord(2)), givenJsonPatch(), SUBJECT_TYPE); + var updateCron = mock(APIreplaceNamespacedCronJobRequest.class); + given(batchV1Api.replaceNamespacedCronJob(anyString(), eq(NAMESPACE), any(V1CronJob.class))) + .willReturn(updateCron); // When assertThrowsExactly(ProcessingFailedException.class, @@ -320,10 +474,28 @@ void testDeleteSourceSystem() { // Given given(repository.getActiveSourceSystem(HANDLE)).willReturn( Optional.of(givenSourceSystemRecord())); + var deleteCron = mock(APIdeleteNamespacedCronJobRequest.class); + given(batchV1Api.deleteNamespacedCronJob(anyString(), eq(NAMESPACE))) + .willReturn(deleteCron); + // Then assertDoesNotThrow(() -> service.deleteSourceSystem(HANDLE)); } + @Test + void testDeleteSourceSystemCronFailed() throws ApiException { + // Given + given(repository.getActiveSourceSystem(HANDLE)).willReturn( + Optional.of(givenSourceSystemRecord())); + var deleteCron = mock(APIdeleteNamespacedCronJobRequest.class); + given(batchV1Api.deleteNamespacedCronJob(anyString(), eq(NAMESPACE))) + .willReturn(deleteCron); + given(deleteCron.execute()).willThrow(ApiException.class); + + // Then + assertThrowsExactly(ProcessingFailedException.class, () -> service.deleteSourceSystem(HANDLE)); + } + @Test void testDeleteSourceSystemNotFound() { // Given diff --git a/src/test/java/eu/dissco/orchestration/backend/testutils/TestUtils.java b/src/test/java/eu/dissco/orchestration/backend/testutils/TestUtils.java index 27a658e..42383ec 100644 --- a/src/test/java/eu/dissco/orchestration/backend/testutils/TestUtils.java +++ b/src/test/java/eu/dissco/orchestration/backend/testutils/TestUtils.java @@ -3,6 +3,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; +import eu.dissco.orchestration.backend.database.jooq.enums.TranslatorType; import eu.dissco.orchestration.backend.domain.HandleType; import eu.dissco.orchestration.backend.domain.MachineAnnotationService; import eu.dissco.orchestration.backend.domain.MachineAnnotationServiceRecord; @@ -112,7 +113,8 @@ public static SourceSystemRecord givenSourceSystemRecord(int version) { version, OBJECT_CREATOR, CREATED, - null, givenSourceSystem() + null, + givenSourceSystem() ); } @@ -121,6 +123,7 @@ public static SourceSystem givenSourceSystem() { OBJECT_NAME, SS_ENDPOINT, OBJECT_DESCRIPTION, + TranslatorType.biocase, HANDLE_ALT ); } diff --git a/src/test/resources/db/migration/V1__init_db.sql b/src/test/resources/db/migration/V1__init_db.sql index 47019f8..29ae0ee 100644 --- a/src/test/resources/db/migration/V1__init_db.sql +++ b/src/test/resources/db/migration/V1__init_db.sql @@ -1,3 +1,5 @@ +create type translator_type as enum ('biocase', 'dwca'); + create table source_system ( id text not null @@ -10,7 +12,8 @@ create table source_system deleted timestamp with time zone, mapping_id text not null, version integer not null, - creator text not null + creator text not null, + translator_type translator_type not null ); create table mapping From 581ad2843a8205324276befec9447d1a39c3ce1a Mon Sep 17 00:00:00 2001 From: Sam Leeflang Date: Tue, 27 Feb 2024 11:13:30 +0100 Subject: [PATCH 2/4] Sonar review --- .../MachineAnnotationServiceServiceTest.java | 12 +++++----- .../service/SourceSystemServiceTest.java | 24 ++++++++++--------- .../backend/testutils/TestUtils.java | 19 +++++++++++---- 3 files changed, 34 insertions(+), 21 deletions(-) diff --git a/src/test/java/eu/dissco/orchestration/backend/service/MachineAnnotationServiceServiceTest.java b/src/test/java/eu/dissco/orchestration/backend/service/MachineAnnotationServiceServiceTest.java index 593db41..8bd8bb4 100644 --- a/src/test/java/eu/dissco/orchestration/backend/service/MachineAnnotationServiceServiceTest.java +++ b/src/test/java/eu/dissco/orchestration/backend/service/MachineAnnotationServiceServiceTest.java @@ -185,8 +185,8 @@ void testCreateKedaFails() throws Exception { given(appsV1Api.createNamespacedDeployment(eq("namespace"), any(V1Deployment.class))) .willReturn(createDeploy); var deleteDeploy = mock(APIdeleteNamespacedDeploymentRequest.class); - given(appsV1Api.deleteNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), - eq("namespace"))).willReturn(deleteDeploy); + given(appsV1Api.deleteNamespacedDeployment(SUFFIX.toLowerCase() + "-deployment", + "namespace")).willReturn(deleteDeploy); var createCustom = mock(APIcreateNamespacedCustomObjectRequest.class); given(customObjectsApi.createNamespacedCustomObject(anyString(), anyString(), eq("namespace"), anyString(), any(Object.class))).willReturn(createCustom); @@ -223,8 +223,8 @@ void testCreateMasKafkaFails() throws Exception { given(customObjectsApi.createNamespacedCustomObject(anyString(), anyString(), eq("namespace"), anyString(), any(Object.class))).willReturn(createCustom); var deleteDeploy = mock(APIdeleteNamespacedDeploymentRequest.class); - given(appsV1Api.deleteNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), - eq("namespace"))).willReturn(deleteDeploy); + given(appsV1Api.deleteNamespacedDeployment(SUFFIX.toLowerCase() + "-deployment", + "namespace")).willReturn(deleteDeploy); var deleteCustom = mock(APIdeleteNamespacedCustomObjectRequest.class); given(customObjectsApi.deleteNamespacedCustomObject(anyString(), anyString(), eq("namespace"), anyString(), eq(SUFFIX.toLowerCase() + "-scaled-object"))).willReturn(deleteCustom); @@ -272,8 +272,8 @@ void testCreateMasKafkaAndPidFails() throws Exception { given(customObjectsApi.createNamespacedCustomObject(anyString(), anyString(), eq("namespace"), anyString(), any(Object.class))).willReturn(createCustom); var deleteDeploy = mock(APIdeleteNamespacedDeploymentRequest.class); - given(appsV1Api.deleteNamespacedDeployment(eq(SUFFIX.toLowerCase() + "-deployment"), - eq("namespace"))).willReturn(deleteDeploy); + given(appsV1Api.deleteNamespacedDeployment(SUFFIX.toLowerCase() + "-deployment", + "namespace")).willReturn(deleteDeploy); var deleteCustom = mock(APIdeleteNamespacedCustomObjectRequest.class); given(customObjectsApi.deleteNamespacedCustomObject(anyString(), anyString(), eq("namespace"), anyString(), eq(SUFFIX.toLowerCase() + "-scaled-object"))).willReturn(deleteCustom); diff --git a/src/test/java/eu/dissco/orchestration/backend/service/SourceSystemServiceTest.java b/src/test/java/eu/dissco/orchestration/backend/service/SourceSystemServiceTest.java index ad478cc..87c195f 100644 --- a/src/test/java/eu/dissco/orchestration/backend/service/SourceSystemServiceTest.java +++ b/src/test/java/eu/dissco/orchestration/backend/service/SourceSystemServiceTest.java @@ -1,6 +1,5 @@ package eu.dissco.orchestration.backend.service; -import static eu.dissco.orchestration.backend.database.jooq.enums.TranslatorType.biocase; import static eu.dissco.orchestration.backend.database.jooq.enums.TranslatorType.dwca; import static eu.dissco.orchestration.backend.service.SourceSystemService.SUBJECT_TYPE; import static eu.dissco.orchestration.backend.testutils.TestUtils.CREATED; @@ -247,12 +246,13 @@ void testCreateMasHandleFails() throws Exception { @Test void testCreateSourceSystemKafkaAndRollbackFails() throws Exception { // Given - var sourceSystem = givenSourceSystem(); + var sourceSystem = givenSourceSystem(dwca); given(handleComponent.postHandle(any())).willReturn(HANDLE); given(mappingService.getActiveMapping(sourceSystem.mappingId())).willReturn( Optional.of(givenMappingRecord(sourceSystem.mappingId(), 1))); willThrow(JsonProcessingException.class).given(kafkaPublisherService) - .publishCreateEvent(HANDLE, MAPPER.valueToTree(givenSourceSystemRecord()), SUBJECT_TYPE); + .publishCreateEvent(HANDLE, MAPPER.valueToTree(givenSourceSystemRecord(dwca)), + SUBJECT_TYPE); willThrow(PidCreationException.class).given(handleComponent).rollbackHandleCreation(any()); var createCron = mock(APIcreateNamespacedCronJobRequest.class); given(batchV1Api.createNamespacedCronJob(eq(NAMESPACE), any(V1CronJob.class))) @@ -268,7 +268,7 @@ void testCreateSourceSystemKafkaAndRollbackFails() throws Exception { () -> service.createSourceSystem(sourceSystem, OBJECT_CREATOR, SYSTEM_PATH)); // Then - then(repository).should().createSourceSystem(givenSourceSystemRecord()); + then(repository).should().createSourceSystem(givenSourceSystemRecord(dwca)); then(builder).should().buildRollbackCreateRequest(HANDLE); then(handleComponent).should().rollbackHandleCreation(any()); then(repository).should().rollbackSourceSystemCreation(HANDLE); @@ -295,7 +295,7 @@ void testUpdateSourceSystem() throws Exception { 1, OBJECT_CREATOR, CREATED, - null, new SourceSystem("name", "endpoint", "description", biocase, "id") + null, new SourceSystem("name", "endpoint", "description", dwca, "id") )); var expected = givenSourceSystemSingleJsonApiWrapper(2); given(repository.getActiveSourceSystem(HANDLE)).willReturn(prevRecord); @@ -322,7 +322,7 @@ void testUpdateSourceSystemCronJobFails() throws Exception { 1, OBJECT_CREATOR, CREATED, - null, new SourceSystem("name", "endpoint", "description", biocase, "id") + null, new SourceSystem("name", "endpoint", "description", dwca, "id") )); given(repository.getActiveSourceSystem(HANDLE)).willReturn(prevRecord); var updateCron = mock(APIreplaceNamespacedCronJobRequest.class); @@ -347,7 +347,7 @@ void testUpdateSourceSystemKafkaFails() throws Exception { 1, OBJECT_CREATOR, CREATED, - null, new SourceSystem("name", "endpoint", "description", biocase, "id") + null, new SourceSystem("name", "endpoint", "description", dwca, "id") )); given(repository.getActiveSourceSystem(HANDLE)).willReturn(prevRecord); willThrow(JsonProcessingException.class).given(kafkaPublisherService) @@ -367,10 +367,12 @@ null, new SourceSystem("name", "endpoint", "description", biocase, "id") } private JsonNode givenJsonPatch() throws JsonProcessingException { - return MAPPER.readTree("[{\"op\":\"replace\",\"path\":\"/mappingId\",\"value\":\"id\"}," - + "{\"op\":\"replace\",\"path\":\"/endpoint\",\"value\":\"endpoint\"},{\"op\":\"replace\"" - + ",\"path\":\"/name\",\"value\":\"name\"},{\"op\":\"replace\",\"path\":\"/description\"" - + ",\"value\":\"description\"}]"); + return MAPPER.readTree( + "[{\"op\":\"replace\",\"path\":\"/mappingId\",\"value\":\"id\"},{\"op\":\"replace\"," + + "\"path\":\"/endpoint\",\"value\":\"endpoint\"},{\"op\":\"replace\"," + + "\"path\":\"/translatorType\",\"value\":\"dwca\"},{\"op\":\"replace\"," + + "\"path\":\"/name\",\"value\":\"name\"},{\"op\":\"replace\",\"path\":\"/description\"," + + "\"value\":\"description\"}]"); } @Test diff --git a/src/test/java/eu/dissco/orchestration/backend/testutils/TestUtils.java b/src/test/java/eu/dissco/orchestration/backend/testutils/TestUtils.java index 42383ec..f752c64 100644 --- a/src/test/java/eu/dissco/orchestration/backend/testutils/TestUtils.java +++ b/src/test/java/eu/dissco/orchestration/backend/testutils/TestUtils.java @@ -104,26 +104,37 @@ public static JsonApiRequestWrapper givenMappingRequest() { } public static SourceSystemRecord givenSourceSystemRecord() { - return givenSourceSystemRecord(1); + return givenSourceSystemRecord(1, TranslatorType.biocase); } - public static SourceSystemRecord givenSourceSystemRecord(int version) { + return givenSourceSystemRecord(version, TranslatorType.biocase); + } + + public static SourceSystemRecord givenSourceSystemRecord(TranslatorType translatorType) { + return givenSourceSystemRecord(1, translatorType); + } + + public static SourceSystemRecord givenSourceSystemRecord(int version, TranslatorType translatorType) { return new SourceSystemRecord( HANDLE, version, OBJECT_CREATOR, CREATED, null, - givenSourceSystem() + givenSourceSystem(translatorType) ); } public static SourceSystem givenSourceSystem() { + return givenSourceSystem(TranslatorType.biocase); + } + + public static SourceSystem givenSourceSystem(TranslatorType translatorType) { return new SourceSystem( OBJECT_NAME, SS_ENDPOINT, OBJECT_DESCRIPTION, - TranslatorType.biocase, + translatorType, HANDLE_ALT ); } From b4626f231fb3df7a15c42545689a9d413a5db8ad Mon Sep 17 00:00:00 2001 From: Sam Leeflang Date: Tue, 27 Feb 2024 13:44:20 +0100 Subject: [PATCH 3/4] Generate random weekly cronjob --- .../backend/service/SourceSystemService.java | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/main/java/eu/dissco/orchestration/backend/service/SourceSystemService.java b/src/main/java/eu/dissco/orchestration/backend/service/SourceSystemService.java index d2bd24f..2ce3cdf 100644 --- a/src/main/java/eu/dissco/orchestration/backend/service/SourceSystemService.java +++ b/src/main/java/eu/dissco/orchestration/backend/service/SourceSystemService.java @@ -331,6 +331,7 @@ private void triggerTranslatorJob(SourceSystemRecord sourceSystemRecord) private Map getTemplateProperties(SourceSystemRecord sourceSystem, boolean isCronJob) { var map = new HashMap(); + var cron = generateCron(); var jobName = generateJobName(sourceSystem, isCronJob); map.put("image", jobProperties.getImage()); map.put("sourceSystemId", sourceSystem.id()); @@ -339,11 +340,17 @@ private Map getTemplateProperties(SourceSystemRecord sourceSyste map.put("kafkaHost", jobProperties.getKafkaHost()); map.put("kafkaTopic", jobProperties.getKafkaTopic()); if (isCronJob) { - map.put("cron", "0 0 * * *"); + map.put("cron", cron); } return map; } + private String generateCron() { + String day = RandomStringUtils.randomNumeric(0, 6); + String hour = RandomStringUtils.randomNumeric(0, 23); + return "0 " + hour + " * * " + day; + } + private StringWriter fillTemplate(Map templateProperties, TranslatorType translatorType, boolean isCron) throws IOException, TemplateException { var writer = new StringWriter(); From 9af290399be81eafd60261c8b5726a2c1cd9a62a Mon Sep 17 00:00:00 2001 From: Sam Leeflang Date: Wed, 6 Mar 2024 09:01:11 +0100 Subject: [PATCH 4/4] Code review --- .../MachineAnnotationServiceController.java | 2 +- .../backend/domain/SourceSystem.java | 2 +- .../backend/service/SourceSystemService.java | 30 +++++++++---------- 3 files changed, 17 insertions(+), 17 deletions(-) diff --git a/src/main/java/eu/dissco/orchestration/backend/controller/MachineAnnotationServiceController.java b/src/main/java/eu/dissco/orchestration/backend/controller/MachineAnnotationServiceController.java index 25dc41a..087280d 100644 --- a/src/main/java/eu/dissco/orchestration/backend/controller/MachineAnnotationServiceController.java +++ b/src/main/java/eu/dissco/orchestration/backend/controller/MachineAnnotationServiceController.java @@ -44,7 +44,7 @@ public class MachineAnnotationServiceController { public ResponseEntity createMachineAnnotationService( Authentication authentication, @RequestBody JsonApiRequestWrapper requestBody, HttpServletRequest servletRequest) - throws JsonProcessingException, TransformerException, ProcessingFailedException { + throws JsonProcessingException, ProcessingFailedException { var machineAnnotationService = getMachineAnnotation(requestBody); var userId = authentication.getName(); log.info("Received create request for machine annotation service: {} from user: {}", diff --git a/src/main/java/eu/dissco/orchestration/backend/domain/SourceSystem.java b/src/main/java/eu/dissco/orchestration/backend/domain/SourceSystem.java index ab86e92..33537aa 100644 --- a/src/main/java/eu/dissco/orchestration/backend/domain/SourceSystem.java +++ b/src/main/java/eu/dissco/orchestration/backend/domain/SourceSystem.java @@ -9,7 +9,7 @@ public record SourceSystem( @NotBlank String endpoint, String description, - + @NotBlank TranslatorType translatorType, @NotBlank String mappingId diff --git a/src/main/java/eu/dissco/orchestration/backend/service/SourceSystemService.java b/src/main/java/eu/dissco/orchestration/backend/service/SourceSystemService.java index 2ce3cdf..7355edf 100644 --- a/src/main/java/eu/dissco/orchestration/backend/service/SourceSystemService.java +++ b/src/main/java/eu/dissco/orchestration/backend/service/SourceSystemService.java @@ -65,14 +65,24 @@ private static String getSuffix(String sourceSystemId) { } private static String generateJobName(SourceSystemRecord sourceSystem, boolean isCron) { - var name = sourceSystem.sourceSystem().translatorType().name().toLowerCase() + "-" + getSuffix( - sourceSystem.id()) + "-translator-service"; + var name = + sourceSystem.sourceSystem().translatorType().getName().toLowerCase() + "-" + + getSuffix(sourceSystem.id()) + "-translator-service"; if (!isCron) { name = name + "-" + RandomStringUtils.randomAlphabetic(6).toLowerCase(); } return name; } + private static void logException(SourceSystemRecord sourceSystemRecord, Exception e) { + if (e instanceof IOException || e instanceof TemplateException) { + log.error("Failed to create translator template for: {}", sourceSystemRecord, e); + } else if (e instanceof ApiException apiException) { + log.error("Failed to deploy kubernetes deployment to cluster with code: {} and message: {}", + apiException.getCode(), apiException.getResponseBody()); + } + } + public JsonApiWrapper createSourceSystem(SourceSystem sourceSystem, String userId, String path) throws NotFoundException, ProcessingFailedException { validateMappingExists(sourceSystem.mappingId()); @@ -95,13 +105,13 @@ private String createHandle(SourceSystem sourceSystem) throws ProcessingFailedEx } } - private void createTranslatorJob(SourceSystemRecord sourceSystemRecord, boolean rollback) + private void createTranslatorJob(SourceSystemRecord sourceSystemRecord, boolean rollbackOnFailure) throws ProcessingFailedException { try { triggerTranslatorJob(sourceSystemRecord); } catch (IOException | TemplateException | ApiException e) { logException(sourceSystemRecord, e); - if (rollback) { + if (rollbackOnFailure) { rollbackSourceSystemCreation(sourceSystemRecord, true); } throw new ProcessingFailedException("Failed to deploy job to cluster", e); @@ -211,15 +221,6 @@ private void updateCronJob(SourceSystemRecord sourceSystemRecord, } } - private static void logException(SourceSystemRecord sourceSystemRecord, Exception e) { - if (e instanceof IOException || e instanceof TemplateException) { - log.error("Failed to create translator template for: {}", sourceSystemRecord, e); - } else if (e instanceof ApiException apiException){ - log.error("Failed to deploy kubernetes deployment to cluster with code: {} and message: {}", - apiException.getCode(), apiException.getResponseBody()); - } - } - private void publishUpdateEvent(SourceSystemRecord newSourceSystemRecord, SourceSystemRecord currentSourceSystemRecord) throws ProcessingFailedException { JsonNode jsonPatch = JsonDiff.asJson(mapper.valueToTree(newSourceSystemRecord.sourceSystem()), @@ -331,7 +332,6 @@ private void triggerTranslatorJob(SourceSystemRecord sourceSystemRecord) private Map getTemplateProperties(SourceSystemRecord sourceSystem, boolean isCronJob) { var map = new HashMap(); - var cron = generateCron(); var jobName = generateJobName(sourceSystem, isCronJob); map.put("image", jobProperties.getImage()); map.put("sourceSystemId", sourceSystem.id()); @@ -340,7 +340,7 @@ private Map getTemplateProperties(SourceSystemRecord sourceSyste map.put("kafkaHost", jobProperties.getKafkaHost()); map.put("kafkaTopic", jobProperties.getKafkaTopic()); if (isCronJob) { - map.put("cron", cron); + map.put("cron", generateCron()); } return map; }