From 9f9fdc27e6db7768d1cc773a95c4012f0dbb1320 Mon Sep 17 00:00:00 2001 From: Chris Bono Date: Mon, 11 Dec 2023 17:33:24 -0600 Subject: [PATCH] Use ROW_NUMBER in JdbcAggregateJobQueryDao to improve performance (#5575) * Use ROW_NUMBER in JdbcAggregateJobQueryDao to improve performance See #5524 --- .../core/database/support/DatabaseType.java | 21 ++- .../batch/SimpleJobServiceFactoryBean.java | 16 ++- .../AggregateDataFlowTaskConfiguration.java | 18 +-- .../repository/AggregateJobQueryDao.java | 1 + .../repository/JdbcAggregateJobQueryDao.java | 110 ++++++++------- .../server/service/JobServiceContainer.java | 6 +- ...AbstractJdbcAggregateJobQueryDaoTests.java | 12 +- .../batch/AbstractSimpleJobServiceTests.java | 14 +- ...JobQueryDaoRowNumberOptimizationTests.java | 81 +++++++++++ .../db/migration/AbstractSmokeTest.java | 112 ++++++++++++--- .../db/migration/JobExecutionTestUtils.java | 133 ++++++++++++++++++ .../server/db/migration/MariaDBSmokeTest.java | 2 +- .../server/db/migration/MySQL57SmokeTest.java | 6 + .../db/migration/SqlServer2017SmokeTest.java | 5 + .../db/migration/SqlServer2019SmokeTest.java | 6 + .../db/migration/SqlServer2022SmokeTest.java | 7 + .../server/db/support/DatabaseTypeTests.java | 102 ++++++++++++++ .../src/test/resources/logback-test.xml | 3 + spring-cloud-dataflow-test/pom.xml | 20 +++ .../dataflow/server/db/ContainerSupport.java | 32 +++++ .../server/db/DB2_11_5_ContainerSupport.java | 40 ++++++ .../db/MariaDB_10_6_ContainerSupport.java | 44 ++++++ .../db/MariaDB_11_ContainerSupport.java | 44 ++++++ .../server/db/MySQL_5_7_ContainerSupport.java | 40 ++++++ .../server/db/MySQL_8_ContainerSupport.java | 40 ++++++ .../db/Oracle_XE_18_ContainerSupport.java | 40 ++++++ .../db/Postgres_14_ContainerSupport.java | 40 ++++++ .../db/SqlServer_2017_ContainerSupport.java | 42 ++++++ .../db/SqlServer_2019_ContainerSupport.java | 42 ++++++ .../db/SqlServer_2022_ContainerSupport.java | 42 ++++++ .../db/oracle/OracleContainerSupport.java | 11 +- 31 files changed, 1027 insertions(+), 105 deletions(-) create mode 100644 spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDaoRowNumberOptimizationTests.java create mode 100644 spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/JobExecutionTestUtils.java create mode 100644 spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/support/DatabaseTypeTests.java create mode 100644 spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/ContainerSupport.java create mode 100644 spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/DB2_11_5_ContainerSupport.java create mode 100644 spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/MariaDB_10_6_ContainerSupport.java create mode 100644 spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/MariaDB_11_ContainerSupport.java create mode 100644 spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/MySQL_5_7_ContainerSupport.java create mode 100644 spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/MySQL_8_ContainerSupport.java create mode 100644 spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/Oracle_XE_18_ContainerSupport.java create mode 100644 spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/Postgres_14_ContainerSupport.java create mode 100644 spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/SqlServer_2017_ContainerSupport.java create mode 100644 spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/SqlServer_2019_ContainerSupport.java create mode 100644 spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/SqlServer_2022_ContainerSupport.java diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/DatabaseType.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/DatabaseType.java index 2b1aea7b01..f207ac3df5 100644 --- a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/DatabaseType.java +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/DatabaseType.java @@ -1,5 +1,5 @@ /* - * Copyright 2016 the original author or authors. + * Copyright 2016-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,6 +16,7 @@ package org.springframework.cloud.dataflow.core.database.support; +import java.sql.DatabaseMetaData; import java.util.HashMap; import java.util.Map; @@ -100,6 +101,24 @@ public static DatabaseType fromProductName(String productName) { } } + /** + * Determines if the Database that the datasource refers to supports the {@code ROW_NUMBER()} SQL function. + * @param dataSource the datasource pointing to the DB in question + * @return whether the database supports the SQL {@code ROW_NUMBER()} function + * @throws MetaDataAccessException if error occurs + */ + public static boolean supportsRowNumberFunction(DataSource dataSource) throws MetaDataAccessException { + DatabaseType databaseType = DatabaseType.fromMetaData(dataSource); + if (databaseType == DatabaseType.H2 || databaseType == DatabaseType.HSQL) { + return false; + } + if (databaseType != DatabaseType.MYSQL) { + return true; + } + int majorVersion = JdbcUtils.extractDatabaseMetaData(dataSource, DatabaseMetaData::getDatabaseMajorVersion); + return (majorVersion >= 8); + } + private String getProductName() { return productName; } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobServiceFactoryBean.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobServiceFactoryBean.java index 15c3dba1df..3d4b9a0752 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobServiceFactoryBean.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobServiceFactoryBean.java @@ -45,6 +45,8 @@ import org.springframework.cloud.dataflow.server.repository.AggregateJobQueryDao; import org.springframework.cloud.dataflow.server.repository.JdbcAggregateJobQueryDao; import org.springframework.cloud.dataflow.server.service.JobServiceContainer; +import org.springframework.context.EnvironmentAware; +import org.springframework.core.env.Environment; import org.springframework.jdbc.core.JdbcOperations; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.support.lob.DefaultLobHandler; @@ -60,7 +62,7 @@ * @author Dave Syer * */ -public class SimpleJobServiceFactoryBean implements FactoryBean, InitializingBean { +public class SimpleJobServiceFactoryBean implements FactoryBean, InitializingBean, EnvironmentAware { private static final Logger logger = LoggerFactory.getLogger(SimpleJobServiceFactoryBean.class); @@ -94,6 +96,8 @@ public class SimpleJobServiceFactoryBean implements FactoryBean, Ini private SchemaVersionTarget schemaVersionTarget; + private Environment environment; + public void setTransactionManager(PlatformTransactionManager transactionManager) { this.transactionManager = transactionManager; } @@ -132,6 +136,11 @@ public void setMaxVarCharLength(int maxVarCharLength) { this.maxVarCharLength = maxVarCharLength; } + @Override + public void setEnvironment(Environment environment) { + this.environment = environment; + } + /** * Public setter for the {@link DataSource}. * @param dataSource a {@link DataSource} @@ -301,10 +310,11 @@ private int determineClobTypeToUse(String databaseType) { return Types.CLOB; } } + protected AggregateJobQueryDao createAggregateJobQueryDao() throws Exception { - AggregateJobQueryDao dao = new JdbcAggregateJobQueryDao(this.dataSource, this.schemaService, this.jobServiceContainer); - return dao; + return new JdbcAggregateJobQueryDao(this.dataSource, this.schemaService, this.jobServiceContainer, this.environment); } + /** * Create a {@link SimpleJobService} from the configuration provided. * diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/AggregateDataFlowTaskConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/AggregateDataFlowTaskConfiguration.java index 15b8042510..3c000b5f8b 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/AggregateDataFlowTaskConfiguration.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/AggregateDataFlowTaskConfiguration.java @@ -55,6 +55,7 @@ import org.springframework.cloud.task.repository.support.DatabaseType; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.core.env.Environment; import org.springframework.jdbc.support.MetaDataAccessException; import org.springframework.transaction.PlatformTransactionManager; @@ -129,8 +130,11 @@ public JobExplorerContainer jobExplorerContainer(DataSource dataSource, SchemaSe } @Bean - public JobServiceContainer jobServiceContainer(DataSource dataSource, PlatformTransactionManager platformTransactionManager, SchemaService schemaService, JobRepositoryContainer jobRepositoryContainer, JobExplorerContainer jobExplorerContainer) { - return new JobServiceContainer(dataSource, platformTransactionManager, schemaService, jobRepositoryContainer, jobExplorerContainer); + public JobServiceContainer jobServiceContainer(DataSource dataSource, PlatformTransactionManager platformTransactionManager, + SchemaService schemaService, JobRepositoryContainer jobRepositoryContainer, + JobExplorerContainer jobExplorerContainer, Environment environment) { + return new JobServiceContainer(dataSource, platformTransactionManager, schemaService, jobRepositoryContainer, + jobExplorerContainer, environment); } @Bean @@ -151,17 +155,13 @@ public TaskDeploymentReader taskDeploymentReader(TaskDeploymentRepository reposi } @Bean - public AggregateJobQueryDao aggregateJobQueryDao(DataSource dataSource, SchemaService schemaService, JobServiceContainer jobServiceContainer) throws Exception { - return new JdbcAggregateJobQueryDao(dataSource, schemaService, jobServiceContainer); + public AggregateJobQueryDao aggregateJobQueryDao(DataSource dataSource, SchemaService schemaService, + JobServiceContainer jobServiceContainer, Environment environment) throws Exception { + return new JdbcAggregateJobQueryDao(dataSource, schemaService, jobServiceContainer, environment); } @Bean public TaskBatchDaoContainer taskBatchDaoContainer(DataSource dataSource, SchemaService schemaService) { return new TaskBatchDaoContainer(dataSource, schemaService); } - - @PostConstruct - public void setup() { - logger.info("created: org.springframework.cloud.dataflow.server.config.AggregateDataFlowContainerConfiguration"); - } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/AggregateJobQueryDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/AggregateJobQueryDao.java index c8f6e9e3e0..81a18384af 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/AggregateJobQueryDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/AggregateJobQueryDao.java @@ -33,6 +33,7 @@ * Provides for reading job execution data for Batch 4 and 5 schema versions. * * @author Corneil du Plessis + * @since 2.11.0 */ public interface AggregateJobQueryDao { Page listJobInstances(String jobName, Pageable pageable) throws NoSuchJobException; diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDao.java index 0a019597c8..3492da0346 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDao.java @@ -26,8 +26,10 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.TreeMap; import java.util.stream.Collectors; + import javax.sql.DataSource; import org.slf4j.Logger; @@ -43,10 +45,11 @@ import org.springframework.batch.core.launch.NoSuchJobExecutionException; import org.springframework.batch.core.launch.NoSuchJobInstanceException; import org.springframework.batch.core.repository.dao.JdbcJobExecutionDao; -import org.springframework.batch.core.repository.dao.StepExecutionDao; import org.springframework.batch.item.database.Order; import org.springframework.batch.item.database.PagingQueryProvider; import org.springframework.batch.item.database.support.SqlPagingQueryProviderFactoryBean; +import org.springframework.cloud.dataflow.core.DataFlowPropertyKeys; +import org.springframework.cloud.dataflow.core.database.support.DatabaseType; import org.springframework.cloud.dataflow.rest.job.JobInstanceExecutions; import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; @@ -59,6 +62,7 @@ import org.springframework.cloud.dataflow.server.service.impl.OffsetOutOfBoundsException; import org.springframework.core.convert.support.ConfigurableConversionService; import org.springframework.core.convert.support.DefaultConversionService; +import org.springframework.core.env.Environment; import org.springframework.dao.DataAccessException; import org.springframework.dao.IncorrectResultSizeDataAccessException; import org.springframework.data.convert.Jsr310Converters; @@ -78,11 +82,12 @@ * but contains Spring Cloud Data Flow specific operations. This functionality might * be migrated to Spring Batch itself eventually. * - * @author Gunnar Hillert * @author Corneil du Plessis + * @since 2.11.0 */ public class JdbcAggregateJobQueryDao implements AggregateJobQueryDao { - private final static Logger logger = LoggerFactory.getLogger(JdbcAggregateJobQueryDao.class); + + private final static Logger LOG = LoggerFactory.getLogger(JdbcAggregateJobQueryDao.class); private static final String GET_COUNT = "SELECT COUNT(1) from AGGREGATE_JOB_EXECUTION"; @@ -124,12 +129,6 @@ public class JdbcAggregateJobQueryDao implements AggregateJobQueryDao { private static final String FIELDS_WITH_STEP_COUNT = FIELDS + ", (SELECT COUNT(*) FROM AGGREGATE_STEP_EXECUTION S WHERE S.JOB_EXECUTION_ID = E.JOB_EXECUTION_ID AND S.SCHEMA_TARGET = E.SCHEMA_TARGET) as STEP_COUNT"; - - private static final String GET_RUNNING_EXECUTIONS = "SELECT " + FIELDS + - " from AGGREGATE_JOB_EXECUTION E" + - " join AGGREGATE_JOB_INSTANCE I ON E.JOB_INSTANCE_ID = I.JOB_INSTANCE_ID AND E.SCHEMA_TARGET = I.SCHEMA_TARGET" + - " where and E.END_TIME is NULL"; - private static final String GET_JOB_INSTANCE_BY_ID = "SELECT I.JOB_INSTANCE_ID, I.VERSION, I.JOB_NAME, I.JOB_KEY" + " FROM AGGREGATE_JOB_INSTANCE I" + " WHERE I.JOB_INSTANCE_ID = ? AND I.SCHEMA_TARGET = ?"; @@ -147,7 +146,6 @@ public class JdbcAggregateJobQueryDao implements AggregateJobQueryDao { private static final String TASK_EXECUTION_ID_FILTER = "B.JOB_EXECUTION_ID = E.JOB_EXECUTION_ID AND B.SCHEMA_TARGET = E.SCHEMA_TARGET AND B.TASK_EXECUTION_ID = ? AND E.SCHEMA_TARGET = ?"; - private static final String FROM_CLAUSE_TASK_EXEC_BATCH = "JOIN AGGREGATE_TASK_BATCH B ON E.JOB_EXECUTION_ID = B.JOB_EXECUTION_ID AND E.SCHEMA_TARGET = B.SCHEMA_TARGET" + " JOIN AGGREGATE_TASK_EXECUTION T ON B.TASK_EXECUTION_ID = T.TASK_EXECUTION_ID AND B.SCHEMA_TARGET = T.SCHEMA_TARGET"; @@ -176,9 +174,6 @@ public class JdbcAggregateJobQueryDao implements AggregateJobQueryDao { " E.JOB_EXECUTION_ID as JOB_EXECUTION_ID, E.START_TIME as START_TIME, E.END_TIME as END_TIME, E.STATUS as STATUS, E.EXIT_CODE as EXIT_CODE, E.EXIT_MESSAGE as EXIT_MESSAGE, E.CREATE_TIME as CREATE_TIME," + " E.LAST_UPDATED as LAST_UPDATED, E.VERSION as VERSION, T.TASK_EXECUTION_ID as TASK_EXECUTION_ID"; - private static final String FIND_JOBS_FIELDS_WITH_STEP_COUNT = FIND_JOBS_FIELDS + - ", (SELECT COUNT(*) FROM AGGREGATE_STEP_EXECUTION S WHERE S.JOB_EXECUTION_ID = E.JOB_EXECUTION_ID AND S.SCHEMA_TARGET = E.SCHEMA_TARGET) as STEP_COUNT"; - private static final String FIND_JOBS_FROM = "LEFT OUTER JOIN AGGREGATE_TASK_BATCH TT ON E.JOB_EXECUTION_ID = TT.JOB_EXECUTION_ID AND E.SCHEMA_TARGET = TT.SCHEMA_TARGET" + " LEFT OUTER JOIN AGGREGATE_TASK_EXECUTION T ON TT.TASK_EXECUTION_ID = T.TASK_EXECUTION_ID AND TT.SCHEMA_TARGET = T.SCHEMA_TARGET"; @@ -186,8 +181,9 @@ public class JdbcAggregateJobQueryDao implements AggregateJobQueryDao { private static final String FIND_BY_ID_SCHEMA = "E.JOB_EXECUTION_ID = ? AND E.SCHEMA_TARGET = ?"; - private final PagingQueryProvider allExecutionsPagingQueryProvider; + private static final String ROW_NUMBER_OPTIMIZATION_ENABLED_PROPERTY = DataFlowPropertyKeys.PREFIX + "task.jdbc.row-number-optimization.enabled"; + private final PagingQueryProvider allExecutionsPagingQueryProvider; private final PagingQueryProvider byJobNameAndStatusPagingQueryProvider; @@ -195,7 +191,6 @@ public class JdbcAggregateJobQueryDao implements AggregateJobQueryDao { private final PagingQueryProvider byJobNameWithStepCountPagingQueryProvider; - private final PagingQueryProvider executionsByDateRangeWithStepCountPagingQueryProvider; private final PagingQueryProvider byJobInstanceIdWithStepCountPagingQueryProvider; @@ -210,7 +205,6 @@ public class JdbcAggregateJobQueryDao implements AggregateJobQueryDao { private final PagingQueryProvider byJobExecutionIdAndSchemaPagingQueryProvider; - private final DataSource dataSource; private final JdbcTemplate jdbcTemplate; @@ -221,32 +215,40 @@ public class JdbcAggregateJobQueryDao implements AggregateJobQueryDao { private final ConfigurableConversionService conversionService = new DefaultConversionService(); - private final Map stepExecutionDaoContainer = new HashMap<>(); + private final boolean useRowNumberOptimization; - public JdbcAggregateJobQueryDao(DataSource dataSource, SchemaService schemaService, JobServiceContainer jobServiceContainer) throws Exception { + public JdbcAggregateJobQueryDao( + DataSource dataSource, + SchemaService schemaService, + JobServiceContainer jobServiceContainer, + Environment environment) throws Exception { this.dataSource = dataSource; this.jdbcTemplate = new JdbcTemplate(dataSource); this.schemaService = schemaService; this.jobServiceContainer = jobServiceContainer; + this.useRowNumberOptimization = determineUseRowNumberOptimization(environment); conversionService.addConverter(new DateToStringConverter()); conversionService.addConverter(new StringToDateConverter()); Jsr310Converters.getConvertersToRegister().forEach(conversionService::addConverter); allExecutionsPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, null); - executionsByDateRangeWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, DATE_RANGE_FILTER); allExecutionsPagingQueryProviderNoStepCount = getPagingQueryProvider(FROM_CLAUSE_TASK_EXEC_BATCH, null); byStatusPagingQueryProvider = getPagingQueryProvider(FROM_CLAUSE_TASK_EXEC_BATCH, STATUS_FILTER); byJobNameAndStatusPagingQueryProvider = getPagingQueryProvider(FROM_CLAUSE_TASK_EXEC_BATCH, NAME_AND_STATUS_FILTER); byJobNamePagingQueryProvider = getPagingQueryProvider(FROM_CLAUSE_TASK_EXEC_BATCH, NAME_FILTER); byJobNameWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, NAME_FILTER); - byJobInstanceIdWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, JOB_INSTANCE_ID_FILTER); byTaskExecutionIdWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, TASK_EXECUTION_ID_FILTER); jobExecutionsPagingQueryProviderByName = getPagingQueryProvider(FIND_JOBS_FIELDS, FIND_JOBS_FROM, FIND_JOBS_WHERE, Collections.singletonMap("E.JOB_EXECUTION_ID", Order.DESCENDING)); byJobExecutionIdAndSchemaPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, FIND_BY_ID_SCHEMA); + } + private boolean determineUseRowNumberOptimization(Environment environment) { + boolean supportsRowNumberFunction = determineSupportsRowNumberFunction(this.dataSource); + boolean rowNumberOptimizationEnabled = environment.getProperty(ROW_NUMBER_OPTIMIZATION_ENABLED_PROPERTY , Boolean.class, Boolean.TRUE); + return supportsRowNumberFunction && rowNumberOptimizationEnabled; } @Override @@ -262,7 +264,7 @@ public Page listJobInstances(String jobName, Pageable pag @Override public JobInstanceExecutions getJobInstanceExecution(String jobName, long instanceId) { - logger.debug("getJobInstanceExecution:{}:{}:{}", jobName, instanceId, FIND_JOB_BY_NAME_INSTANCE_ID); + LOG.debug("getJobInstanceExecution:{}:{}:{}", jobName, instanceId, FIND_JOB_BY_NAME_INSTANCE_ID); List executions = jdbcTemplate.query(FIND_JOB_BY_NAME_INSTANCE_ID, new JobInstanceExecutionsExtractor(true), jobName, instanceId); if (executions == null || executions.isEmpty()) { return null; @@ -374,8 +376,8 @@ public TaskJobExecution getJobExecution(long jobExecutionId, String schemaTarget throw new NoSuchJobExecutionException(String.format("Job id %s for schema target %s not found", jobExecutionId, schemaTarget)); } if (jobExecutions.size() > 1) { - logger.debug("Too many job executions:{}", jobExecutions); - logger.warn("Expected only 1 job for {}: not {}", jobExecutionId, jobExecutions.size()); + LOG.debug("Too many job executions:{}", jobExecutions); + LOG.warn("Expected only 1 job for {}: not {}", jobExecutionId, jobExecutions.size()); } TaskJobExecution taskJobExecution = jobExecutions.get(0); @@ -396,7 +398,7 @@ private List getJobExecutionPage(long jobExecutionId, String s } private int countJobExecutions() { - logger.debug("countJobExecutions:{}", GET_COUNT); + LOG.debug("countJobExecutions:{}", GET_COUNT); Integer count = jdbcTemplate.queryForObject(GET_COUNT, Integer.class); return count != null ? count : 0; } @@ -404,34 +406,34 @@ private int countJobExecutions() { private int countJobExecutionsByDate(Date fromDate, Date toDate) { Assert.notNull(fromDate, "fromDate must not be null"); Assert.notNull(toDate, "toDate must not be null"); - logger.debug("countJobExecutionsByDate:{}:{}:{}", fromDate, toDate, GET_COUNT_BY_DATE); + LOG.debug("countJobExecutionsByDate:{}:{}:{}", fromDate, toDate, GET_COUNT_BY_DATE); Integer count = jdbcTemplate.queryForObject(GET_COUNT_BY_DATE, Integer.class, fromDate, toDate); return count != null ? count : 0; } private int countJobExecutions(String jobName) { - logger.debug("countJobExecutions:{}:{}", jobName, GET_COUNT_BY_JOB_NAME); + LOG.debug("countJobExecutions:{}:{}", jobName, GET_COUNT_BY_JOB_NAME); Integer count = jdbcTemplate.queryForObject(GET_COUNT_BY_JOB_NAME, Integer.class, jobName); return count != null ? count : 0; } private int countJobExecutions(BatchStatus status) { - logger.debug("countJobExecutions:{}:{}", status, GET_COUNT_BY_STATUS); + LOG.debug("countJobExecutions:{}:{}", status, GET_COUNT_BY_STATUS); Integer count = jdbcTemplate.queryForObject(GET_COUNT_BY_STATUS, Integer.class, status.name()); return count != null ? count : 0; } private int countJobExecutions(String jobName, BatchStatus status) { - logger.debug("countJobExecutions:{}:{}", jobName, status); + LOG.debug("countJobExecutions:{}:{}", jobName, status); Integer count; if (StringUtils.hasText(jobName) && status != null) { - logger.debug("countJobExecutions:{}:{}:{}", jobName, status, GET_COUNT_BY_JOB_NAME_AND_STATUS); + LOG.debug("countJobExecutions:{}:{}:{}", jobName, status, GET_COUNT_BY_JOB_NAME_AND_STATUS); count = jdbcTemplate.queryForObject(GET_COUNT_BY_JOB_NAME_AND_STATUS, Integer.class, jobName, status.name()); } else if (status != null) { - logger.debug("countJobExecutions:{}:{}", status, GET_COUNT_BY_STATUS); + LOG.debug("countJobExecutions:{}:{}", status, GET_COUNT_BY_STATUS); count = jdbcTemplate.queryForObject(GET_COUNT_BY_STATUS, Integer.class, status.name()); } else if (StringUtils.hasText(jobName)) { - logger.debug("countJobExecutions:{}:{}", jobName, GET_COUNT_BY_JOB_NAME); + LOG.debug("countJobExecutions:{}:{}", jobName, GET_COUNT_BY_JOB_NAME); count = jdbcTemplate.queryForObject(GET_COUNT_BY_JOB_NAME, Integer.class, jobName); } else { count = jdbcTemplate.queryForObject(GET_COUNT, Integer.class); @@ -443,7 +445,7 @@ private int countJobExecutionsByInstanceId(int jobInstanceId, String schemaTarge if (!StringUtils.hasText(schemaTarget)) { schemaTarget = SchemaVersionTarget.defaultTarget().getName(); } - logger.debug("countJobExecutionsByInstanceId:{}:{}:{}", jobInstanceId, schemaTarget, GET_COUNT_BY_JOB_INSTANCE_ID); + LOG.debug("countJobExecutionsByInstanceId:{}:{}:{}", jobInstanceId, schemaTarget, GET_COUNT_BY_JOB_INSTANCE_ID); Integer count = jdbcTemplate.queryForObject(GET_COUNT_BY_JOB_INSTANCE_ID, Integer.class, jobInstanceId, schemaTarget); return count != null ? count : 0; } @@ -452,7 +454,7 @@ private int countJobExecutionsByTaskExecutionId(int taskExecutionId, String sche if (!StringUtils.hasText(schemaTarget)) { schemaTarget = SchemaVersionTarget.defaultTarget().getName(); } - logger.debug("countJobExecutionsByTaskExecutionId:{}:{}:{}", taskExecutionId, schemaTarget, GET_COUNT_BY_TASK_EXECUTION_ID); + LOG.debug("countJobExecutionsByTaskExecutionId:{}:{}:{}", taskExecutionId, schemaTarget, GET_COUNT_BY_TASK_EXECUTION_ID); Integer count = jdbcTemplate.queryForObject(GET_COUNT_BY_TASK_EXECUTION_ID, Integer.class, taskExecutionId, schemaTarget); return count != null ? count : 0; } @@ -541,7 +543,7 @@ protected JobParameters getJobParameters(Long executionId, String schemaTarget) value = new JobParameter(rs.getTimestamp("DATE_VAL"), identifying); break; default: - logger.error("Unknown type:{} for {}", type, keyName); + LOG.error("Unknown type:{} for {}", type, keyName); return; } map.put(keyName, value); @@ -591,23 +593,23 @@ protected JobParameters getJobParameters(Long executionId, String schemaTarget) private > List queryForProvider(P pagingQueryProvider, M mapper, int start, int count, Object... arguments) { if (start <= 0) { String sql = pagingQueryProvider.generateFirstPageQuery(count); - if (logger.isDebugEnabled()) { - logger.debug("queryFirstPage:{}:{}:{}:{}", sql, start, count, Arrays.asList(arguments)); + if (LOG.isDebugEnabled()) { + LOG.debug("queryFirstPage:{}:{}:{}:{}", sql, start, count, Arrays.asList(arguments)); } return jdbcTemplate.query(sql, mapper, arguments); } else { try { String sqlJump = pagingQueryProvider.generateJumpToItemQuery(start, count); - if (logger.isDebugEnabled()) { - logger.debug("queryJumpToItem:{}:{}:{}:{}", sqlJump, start, count, Arrays.asList(arguments)); + if (LOG.isDebugEnabled()) { + LOG.debug("queryJumpToItem:{}:{}:{}:{}", sqlJump, start, count, Arrays.asList(arguments)); } Long startValue; startValue = jdbcTemplate.queryForObject(sqlJump, Long.class, arguments); List args = new ArrayList<>(Arrays.asList(arguments)); args.add(startValue); String sql = pagingQueryProvider.generateRemainingPagesQuery(count); - if (logger.isDebugEnabled()) { - logger.debug("queryRemaining:{}:{}:{}:{}", sql, start, count, args); + if (LOG.isDebugEnabled()) { + LOG.debug("queryRemaining:{}:{}:{}:{}", sql, start, count, args); } return jdbcTemplate.query(sql, mapper, args.toArray()); } catch (IncorrectResultSizeDataAccessException x) { @@ -619,21 +621,21 @@ private > List query private >> List queryForProvider(P pagingQueryProvider, R extractor, int start, int count, Object... arguments) { if (start <= 0) { String sql = pagingQueryProvider.generateFirstPageQuery(count); - if (logger.isDebugEnabled()) { - logger.debug("queryFirstPage:{}:{}:{}:{}", sql, start, count, Arrays.asList(arguments)); + if (LOG.isDebugEnabled()) { + LOG.debug("queryFirstPage:{}:{}:{}:{}", sql, start, count, Arrays.asList(arguments)); } return jdbcTemplate.query(sql, extractor, arguments); } else { String sqlJump = pagingQueryProvider.generateJumpToItemQuery(start, count); - if (logger.isDebugEnabled()) { - logger.debug("queryJumpToItem:{}:{}:{}:{}", sqlJump, start, count, Arrays.asList(arguments)); + if (LOG.isDebugEnabled()) { + LOG.debug("queryJumpToItem:{}:{}:{}:{}", sqlJump, start, count, Arrays.asList(arguments)); } Long startValue = jdbcTemplate.queryForObject(sqlJump, Long.class, arguments); List args = new ArrayList<>(Arrays.asList(arguments)); args.add(startValue); String sql = pagingQueryProvider.generateRemainingPagesQuery(count); - if (logger.isDebugEnabled()) { - logger.debug("queryRemaining:{}:{}:{}:{}", sql, start, count, args); + if (LOG.isDebugEnabled()) { + LOG.debug("queryRemaining:{}:{}:{}:{}", sql, start, count, args); } return jdbcTemplate.query(sql, extractor, args.toArray()); } @@ -805,13 +807,27 @@ private PagingQueryProvider getPagingQueryProvider(String fields, String fromCla if (fields == null) { fields = FIELDS; } + if (fields.contains("E.JOB_EXECUTION_ID") && this.useRowNumberOptimization) { + Order order = sortKeys.get("E.JOB_EXECUTION_ID"); + String orderString = Optional.ofNullable(order).map(orderKey -> orderKey == Order.DESCENDING ? "DESC" : "ASC").orElse("DESC"); + fields += ", ROW_NUMBER() OVER (PARTITION BY E.JOB_EXECUTION_ID ORDER BY E.JOB_EXECUTION_ID " + orderString + ") as RN"; + } factory.setSelectClause(fields); if (sortKeys.isEmpty()) { sortKeys = Collections.singletonMap("E.JOB_EXECUTION_ID", Order.DESCENDING); } factory.setSortKeys(sortKeys); factory.setWhereClause(whereClause); - return factory.getObject(); } + + private boolean determineSupportsRowNumberFunction(DataSource dataSource) { + try { + return DatabaseType.supportsRowNumberFunction(dataSource); + } + catch (Exception e) { + LOG.warn("Unable to determine if DB supports ROW_NUMBER() function (reason: " + e.getMessage() + ")", e); + } + return false; + } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/JobServiceContainer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/JobServiceContainer.java index 7c76a1040a..95af1f6455 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/JobServiceContainer.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/JobServiceContainer.java @@ -14,6 +14,7 @@ import org.springframework.cloud.dataflow.server.batch.SimpleJobServiceFactoryBean; import org.springframework.cloud.dataflow.server.controller.NoSuchSchemaTargetException; import org.springframework.cloud.dataflow.server.repository.JobRepositoryContainer; +import org.springframework.core.env.Environment; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.util.StringUtils; @@ -26,11 +27,12 @@ public JobServiceContainer( PlatformTransactionManager platformTransactionManager, SchemaService schemaService, JobRepositoryContainer jobRepositoryContainer, - JobExplorerContainer jobExplorerContainer - ) { + JobExplorerContainer jobExplorerContainer, + Environment environment) { for(SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { SimpleJobServiceFactoryBean factoryBean = new SimpleJobServiceFactoryBean(); + factoryBean.setEnvironment(environment); factoryBean.setDataSource(dataSource); factoryBean.setTransactionManager(platformTransactionManager); factoryBean.setJobServiceContainer(this); diff --git a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/batch/AbstractJdbcAggregateJobQueryDaoTests.java b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/batch/AbstractJdbcAggregateJobQueryDaoTests.java index 8336076ecc..32788119d6 100644 --- a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/batch/AbstractJdbcAggregateJobQueryDaoTests.java +++ b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/batch/AbstractJdbcAggregateJobQueryDaoTests.java @@ -18,22 +18,20 @@ import org.junit.jupiter.api.Test; import org.mockito.Mock; -import org.springframework.batch.core.launch.NoSuchJobInstanceException; -import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.testcontainers.containers.JdbcDatabaseContainer; import org.springframework.batch.core.JobInstance; import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.repository.dao.AbstractJdbcBatchMetadataDao; +import org.springframework.batch.core.launch.NoSuchJobInstanceException; import org.springframework.batch.item.database.support.DataFieldMaxValueIncrementerFactory; import org.springframework.cloud.dataflow.core.database.support.DatabaseType; import org.springframework.cloud.dataflow.core.database.support.MultiSchemaIncrementerFactory; import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.dataflow.schema.service.impl.DefaultSchemaService; import org.springframework.cloud.dataflow.server.repository.JdbcAggregateJobQueryDao; import org.springframework.cloud.dataflow.server.service.JobServiceContainer; -import org.springframework.jdbc.core.JdbcTemplate; - +import org.springframework.mock.env.MockEnvironment; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; @@ -55,8 +53,10 @@ abstract class AbstractJdbcAggregateJobQueryDaoTests extends AbstractDaoTests { protected void prepareForTest(JdbcDatabaseContainer dbContainer, String schemaName, DatabaseType databaseType) throws Exception { super.prepareForTest(dbContainer, schemaName); + MockEnvironment environment = new MockEnvironment(); + environment.setProperty("spring.cloud.dataflow.task.jdbc.row-number-optimization.enabled", "true"); this.jdbcAggregateJobQueryDao = new JdbcAggregateJobQueryDao(super.getDataSource(), new DefaultSchemaService(), - this.jobServiceContainer); + this.jobServiceContainer, environment); jdbcSearchableJobInstanceDao = new JdbcSearchableJobInstanceDao(); jdbcSearchableJobInstanceDao.setJdbcTemplate(super.getJdbcTemplate()); incrementerFactory = new MultiSchemaIncrementerFactory(super.getDataSource()); diff --git a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/batch/AbstractSimpleJobServiceTests.java b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/batch/AbstractSimpleJobServiceTests.java index c81672d185..7385bae134 100644 --- a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/batch/AbstractSimpleJobServiceTests.java +++ b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/batch/AbstractSimpleJobServiceTests.java @@ -49,6 +49,7 @@ import org.springframework.cloud.task.repository.support.SimpleTaskRepository; import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; import org.springframework.context.annotation.Bean; +import org.springframework.core.env.Environment; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; import org.springframework.transaction.PlatformTransactionManager; @@ -241,6 +242,7 @@ private String getQuery(String base, AppBootSchemaVersion appBootSchemaVersion) } return StringUtils.replace(base, "%PREFIX%", tablePrefix); } + protected static class SimpleJobTestConfiguration { @Bean @@ -271,11 +273,13 @@ public JobExplorerContainer jobExplorerContainer(DataSource dataSource, SchemaSe @Bean public JobServiceContainer jobServiceContainer(DataSource dataSource, - PlatformTransactionManager platformTransactionManager, - SchemaService schemaService, - JobRepositoryContainer jobRepositoryContainer, - JobExplorerContainer jobExplorerContainer) { - return new JobServiceContainer(dataSource, platformTransactionManager, schemaService, jobRepositoryContainer, jobExplorerContainer); + PlatformTransactionManager platformTransactionManager, + SchemaService schemaService, + JobRepositoryContainer jobRepositoryContainer, + JobExplorerContainer jobExplorerContainer, + Environment environment) { + return new JobServiceContainer(dataSource, platformTransactionManager, schemaService, jobRepositoryContainer, + jobExplorerContainer, environment); } } } diff --git a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDaoRowNumberOptimizationTests.java b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDaoRowNumberOptimizationTests.java new file mode 100644 index 0000000000..5d11c111d0 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDaoRowNumberOptimizationTests.java @@ -0,0 +1,81 @@ +/* + * Copyright 2023-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.repository; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.testcontainers.containers.JdbcDatabaseContainer; +import org.testcontainers.containers.MariaDBContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; + +import org.springframework.boot.jdbc.DataSourceBuilder; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.server.service.JobServiceContainer; +import org.springframework.mock.env.MockEnvironment; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; + +/** + * Unit tests for the row number optimization feature of {@link JdbcAggregateJobQueryDao}. + * + * @author Chris Bono + */ +@Testcontainers(disabledWithoutDocker = true) +class JdbcAggregateJobQueryDaoRowNumberOptimizationTests { + + @Container + private static final JdbcDatabaseContainer container = new MariaDBContainer("mariadb:10.9.3"); + + private static DataSource dataSource; + + @BeforeAll + static void startContainer() { + dataSource = DataSourceBuilder.create() + .url(container.getJdbcUrl()) + .username(container.getUsername()) + .password(container.getPassword()) + .driverClassName(container.getDriverClassName()) + .build(); + } + + @Test + void shouldUseOptimizationWhenPropertyNotSpecified() throws Exception { + MockEnvironment mockEnv = new MockEnvironment(); + JdbcAggregateJobQueryDao dao = new JdbcAggregateJobQueryDao(dataSource, mock(SchemaService.class), mock(JobServiceContainer.class), mockEnv); + assertThat(dao).hasFieldOrPropertyWithValue("useRowNumberOptimization", true); + } + + @Test + void shouldUseOptimizationWhenPropertyEnabled() throws Exception { + MockEnvironment mockEnv = new MockEnvironment(); + mockEnv.setProperty("spring.cloud.dataflow.task.jdbc.row-number-optimization.enabled", "true"); + JdbcAggregateJobQueryDao dao = new JdbcAggregateJobQueryDao(dataSource, mock(SchemaService.class), mock(JobServiceContainer.class), mockEnv); + assertThat(dao).hasFieldOrPropertyWithValue("useRowNumberOptimization", true); + } + + @Test + void shouldNotUseOptimizationWhenPropertyDisabled() throws Exception { + MockEnvironment mockEnv = new MockEnvironment(); + mockEnv.setProperty("spring.cloud.dataflow.task.jdbc.row-number-optimization.enabled", "false"); + JdbcAggregateJobQueryDao dao = new JdbcAggregateJobQueryDao(dataSource, mock(SchemaService.class), mock(JobServiceContainer.class), mockEnv); + assertThat(dao).hasFieldOrPropertyWithValue("useRowNumberOptimization", false); + } +} diff --git a/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/AbstractSmokeTest.java b/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/AbstractSmokeTest.java index 2498d9b254..883ef805b3 100644 --- a/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/AbstractSmokeTest.java +++ b/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/AbstractSmokeTest.java @@ -15,32 +15,54 @@ */ package org.springframework.cloud.dataflow.server.db.migration; +import java.time.Duration; +import java.util.HashSet; import java.util.List; import java.util.Optional; +import java.util.Set; +import java.util.stream.Stream; +import org.awaitility.Awaitility; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import org.testcontainers.containers.JdbcDatabaseContainer; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.launch.NoSuchJobExecutionException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.boot.test.system.CapturedOutput; +import org.springframework.boot.test.system.OutputCaptureExtension; import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskExplorer; import org.springframework.cloud.dataflow.aggregate.task.TaskRepositoryContainer; import org.springframework.cloud.dataflow.core.StreamDefinition; +import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.schema.service.impl.DefaultSchemaService; +import org.springframework.cloud.dataflow.server.controller.support.TaskExecutionControllerDeleteAction; import org.springframework.cloud.dataflow.server.repository.StreamDefinitionRepository; +import org.springframework.cloud.dataflow.server.repository.TaskBatchDaoContainer; +import org.springframework.cloud.dataflow.server.repository.TaskExecutionDaoContainer; +import org.springframework.cloud.dataflow.server.service.TaskDeleteService; +import org.springframework.cloud.dataflow.server.service.TaskJobService; import org.springframework.cloud.dataflow.server.single.DataFlowServerApplication; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskRepository; +import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.DynamicPropertyRegistry; import org.springframework.test.context.DynamicPropertySource; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.support.TransactionTemplate; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; import static org.assertj.core.api.Assertions.assertThat; @@ -53,10 +75,9 @@ webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT, properties = "spring.jpa.hibernate.ddl-auto=none") @DirtiesContext(classMode = DirtiesContext.ClassMode.BEFORE_CLASS) +@ExtendWith(OutputCaptureExtension.class) public abstract class AbstractSmokeTest { - private final static Logger logger = LoggerFactory.getLogger(AbstractSmokeTest.class); - protected static JdbcDatabaseContainer container; @DynamicPropertySource @@ -68,28 +89,47 @@ static void databaseProperties(DynamicPropertyRegistry registry) { } @Autowired - SchemaService schemaService; + private SchemaService schemaService; + + @Autowired + private TaskRepositoryContainer taskRepositoryContainer; @Autowired - TaskRepositoryContainer taskRepositoryContainer; + private AggregateTaskExplorer taskExplorer; @Autowired - protected AggregateTaskExplorer taskExplorer; + private StreamDefinitionRepository streamDefinitionRepository; @Autowired - protected StreamDefinitionRepository streamDefinitionRepository; + private PlatformTransactionManager transactionManager; @Autowired - protected PlatformTransactionManager transactionManager; + private TaskDeleteService taskDeleteService; + + private MultiValueMap createdExecutionIdsBySchemaTarget = new LinkedMultiValueMap<>(); + + @Test + void streamCreation() { + TransactionTemplate tx = new TransactionTemplate(transactionManager); + tx.execute(status -> { + StreamDefinition streamDefinition = new StreamDefinition("timelogger", "time | log"); + streamDefinition = streamDefinitionRepository.save(streamDefinition); + Optional loaded = streamDefinitionRepository.findById(streamDefinition.getName()); + assertThat(loaded).isPresent(); + assertThat(loaded.get().getDslText()).isEqualTo("time | log"); + return true; + }); + } @Test - public void testTaskCreation() { - long originalCount = taskExplorer.getTaskExecutionCount(); + void taskCreation() { + long originalCount = this.taskExplorer.getTaskExecutionCount(); TransactionTemplate tx = new TransactionTemplate(transactionManager); tx.execute(status -> { for (SchemaVersionTarget schemaVersionTarget : schemaService.getTargets().getSchemas()) { TaskRepository taskRepository = this.taskRepositoryContainer.get(schemaVersionTarget.getName()); TaskExecution taskExecution = taskRepository.createTaskExecution(schemaVersionTarget.getName() + "_test_task"); + createdExecutionIdsBySchemaTarget.add(schemaVersionTarget, taskExecution.getExecutionId()); assertThat(taskExecution.getExecutionId()).isGreaterThan(0L); } return true; @@ -102,16 +142,44 @@ public void testTaskCreation() { .allSatisfy((taskExecution) -> assertThat(taskExecution.getExecutionId()).isNotEqualTo(0L)); } - @Test - public void streamCreation() { - TransactionTemplate tx = new TransactionTemplate(transactionManager); - tx.execute(status -> { - StreamDefinition streamDefinition = new StreamDefinition("timelogger", "time | log"); - streamDefinition = streamDefinitionRepository.save(streamDefinition); - Optional loaded = streamDefinitionRepository.findById(streamDefinition.getName()); - assertThat(loaded).isPresent(); - assertThat(loaded.get().getDslText()).isEqualTo("time | log"); - return true; - }); + @ParameterizedTest + @MethodSource("schemaVersionTargetsProvider") + void shouldListJobExecutionsUsingPerformantRowNumberQuery( + SchemaVersionTarget schemaVersionTarget, + CapturedOutput output, + @Autowired TaskJobService taskJobService, + @Autowired TaskExecutionDaoContainer taskExecutionDaoContainer, + @Autowired TaskBatchDaoContainer taskBatchDaoContainer) throws NoSuchJobExecutionException { + Page jobExecutions = taskJobService.listJobExecutionsWithStepCount(Pageable.ofSize(100)); + int originalCount = jobExecutions.getContent().size(); + JobExecutionTestUtils testUtils = new JobExecutionTestUtils(taskExecutionDaoContainer, taskBatchDaoContainer); + TaskExecution execution1 = testUtils.createSampleJob("job1", 1, BatchStatus.STARTED, new JobParameters(), schemaVersionTarget); + createdExecutionIdsBySchemaTarget.add(schemaVersionTarget, execution1.getExecutionId()); + TaskExecution execution2 = testUtils.createSampleJob("job2", 3, BatchStatus.COMPLETED, new JobParameters(), schemaVersionTarget); + createdExecutionIdsBySchemaTarget.add(schemaVersionTarget, execution2.getExecutionId()); + jobExecutions = taskJobService.listJobExecutionsWithStepCount(Pageable.ofSize(100)); + assertThat(jobExecutions).hasSize(originalCount + 4); + String expectedSqlFragment = (this.supportsRowNumberFunction()) ? + "as STEP_COUNT, ROW_NUMBER() OVER (PARTITION" : + "as STEP_COUNT FROM AGGREGATE_JOB_INSTANCE"; + Awaitility.waitAtMost(Duration.ofSeconds(5)) + .untilAsserted(() -> assertThat(output).contains(expectedSqlFragment)); + } + + static Stream schemaVersionTargetsProvider() { + return new DefaultSchemaService().getTargets().getSchemas().stream(); + } + + @AfterEach + void cleanupAfterTest() { + Set actions = new HashSet<>(); + actions.add(TaskExecutionControllerDeleteAction.CLEANUP); + actions.add(TaskExecutionControllerDeleteAction.REMOVE_DATA); + createdExecutionIdsBySchemaTarget.forEach((schemaTarget, executionIds) -> + this.taskDeleteService.cleanupExecutions(actions, new HashSet<>(executionIds), schemaTarget.getName())); + } + + protected boolean supportsRowNumberFunction() { + return true; } } diff --git a/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/JobExecutionTestUtils.java b/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/JobExecutionTestUtils.java new file mode 100644 index 0000000000..63b9359603 --- /dev/null +++ b/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/JobExecutionTestUtils.java @@ -0,0 +1,133 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration; + +import java.sql.Timestamp; +import java.sql.Types; +import java.time.ZoneId; +import java.util.ArrayList; +import java.util.Date; + +import javax.sql.DataSource; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobInstance; +import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.repository.dao.JdbcJobInstanceDao; +import org.springframework.batch.item.database.support.DataFieldMaxValueIncrementerFactory; +import org.springframework.cloud.dataflow.core.database.support.DatabaseType; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaIncrementerFactory; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.server.repository.TaskBatchDaoContainer; +import org.springframework.cloud.dataflow.server.repository.TaskExecutionDaoContainer; +import org.springframework.cloud.task.batch.listener.TaskBatchDao; +import org.springframework.cloud.task.repository.TaskExecution; +import org.springframework.cloud.task.repository.dao.JdbcTaskExecutionDao; +import org.springframework.cloud.task.repository.dao.TaskExecutionDao; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; +import org.springframework.jdbc.support.MetaDataAccessException; +import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; +import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.util.StringUtils; + +/** + * Test utility related to job execution test data setup. + */ +class JobExecutionTestUtils +{ + private final TaskExecutionDaoContainer taskExecutionDaoContainer; + + private final TaskBatchDaoContainer taskBatchDaoContainer; + + JobExecutionTestUtils( + TaskExecutionDaoContainer taskExecutionDaoContainer, + TaskBatchDaoContainer taskBatchDaoContainer + ) { + this.taskExecutionDaoContainer = taskExecutionDaoContainer; + this.taskBatchDaoContainer = taskBatchDaoContainer; + } + + TaskExecution createSampleJob(String jobName, int jobExecutionCount, BatchStatus batchStatus, JobParameters jobParameters, SchemaVersionTarget schemaVersionTarget) { + String schemaVersion = schemaVersionTarget.getName(); + + TaskExecutionDao taskExecutionDao = this.taskExecutionDaoContainer.get(schemaVersion); + DataSource dataSource = (DataSource) ReflectionTestUtils.getField(taskExecutionDao, JdbcTaskExecutionDao.class, "dataSource"); + NamedParameterJdbcTemplate namedParamJdbcTemplate = (NamedParameterJdbcTemplate) ReflectionTestUtils.getField(taskExecutionDao, JdbcTaskExecutionDao.class, "jdbcTemplate"); + JdbcTemplate jdbcTemplate = namedParamJdbcTemplate.getJdbcTemplate(); + DataFieldMaxValueIncrementerFactory incrementerFactory = new MultiSchemaIncrementerFactory(dataSource); + DatabaseType incrementerFallbackType = determineIncrementerFallbackType(dataSource); + + JdbcJobInstanceDao jobInstanceDao = new JdbcJobInstanceDao(); + jobInstanceDao.setJdbcTemplate(jdbcTemplate); + jobInstanceDao.setTablePrefix(schemaVersionTarget.getBatchPrefix()); + jobInstanceDao.setJobIncrementer(incrementerFactory.getIncrementer(incrementerFallbackType.name(), schemaVersionTarget.getBatchPrefix() + "JOB_SEQ")); + + // BATCH_JOB_EXECUTION differs and the DAO can not be used for BATCH4/5 inserting + DataFieldMaxValueIncrementer jobExecutionIncrementer = incrementerFactory.getIncrementer(incrementerFallbackType.name(), schemaVersionTarget.getBatchPrefix() + "JOB_EXECUTION_SEQ"); + TaskBatchDao taskBatchDao = this.taskBatchDaoContainer.get(schemaVersion); + TaskExecution taskExecution = taskExecutionDao.createTaskExecution(jobName, new Date(), new ArrayList<>(), null); + JobInstance jobInstance = jobInstanceDao.createJobInstance(jobName, jobParameters); + for (int i = 0; i < jobExecutionCount; i++) { + JobExecution jobExecution = new JobExecution(jobInstance, new JobParameters()); + jobExecution.setStatus(batchStatus); + jobExecution.setId(jobExecutionIncrementer.nextLongValue()); + jobExecution.setStartTime(new Date()); + saveJobExecution(jobExecution, jdbcTemplate, schemaVersionTarget); + taskBatchDao.saveRelationship(taskExecution, jobExecution); + } + return taskExecution; + } + + private DatabaseType determineIncrementerFallbackType(DataSource dataSource) { + DatabaseType databaseType; + try { + databaseType = DatabaseType.fromMetaData(dataSource); + } catch (MetaDataAccessException e) { + throw new IllegalStateException(e); + } + if (databaseType == DatabaseType.MARIADB) { + databaseType = DatabaseType.MYSQL; + } + return databaseType; + } + + private JobExecution saveJobExecution(JobExecution jobExecution, JdbcTemplate jdbcTemplate, SchemaVersionTarget schemaVersionTarget) { + jobExecution.setStartTime(new Date()); + jobExecution.setVersion(1); + Timestamp startTime = timestampFromDate(jobExecution.getStartTime()); + Timestamp endTime = timestampFromDate(jobExecution.getEndTime()); + Timestamp createTime = timestampFromDate(jobExecution.getCreateTime()); + Timestamp lastUpdated = timestampFromDate(jobExecution.getLastUpdated()); + Object[] parameters = new Object[] { jobExecution.getId(), jobExecution.getJobId(), startTime, endTime, + jobExecution.getStatus().toString(), jobExecution.getExitStatus().getExitCode(), + jobExecution.getExitStatus().getExitDescription(), jobExecution.getVersion(), createTime, lastUpdated }; + String sql = "INSERT INTO %PREFIX%JOB_EXECUTION(JOB_EXECUTION_ID, " + + "JOB_INSTANCE_ID, START_TIME, END_TIME, STATUS, EXIT_CODE, EXIT_MESSAGE, VERSION, CREATE_TIME, LAST_UPDATED) " + + "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; + sql = StringUtils.replace(sql, "%PREFIX%", schemaVersionTarget.getBatchPrefix()); + jdbcTemplate.update(sql, parameters, + new int[] { Types.BIGINT, Types.BIGINT, Types.TIMESTAMP, Types.TIMESTAMP, Types.VARCHAR, Types.VARCHAR, + Types.VARCHAR, Types.INTEGER, Types.TIMESTAMP, Types.TIMESTAMP }); + return jobExecution; + } + + private Timestamp timestampFromDate(Date date) { + return (date != null) ? Timestamp.valueOf(date.toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime()) : null; + } +} diff --git a/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/MariaDBSmokeTest.java b/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/MariaDBSmokeTest.java index b112ab812a..28ed84fa8d 100644 --- a/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/MariaDBSmokeTest.java +++ b/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/MariaDBSmokeTest.java @@ -20,7 +20,6 @@ import org.springframework.test.context.TestPropertySource; - /** * Basic database schema and JPA tests for MariaDB 10.4 or later. * @@ -30,6 +29,7 @@ "spring.jpa.database-platform=org.hibernate.dialect.MariaDB106Dialect" }) public class MariaDBSmokeTest extends AbstractSmokeTest { + @BeforeAll static void startContainer() { container = new MariaDBContainer<>("mariadb:10.6"); diff --git a/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/MySQL57SmokeTest.java b/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/MySQL57SmokeTest.java index dbf4672397..7bc8975a22 100644 --- a/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/MySQL57SmokeTest.java +++ b/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/MySQL57SmokeTest.java @@ -24,9 +24,15 @@ * @author Corneil du Plessis */ public class MySQL57SmokeTest extends AbstractSmokeTest { + @BeforeAll static void startContainer() { container = new MySQLContainer<>("mysql:5.7"); container.start(); } + + @Override + protected boolean supportsRowNumberFunction() { + return false; + } } diff --git a/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/SqlServer2017SmokeTest.java b/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/SqlServer2017SmokeTest.java index 6eadaef7bb..7543c32a1c 100644 --- a/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/SqlServer2017SmokeTest.java +++ b/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/SqlServer2017SmokeTest.java @@ -19,6 +19,7 @@ import org.testcontainers.containers.MSSQLServerContainer; import org.testcontainers.utility.DockerImageName; +import org.springframework.cloud.dataflow.server.db.ContainerSupport; /** * Basic database schema and JPA tests for MS SQL Server. @@ -26,8 +27,12 @@ * @author Corneil du Plessis */ public class SqlServer2017SmokeTest extends AbstractSmokeTest { + @BeforeAll static void startContainer() { + if (ContainerSupport.runningOnMacArm64()) { + throw new RuntimeException("Unable to run SQLServer tests on Mac OS"); + } container = new MSSQLServerContainer<>(DockerImageName.parse( MSSQLServerContainer.IMAGE).withTag("2017-latest") ).acceptLicense(); diff --git a/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/SqlServer2019SmokeTest.java b/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/SqlServer2019SmokeTest.java index c1d21ffee6..8112e3d511 100644 --- a/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/SqlServer2019SmokeTest.java +++ b/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/SqlServer2019SmokeTest.java @@ -19,6 +19,8 @@ import org.testcontainers.containers.MSSQLServerContainer; import org.testcontainers.utility.DockerImageName; +import org.springframework.cloud.dataflow.server.db.ContainerSupport; + /** * Basic database schema and JPA tests for MS SQL Server. @@ -26,8 +28,12 @@ * @author Corneil du Plessis */ public class SqlServer2019SmokeTest extends AbstractSmokeTest { + @BeforeAll static void startContainer() { + if (ContainerSupport.runningOnMacArm64()) { + throw new RuntimeException("Unable to run SQLServer tests on Mac OS"); + } container = new MSSQLServerContainer<>( DockerImageName.parse(MSSQLServerContainer.IMAGE).withTag("2019-latest") ).acceptLicense(); diff --git a/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/SqlServer2022SmokeTest.java b/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/SqlServer2022SmokeTest.java index 9599b78485..016c596459 100644 --- a/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/SqlServer2022SmokeTest.java +++ b/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/SqlServer2022SmokeTest.java @@ -19,6 +19,8 @@ import org.testcontainers.containers.MSSQLServerContainer; import org.testcontainers.utility.DockerImageName; +import org.springframework.cloud.dataflow.server.db.ContainerSupport; + /** * Basic database schema and JPA tests for MS SQL Server. @@ -26,11 +28,16 @@ * @author Corneil du Plessis */ public class SqlServer2022SmokeTest extends AbstractSmokeTest { + @BeforeAll static void startContainer() { + if (ContainerSupport.runningOnMacArm64()) { + throw new RuntimeException("Unable to run SQLServer tests on Mac OS"); + } container = new MSSQLServerContainer<>( DockerImageName.parse(MSSQLServerContainer.IMAGE).withTag("2022-latest") ).acceptLicense(); container.start(); } + } diff --git a/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/support/DatabaseTypeTests.java b/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/support/DatabaseTypeTests.java new file mode 100644 index 0000000000..1d7e4e93d5 --- /dev/null +++ b/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/support/DatabaseTypeTests.java @@ -0,0 +1,102 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.support; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.SpringBootConfiguration; +import org.springframework.boot.test.autoconfigure.jdbc.AutoConfigureTestDatabase; +import org.springframework.boot.test.autoconfigure.jdbc.JdbcTest; +import org.springframework.cloud.dataflow.core.database.support.DatabaseType; +import org.springframework.cloud.dataflow.server.db.DB2_11_5_ContainerSupport; +import org.springframework.cloud.dataflow.server.db.MariaDB_10_6_ContainerSupport; +import org.springframework.cloud.dataflow.server.db.MariaDB_11_ContainerSupport; +import org.springframework.cloud.dataflow.server.db.MySQL_5_7_ContainerSupport; +import org.springframework.cloud.dataflow.server.db.MySQL_8_ContainerSupport; +import org.springframework.cloud.dataflow.server.db.Oracle_XE_18_ContainerSupport; +import org.springframework.cloud.dataflow.server.db.SqlServer_2017_ContainerSupport; +import org.springframework.cloud.dataflow.server.db.SqlServer_2019_ContainerSupport; +import org.springframework.cloud.dataflow.server.db.SqlServer_2022_ContainerSupport; +import org.springframework.jdbc.support.MetaDataAccessException; + +import static org.assertj.core.api.Assertions.assertThat; + +class DatabaseTypeTests { + + @JdbcTest(properties = "spring.jpa.hibernate.ddl-auto=none") + @AutoConfigureTestDatabase(replace = AutoConfigureTestDatabase.Replace.NONE) + static abstract class SingleDbDatabaseTypeTests { + + @Test + void shouldSupportRowNumberFunction(@Autowired DataSource dataSource) throws MetaDataAccessException { + assertThat(DatabaseType.supportsRowNumberFunction(dataSource)).isEqualTo(supportsRowNumberFunction()); + } + + protected boolean supportsRowNumberFunction() { + return true; + } + + @SpringBootConfiguration + static class FakeApp { + } + } + + @Nested + class MariaDB_10_6_DatabaseTypeTests extends SingleDbDatabaseTypeTests implements MariaDB_10_6_ContainerSupport { + } + + @Nested + class MariaDB_11_DatabaseTypeTests extends SingleDbDatabaseTypeTests implements MariaDB_11_ContainerSupport { + } + + @Nested + class MySql_5_7_tabaseTypeTests extends SingleDbDatabaseTypeTests implements MySQL_5_7_ContainerSupport { + @Override + protected boolean supportsRowNumberFunction() { + return false; + } + } + + @Nested + class MySql_8_DatabaseTypeTests extends SingleDbDatabaseTypeTests implements MySQL_8_ContainerSupport { + } + + @Nested + class DB2DatabaseTypeTests extends SingleDbDatabaseTypeTests implements DB2_11_5_ContainerSupport { + } + + @Nested + class OracleDatabaseTypeTests extends SingleDbDatabaseTypeTests implements Oracle_XE_18_ContainerSupport { + } + + @Nested + class SqlServer_2017_DatabaseTypeTests extends SingleDbDatabaseTypeTests implements SqlServer_2017_ContainerSupport { + } + + @Nested + class SqlServer_2019_DatabaseTypeTests extends SingleDbDatabaseTypeTests implements SqlServer_2019_ContainerSupport { + } + + @Nested + class SqlServer_2022_DatabaseTypeTests extends SingleDbDatabaseTypeTests implements SqlServer_2022_ContainerSupport { + } + +} diff --git a/spring-cloud-dataflow-server/src/test/resources/logback-test.xml b/spring-cloud-dataflow-server/src/test/resources/logback-test.xml index 5087a8ef5d..37dcacb03a 100644 --- a/spring-cloud-dataflow-server/src/test/resources/logback-test.xml +++ b/spring-cloud-dataflow-server/src/test/resources/logback-test.xml @@ -1,6 +1,9 @@ + + + diff --git a/spring-cloud-dataflow-test/pom.xml b/spring-cloud-dataflow-test/pom.xml index b3fe78e418..66f652d473 100644 --- a/spring-cloud-dataflow-test/pom.xml +++ b/spring-cloud-dataflow-test/pom.xml @@ -38,10 +38,30 @@ junit junit + + org.testcontainers + postgresql + + + org.testcontainers + mysql + + + org.testcontainers + mariadb + + + org.testcontainers + mssqlserver + org.testcontainers oracle-xe + + org.testcontainers + db2 + org.springframework.boot spring-boot-starter-test diff --git a/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/ContainerSupport.java b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/ContainerSupport.java new file mode 100644 index 0000000000..420bddf7ac --- /dev/null +++ b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/ContainerSupport.java @@ -0,0 +1,32 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db; + +import java.util.Locale; + +public final class ContainerSupport { + + private ContainerSupport() { + + } + + public static boolean runningOnMacArm64() { + String osName = System.getProperty("os.name").toLowerCase(Locale.ENGLISH); + String osArchitecture = System.getProperty("os.arch").toLowerCase(Locale.ENGLISH); + return osName.contains("mac") && osArchitecture.equals("aarch64"); + } +} diff --git a/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/DB2_11_5_ContainerSupport.java b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/DB2_11_5_ContainerSupport.java new file mode 100644 index 0000000000..3c85b7e179 --- /dev/null +++ b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/DB2_11_5_ContainerSupport.java @@ -0,0 +1,40 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db; + +import org.testcontainers.containers.Db2Container; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; + +import org.springframework.test.context.DynamicPropertyRegistry; +import org.springframework.test.context.DynamicPropertySource; + +@Testcontainers(disabledWithoutDocker = true) +public interface DB2_11_5_ContainerSupport { + + @Container + Db2Container container = new Db2Container("ibmcom/db2:11.5.0.0a").acceptLicense(); + + @DynamicPropertySource + static void databaseProperties(DynamicPropertyRegistry registry) { + registry.add("spring.datasource.url", container::getJdbcUrl); + registry.add("spring.datasource.username", container::getUsername); + registry.add("spring.datasource.password", container::getPassword); + registry.add("spring.datasource.driver-class-name", container::getDriverClassName); + } + +} diff --git a/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/MariaDB_10_6_ContainerSupport.java b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/MariaDB_10_6_ContainerSupport.java new file mode 100644 index 0000000000..7cfbc7849d --- /dev/null +++ b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/MariaDB_10_6_ContainerSupport.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db; + +import org.testcontainers.containers.MariaDBContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; + +import org.springframework.test.context.DynamicPropertyRegistry; +import org.springframework.test.context.DynamicPropertySource; +import org.springframework.test.context.TestPropertySource; + +@Testcontainers(disabledWithoutDocker = true) +@TestPropertySource(properties = { + "spring.jpa.database-platform=org.hibernate.dialect.MariaDB106Dialect" +}) +public interface MariaDB_10_6_ContainerSupport { + + @Container + MariaDBContainer container = new MariaDBContainer<>("mariadb:10.6"); + + @DynamicPropertySource + static void databaseProperties(DynamicPropertyRegistry registry) { + registry.add("spring.datasource.url", container::getJdbcUrl); + registry.add("spring.datasource.username", container::getUsername); + registry.add("spring.datasource.password", container::getPassword); + registry.add("spring.datasource.driver-class-name", container::getDriverClassName); + } + +} diff --git a/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/MariaDB_11_ContainerSupport.java b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/MariaDB_11_ContainerSupport.java new file mode 100644 index 0000000000..8138c5c7f0 --- /dev/null +++ b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/MariaDB_11_ContainerSupport.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db; + +import org.testcontainers.containers.MariaDBContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; + +import org.springframework.test.context.DynamicPropertyRegistry; +import org.springframework.test.context.DynamicPropertySource; +import org.springframework.test.context.TestPropertySource; + +@Testcontainers(disabledWithoutDocker = true) +@TestPropertySource(properties = { + "spring.jpa.database-platform=org.hibernate.dialect.MariaDB106Dialect" +}) +public interface MariaDB_11_ContainerSupport { + + @Container + MariaDBContainer container = new MariaDBContainer<>("mariadb:11"); + + @DynamicPropertySource + static void databaseProperties(DynamicPropertyRegistry registry) { + registry.add("spring.datasource.url", container::getJdbcUrl); + registry.add("spring.datasource.username", container::getUsername); + registry.add("spring.datasource.password", container::getPassword); + registry.add("spring.datasource.driver-class-name", container::getDriverClassName); + } + +} diff --git a/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/MySQL_5_7_ContainerSupport.java b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/MySQL_5_7_ContainerSupport.java new file mode 100644 index 0000000000..fac8632ecd --- /dev/null +++ b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/MySQL_5_7_ContainerSupport.java @@ -0,0 +1,40 @@ +/* + * Copyright 2023-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db; + +import org.testcontainers.containers.MySQLContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; + +import org.springframework.test.context.DynamicPropertyRegistry; +import org.springframework.test.context.DynamicPropertySource; + +@Testcontainers(disabledWithoutDocker = true) +public interface MySQL_5_7_ContainerSupport { + + @Container + MySQLContainer container = new MySQLContainer<>("mysql:5.7"); + + @DynamicPropertySource + static void databaseProperties(DynamicPropertyRegistry registry) { + registry.add("spring.datasource.url", container::getJdbcUrl); + registry.add("spring.datasource.username", container::getUsername); + registry.add("spring.datasource.password", container::getPassword); + registry.add("spring.datasource.driver-class-name", container::getDriverClassName); + } + +} diff --git a/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/MySQL_8_ContainerSupport.java b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/MySQL_8_ContainerSupport.java new file mode 100644 index 0000000000..5120ae7f56 --- /dev/null +++ b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/MySQL_8_ContainerSupport.java @@ -0,0 +1,40 @@ +/* + * Copyright 2023-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db; + +import org.testcontainers.containers.MySQLContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; + +import org.springframework.test.context.DynamicPropertyRegistry; +import org.springframework.test.context.DynamicPropertySource; + +@Testcontainers(disabledWithoutDocker = true) +public interface MySQL_8_ContainerSupport { + + @Container + MySQLContainer container = new MySQLContainer<>("mysql:8"); + + @DynamicPropertySource + static void databaseProperties(DynamicPropertyRegistry registry) { + registry.add("spring.datasource.url", container::getJdbcUrl); + registry.add("spring.datasource.username", container::getUsername); + registry.add("spring.datasource.password", container::getPassword); + registry.add("spring.datasource.driver-class-name", container::getDriverClassName); + } + +} diff --git a/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/Oracle_XE_18_ContainerSupport.java b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/Oracle_XE_18_ContainerSupport.java new file mode 100644 index 0000000000..e30503c3d3 --- /dev/null +++ b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/Oracle_XE_18_ContainerSupport.java @@ -0,0 +1,40 @@ +/* + * Copyright 2023-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db; + +import org.testcontainers.containers.OracleContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; + +import org.springframework.test.context.DynamicPropertyRegistry; +import org.springframework.test.context.DynamicPropertySource; + +@Testcontainers(disabledWithoutDocker = true) +public interface Oracle_XE_18_ContainerSupport { + + @Container + OracleContainer container = new OracleContainer("gvenzl/oracle-xe:18-slim-faststart"); + + @DynamicPropertySource + static void databaseProperties(DynamicPropertyRegistry registry) { + registry.add("spring.datasource.url", container::getJdbcUrl); + registry.add("spring.datasource.username", container::getUsername); + registry.add("spring.datasource.password", container::getPassword); + registry.add("spring.datasource.driver-class-name", container::getDriverClassName); + } + +} diff --git a/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/Postgres_14_ContainerSupport.java b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/Postgres_14_ContainerSupport.java new file mode 100644 index 0000000000..64ee9a4cfe --- /dev/null +++ b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/Postgres_14_ContainerSupport.java @@ -0,0 +1,40 @@ +/* + * Copyright 2023-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db; + +import org.testcontainers.containers.PostgreSQLContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; + +import org.springframework.test.context.DynamicPropertyRegistry; +import org.springframework.test.context.DynamicPropertySource; + +@Testcontainers(disabledWithoutDocker = true) +public interface Postgres_14_ContainerSupport { + + @Container + PostgreSQLContainer container = new PostgreSQLContainer<>("postgres:14"); + + @DynamicPropertySource + static void databaseProperties(DynamicPropertyRegistry registry) { + registry.add("spring.datasource.url", container::getJdbcUrl); + registry.add("spring.datasource.username", container::getUsername); + registry.add("spring.datasource.password", container::getPassword); + registry.add("spring.datasource.driver-class-name", container::getDriverClassName); + } + +} diff --git a/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/SqlServer_2017_ContainerSupport.java b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/SqlServer_2017_ContainerSupport.java new file mode 100644 index 0000000000..598cd39be2 --- /dev/null +++ b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/SqlServer_2017_ContainerSupport.java @@ -0,0 +1,42 @@ +/* + * Copyright 2023-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db; + +import org.testcontainers.containers.MSSQLServerContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +import org.springframework.test.context.DynamicPropertyRegistry; +import org.springframework.test.context.DynamicPropertySource; + +@Testcontainers(disabledWithoutDocker = true) +public interface SqlServer_2017_ContainerSupport { + + @Container + MSSQLServerContainer container = new MSSQLServerContainer( + DockerImageName.parse(MSSQLServerContainer.IMAGE).withTag("2017-latest")).acceptLicense(); + + @DynamicPropertySource + static void databaseProperties(DynamicPropertyRegistry registry) { + registry.add("spring.datasource.url", container::getJdbcUrl); + registry.add("spring.datasource.username", container::getUsername); + registry.add("spring.datasource.password", container::getPassword); + registry.add("spring.datasource.driver-class-name", container::getDriverClassName); + } + +} diff --git a/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/SqlServer_2019_ContainerSupport.java b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/SqlServer_2019_ContainerSupport.java new file mode 100644 index 0000000000..c143b82674 --- /dev/null +++ b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/SqlServer_2019_ContainerSupport.java @@ -0,0 +1,42 @@ +/* + * Copyright 2023-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db; + +import org.testcontainers.containers.MSSQLServerContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +import org.springframework.test.context.DynamicPropertyRegistry; +import org.springframework.test.context.DynamicPropertySource; + +@Testcontainers(disabledWithoutDocker = true) +public interface SqlServer_2019_ContainerSupport { + + @Container + MSSQLServerContainer container = new MSSQLServerContainer( + DockerImageName.parse(MSSQLServerContainer.IMAGE).withTag("2019-latest")).acceptLicense(); + + @DynamicPropertySource + static void databaseProperties(DynamicPropertyRegistry registry) { + registry.add("spring.datasource.url", container::getJdbcUrl); + registry.add("spring.datasource.username", container::getUsername); + registry.add("spring.datasource.password", container::getPassword); + registry.add("spring.datasource.driver-class-name", container::getDriverClassName); + } + +} diff --git a/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/SqlServer_2022_ContainerSupport.java b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/SqlServer_2022_ContainerSupport.java new file mode 100644 index 0000000000..67094e4aa4 --- /dev/null +++ b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/SqlServer_2022_ContainerSupport.java @@ -0,0 +1,42 @@ +/* + * Copyright 2023-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db; + +import org.testcontainers.containers.MSSQLServerContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +import org.springframework.test.context.DynamicPropertyRegistry; +import org.springframework.test.context.DynamicPropertySource; + +@Testcontainers(disabledWithoutDocker = true) +public interface SqlServer_2022_ContainerSupport { + + @Container + MSSQLServerContainer container = new MSSQLServerContainer( + DockerImageName.parse(MSSQLServerContainer.IMAGE).withTag("2022-latest")).acceptLicense(); + + @DynamicPropertySource + static void databaseProperties(DynamicPropertyRegistry registry) { + registry.add("spring.datasource.url", container::getJdbcUrl); + registry.add("spring.datasource.username", container::getUsername); + registry.add("spring.datasource.password", container::getPassword); + registry.add("spring.datasource.driver-class-name", container::getDriverClassName); + } + +} diff --git a/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/oracle/OracleContainerSupport.java b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/oracle/OracleContainerSupport.java index e912499278..145d12cb9e 100644 --- a/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/oracle/OracleContainerSupport.java +++ b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/oracle/OracleContainerSupport.java @@ -15,8 +15,6 @@ */ package org.springframework.cloud.dataflow.server.db.oracle; -import java.util.Locale; - import org.junit.jupiter.api.extension.ExtendWith; import org.testcontainers.containers.OracleContainer; import org.testcontainers.utility.DockerImageName; @@ -24,6 +22,7 @@ import uk.org.webcompere.systemstubs.jupiter.SystemStub; import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension; +import org.springframework.cloud.dataflow.server.db.ContainerSupport; import org.springframework.core.log.LogAccessor; /** @@ -41,7 +40,7 @@ public interface OracleContainerSupport { EnvironmentVariables ENV_VARS = new EnvironmentVariables(); static OracleContainer startContainer() { - if (runningOnMacArm64()) { + if (ContainerSupport.runningOnMacArm64()) { String wiki = "https://github.com/spring-cloud/spring-cloud-dataflow/wiki/Oracle-on-Mac-ARM64"; LOG.warn(() -> "You are running on Mac ARM64. If this test fails, make sure Colima is running prior " + "to test invocation. See " + wiki + " for details"); @@ -53,10 +52,4 @@ static OracleContainer startContainer() { oracleContainer.start(); return oracleContainer; } - - static boolean runningOnMacArm64() { - String osName = System.getProperty("os.name").toLowerCase(Locale.ENGLISH); - String osArchitecture = System.getProperty("os.arch").toLowerCase(Locale.ENGLISH); - return osName.contains("mac") && osArchitecture.equals("aarch64"); - } }