From 6cb57b34b86ab204c9b4455861dd02c8c4168aa4 Mon Sep 17 00:00:00 2001 From: Chris Bono Date: Thu, 21 Dec 2023 20:58:43 -0600 Subject: [PATCH] Use custom paging provider for aggregate DAO queries (#5606) This commit adds a custom paging provider that is used only by the aggregate DAO. This is required because the standard paging provider that ships with Spring Batch 4.x does not properly handle sort key aliases when using nested ROW_NUMBER clauses. * This also sneaks in Mac ARM64 support for DB2. Resolves #5531 --- README.md | 13 ++ .../repository/JdbcAggregateJobQueryDao.java | 208 +++++++++++++++++- .../SqlPagingQueryProviderFactoryBean.java | 2 +- spring-cloud-dataflow-server/pom.xml | 10 + .../db/migration/AbstractSmokeTest.java | 8 + .../db/migration/JobExecutionTestUtils.java | 40 ++++ .../server/db/DB2_11_5_ContainerSupport.java | 20 +- .../db/arm64/Db2Arm64ContainerSupport.java | 55 +++++ 8 files changed, 347 insertions(+), 9 deletions(-) create mode 100644 spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/arm64/Db2Arm64ContainerSupport.java diff --git a/README.md b/README.md index 29491961f4..fd40c5b76a 100644 --- a/README.md +++ b/README.md @@ -113,6 +113,19 @@ You can follow the steps in the [MSSQL on Mac ARM64](https://github.com/spring-c ---- +## Running Locally w/ IBM DB2 +By default, the Dataflow server jar does not include the DB2 database driver dependency. +If you want to use DB2 for development/testing when running locally, you can specify the `local-dev-db2` Maven profile when building. +The following command will include the DB2 driver dependency in the jar: +``` +$ ./mvnw -s .settings.xml clean package -Plocal-dev-db2 +``` +You can follow the steps in the [DB2 on Mac ARM64](https://github.com/spring-cloud/spring-cloud-dataflow/wiki/DB2-on-Mac-ARM64#running-dataflow-locally-against-db2) Wiki to run DB2 locally in Docker with Dataflow pointing at it. + +> **NOTE:** If you are not running Mac ARM64 just skip the steps related to Homebrew and Colima + +---- + ## Contributing We welcome contributions! See the [CONTRIBUTING](./CONTRIBUTING.adoc) guide for details. diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDao.java index abc1659a0d..90eca34ebd 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDao.java @@ -16,6 +16,7 @@ package org.springframework.cloud.dataflow.server.repository; +import java.lang.reflect.Field; import java.sql.ResultSet; import java.sql.SQLException; import java.time.Instant; @@ -47,7 +48,12 @@ import org.springframework.batch.core.repository.dao.JdbcJobExecutionDao; import org.springframework.batch.item.database.Order; import org.springframework.batch.item.database.PagingQueryProvider; +import org.springframework.batch.item.database.support.AbstractSqlPagingQueryProvider; +import org.springframework.batch.item.database.support.Db2PagingQueryProvider; +import org.springframework.batch.item.database.support.OraclePagingQueryProvider; import org.springframework.batch.item.database.support.SqlPagingQueryProviderFactoryBean; +import org.springframework.batch.item.database.support.SqlPagingQueryUtils; +import org.springframework.batch.item.database.support.SqlServerPagingQueryProvider; import org.springframework.cloud.dataflow.core.DataFlowPropertyKeys; import org.springframework.cloud.dataflow.core.database.support.DatabaseType; import org.springframework.cloud.dataflow.rest.job.JobInstanceExecutions; @@ -75,6 +81,7 @@ import org.springframework.jdbc.core.RowMapper; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; +import org.springframework.util.ReflectionUtils; import org.springframework.util.StringUtils; /** @@ -802,7 +809,7 @@ private PagingQueryProvider getPagingQueryProvider(String fields, String fromCla * @throws Exception if page provider is not created. */ private PagingQueryProvider getPagingQueryProvider(String fields, String fromClause, String whereClause, Map sortKeys) throws Exception { - SqlPagingQueryProviderFactoryBean factory = new SqlPagingQueryProviderFactoryBean(); + SqlPagingQueryProviderFactoryBean factory = new SafeSqlPagingQueryProviderFactoryBean(); factory.setDataSource(dataSource); fromClause = "AGGREGATE_JOB_INSTANCE I JOIN AGGREGATE_JOB_EXECUTION E ON I.JOB_INSTANCE_ID=E.JOB_INSTANCE_ID AND I.SCHEMA_TARGET=E.SCHEMA_TARGET" + (fromClause == null ? "" : " " + fromClause); factory.setFromClause(fromClause); @@ -811,7 +818,7 @@ private PagingQueryProvider getPagingQueryProvider(String fields, String fromCla } if (fields.contains("E.JOB_EXECUTION_ID") && this.useRowNumberOptimization) { Order order = sortKeys.get("E.JOB_EXECUTION_ID"); - String orderString = Optional.ofNullable(order).map(orderKey -> orderKey == Order.DESCENDING ? "DESC" : "ASC").orElse("DESC"); + String orderString = (order == null || order == Order.DESCENDING) ? "DESC" : "ASC"; fields += ", ROW_NUMBER() OVER (PARTITION BY E.JOB_EXECUTION_ID ORDER BY E.JOB_EXECUTION_ID " + orderString + ") as RN"; } factory.setSelectClause(fields); @@ -832,4 +839,201 @@ private boolean determineSupportsRowNumberFunction(DataSource dataSource) { } return false; } + + /** + * A {@link SqlPagingQueryProviderFactoryBean} specialization that overrides the {@code Oracle, MSSQL, and DB2} + * paging {@link SafeOraclePagingQueryProvider provider} with an implementation that properly handles sort aliases. + *

NOTE: nested within the aggregate DAO as this is the only place that needs this specialization. + */ + static class SafeSqlPagingQueryProviderFactoryBean extends SqlPagingQueryProviderFactoryBean { + + private DataSource dataSource; + + @Override + public void setDataSource(DataSource dataSource) { + super.setDataSource(dataSource); + this.dataSource = dataSource; + } + + @Override + public PagingQueryProvider getObject() throws Exception { + PagingQueryProvider provider = super.getObject(); + if (provider instanceof OraclePagingQueryProvider) { + provider = new SafeOraclePagingQueryProvider((AbstractSqlPagingQueryProvider) provider, this.dataSource); + } + else if (provider instanceof SqlServerPagingQueryProvider) { + provider = new SafeSqlServerPagingQueryProvider((SqlServerPagingQueryProvider) provider, this.dataSource); + } + else if (provider instanceof Db2PagingQueryProvider) { + provider = new SafeDb2PagingQueryProvider((Db2PagingQueryProvider) provider, this.dataSource); + } + return provider; + } + + } + + /** + * A {@link AbstractSqlPagingQueryProvider paging provider} for {@code Oracle} that works around the fact that the + * Oracle provider in Spring Batch 4.x does not properly handle sort aliases when using nested {@code ROW_NUMBER} + * clauses. + */ + static class SafeOraclePagingQueryProvider extends AbstractSqlPagingQueryProvider { + + SafeOraclePagingQueryProvider(AbstractSqlPagingQueryProvider delegate, DataSource dataSource) { + // Have to use reflection to retrieve the provider fields + this.setFromClause(extractField(delegate, "fromClause", String.class)); + this.setWhereClause(extractField(delegate, "whereClause", String.class)); + this.setSortKeys(extractField(delegate, "sortKeys", Map.class)); + this.setSelectClause(extractField(delegate, "selectClause", String.class)); + this.setGroupClause(extractField(delegate, "groupClause", String.class)); + try { + this.init(dataSource); + } + catch (Exception e) { + throw new RuntimeException(e); + } + } + + private T extractField(AbstractSqlPagingQueryProvider target, String fieldName, Class fieldType) { + Field field = ReflectionUtils.findField(AbstractSqlPagingQueryProvider.class, fieldName, fieldType); + ReflectionUtils.makeAccessible(field); + return (T) ReflectionUtils.getField(field, target); + } + + @Override + public String generateFirstPageQuery(int pageSize) { + return generateRowNumSqlQuery(false, pageSize); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + return generateRowNumSqlQuery(true, pageSize); + } + + @Override + public String generateJumpToItemQuery(int itemIndex, int pageSize) { + int page = itemIndex / pageSize; + int offset = (page * pageSize); + offset = (offset == 0) ? 1 : offset; + String sortKeyInnerSelect = this.getSortKeySelect(true); + String sortKeyOuterSelect = this.getSortKeySelect(false); + return SqlPagingQueryUtils.generateRowNumSqlQueryWithNesting(this, sortKeyInnerSelect, sortKeyOuterSelect, + false, "TMP_ROW_NUM = " + offset); + } + + private String getSortKeySelect(boolean withAliases) { + StringBuilder sql = new StringBuilder(); + Map sortKeys = (withAliases) ? this.getSortKeys() : this.getSortKeysWithoutAliases(); + sql.append(sortKeys.keySet().stream().collect(Collectors.joining(","))); + return sql.toString(); + } + + // Taken from SqlPagingQueryUtils.generateRowNumSqlQuery but use sortKeysWithoutAlias + // for outer sort condition. + private String generateRowNumSqlQuery(boolean remainingPageQuery, int pageSize) { + StringBuilder sql = new StringBuilder(); + sql.append("SELECT * FROM (SELECT ").append(getSelectClause()); + sql.append(" FROM ").append(this.getFromClause()); + if (StringUtils.hasText(this.getWhereClause())) { + sql.append(" WHERE ").append(this.getWhereClause()); + } + if (StringUtils.hasText(this.getGroupClause())) { + sql.append(" GROUP BY ").append(this.getGroupClause()); + } + // inner sort by + sql.append(" ORDER BY ").append(SqlPagingQueryUtils.buildSortClause(this)); + sql.append(") WHERE ").append("ROWNUM <= " + pageSize); + if (remainingPageQuery) { + sql.append(" AND "); + // For the outer sort we want to use sort keys w/o aliases. However, + // SqlPagingQueryUtils.buildSortConditions does not allow sort keys to be passed in. + // Therefore, we temporarily set the 'sortKeys' for the call to 'buildSortConditions'. + // The alternative is to clone the 'buildSortConditions' method here and allow the sort keys to be + // passed in BUT method is gigantic and this approach is the lesser of the two evils. + Map originalSortKeys = this.getSortKeys(); + this.setSortKeys(this.getSortKeysWithoutAliases()); + try { + SqlPagingQueryUtils.buildSortConditions(this, sql); + } + finally { + this.setSortKeys(originalSortKeys); + } + } + return sql.toString(); + } + } + + /** + * A {@link SqlServerPagingQueryProvider paging provider} for {@code MSSQL} that works around the fact that the + * MSSQL provider in Spring Batch 4.x does not properly handle sort aliases when generating jump to page queries. + */ + static class SafeSqlServerPagingQueryProvider extends SqlServerPagingQueryProvider { + + SafeSqlServerPagingQueryProvider(SqlServerPagingQueryProvider delegate, DataSource dataSource) { + // Have to use reflection to retrieve the provider fields + this.setFromClause(extractField(delegate, "fromClause", String.class)); + this.setWhereClause(extractField(delegate, "whereClause", String.class)); + this.setSortKeys(extractField(delegate, "sortKeys", Map.class)); + this.setSelectClause(extractField(delegate, "selectClause", String.class)); + this.setGroupClause(extractField(delegate, "groupClause", String.class)); + try { + this.init(dataSource); + } + catch (Exception e) { + throw new RuntimeException(e); + } + } + + private T extractField(AbstractSqlPagingQueryProvider target, String fieldName, Class fieldType) { + Field field = ReflectionUtils.findField(AbstractSqlPagingQueryProvider.class, fieldName, fieldType); + ReflectionUtils.makeAccessible(field); + return (T) ReflectionUtils.getField(field, target); + } + + @Override + protected String getOverClause() { + // Overrides the parent impl to use 'getSortKeys' instead of 'getSortKeysWithoutAliases' + StringBuilder sql = new StringBuilder(); + sql.append(" ORDER BY ").append(SqlPagingQueryUtils.buildSortClause(this.getSortKeys())); + return sql.toString(); + } + + } + + /** + * A {@link Db2PagingQueryProvider paging provider} for {@code DB2} that works around the fact that the + * DB2 provider in Spring Batch 4.x does not properly handle sort aliases when generating jump to page queries. + */ + static class SafeDb2PagingQueryProvider extends Db2PagingQueryProvider { + + SafeDb2PagingQueryProvider(Db2PagingQueryProvider delegate, DataSource dataSource) { + // Have to use reflection to retrieve the provider fields + this.setFromClause(extractField(delegate, "fromClause", String.class)); + this.setWhereClause(extractField(delegate, "whereClause", String.class)); + this.setSortKeys(extractField(delegate, "sortKeys", Map.class)); + this.setSelectClause(extractField(delegate, "selectClause", String.class)); + this.setGroupClause(extractField(delegate, "groupClause", String.class)); + try { + this.init(dataSource); + } + catch (Exception e) { + throw new RuntimeException(e); + } + } + + private T extractField(AbstractSqlPagingQueryProvider target, String fieldName, Class fieldType) { + Field field = ReflectionUtils.findField(AbstractSqlPagingQueryProvider.class, fieldName, fieldType); + ReflectionUtils.makeAccessible(field); + return (T) ReflectionUtils.getField(field, target); + } + + @Override + protected String getOverClause() { + // Overrides the parent impl to use 'getSortKeys' instead of 'getSortKeysWithoutAliases' + StringBuilder sql = new StringBuilder(); + sql.append(" ORDER BY ").append(SqlPagingQueryUtils.buildSortClause(this.getSortKeys())); + return sql.toString(); + } + + } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SqlPagingQueryProviderFactoryBean.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SqlPagingQueryProviderFactoryBean.java index 545946f57e..4801836c73 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SqlPagingQueryProviderFactoryBean.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SqlPagingQueryProviderFactoryBean.java @@ -52,7 +52,7 @@ public class SqlPagingQueryProviderFactoryBean implements FactoryBean providers; static { - Map providerMap = new HashMap(); + Map providerMap = new HashMap(); providerMap.put(DatabaseType.HSQL, new HsqlPagingQueryProvider()); providerMap.put(DatabaseType.H2, new H2PagingQueryProvider()); providerMap.put(DatabaseType.MYSQL, new MySqlPagingQueryProvider()); diff --git a/spring-cloud-dataflow-server/pom.xml b/spring-cloud-dataflow-server/pom.xml index c34576e8ca..32b2b2aedb 100644 --- a/spring-cloud-dataflow-server/pom.xml +++ b/spring-cloud-dataflow-server/pom.xml @@ -408,5 +408,15 @@ + + local-dev-db2 + + + com.ibm.db2 + jcc + 11.5.8.0 + + + diff --git a/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/AbstractSmokeTest.java b/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/AbstractSmokeTest.java index f5d31793e0..81897dba68 100644 --- a/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/AbstractSmokeTest.java +++ b/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/AbstractSmokeTest.java @@ -144,6 +144,8 @@ void shouldListJobExecutionsUsingPerformantRowNumberQuery( createdExecutionIdsBySchemaTarget.add(schemaVersionTarget, execution1.getExecutionId()); TaskExecution execution2 = testUtils.createSampleJob("job2", 3, BatchStatus.COMPLETED, new JobParameters(), schemaVersionTarget); createdExecutionIdsBySchemaTarget.add(schemaVersionTarget, execution2.getExecutionId()); + + // Get all executions and ensure the count and that the row number function was (or not) used jobExecutions = taskJobService.listJobExecutionsWithStepCount(Pageable.ofSize(100)); assertThat(jobExecutions).hasSize(originalCount + 4); String expectedSqlFragment = (this.supportsRowNumberFunction()) ? @@ -151,6 +153,12 @@ void shouldListJobExecutionsUsingPerformantRowNumberQuery( "as STEP_COUNT FROM AGGREGATE_JOB_INSTANCE"; Awaitility.waitAtMost(Duration.ofSeconds(5)) .untilAsserted(() -> assertThat(output).contains(expectedSqlFragment)); + + // Verify that paging works as well + jobExecutions = taskJobService.listJobExecutionsWithStepCount(Pageable.ofSize(2).withPage(0)); + assertThat(jobExecutions).hasSize(2); + jobExecutions = taskJobService.listJobExecutionsWithStepCount(Pageable.ofSize(2).withPage(1)); + assertThat(jobExecutions).hasSize(2); } static Stream schemaVersionTargetsProvider() { diff --git a/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/JobExecutionTestUtils.java b/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/JobExecutionTestUtils.java index 63b9359603..8991b09ee6 100644 --- a/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/JobExecutionTestUtils.java +++ b/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/JobExecutionTestUtils.java @@ -24,15 +24,22 @@ import javax.sql.DataSource; +import com.zaxxer.hikari.HikariDataSource; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.JobInstance; import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.repository.dao.JdbcJobInstanceDao; import org.springframework.batch.item.database.support.DataFieldMaxValueIncrementerFactory; +import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties; import org.springframework.cloud.dataflow.core.database.support.DatabaseType; import org.springframework.cloud.dataflow.core.database.support.MultiSchemaIncrementerFactory; import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.schema.service.impl.DefaultSchemaService; import org.springframework.cloud.dataflow.server.repository.TaskBatchDaoContainer; import org.springframework.cloud.dataflow.server.repository.TaskExecutionDaoContainer; import org.springframework.cloud.task.batch.listener.TaskBatchDao; @@ -130,4 +137,37 @@ private JobExecution saveJobExecution(JobExecution jobExecution, JdbcTemplate jd private Timestamp timestampFromDate(Date date) { return (date != null) ? Timestamp.valueOf(date.toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime()) : null; } + + + /** + * Test utility that generates hundreds of job executions which can be useful when debugging paging issues. + *

To run, adjust the datasource properties accordingly and then execute the test manually in your editor. + */ + @Disabled + static class JobExecutionTestDataGenerator { + + @Test + void generateJobExecutions() { + // Adjust these properties as necessary to point to your env + DataSourceProperties dataSourceProperties = new DataSourceProperties(); + dataSourceProperties.setUrl("jdbc:oracle:thin:@localhost:1521/dataflow"); + dataSourceProperties.setUsername("spring"); + dataSourceProperties.setPassword("spring"); + dataSourceProperties.setDriverClassName("oracle.jdbc.OracleDriver"); + + DataSource dataSource = dataSourceProperties.initializeDataSourceBuilder().type(HikariDataSource.class).build(); + SchemaService schemaService = new DefaultSchemaService(); + TaskExecutionDaoContainer taskExecutionDaoContainer = new TaskExecutionDaoContainer(dataSource, schemaService); + TaskBatchDaoContainer taskBatchDaoContainer = new TaskBatchDaoContainer(dataSource, schemaService); + JobExecutionTestUtils generator = new JobExecutionTestUtils(taskExecutionDaoContainer, taskBatchDaoContainer); + generator.createSampleJob(jobName("boot2"), 200, BatchStatus.COMPLETED, new JobParameters(), + schemaService.getTarget("boot2")); + generator.createSampleJob(jobName("boot3"), 200, BatchStatus.COMPLETED, new JobParameters(), + schemaService.getTarget("boot3")); + } + + private String jobName(String schemaTarget) { + return schemaTarget + "-job-" + System.currentTimeMillis(); + } + } } diff --git a/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/DB2_11_5_ContainerSupport.java b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/DB2_11_5_ContainerSupport.java index 12b2c67943..d1db98c78b 100644 --- a/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/DB2_11_5_ContainerSupport.java +++ b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/DB2_11_5_ContainerSupport.java @@ -16,10 +16,12 @@ package org.springframework.cloud.dataflow.server.db; +import java.util.concurrent.atomic.AtomicReference; + +import org.junit.jupiter.api.BeforeAll; import org.testcontainers.containers.Db2Container; -import org.testcontainers.junit.jupiter.Container; -import org.testcontainers.junit.jupiter.Testcontainers; +import org.springframework.cloud.dataflow.server.db.arm64.Db2Arm64ContainerSupport; import org.springframework.test.context.DynamicPropertyRegistry; import org.springframework.test.context.DynamicPropertySource; @@ -28,14 +30,20 @@ * * @author Chris Bono */ -@Testcontainers(disabledWithoutDocker = true) -public interface DB2_11_5_ContainerSupport { +public interface DB2_11_5_ContainerSupport extends Db2Arm64ContainerSupport { + + AtomicReference containerReference = new AtomicReference<>(null); - @Container - Db2Container container = new Db2Container("ibmcom/db2:11.5.0.0a").acceptLicense(); + @BeforeAll + static void startContainer() { + Db2Container container = Db2Arm64ContainerSupport.startContainer(() -> + new Db2Container("ibmcom/db2:11.5.0.0a").acceptLicense()); + containerReference.set(container); + } @DynamicPropertySource static void databaseProperties(DynamicPropertyRegistry registry) { + Db2Container container = containerReference.get(); registry.add("spring.datasource.url", container::getJdbcUrl); registry.add("spring.datasource.username", container::getUsername); registry.add("spring.datasource.password", container::getPassword); diff --git a/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/arm64/Db2Arm64ContainerSupport.java b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/arm64/Db2Arm64ContainerSupport.java new file mode 100644 index 0000000000..07c39fb6b9 --- /dev/null +++ b/spring-cloud-dataflow-test/src/main/java/org/springframework/cloud/dataflow/server/db/arm64/Db2Arm64ContainerSupport.java @@ -0,0 +1,55 @@ +/* + * Copyright 2023-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.arm64; + +import java.util.function.Supplier; + +import org.junit.jupiter.api.extension.ExtendWith; +import org.testcontainers.containers.Db2Container; +import org.testcontainers.containers.OracleContainer; +import uk.org.webcompere.systemstubs.environment.EnvironmentVariables; +import uk.org.webcompere.systemstubs.jupiter.SystemStub; +import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension; + +import org.springframework.cloud.dataflow.server.db.ContainerSupport; +import org.springframework.core.log.LogAccessor; + +/** + * Provides support for testing against an {@link Db2Container DB2 testcontainer} on Mac ARM64. + * + * @author Chris Bono + */ +@ExtendWith(SystemStubsExtension.class) +public interface Db2Arm64ContainerSupport { + + LogAccessor LOG = new LogAccessor(Db2Arm64ContainerSupport.class); + + @SystemStub + EnvironmentVariables ENV_VARS = new EnvironmentVariables(); + + static Db2Container startContainer(Supplier db2ContainerSupplier) { + if (ContainerSupport.runningOnMacArm64()) { + String wiki = "https://github.com/spring-cloud/spring-cloud-dataflow/wiki/DB2-on-Mac-ARM64"; + LOG.warn(() -> "You are running on Mac ARM64. If this test fails, make sure Colima is running prior " + + "to test invocation. See " + wiki + " for details"); + ENV_VARS.set("TESTCONTAINERS_DOCKER_SOCKET_OVERRIDE", "/var/run/docker.sock"); + ENV_VARS.set("DOCKER_HOST", String.format("unix://%s/.colima/docker.sock", System.getProperty("user.home"))); + } + Db2Container container = db2ContainerSupplier.get(); + container.start(); + return container; + } +}