Skip to content

Commit

Permalink
Update JdbcSearchableJobExecutionDao to support Batch 4 and 5 schemas (
Browse files Browse the repository at this point in the history
…#5613)

* Ensure that boot2 and boot3 job execution are queried.
* Restore the default constructor for JdbcSearchableJobExecutionDao.
* Remove final from local variables.
* Update so the test cases from AbstractJdbcJobSearchableExecutionDaoTests can be moved to AbstractSimpleJobServiceTests.
* Moved tests from AbstractJdbcJobSearchableExecutionDaoTests to AbstractSimpleJobServiceTests.
Fix JobParameter loading on JdbcSearchableJobExecutionDao.
* Added DirtiesContext for each method to prevent connection errors.
* Fixed formatting.
* Fix Datasource handling in SimpleJobServiceMariadbTests and SimpleJobServicePostgresTests.

Fixes #5609
  • Loading branch information
Corneil du Plessis authored Dec 22, 2023
1 parent 6cb57b3 commit 5073813
Show file tree
Hide file tree
Showing 13 changed files with 782 additions and 588 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@ public JobParameter deserialize(JsonParser jsonParser, DeserializationContext de

if (!type.isEmpty() && !type.equalsIgnoreCase("STRING")) {
if ("DATE".equalsIgnoreCase(type)) {
// TODO: when upgraded to Java8 use java DateTime
jobParameter = new JobParameter(DateTime.parse(value).toDate(), identifying);
}
else if ("DOUBLE".equalsIgnoreCase(type)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ public static Map<String, String> extractAndQualifyDeployerProperties(Map<String
.filter(kv -> kv.getKey().startsWith(wildcardPrefix) || kv.getKey().startsWith(appPrefix))
.collect(Collectors.toMap(kv -> kv.getKey().startsWith(wildcardPrefix)
? "spring.cloud.deployer." + kv.getKey().substring(wildcardLength)
: "spring.cloud.deployer." + kv.getKey().substring(appLength), kv -> kv.getValue(),
: "spring.cloud.deployer." + kv.getKey().substring(appLength), Entry::getValue,
(fromWildcard, fromApp) -> fromApp));
logger.debug("extractAndQualifyDeployerProperties:{}", result);
return result;
Expand All @@ -361,12 +361,12 @@ public static Map<String, String> qualifyDeployerProperties(Map<String, String>
.filter(kv -> kv.getKey().startsWith(wildcardPrefix) || kv.getKey().startsWith(appPrefix))
.collect(Collectors.toMap(kv -> kv.getKey().startsWith(wildcardPrefix)
? "spring.cloud.deployer." + kv.getKey().substring(wildcardLength)
: "spring.cloud.deployer." + kv.getKey().substring(appLength), kv -> kv.getValue(),
: "spring.cloud.deployer." + kv.getKey().substring(appLength), Entry::getValue,
(fromWildcard, fromApp) -> fromApp));

Map<String, String> resultApp = new TreeMap<>(input).entrySet().stream()
.filter(kv -> !kv.getKey().startsWith(wildcardPrefix) && !kv.getKey().startsWith(appPrefix))
.collect(Collectors.toMap(kv -> kv.getKey(), kv -> kv.getValue(),
.collect(Collectors.toMap(Entry::getKey, Entry::getValue,
(fromWildcard, fromApp) -> fromApp));

resultDeployer.putAll(resultApp);
Expand Down Expand Up @@ -441,8 +441,8 @@ public static List<String> removeQuoting(List<String> params) {
}
start = regexMatcher.start();
}
if (param != null && param.length() > 0) {
String p = removeQuoting(param.substring(start, param.length()).trim());
if (param != null && !param.isEmpty()) {
String p = removeQuoting(param.substring(start).trim());
if (StringUtils.hasText(p)) {
paramsToUse.add(p);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.dataflow.server.batch;

import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion;
import org.springframework.cloud.dataflow.schema.SchemaVersionTarget;
import org.springframework.util.Assert;

/**
* Provides enumeration of Batch Schema versions needed to be supported.
* @author Corneil du Plessis
*/
public enum BatchVersion {
BATCH_4,
BATCH_5;
public static BatchVersion from(AppBootSchemaVersion bootSchemaVersion) {
Assert.notNull(bootSchemaVersion, "bootSchemaVersion required");
return AppBootSchemaVersion.BOOT3.equals(bootSchemaVersion) ? BATCH_5 : BATCH_4;
}
public static BatchVersion from(SchemaVersionTarget versionTarget) {
Assert.notNull(versionTarget, "versionTarget required");
return from(versionTarget.getSchemaVersion());
}
}

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright 2009-2019 the original author or authors.
* Copyright 2009-2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand All @@ -16,7 +16,6 @@
package org.springframework.cloud.dataflow.server.batch;

import java.sql.Types;

import javax.sql.DataSource;

import org.slf4j.Logger;
Expand Down Expand Up @@ -60,6 +59,7 @@
* ingredients as convenient as possible.
*
* @author Dave Syer
* @author Corneil du Plessis
*
*/
public class SimpleJobServiceFactoryBean implements FactoryBean<JobService>, InitializingBean, EnvironmentAware {
Expand Down Expand Up @@ -264,7 +264,8 @@ protected SearchableJobInstanceDao createJobInstanceDao() throws Exception {
}

protected SearchableJobExecutionDao createJobExecutionDao() throws Exception {
JdbcSearchableJobExecutionDao dao = new JdbcSearchableJobExecutionDao();
BatchVersion batchVersion = BatchVersion.from(this.schemaVersionTarget);
JdbcSearchableJobExecutionDao dao = new JdbcSearchableJobExecutionDao(batchVersion);
dao.setDataSource(dataSource);
dao.setJobExecutionIncrementer(incrementerFactory.getIncrementer(databaseType, tablePrefix
+ "JOB_EXECUTION_SEQ"));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,20 +16,16 @@

package org.springframework.cloud.dataflow.server.repository;

import javax.sql.DataSource;
import java.util.HashMap;
import java.util.Map;
import javax.sql.DataSource;

import org.springframework.batch.core.repository.dao.JobExecutionDao;
import org.springframework.cloud.dataflow.core.database.support.DatabaseType;
import org.springframework.cloud.dataflow.core.database.support.MultiSchemaIncrementerFactory;
import org.springframework.cloud.dataflow.schema.SchemaVersionTarget;
import org.springframework.cloud.dataflow.schema.service.SchemaService;
import org.springframework.cloud.dataflow.server.batch.BatchVersion;
import org.springframework.cloud.dataflow.server.batch.JdbcSearchableJobExecutionDao;
import org.springframework.cloud.dataflow.server.batch.SearchableJobExecutionDao;
import org.springframework.cloud.dataflow.server.controller.NoSuchSchemaTargetException;
import org.springframework.cloud.dataflow.server.repository.support.JdbcParameterUtils;
import org.springframework.jdbc.support.JdbcUtils;
import org.springframework.util.StringUtils;

/**
Expand All @@ -41,7 +37,8 @@ public class JobExecutionDaoContainer {

public JobExecutionDaoContainer(DataSource dataSource, SchemaService schemaService) {
for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) {
JdbcSearchableJobExecutionDao jdbcSearchableJobExecutionDao = new JdbcSearchableJobExecutionDao();
BatchVersion batchVersion = BatchVersion.from(target);
JdbcSearchableJobExecutionDao jdbcSearchableJobExecutionDao = new JdbcSearchableJobExecutionDao(batchVersion);
jdbcSearchableJobExecutionDao.setDataSource(dataSource);
jdbcSearchableJobExecutionDao.setTablePrefix(target.getBatchPrefix());
try {
Expand Down

This file was deleted.

Loading

0 comments on commit 5073813

Please sign in to comment.