diff --git a/pom.xml b/pom.xml
index 25b5a855b4..2b6eabf464 100644
--- a/pom.xml
+++ b/pom.xml
@@ -77,9 +77,10 @@
spring-cloud-starter-dataflow-server
spring-cloud-starter-dataflow-ui
spring-cloud-dataflow-server
- spring-cloud-dataflow-tasklauncher
- spring-cloud-dataflow-single-step-batch-job
- spring-cloud-dataflow-composed-task-runner
+
+
+
+
spring-cloud-dataflow-test
spring-cloud-dataflow-dependencies
spring-cloud-dataflow-classic-docs
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AppRegistryDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AppRegistryDocumentation.java
index 85ed9b6457..6a0e465799 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AppRegistryDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AppRegistryDocumentation.java
@@ -32,7 +32,7 @@
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
@@ -83,7 +83,7 @@ public void registeringAnApplicationVersion() throws Exception {
parameterWithName("name").description("The name of the application to register"),
parameterWithName("version").description("The version of the application to register")
),
- requestParameters(
+ queryParameters(
parameterWithName("uri").description("URI where the application bits reside"),
parameterWithName("metadata-uri").optional()
.description("URI where the application metadata jar can be found"),
@@ -108,7 +108,7 @@ public void bulkRegisteringApps() throws Exception {
.andExpect(status().isCreated())
.andDo(
this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("uri").optional().description("URI where a properties file containing registrations can be fetched. Exclusive with `apps`."),
parameterWithName("apps").optional().description("Inline set of registrations. Exclusive with `uri`."),
parameterWithName("force").optional().description("Must be true if a registration with the same name and type already exists, otherwise an error will occur")
@@ -133,7 +133,7 @@ public void getApplicationsFiltered() throws Exception {
)
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("search").description("The search string performed on the name (optional)"),
parameterWithName("type")
.description("Restrict the returned apps to the type of the app. One of " + Arrays.asList(ApplicationType.values())),
@@ -167,7 +167,7 @@ public void getSingleApplication() throws Exception {
parameterWithName("type").description("The type of application to query. One of " + Arrays.asList(ApplicationType.values())),
parameterWithName("name").description("The name of the application to query")
),
- requestParameters(
+ queryParameters(
parameterWithName("exhaustive").optional()
.description("Return all application properties, including common Spring Boot properties")
),
@@ -205,7 +205,7 @@ public void registeringAnApplication() throws Exception {
parameterWithName("type").description("The type of application to register. One of " + Arrays.asList(ApplicationType.values())),
parameterWithName("name").description("The name of the application to register")
),
- requestParameters(
+ queryParameters(
parameterWithName("uri").description("URI where the application bits reside"),
parameterWithName("metadata-uri").optional().description("URI where the application metadata jar can be found"),
parameterWithName("bootVersion").optional().description("The Spring Boot version of the application.Default is 2"),
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AuditRecordsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AuditRecordsDocumentation.java
index 40d361e15c..622833c4f7 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AuditRecordsDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AuditRecordsDocumentation.java
@@ -28,7 +28,7 @@
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@@ -80,7 +80,7 @@ public void listAllAuditRecords() throws Exception {
.andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("page").description("The zero-based page number (optional)"),
parameterWithName("size").description("The requested page size (optional)"),
parameterWithName("operations").description("Comma-separated list of Audit Operations (optional)"),
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/BaseDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/BaseDocumentation.java
index 7a8c1bdc0d..b2cf24ada2 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/BaseDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/BaseDocumentation.java
@@ -17,6 +17,7 @@
package org.springframework.cloud.dataflow.server.rest.documentation;
import javax.sql.DataSource;
+import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@@ -32,14 +33,7 @@
import org.springframework.cloud.dataflow.core.ApplicationType;
import org.springframework.cloud.dataflow.core.Launcher;
import org.springframework.cloud.dataflow.core.TaskPlatform;
-import org.springframework.cloud.dataflow.core.database.support.MultiSchemaIncrementerFactory;
-import org.springframework.cloud.dataflow.schema.SchemaVersionTarget;
-import org.springframework.cloud.dataflow.schema.service.SchemaService;
import org.springframework.cloud.dataflow.server.controller.TaskSchedulerController;
-import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao;
-import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDaoContainer;
-import org.springframework.cloud.dataflow.server.repository.JdbcDataflowTaskExecutionMetadataDao;
-import org.springframework.cloud.dataflow.server.repository.support.SchemaUtilities;
import org.springframework.cloud.dataflow.server.service.SchedulerService;
import org.springframework.cloud.dataflow.server.single.LocalDataflowResource;
import org.springframework.cloud.deployer.spi.app.ActuatorOperations;
@@ -205,28 +199,6 @@ void destroyStream(String name) throws Exception {
);
}
- protected DataflowTaskExecutionMetadataDaoContainer createDataFlowTaskExecutionMetadataDaoContainer(SchemaService schemaService) {
- DataflowTaskExecutionMetadataDaoContainer result = new DataflowTaskExecutionMetadataDaoContainer();
- MultiSchemaIncrementerFactory incrementerFactory = new MultiSchemaIncrementerFactory(dataSource);
- String databaseType;
- try {
- databaseType = DatabaseType.fromMetaData(dataSource).name();
- } catch (MetaDataAccessException e) {
- throw new IllegalStateException(e);
- }
- for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) {
- DataflowTaskExecutionMetadataDao dao = new JdbcDataflowTaskExecutionMetadataDao(
- dataSource,
- incrementerFactory.getIncrementer(databaseType,
- SchemaUtilities.getQuery("%PREFIX%EXECUTION_METADATA_SEQ", target.getTaskPrefix())
- ),
- target.getTaskPrefix()
- );
- result.add(target.getName(), dao);
- }
- return result;
- }
-
/**
* A {@link ResultHandler} that can be turned off and on.
*
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobExecutionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobExecutionsDocumentation.java
index d7811f3cca..5b8886ae3b 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobExecutionsDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobExecutionsDocumentation.java
@@ -16,6 +16,7 @@
package org.springframework.cloud.dataflow.server.rest.documentation;
+import java.time.LocalDateTime;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
@@ -29,7 +30,10 @@
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameter;
import org.springframework.batch.core.JobParameters;
+import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
+import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.repository.JobRestartException;
import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport;
@@ -58,7 +62,7 @@
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@@ -128,7 +132,7 @@ public void listJobExecutions() throws Exception {
.param("size", "10"))
.andDo(print())
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("page")
.description("The zero-based page number (optional)"),
parameterWithName("size")
@@ -149,7 +153,7 @@ public void listThinJobExecutions() throws Exception {
.param("size", "10"))
.andDo(print())
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("page")
.description("The zero-based page number (optional)"),
parameterWithName("size")
@@ -171,7 +175,7 @@ public void listThinJobExecutionsByJobInstanceId() throws Exception {
.param("jobInstanceId", "1"))
.andDo(print())
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("page")
.description("The zero-based page number (optional)"),
parameterWithName("size")
@@ -195,7 +199,7 @@ public void listThinJobExecutionsByTaskExecutionId() throws Exception {
.param("taskExecutionId", "1"))
.andDo(print())
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("page")
.description("The zero-based page number (optional)"),
parameterWithName("size")
@@ -220,7 +224,7 @@ public void listThinJobExecutionsByDate() throws Exception {
.param("toDate", "2050-09-24T18:00:45,000"))
.andDo(print())
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("page")
.description("The zero-based page number (optional)"),
parameterWithName("size")
@@ -246,7 +250,7 @@ public void listJobExecutionsByName() throws Exception {
.param("size", "10"))
.andDo(print())
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("page")
.description("The zero-based page number (optional)"),
parameterWithName("size")
@@ -270,7 +274,7 @@ public void listThinJobExecutionsByName() throws Exception {
.param("size", "10"))
.andDo(print())
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("page")
.description("The zero-based page number (optional)"),
parameterWithName("size")
@@ -297,7 +301,7 @@ public void jobDisplayDetail() throws Exception {
pathParameters(
parameterWithName("id").description("The id of an existing job execution (required)")
),
- requestParameters(
+ queryParameters(
parameterWithName("schemaTarget").description("Schema Target to the Job.").optional()
),
responseFields(
@@ -337,7 +341,7 @@ public void jobStop() throws Exception {
.andDo(this.documentationHandler.document(
pathParameters(parameterWithName("id")
.description("The id of an existing job execution (required)"))
- , requestParameters(
+ , queryParameters(
parameterWithName("schemaTarget").description("The schema target of the job execution").optional(),
parameterWithName("stop")
.description("Sends signal to stop the job if set to true"))));
@@ -354,7 +358,7 @@ public void jobRestart() throws Exception {
.andDo(this.documentationHandler.document(
pathParameters(parameterWithName("id")
.description("The id of an existing job execution (required)"))
- , requestParameters(
+ , queryParameters(
parameterWithName("schemaTarget").description("The schema target of the job execution").optional(),
parameterWithName("restart")
.description("Sends signal to restart the job if set to true")
@@ -373,18 +377,18 @@ private void initialize() {
}
- private void createJobExecution(String name, BatchStatus status) {
+ private void createJobExecution(String name, BatchStatus status) throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
SchemaVersionTarget schemaVersionTarget = this.aggregateExecutionSupport.findSchemaVersionTarget(name, taskDefinitionReader);
TaskExecutionDao dao = this.daoContainer.get(schemaVersionTarget.getName());
- TaskExecution taskExecution = dao.createTaskExecution(name, new Date(), Collections.singletonList("--spring.cloud.data.flow.platformname=default"), null);
- Map jobParameterMap = new HashMap<>();
+ TaskExecution taskExecution = dao.createTaskExecution(name, LocalDateTime.now(), Collections.singletonList("--spring.cloud.data.flow.platformname=default"), null);
+ Map> jobParameterMap = new HashMap<>();
JobParameters jobParameters = new JobParameters(jobParameterMap);
JobRepository jobRepository = this.jobRepositoryContainer.get(schemaVersionTarget.getName());
- JobExecution jobExecution = jobRepository.createJobExecution(jobRepository.createJobInstance(name, new JobParameters()), jobParameters, null);
+ JobExecution jobExecution = jobRepository.createJobExecution(name, jobParameters);
TaskBatchDao taskBatchDao = this.taskBatchDaoContainer.get(schemaVersionTarget.getName());
taskBatchDao.saveRelationship(taskExecution, jobExecution);
jobExecution.setStatus(status);
- jobExecution.setStartTime(new Date());
+ jobExecution.setStartTime(LocalDateTime.now());
jobRepository.update(jobExecution);
final TaskManifest manifest = new TaskManifest();
manifest.setPlatformName("default");
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobInstancesDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobInstancesDocumentation.java
index 8b553dcfeb..afbd9d82f7 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobInstancesDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobInstancesDocumentation.java
@@ -16,8 +16,8 @@
package org.springframework.cloud.dataflow.server.rest.documentation;
+import java.time.LocalDateTime;
import java.util.ArrayList;
-import java.util.Date;
import org.junit.Before;
import org.junit.Test;
@@ -26,7 +26,10 @@
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameters;
+import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
+import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.repository.JobRestartException;
import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport;
@@ -49,7 +52,7 @@
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@@ -93,7 +96,7 @@ public void listJobInstances() throws Exception {
.param("size", "10"))
.andDo(print())
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("page")
.description("The zero-based page number (optional)"),
parameterWithName("size")
@@ -117,7 +120,7 @@ public void jobDisplayDetail() throws Exception {
pathParameters(
parameterWithName("id").description("The id of an existing job instance (required)")
),
- requestParameters(
+ queryParameters(
parameterWithName("schemaTarget").description("Schema target").optional()
),
responseFields(
@@ -138,16 +141,16 @@ private void initialize() {
this.taskBatchDaoContainer = context.getBean(TaskBatchDaoContainer.class);
}
- private void createJobExecution(String name, BatchStatus status) {
+ private void createJobExecution(String name, BatchStatus status) throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
SchemaVersionTarget schemaVersionTarget = this.aggregateExecutionSupport.findSchemaVersionTarget(name, taskDefinitionReader);
TaskExecutionDao dao = this.daoContainer.get(schemaVersionTarget.getName());
- TaskExecution taskExecution = dao.createTaskExecution(name, new Date(), new ArrayList<>(), null);
+ TaskExecution taskExecution = dao.createTaskExecution(name, LocalDateTime.now(), new ArrayList<>(), null);
JobRepository jobRepository = this.jobRepositoryContainer.get(schemaVersionTarget.getName());
- JobExecution jobExecution = jobRepository.createJobExecution(jobRepository.createJobInstance(name, new JobParameters()), new JobParameters(), null);
+ JobExecution jobExecution = jobRepository.createJobExecution(name, new JobParameters());
TaskBatchDao taskBatchDao = this.taskBatchDaoContainer.get(schemaVersionTarget.getName());
taskBatchDao.saveRelationship(taskExecution, jobExecution);
jobExecution.setStatus(status);
- jobExecution.setStartTime(new Date());
+ jobExecution.setStartTime(LocalDateTime.now());
jobRepository.update(jobExecution);
}
}
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobStepExecutionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobStepExecutionsDocumentation.java
index 07ba4cd4fe..3f9ad263fd 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobStepExecutionsDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobStepExecutionsDocumentation.java
@@ -16,8 +16,8 @@
package org.springframework.cloud.dataflow.server.rest.documentation;
+import java.time.LocalDateTime;
import java.util.ArrayList;
-import java.util.Date;
import org.junit.Before;
import org.junit.Test;
@@ -27,7 +27,10 @@
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
+import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.repository.JobRestartException;
import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport;
@@ -50,7 +53,7 @@
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
@@ -104,7 +107,7 @@ public void listStepExecutionsForJob() throws Exception {
.param("page", "0")
.param("size", "10"))
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("page")
.description("The zero-based page number (optional)"),
parameterWithName("size")
@@ -128,7 +131,7 @@ public void stepDetail() throws Exception {
parameterWithName("stepid")
.description("The id of an existing step execution for a specific job execution (required)")
),
- requestParameters(
+ queryParameters(
parameterWithName("schemaTarget").description("Schema target").optional()
),
responseFields(
@@ -174,19 +177,20 @@ private void initialize() {
this.taskDefinitionReader = context.getBean(TaskDefinitionReader.class);
}
- private void createJobExecution(String name, BatchStatus status) {
+ private void createJobExecution(String name, BatchStatus status) throws JobInstanceAlreadyCompleteException,
+ JobExecutionAlreadyRunningException, JobRestartException {
SchemaVersionTarget schemaVersionTarget = this.aggregateExecutionSupport.findSchemaVersionTarget(name, taskDefinitionReader);
TaskExecutionDao dao = this.daoContainer.get(schemaVersionTarget.getName());
- TaskExecution taskExecution = dao.createTaskExecution(name, new Date(), new ArrayList<>(), null);
+ TaskExecution taskExecution = dao.createTaskExecution(name, LocalDateTime.now(), new ArrayList<>(), null);
JobRepository jobRepository = this.jobRepositoryContainer.get(schemaVersionTarget.getName());
- JobExecution jobExecution = jobRepository.createJobExecution(jobRepository.createJobInstance(name, new JobParameters()), new JobParameters(), null);
+ JobExecution jobExecution = jobRepository.createJobExecution(name, new JobParameters());
StepExecution stepExecution = new StepExecution(name + "_STEP", jobExecution, jobExecution.getId());
stepExecution.setId(null);
jobRepository.add(stepExecution);
TaskBatchDao taskBatchDao = taskBatchDaoContainer.get(schemaVersionTarget.getName());
taskBatchDao.saveRelationship(taskExecution, jobExecution);
jobExecution.setStatus(status);
- jobExecution.setStartTime(new Date());
+ jobExecution.setStartTime(LocalDateTime.now());
jobRepository.update(jobExecution);
}
}
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDefinitionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDefinitionsDocumentation.java
index 08385bab4b..ba9b5c39d2 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDefinitionsDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDefinitionsDocumentation.java
@@ -33,7 +33,7 @@
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@@ -77,7 +77,7 @@ public void createDefinition() throws Exception {
.param("deploy", "false"))
.andExpect(status().isCreated())
.andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("name").description("The name for the created task definitions"),
parameterWithName("definition").description("The definition for the stream, using Data Flow DSL"),
parameterWithName("description").description("The description of the stream definition"),
@@ -107,7 +107,7 @@ public void listAllStreamDefinitions() throws Exception {
.andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("page").description("The zero-based page number (optional)"),
parameterWithName("search").description("The search string performed on the name (optional)"),
parameterWithName("sort").description("The sort on the list (optional)"),
@@ -179,7 +179,7 @@ public void listRelatedStreamDefinitions() throws Exception {
.andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("nested")
.description("Should we recursively findByTaskNameContains for related stream definitions (optional)"),
parameterWithName("page").description("The zero-based page number (optional)"),
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDeploymentsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDeploymentsDocumentation.java
index 2e00431002..88f067ec3b 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDeploymentsDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDeploymentsDocumentation.java
@@ -17,7 +17,6 @@
package org.springframework.cloud.dataflow.server.rest.documentation;
import java.io.IOException;
-import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@@ -42,7 +41,7 @@
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
@@ -136,7 +135,7 @@ public void info() throws Exception {
.andDo(this.documentationHandler.document(
pathParameters(parameterWithName("timelog")
.description("The name of an existing stream definition (required)")),
- requestParameters(parameterWithName("reuse-deployment-properties")
+ queryParameters(parameterWithName("reuse-deployment-properties")
.description(parameterWithName("The name of the flag to reuse the deployment properties")))
));
}
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskDefinitionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskDefinitionsDocumentation.java
index 1fd36e6d7b..421dedcfda 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskDefinitionsDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskDefinitionsDocumentation.java
@@ -32,7 +32,7 @@
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@@ -65,7 +65,7 @@ public void createDefinition() throws Exception {
.param("description", "Demo task definition for testing"))
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("name").description("The name for the created task definition"),
parameterWithName("definition").description("The definition for the task, using Data Flow DSL"),
parameterWithName("description").description("The description of the task definition")
@@ -97,7 +97,7 @@ public void listAllTaskDefinitions() throws Exception {
.andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("page").description("The zero-based page number (optional)"),
parameterWithName("size").description("The requested page size (optional)"),
parameterWithName("search").description("The search string performed on the name (optional)"),
@@ -122,7 +122,7 @@ public void displayDetail() throws Exception {
pathParameters(
parameterWithName("my-task").description("The name of an existing task definition (required)")
),
- requestParameters(
+ queryParameters(
parameterWithName("manifest").description("The flag to include the task manifest into the latest task execution (optional)")
),
responseFields(
@@ -149,7 +149,7 @@ public void taskDefinitionDelete() throws Exception {
.andDo(this.documentationHandler.document(
pathParameters(
parameterWithName("my-task").description("The name of an existing task definition (required)")),
- requestParameters(
+ queryParameters(
parameterWithName("cleanup").description("The flag to indicate if the associated task executions needed to be cleaned up")
)
));
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskExecutionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskExecutionsDocumentation.java
index 5d0c23f961..63abcf8dd8 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskExecutionsDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskExecutionsDocumentation.java
@@ -37,7 +37,7 @@
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@@ -83,7 +83,7 @@ public void launchTaskBoot3() throws Exception {
)
.andExpect(status().isCreated())
.andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("name").description("The name of the task definition to launch"),
parameterWithName("properties")
.description("Application and Deployer properties to use while launching. (optional)"),
@@ -109,7 +109,7 @@ public void launchTask() throws Exception {
)
.andExpect(status().isCreated())
.andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("name").description("The name of the task definition to launch"),
parameterWithName("properties")
.description("Application and Deployer properties to use while launching. (optional)"),
@@ -148,7 +148,7 @@ public void getTaskDisplayDetail() throws Exception {
pathParameters(
parameterWithName("id").description("The id of an existing task execution (required)")
),
- requestParameters(
+ queryParameters(
parameterWithName("schemaTarget").description("The schemaTarget provided in Task execution detail")
),
responseFields(
@@ -203,7 +203,7 @@ public void getTaskDisplayDetailByExternalId() throws Exception {
pathParameters(
parameterWithName("externalExecutionId").description("The external ExecutionId of an existing task execution (required)")
),
- requestParameters(
+ queryParameters(
parameterWithName("platform").description("The name of the platform.")
),
responseFields(
@@ -246,7 +246,7 @@ public void listTaskExecutions() throws Exception {
.param("size", "2"))
.andDo(print())
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("page")
.description("The zero-based page number (optional)"),
parameterWithName("size")
@@ -273,7 +273,7 @@ public void listTaskExecutionsByName() throws Exception {
)
.andDo(print())
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("page")
.description("The zero-based page number (optional)"),
parameterWithName("size")
@@ -306,7 +306,7 @@ public void stopTask() throws Exception {
pathParameters(
parameterWithName("id").description("The ids of an existing task execution (required)")
),
- requestParameters(
+ queryParameters(
parameterWithName("schemaTarget").description("The schemaTarget provided in Task execution detail. (optional)"))
)
);
@@ -327,7 +327,7 @@ public void taskExecutionRemove() throws Exception {
.andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- requestParameters(parameterWithName("action").description("Optional. Defaults to: CLEANUP.")),
+ queryParameters(parameterWithName("action").description("Optional. Defaults to: CLEANUP.")),
pathParameters(parameterWithName("ids")
.description("The id of an existing task execution (required). Multiple comma separated values are accepted."))
));
@@ -340,7 +340,7 @@ public void taskExecutionRemoveAndTaskDataRemove() throws Exception {
.andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("action").description("Using both actions CLEANUP and REMOVE_DATA simultaneously."),
parameterWithName("schemaTarget").description("Schema target for task. (optional)")
),
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskLogsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskLogsDocumentation.java
index 8b09a8fae2..07b8fec3fe 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskLogsDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskLogsDocumentation.java
@@ -33,7 +33,7 @@
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get;
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@@ -76,7 +76,7 @@ public void getLogsByTaskId() throws Exception {
.andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("platformName").description("The name of the platform the task is launched."))
));
}
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskPlatformDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskPlatformDocumentation.java
index 2c6dc1c2d4..054f88ea82 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskPlatformDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskPlatformDocumentation.java
@@ -24,7 +24,7 @@
import static org.springframework.restdocs.payload.PayloadDocumentation.responseFields;
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@@ -46,7 +46,7 @@ public void listTaskPlatforms() throws Exception {
.andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("page")
.description("The zero-based page number (optional)"),
parameterWithName("size")
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskSchedulerDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskSchedulerDocumentation.java
index 5ece6715cb..0ea482694b 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskSchedulerDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskSchedulerDocumentation.java
@@ -31,7 +31,7 @@
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@@ -67,7 +67,7 @@ public void createSchedule() throws Exception {
.param("arguments", "--foo=bar"))
.andExpect(status().isCreated())
.andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("scheduleName").description("The name for the created schedule"),
parameterWithName("platform").description("The name of the platform the task is launched"),
parameterWithName("taskDefinitionName")
@@ -99,7 +99,7 @@ public void listFilteredSchedules() throws Exception {
.andDo(this.documentationHandler.document(
pathParameters(parameterWithName("task-definition-name")
.description("Filter schedules based on the specified task definition (required)")),
- requestParameters(
+ queryParameters(
parameterWithName("page")
.description("The zero-based page number (optional)"),
parameterWithName("size")
@@ -120,7 +120,7 @@ public void listAllSchedules() throws Exception {
.andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("page")
.description("The zero-based page number (optional)"),
parameterWithName("size")
diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/StepExecutionHistory.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/StepExecutionHistory.java
index d2fc6d8bb2..ab90898c60 100644
--- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/StepExecutionHistory.java
+++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/StepExecutionHistory.java
@@ -19,7 +19,6 @@
import java.time.Duration;
import java.time.LocalDateTime;
import java.time.temporal.ChronoUnit;
-import java.util.Date;
import org.springframework.batch.core.StepExecution;
diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionResource.java
index f4e99885f4..9f2dbcfce0 100644
--- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionResource.java
+++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionResource.java
@@ -20,7 +20,6 @@
import java.time.Duration;
import java.time.LocalDateTime;
import java.time.temporal.ChronoUnit;
-import java.util.Date;
import java.util.Properties;
import java.util.TimeZone;
diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionThinResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionThinResource.java
index 8e351a21be..55042f7f2b 100644
--- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionThinResource.java
+++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionThinResource.java
@@ -20,12 +20,9 @@
import java.time.Duration;
import java.time.LocalDateTime;
import java.time.temporal.ChronoUnit;
-import java.util.Date;
import java.util.Properties;
import java.util.TimeZone;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobInstance;
diff --git a/spring-cloud-dataflow-server-core/pom.xml b/spring-cloud-dataflow-server-core/pom.xml
index c5a818865a..48808e0265 100644
--- a/spring-cloud-dataflow-server-core/pom.xml
+++ b/spring-cloud-dataflow-server-core/pom.xml
@@ -253,6 +253,11 @@
postgresql
test
+
+ org.hibernate.orm
+ hibernate-ant
+ test
+
diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/DataflowPagingQueryProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/DataflowPagingQueryProvider.java
new file mode 100644
index 0000000000..c806043425
--- /dev/null
+++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/DataflowPagingQueryProvider.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2024 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.cloud.dataflow.server.batch;
+
+//TODO: Boot3x followup
+public interface DataflowPagingQueryProvider {
+
+ /**
+ *
+ * Generate the query that will provide the jump to item query. The itemIndex provided could be in the middle of
+ * the page and together with the page size it will be used to calculate the last index of the preceding page
+ * to be able to retrieve the sort key for this row.
+ *
+ * @param itemIndex the index for the next item to be read
+ * @param pageSize number of rows to read for each page
+ * @return the generated query
+ */
+ String generateJumpToItemQuery(int itemIndex, int pageSize);
+}
diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/DataflowSqlPagingQueryProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/DataflowSqlPagingQueryProvider.java
new file mode 100644
index 0000000000..14f1ac316c
--- /dev/null
+++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/DataflowSqlPagingQueryProvider.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2024 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.cloud.dataflow.server.batch;
+
+
+//TODO: Boot3x followup
+
+import org.springframework.batch.item.database.support.AbstractSqlPagingQueryProvider;
+import org.springframework.cloud.dataflow.server.repository.support.PagingQueryProvider;
+
+/**
+ * This class provides the implementation for methods removed by Spring Batch but are still
+ * needed by SCDF. This comment will be need to be updated prior to release to
+ * discuss that it implements extra features needed beyond the {@code SqlPagingQueryProviderFactoryBean}.
+ */
+public abstract class DataflowSqlPagingQueryProvider implements DataflowPagingQueryProvider {
+ public String generateJumpToItemQuery(int start, int count) {
+ throw new UnsupportedOperationException("This method is not yet supported by SCDF.");
+ }
+}
diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableJobExecutionDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableJobExecutionDao.java
index 58f5a4b6d4..05d41a384f 100644
--- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableJobExecutionDao.java
+++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableJobExecutionDao.java
@@ -121,22 +121,40 @@ public class JdbcSearchableJobExecutionDao extends JdbcJobExecutionDao implement
private PagingQueryProvider allExecutionsPagingQueryProvider;
+ private DataflowPagingQueryProvider dataflowAllExecutionsPagingQueryProvider;
+
private PagingQueryProvider byJobNamePagingQueryProvider;
+ private DataflowPagingQueryProvider dataflowByJobNamePagingQueryProvider;
+
private PagingQueryProvider byStatusPagingQueryProvider;
+ private DataflowPagingQueryProvider dataflowByStatusPagingQueryProvider;
+
private PagingQueryProvider byJobNameAndStatusPagingQueryProvider;
+ private DataflowPagingQueryProvider dataflowByJobNameAndStatusPagingQueryProvider;
+
private PagingQueryProvider byJobNameWithStepCountPagingQueryProvider;
+ private DataflowPagingQueryProvider dataflowByJobNameWithStepCountPagingQueryProvider;
+
private PagingQueryProvider executionsWithStepCountPagingQueryProvider;
+ private DataflowPagingQueryProvider dataflowExecutionsWithStepCountPagingQueryProvider;
+
private PagingQueryProvider byDateRangeWithStepCountPagingQueryProvider;
+ private DataflowPagingQueryProvider dataflowByDateRangeWithStepCountPagingQueryProvider;
+
private PagingQueryProvider byJobInstanceIdWithStepCountPagingQueryProvider;
+ private DataflowPagingQueryProvider dataflowByJobInstanceIdWithStepCountPagingQueryProvider;
+
private PagingQueryProvider byTaskExecutionIdWithStepCountPagingQueryProvider;
+ private DataflowPagingQueryProvider dataFlowByTaskExecutionIdWithStepCountPagingQueryProvider;
+
private final ConfigurableConversionService conversionService;
private DataSource dataSource;
@@ -180,17 +198,42 @@ protected long getNextKey() {
});
allExecutionsPagingQueryProvider = getPagingQueryProvider();
+ dataflowAllExecutionsPagingQueryProvider = getDataflowPagingQueryProvider();
+
+
executionsWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, null, null);
+
+ dataflowExecutionsWithStepCountPagingQueryProvider = getDataflowPagingQueryProvider(FIELDS_WITH_STEP_COUNT, null, null);
+
+
byJobNamePagingQueryProvider = getPagingQueryProvider(NAME_FILTER);
+ dataflowByJobNamePagingQueryProvider =getDataflowPagingQueryProvider(NAME_FILTER);
+
byStatusPagingQueryProvider = getPagingQueryProvider(STATUS_FILTER);
+ dataflowByStatusPagingQueryProvider = getDataflowPagingQueryProvider(STATUS_FILTER);
+
byJobNameAndStatusPagingQueryProvider = getPagingQueryProvider(NAME_AND_STATUS_FILTER);
+ dataflowByJobNameAndStatusPagingQueryProvider = getDataflowPagingQueryProvider(NAME_AND_STATUS_FILTER);
+
byJobNameWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, null, NAME_FILTER);
+
+ dataflowByJobNameWithStepCountPagingQueryProvider = getDataflowPagingQueryProvider(FIELDS_WITH_STEP_COUNT, null, NAME_FILTER);
+
+
byDateRangeWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, null,
DATE_RANGE_FILTER);
+ dataflowByDateRangeWithStepCountPagingQueryProvider = getDataflowPagingQueryProvider(FIELDS_WITH_STEP_COUNT, null,
+ DATE_RANGE_FILTER);
+
byJobInstanceIdWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, null,
JOB_INSTANCE_ID_FILTER);
+ dataflowByJobInstanceIdWithStepCountPagingQueryProvider = getDataflowPagingQueryProvider(FIELDS_WITH_STEP_COUNT, null,
+ JOB_INSTANCE_ID_FILTER);
+
byTaskExecutionIdWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT,
FROM_CLAUSE_TASK_TASK_BATCH, TASK_EXECUTION_ID_FILTER);
+ dataFlowByTaskExecutionIdWithStepCountPagingQueryProvider = getDataflowPagingQueryProvider(FIELDS_WITH_STEP_COUNT,
+ FROM_CLAUSE_TASK_TASK_BATCH, TASK_EXECUTION_ID_FILTER);
super.afterPropertiesSet();
@@ -254,7 +297,15 @@ private PagingQueryProvider getPagingQueryProvider() throws Exception {
}
/**
- * @return a {@link PagingQueryProvider} for all job executions with the provided
+ * @return a {@link PagingQueryProvider} for all job executions
+ * @throws Exception if page provider is not created.
+ */
+ private DataflowPagingQueryProvider getDataflowPagingQueryProvider() throws Exception {
+ return getDataflowPagingQueryProvider(null);
+ }
+
+ /**
+ * @return a {@link DataflowPagingQueryProvider} for all job executions with the provided
* where clause
* @throws Exception if page provider is not created.
*/
@@ -262,6 +313,17 @@ private PagingQueryProvider getPagingQueryProvider(String whereClause) throws Ex
return getPagingQueryProvider(null, whereClause);
}
+ //TODO: Boot3x followup Need to create the {@link DataflowPagingQueryProvider} to call method generateJumpToItemQuery.
+ /**
+ * @return a {@link DataflowPagingQueryProvider} for all job executions with the provided
+ * where clause
+ * @throws Exception if page provider is not created.
+ */
+ private DataflowPagingQueryProvider getDataflowPagingQueryProvider(String whereClause) {
+ throw new UnsupportedOperationException("Need to create DataflowSqlPagingQueryProvider so that dataflow can call " +
+ "generateJumpToItemQuery");
+ }
+
/**
* @return a {@link PagingQueryProvider} with a where clause to narrow the query
* @throws Exception if page provider is not created.
@@ -293,6 +355,16 @@ private PagingQueryProvider getPagingQueryProvider(String fields, String fromCla
return factory.getObject();
}
+ //TODO: Boot3x followup Need to create the {@link DataflowPagingQueryProvider} to call method generateJumpToItemQuery.
+ /**
+ * @return a {@link PagingQueryProvider} with a where clause to narrow the query
+ * @throws Exception if page provider is not created.
+ */
+ private DataflowPagingQueryProvider getDataflowPagingQueryProvider(String fields, String fromClause, String whereClause) {
+ throw new UnsupportedOperationException("Need to create DataflowSqlPagingQueryProvider so that dataflow can call " +
+ "generateJumpToItemQuery");
+ }
+
/**
* @see SearchableJobExecutionDao#countJobExecutions()
*/
@@ -339,7 +411,7 @@ public List getJobExecutionsWithStepCount(Date fromDa
}
try {
Long startAfterValue = getJdbcTemplate().queryForObject(
- byDateRangeWithStepCountPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class,
+ dataflowByDateRangeWithStepCountPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class,
fromDate, toDate);
return getJdbcTemplate().query(
byDateRangeWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count),
@@ -360,7 +432,7 @@ public List getJobExecutionsWithStepCountFilteredByJo
}
try {
Long startAfterValue = getJdbcTemplate().queryForObject(
- byJobInstanceIdWithStepCountPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class,
+ dataflowByJobInstanceIdWithStepCountPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class,
jobInstanceId);
return getJdbcTemplate().query(
byJobInstanceIdWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count),
@@ -381,7 +453,7 @@ public List getJobExecutionsWithStepCountFilteredByTa
}
try {
Long startAfterValue = getJdbcTemplate().queryForObject(
- byTaskExecutionIdWithStepCountPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class,
+ dataFlowByTaskExecutionIdWithStepCountPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class,
taskExecutionId);
return getJdbcTemplate().query(
byTaskExecutionIdWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count),
@@ -411,7 +483,7 @@ public List getJobExecutions(String jobName, BatchStatus status, i
}
try {
Long startAfterValue = getJdbcTemplate().queryForObject(
- byJobNameAndStatusPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, jobName,
+ dataflowByJobNameAndStatusPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, jobName,
status.name());
return getJdbcTemplate().query(byJobNameAndStatusPagingQueryProvider.generateRemainingPagesQuery(count),
new SearchableJobExecutionRowMapper(), jobName, status.name(), startAfterValue);
@@ -432,7 +504,7 @@ public List getJobExecutions(String jobName, int start, int count)
}
try {
Long startAfterValue = getJdbcTemplate().queryForObject(
- byJobNamePagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, jobName);
+ dataflowByJobNamePagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, jobName);
return getJdbcTemplate().query(byJobNamePagingQueryProvider.generateRemainingPagesQuery(count),
new SearchableJobExecutionRowMapper(), jobName, startAfterValue);
}
@@ -449,7 +521,7 @@ public List getJobExecutions(BatchStatus status, int start, int co
}
try {
Long startAfterValue = getJdbcTemplate().queryForObject(
- byStatusPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, status.name());
+ dataflowByStatusPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, status.name());
return getJdbcTemplate().query(byStatusPagingQueryProvider.generateRemainingPagesQuery(count),
new SearchableJobExecutionRowMapper(), status.name(), startAfterValue);
}
@@ -469,7 +541,7 @@ public List getJobExecutionsWithStepCount(String jobN
}
try {
Long startAfterValue = getJdbcTemplate().queryForObject(
- byJobNameWithStepCountPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class,
+ dataflowByJobNameWithStepCountPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class,
jobName);
return getJdbcTemplate().query(byJobNameWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count),
new JobExecutionStepCountRowMapper(), jobName, startAfterValue);
@@ -490,7 +562,7 @@ public List getJobExecutions(int start, int count) {
}
try {
Long startAfterValue = getJdbcTemplate()
- .queryForObject(allExecutionsPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class);
+ .queryForObject(dataflowAllExecutionsPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class);
return getJdbcTemplate().query(allExecutionsPagingQueryProvider.generateRemainingPagesQuery(count),
new SearchableJobExecutionRowMapper(), startAfterValue);
}
@@ -507,7 +579,7 @@ public List getJobExecutionsWithStepCount(int start,
}
try {
Long startAfterValue = getJdbcTemplate().queryForObject(
- executionsWithStepCountPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class);
+ dataflowExecutionsWithStepCountPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class);
return getJdbcTemplate().query(
executionsWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count),
new JobExecutionStepCountRowMapper(), startAfterValue);
@@ -570,8 +642,9 @@ public JobExecutionWithStepCount mapRow(ResultSet rs, int rowNum) throws SQLExce
}
+ //TODO: Boot3x followup - need to handle LocalDateTime and possibly Integer
protected JobParameters getJobParametersBatch5(Long executionId) {
- Map map = new HashMap<>();
+ Map> map = new HashMap<>();
RowCallbackHandler handler = rs -> {
String parameterName = rs.getString("PARAMETER_NAME");
@@ -588,29 +661,29 @@ protected JobParameters getJobParametersBatch5(Long executionId) {
boolean identifying = rs.getString("IDENTIFYING").equalsIgnoreCase("Y");
if (typedValue instanceof String) {
- map.put(parameterName, new JobParameter((String) typedValue, identifying));
+ map.put(parameterName, new JobParameter(typedValue, String.class, identifying));
}
else if (typedValue instanceof Integer) {
- map.put(parameterName, new JobParameter(((Integer) typedValue).longValue(), identifying));
+ map.put(parameterName, new JobParameter(((Integer) typedValue).longValue(), Integer.class, identifying));
}
else if (typedValue instanceof Long) {
- map.put(parameterName, new JobParameter((Long) typedValue, identifying));
+ map.put(parameterName, new JobParameter(typedValue, Long.class, identifying));
}
else if (typedValue instanceof Float) {
- map.put(parameterName, new JobParameter(((Float) typedValue).doubleValue(), identifying));
+ map.put(parameterName, new JobParameter(((Float) typedValue).doubleValue(), Float.class, identifying));
}
else if (typedValue instanceof Double) {
- map.put(parameterName, new JobParameter((Double) typedValue, identifying));
+ map.put(parameterName, new JobParameter(typedValue, Double.class, identifying));
}
else if (typedValue instanceof Timestamp) {
- map.put(parameterName, new JobParameter(new Date(((Timestamp) typedValue).getTime()), identifying));
+ map.put(parameterName, new JobParameter(new Date(((Timestamp) typedValue).getTime()), Timestamp.class, identifying));
}
else if (typedValue instanceof Date) {
- map.put(parameterName, new JobParameter((Date) typedValue, identifying));
+ map.put(parameterName, new JobParameter(typedValue, Date.class, identifying));
}
else {
map.put(parameterName,
- new JobParameter(typedValue != null ? typedValue.toString() : "null", identifying));
+ new JobParameter(typedValue != null ? typedValue.toString() : "null", String.class, identifying));
}
};
@@ -639,12 +712,12 @@ JobExecution createJobExecutionFromResultSet(ResultSet rs, int rowNum) throws SQ
jobExecution = new JobExecution(jobInstance, jobParameters);
jobExecution.setId(id);
- jobExecution.setStartTime(rs.getTimestamp(2));
- jobExecution.setEndTime(rs.getTimestamp(3));
+ jobExecution.setStartTime(rs.getTimestamp(2).toLocalDateTime());
+ jobExecution.setEndTime(rs.getTimestamp(3).toLocalDateTime());
jobExecution.setStatus(BatchStatus.valueOf(rs.getString(4)));
jobExecution.setExitStatus(new ExitStatus(rs.getString(5), rs.getString(6)));
- jobExecution.setCreateTime(rs.getTimestamp(7));
- jobExecution.setLastUpdated(rs.getTimestamp(8));
+ jobExecution.setCreateTime(rs.getTimestamp(7).toLocalDateTime());
+ jobExecution.setLastUpdated(rs.getTimestamp(8).toLocalDateTime());
jobExecution.setVersion(rs.getInt(9));
return jobExecution;
}
@@ -669,20 +742,19 @@ public JobExecution mapRow(ResultSet rs, int rowNum) throws SQLException {
Long id = rs.getLong(1);
JobParameters jobParameters = getJobParameters(id);
JobExecution jobExecution;
- String jobConfigurationLocation = batchVersion.equals(BatchVersion.BATCH_4) ? rs.getString(10) : null;
if (jobInstance == null) {
- jobExecution = new JobExecution(id, jobParameters, jobConfigurationLocation);
+ jobExecution = new JobExecution(id, jobParameters);
}
else {
- jobExecution = new JobExecution(jobInstance, id, jobParameters, jobConfigurationLocation);
+ jobExecution = new JobExecution(jobInstance, id, jobParameters);
}
- jobExecution.setStartTime(rs.getTimestamp(2));
- jobExecution.setEndTime(rs.getTimestamp(3));
+ jobExecution.setStartTime(rs.getTimestamp(2).toLocalDateTime());
+ jobExecution.setEndTime(rs.getTimestamp(3).toLocalDateTime());
jobExecution.setStatus(BatchStatus.valueOf(rs.getString(4)));
jobExecution.setExitStatus(new ExitStatus(rs.getString(5), rs.getString(6)));
- jobExecution.setCreateTime(rs.getTimestamp(7));
- jobExecution.setLastUpdated(rs.getTimestamp(8));
+ jobExecution.setCreateTime(rs.getTimestamp(7).toLocalDateTime());
+ jobExecution.setLastUpdated(rs.getTimestamp(8).toLocalDateTime());
jobExecution.setVersion(rs.getInt(9));
return jobExecution;
}
diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableStepExecutionDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableStepExecutionDao.java
index b0638524c0..9a4a13ab91 100644
--- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableStepExecutionDao.java
+++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableStepExecutionDao.java
@@ -138,6 +138,7 @@ public Collection findStepExecutions(String jobName, String stepN
}
PagingQueryProvider queryProvider = getPagingQueryProvider(whereClause);
+ DataflowPagingQueryProvider dataflowQueryProvider = getDataflowPagingQueryProvider(whereClause);
List stepExecutions;
if (start <= 0) {
@@ -147,7 +148,7 @@ public Collection findStepExecutions(String jobName, String stepN
else {
try {
Long startAfterValue = getJdbcTemplate().queryForObject(
- queryProvider.generateJumpToItemQuery(start, count), Long.class, jobName, stepName);
+ dataflowQueryProvider.generateJumpToItemQuery(start, count), Long.class, jobName, stepName);
stepExecutions = getJdbcTemplate().query(queryProvider.generateRemainingPagesQuery(count),
new StepExecutionRowMapper(), jobName, stepName, startAfterValue);
}
@@ -198,13 +199,24 @@ private PagingQueryProvider getPagingQueryProvider(String whereClause) {
}
}
+ //TODO: Boot3x followup Need to create the {@link DataflowPagingQueryProvider} to call method generateJumpToItemQuery.
+ /**
+ * @return a {@link DataflowPagingQueryProvider} with a where clause to narrow the
+ * query
+ * @throws Exception
+ */
+ private DataflowPagingQueryProvider getDataflowPagingQueryProvider(String whereClause) {
+ throw new UnsupportedOperationException("Need to create DataflowSqlPagingQueryProvider so that dataflow can call " +
+ "generateJumpToItemQuery");
+ }
+
private static class StepExecutionRowMapper implements RowMapper {
public StepExecution mapRow(ResultSet rs, int rowNum) throws SQLException {
StepExecution stepExecution = new StepExecution(rs.getString(2), null);
stepExecution.setId(rs.getLong(1));
- stepExecution.setStartTime(rs.getTimestamp(3));
- stepExecution.setEndTime(rs.getTimestamp(4));
+ stepExecution.setStartTime(rs.getTimestamp(3).toLocalDateTime());
+ stepExecution.setEndTime(rs.getTimestamp(4).toLocalDateTime());
stepExecution.setStatus(BatchStatus.valueOf(rs.getString(5)));
stepExecution.setCommitCount(rs.getInt(6));
stepExecution.setReadCount(rs.getInt(7));
@@ -215,7 +227,7 @@ public StepExecution mapRow(ResultSet rs, int rowNum) throws SQLException {
stepExecution.setWriteSkipCount(rs.getInt(13));
stepExecution.setProcessSkipCount(rs.getInt(14));
stepExecution.setRollbackCount(rs.getInt(15));
- stepExecution.setLastUpdated(rs.getTimestamp(16));
+ stepExecution.setLastUpdated(rs.getTimestamp(16).toLocalDateTime());
stepExecution.setVersion(rs.getInt(17));
return stepExecution;
}
diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobRestartRuntimeException.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobRestartRuntimeException.java
new file mode 100644
index 0000000000..3752abdfe7
--- /dev/null
+++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobRestartRuntimeException.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2024 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.cloud.dataflow.server.batch;
+
+//TODO: Boot3x followup
+public class JobRestartRuntimeException extends RuntimeException {
+
+ public JobRestartRuntimeException(Long jobExecutionId, Exception cause) {
+ super(String.format("JobExecutionId '%d' was not restarted.", jobExecutionId), cause);
+ }
+
+ public JobRestartRuntimeException(Long jobExecutionId) {
+ super(String.format("JobExecutionId '%d' was not restarted.", jobExecutionId));
+ }
+}
diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobStartRuntimeException.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobStartRuntimeException.java
new file mode 100644
index 0000000000..27038fb2b2
--- /dev/null
+++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobStartRuntimeException.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2024 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.cloud.dataflow.server.batch;
+
+//TODO: Boot3x followup
+public class JobStartRuntimeException extends RuntimeException {
+
+ public JobStartRuntimeException(String jobName, Exception cause) {
+ super(String.format("JobExecutionId '%s' was not started.", jobName), cause);
+ }
+
+ public JobStartRuntimeException(Long jobExecutionId) {
+ super(String.format("JobExecutionId '%s' was not started.", jobExecutionId));
+ }
+}
diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobStopException.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobStopException.java
new file mode 100644
index 0000000000..f06f732065
--- /dev/null
+++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobStopException.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2024 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.cloud.dataflow.server.batch;
+
+//TODO: Boot3x followup
+public class JobStopException extends RuntimeException {
+
+ public JobStopException(Long jobExecutionId, Exception cause) {
+ super(String.format("JobExecutionId '%d' was not stopped.", jobExecutionId), cause);
+ }
+
+ public JobStopException(Long jobExecutionId) {
+ super(String.format("JobExecutionId '%d' was not stopped.", jobExecutionId));
+ }
+}
diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobService.java
index 9f36cff283..1ab36b883e 100644
--- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobService.java
+++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobService.java
@@ -15,8 +15,8 @@
*/
package org.springframework.cloud.dataflow.server.batch;
-import javax.batch.operations.JobOperator;
import java.io.IOException;
+import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@@ -36,13 +36,18 @@
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobInstance;
import org.springframework.batch.core.JobParameters;
+import org.springframework.batch.core.JobParametersInvalidException;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.launch.JobExecutionNotRunningException;
+import org.springframework.batch.core.launch.JobInstanceAlreadyExistsException;
+import org.springframework.batch.core.launch.JobOperator;
import org.springframework.batch.core.launch.NoSuchJobException;
import org.springframework.batch.core.launch.NoSuchJobExecutionException;
import org.springframework.batch.core.launch.NoSuchJobInstanceException;
import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
+import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.repository.JobRestartException;
import org.springframework.batch.core.repository.dao.ExecutionContextDao;
import org.springframework.batch.core.step.NoSuchStepException;
import org.springframework.beans.factory.DisposableBean;
@@ -83,7 +88,7 @@ public class SimpleJobService implements JobService, DisposableBean {
private Collection activeExecutions = Collections.synchronizedList(new ArrayList());
- private JobOperator jsrJobOperator;
+ private JobOperator jobOperator;
private final AggregateJobQueryDao aggregateJobQueryDao;
@@ -93,7 +98,7 @@ public class SimpleJobService implements JobService, DisposableBean {
public SimpleJobService(SearchableJobInstanceDao jobInstanceDao, SearchableJobExecutionDao jobExecutionDao,
SearchableStepExecutionDao stepExecutionDao, JobRepository jobRepository,
- ExecutionContextDao executionContextDao, JobOperator jsrJobOperator, AggregateJobQueryDao aggregateJobQueryDao,
+ ExecutionContextDao executionContextDao, JobOperator jobOperator, AggregateJobQueryDao aggregateJobQueryDao,
SchemaVersionTarget schemaVersionTarget) {
super();
this.jobInstanceDao = jobInstanceDao;
@@ -103,12 +108,8 @@ public SimpleJobService(SearchableJobInstanceDao jobInstanceDao, SearchableJobEx
this.executionContextDao = executionContextDao;
this.aggregateJobQueryDao = aggregateJobQueryDao;
this.schemaVersionTarget = schemaVersionTarget;
-
- if (jsrJobOperator == null) {
- logger.warn("No JobOperator compatible with JSR-352 was provided.");
- } else {
- this.jsrJobOperator = jsrJobOperator;
- }
+ Objects.requireNonNull(jobOperator, "jobOperator must not be null");
+ this.jobOperator = jobOperator;
}
/**
@@ -162,29 +163,29 @@ public JobExecution restart(Long jobExecutionId, JobParameters params) throws No
JobExecution jobExecution;
- if (jsrJobOperator != null) {
- if (params != null) {
- jobExecution = new JobExecution(jsrJobOperator.restart(jobExecutionId, params.toProperties()));
- } else {
- jobExecution = new JobExecution(jsrJobOperator.restart(jobExecutionId, new Properties()));
- }
- } else {
- throw new NoSuchJobException(String.format("Can't find job associated with job execution id %s to restart",
- String.valueOf(jobExecutionId)));
- }
+ try {
+ jobExecution = new JobExecution(jobOperator.restart(jobExecutionId.longValue()));
+ }
+ catch (Exception e) {
+ throw new JobRestartRuntimeException(jobExecutionId, e);
+ }
- return jobExecution;
+ return jobExecution;
}
@Override
public JobExecution launch(String jobName, JobParameters jobParameters) throws NoSuchJobException {
JobExecution jobExecution;
- if (jsrJobOperator != null) {
- jobExecution = new JobExecution(jsrJobOperator.start(jobName, jobParameters.toProperties()));
+ if (jobOperator != null) {
+ try {
+ jobExecution = new JobExecution(jobOperator.start(jobName, jobParameters.toProperties()));
+ } catch (JobInstanceAlreadyExistsException | JobParametersInvalidException e) {
+ throw new JobStartRuntimeException(jobName, e);
+ }
} else {
throw new NoSuchJobException(String.format("Unable to find job %s to launch",
- String.valueOf(jobName)));
+ jobName));
}
return jobExecution;
@@ -229,7 +230,7 @@ public Collection listJobs(int start, int count) {
return new ArrayList<>(jobNames).subList(start, start + count);
}
- private Collection getJsrJobNames() {
+ private Collection getJobNames() {
Set jsr352JobNames = new HashSet();
try {
@@ -256,14 +257,18 @@ public int countJobs() {
@Override
public int stopAll() {
Collection result = jobExecutionDao.getRunningJobExecutions();
- Collection jsrJobNames = getJsrJobNames();
+ Collection jobNames = getJobNames();
for (JobExecution jobExecution : result) {
- if (jsrJobOperator != null && jsrJobNames.contains(jobExecution.getJobInstance().getJobName())) {
- jsrJobOperator.stop(jobExecution.getId());
- } else {
- jobExecution.stop();
- jobRepository.update(jobExecution);
+ try {
+ if (jobNames.contains(jobExecution.getJobInstance().getJobName())) {
+ jobOperator.stop(jobExecution.getId());
+
+ } else {
+ throw new JobStopException(jobExecution.getId());
+ }
+ } catch (Exception e) {
+ throw new IllegalArgumentException("The following JobExecutionId was not found: " + jobExecution.getId(), e);
}
}
@@ -279,14 +284,13 @@ public JobExecution stop(Long jobExecutionId) throws NoSuchJobExecutionException
logger.info("Stopping job execution: " + jobExecution);
- Collection jsrJobNames = getJsrJobNames();
+ Collection jobNames = getJobNames();
- if (jsrJobOperator != null && jsrJobNames.contains(jobExecution.getJobInstance().getJobName())) {
- jsrJobOperator.stop(jobExecutionId);
+ if (jobNames.contains(jobExecution.getJobInstance().getJobName())) {
+ jobOperator.stop(jobExecutionId);
jobExecution = getJobExecution(jobExecutionId);
} else {
- jobExecution.stop();
- jobRepository.update(jobExecution);
+ throw new JobStopException(jobExecution.getId());
}
return jobExecution;
@@ -304,15 +308,15 @@ public JobExecution abandon(Long jobExecutionId) throws NoSuchJobExecutionExcept
logger.info("Aborting job execution: " + jobExecution);
- Collection jsrJobNames = getJsrJobNames();
+ Collection jobNames = getJobNames();
JobInstance jobInstance = jobExecution.getJobInstance();
- if (jsrJobOperator != null && jsrJobNames.contains(jobInstance.getJobName())) {
- jsrJobOperator.abandon(jobExecutionId);
+ if (jobOperator != null && jobNames.contains(jobInstance.getJobName())) {
+ jobOperator.abandon(jobExecutionId);
jobExecution = getJobExecution(jobExecutionId);
} else {
jobExecution.upgradeStatus(BatchStatus.ABANDONED);
- jobExecution.setEndTime(new Date());
+ jobExecution.setEndTime(LocalDateTime.now());
jobRepository.update(jobExecution);
}
@@ -483,7 +487,7 @@ private List getJobExecutions(String jobName, BatchStatus status,
}
private void checkJobExists(String jobName) throws NoSuchJobException {
- if (getJsrJobNames().stream().anyMatch(e -> e.contains(jobName)) ||
+ if (getJobNames().stream().anyMatch(e -> e.contains(jobName)) ||
jobInstanceDao.countJobInstances(jobName) > 0) {
return;
}
diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobServiceFactoryBean.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobServiceFactoryBean.java
index fcf885cdac..5cd2d704e3 100644
--- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobServiceFactoryBean.java
+++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobServiceFactoryBean.java
@@ -21,10 +21,10 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.springframework.batch.core.configuration.support.MapJobRegistry;
import org.springframework.batch.core.explore.JobExplorer;
-import org.springframework.batch.core.jsr.JsrJobParametersConverter;
-import org.springframework.batch.core.jsr.launch.JsrJobOperator;
import org.springframework.batch.core.launch.JobLauncher;
+import org.springframework.batch.core.launch.support.SimpleJobOperator;
import org.springframework.batch.core.repository.ExecutionContextSerializer;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.core.repository.dao.AbstractJdbcBatchMetadataDao;
@@ -323,13 +323,14 @@ protected AggregateJobQueryDao createAggregateJobQueryDao() throws Exception {
*/
@Override
public JobService getObject() throws Exception {
- JsrJobParametersConverter jobParametersConverter = new JsrJobParametersConverter(dataSource);
- jobParametersConverter.afterPropertiesSet();
- JsrJobOperator jsrJobOperator = new JsrJobOperator(jobExplorer, jobRepository, jobParametersConverter,
- transactionManager);
- jsrJobOperator.afterPropertiesSet();
+
+ SimpleJobOperator jobOperator = new SimpleJobOperator();
+ jobOperator.setJobExplorer(this.jobExplorer);
+ jobOperator.setJobLauncher(this.jobLauncher);
+ jobOperator.setJobRepository(this.jobRepository);
+ jobOperator.setJobRegistry(new MapJobRegistry());
return new SimpleJobService(createJobInstanceDao(), createJobExecutionDao(), createStepExecutionDao(),
- jobRepository, createExecutionContextDao(), jsrJobOperator, createAggregateJobQueryDao(), schemaVersionTarget);
+ jobRepository, createExecutionContextDao(), jobOperator, createAggregateJobQueryDao(), schemaVersionTarget);
}
/**
diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/AggregateDataFlowTaskConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/AggregateDataFlowTaskConfiguration.java
index 97670ea45f..78033b243a 100644
--- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/AggregateDataFlowTaskConfiguration.java
+++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/AggregateDataFlowTaskConfiguration.java
@@ -58,6 +58,8 @@
import org.springframework.jdbc.support.MetaDataAccessException;
import org.springframework.transaction.PlatformTransactionManager;
+import java.sql.SQLException;
+
/**
* Configuration for DAO Containers use for multiple schema targets.
*
@@ -78,7 +80,8 @@ public DataflowJobExecutionDaoContainer dataflowJobExecutionDao(DataSource dataS
}
@Bean
- public DataflowTaskExecutionDaoContainer dataflowTaskExecutionDao(DataSource dataSource, SchemaService schemaService, TaskProperties taskProperties) {
+ public DataflowTaskExecutionDaoContainer dataflowTaskExecutionDao(DataSource dataSource, SchemaService schemaService,
+ TaskProperties taskProperties) {
DataflowTaskExecutionDaoContainer result = new DataflowTaskExecutionDaoContainer();
for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) {
TaskProperties properties = new TaskProperties();
@@ -91,7 +94,9 @@ public DataflowTaskExecutionDaoContainer dataflowTaskExecutionDao(DataSource dat
}
@Bean
- public DataflowTaskExecutionMetadataDaoContainer dataflowTaskExecutionMetadataDao(DataSource dataSource, SchemaService schemaService) {
+ public DataflowTaskExecutionMetadataDaoContainer dataflowTaskExecutionMetadataDao(DataSource dataSource,
+ SchemaService schemaService)
+ throws SQLException {
DataFieldMaxValueIncrementerFactory incrementerFactory = new MultiSchemaIncrementerFactory(dataSource);
String databaseType;
try {
diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataflowOAuthSecurityConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataflowOAuthSecurityConfiguration.java
index 8ecd12990b..a27c36e492 100644
--- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataflowOAuthSecurityConfiguration.java
+++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataflowOAuthSecurityConfiguration.java
@@ -21,6 +21,7 @@
import org.springframework.context.annotation.Conditional;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
+import org.springframework.security.config.annotation.web.configurers.HttpBasicConfigurer;
/**
* Setup Spring Security OAuth for the Rest Endpoints of Spring Cloud Data Flow.
@@ -34,8 +35,8 @@
public class DataflowOAuthSecurityConfiguration extends OAuthSecurityConfiguration {
@Override
- protected void configure(HttpSecurity http) throws Exception {
- super.configure(http);
+ protected HttpBasicConfigurer configure(HttpSecurity http) throws Exception {
+ return super.configure(http);
}
}
diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/SpringDocAutoConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/SpringDocAutoConfiguration.java
index 8238604de1..e53a7f5e7e 100644
--- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/SpringDocAutoConfiguration.java
+++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/SpringDocAutoConfiguration.java
@@ -79,7 +79,7 @@ void init() {
@Bean
@ConditionalOnMissingBean
public WebSecurityCustomizer springDocWebSecurityCustomizer() {
- return (webSecurity -> webSecurity.ignoring().antMatchers(
+ return (webSecurity -> webSecurity.ignoring().requestMatchers(
"/swagger-ui/**",
getApiDocsPathContext() + "/**",
swaggerUiConfigProperties.getPath(),
diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AboutController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AboutController.java
index bd25aa0fcd..efb333b3e7 100644
--- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AboutController.java
+++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AboutController.java
@@ -18,9 +18,16 @@
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
+
+import org.apache.hc.client5.http.impl.io.BasicHttpClientConnectionManager;
+import org.apache.hc.client5.http.socket.ConnectionSocketFactory;
+import org.apache.hc.client5.http.socket.PlainConnectionSocketFactory;
+import org.apache.hc.client5.http.ssl.SSLConnectionSocketFactory;
+import org.apache.hc.core5.http.config.Lookup;
+import org.apache.hc.core5.http.config.RegistryBuilder;
import org.apache.http.conn.ssl.NoopHostnameVerifier;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.impl.client.HttpClients;
+import org.apache.hc.client5.http.impl.classic.CloseableHttpClient;
+import org.apache.hc.client5.http.impl.classic.HttpClients;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.ObjectProvider;
@@ -281,8 +288,11 @@ private String getChecksum(String defaultValue, String url,
String version) {
String result = defaultValue;
if (result == null && StringUtils.hasText(url)) {
+ Lookup connSocketFactoryLookup = RegistryBuilder. create()
+ .register("http", new PlainConnectionSocketFactory())
+ .build();
CloseableHttpClient httpClient = HttpClients.custom()
- .setSSLHostnameVerifier(new NoopHostnameVerifier())
+ .setConnectionManager(new BasicHttpClientConnectionManager(connSocketFactoryLookup))
.build();
HttpComponentsClientHttpRequestFactory requestFactory
= new HttpComponentsClientHttpRequestFactory();
diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/support/StepExecutionProgressInfo.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/support/StepExecutionProgressInfo.java
index a2bfa6c217..2ea291d9c4 100644
--- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/support/StepExecutionProgressInfo.java
+++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/support/StepExecutionProgressInfo.java
@@ -16,8 +16,10 @@
package org.springframework.cloud.dataflow.server.job.support;
+import java.time.Duration;
+import java.time.LocalDateTime;
+import java.time.temporal.ChronoUnit;
import java.util.Arrays;
-import java.util.Date;
import org.springframework.batch.core.StepExecution;
import org.springframework.cloud.dataflow.rest.job.CumulativeHistory;
@@ -51,18 +53,18 @@ public class StepExecutionProgressInfo {
public StepExecutionProgressInfo(StepExecution stepExecution, StepExecutionHistory stepExecutionHistory) {
this.stepExecution = stepExecution;
this.stepExecutionHistory = stepExecutionHistory;
- Date startTime = stepExecution.getStartTime();
- Date endTime = stepExecution.getEndTime();
+ LocalDateTime startTime = stepExecution.getStartTime();
+ LocalDateTime endTime = stepExecution.getEndTime();
if (endTime == null) {
- endTime = new Date();
+ endTime = LocalDateTime.now();
}
else {
isFinished = true;
}
if (startTime == null) {
- startTime = new Date();
+ startTime = LocalDateTime.now();
}
- duration = endTime.getTime() - startTime.getTime();
+ duration = Duration.between(startTime, endTime).get(ChronoUnit.MILLIS);
percentageComplete = calculatePercentageComplete();
}
diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/AggregateJobQueryDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/AggregateJobQueryDao.java
index 81a18384af..30e8e6d703 100644
--- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/AggregateJobQueryDao.java
+++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/AggregateJobQueryDao.java
@@ -17,7 +17,6 @@
import java.util.Date;
-import java.util.List;
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.JobInstance;
diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowSqlPagingQueryUtils.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowSqlPagingQueryUtils.java
new file mode 100644
index 0000000000..8abe396414
--- /dev/null
+++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowSqlPagingQueryUtils.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2024 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.cloud.dataflow.server.repository;
+
+import org.springframework.batch.item.database.support.AbstractSqlPagingQueryProvider;
+
+
+//TODO: Boot3x followup
+public class DataflowSqlPagingQueryUtils {
+
+ public static String generateRowNumSqlQueryWithNesting(AbstractSqlPagingQueryProvider provider,
+ String innerSelectClause, String outerSelectClause,
+ boolean remainingPageQuery, String rowNumClause) {
+ throw new UnsupportedOperationException("Need to create DataflowSqlPagingQueryUtils so that dataflow can call " +
+ "generateRowNumSqlQueryWithNesting");
+ }
+}
diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDao.java
index 90eca34ebd..4ea57ab9c3 100644
--- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDao.java
+++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDao.java
@@ -27,7 +27,6 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.Optional;
import java.util.TreeMap;
import java.util.stream.Collectors;
@@ -61,6 +60,7 @@
import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion;
import org.springframework.cloud.dataflow.schema.SchemaVersionTarget;
import org.springframework.cloud.dataflow.schema.service.SchemaService;
+import org.springframework.cloud.dataflow.server.batch.DataflowPagingQueryProvider;
import org.springframework.cloud.dataflow.server.batch.JobService;
import org.springframework.cloud.dataflow.server.converter.DateToStringConverter;
import org.springframework.cloud.dataflow.server.converter.StringToDateConverter;
@@ -192,26 +192,49 @@ public class JdbcAggregateJobQueryDao implements AggregateJobQueryDao {
private final PagingQueryProvider allExecutionsPagingQueryProvider;
+ private final DataflowPagingQueryProvider dataflowAllExecutionsPagingQueryProvider;
+
private final PagingQueryProvider byJobNameAndStatusPagingQueryProvider;
+ private final DataflowPagingQueryProvider dataflowByJobNameAndStatusPagingQueryProvider;
+
private final PagingQueryProvider byStatusPagingQueryProvider;
+ private final DataflowPagingQueryProvider dataflowByStatusPagingQueryProvider;
+
private final PagingQueryProvider byJobNameWithStepCountPagingQueryProvider;
+ private final DataflowPagingQueryProvider dataflowByJobNameWithStepCountPagingQueryProvider;
+
private final PagingQueryProvider executionsByDateRangeWithStepCountPagingQueryProvider;
+ private final DataflowPagingQueryProvider dataflowExecutionsByDateRangeWithStepCountPagingQueryProvider;
+
private final PagingQueryProvider byJobInstanceIdWithStepCountPagingQueryProvider;
+ private final DataflowPagingQueryProvider dataflowByJobInstanceIdWithStepCountPagingQueryProvider;
+
private final PagingQueryProvider byTaskExecutionIdWithStepCountPagingQueryProvider;
+ private final DataflowPagingQueryProvider dataflowByTaskExecutionIdWithStepCountPagingQueryProvider;
+
+
private final PagingQueryProvider jobExecutionsPagingQueryProviderByName;
+ private final DataflowPagingQueryProvider dataflowJobExecutionsPagingQueryProviderByName;
+
private final PagingQueryProvider allExecutionsPagingQueryProviderNoStepCount;
+ private final DataflowPagingQueryProvider dataflowAllExecutionsPagingQueryProviderNoStepCount;
+
private final PagingQueryProvider byJobNamePagingQueryProvider;
+ private final DataflowPagingQueryProvider dataflowByJobNamePagingQueryProvider;
+
private final PagingQueryProvider byJobExecutionIdAndSchemaPagingQueryProvider;
+ private final DataflowPagingQueryProvider dataflowByJobExecutionIdAndSchemaPagingQueryProvider;
+
private final DataSource dataSource;
private final JdbcTemplate jdbcTemplate;
@@ -240,16 +263,38 @@ public JdbcAggregateJobQueryDao(
Jsr310Converters.getConvertersToRegister().forEach(conversionService::addConverter);
allExecutionsPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, null);
+ dataflowAllExecutionsPagingQueryProvider = getDataflowPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, null);
+
+ dataflowExecutionsByDateRangeWithStepCountPagingQueryProvider = getDataflowPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, DATE_RANGE_FILTER);
executionsByDateRangeWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, DATE_RANGE_FILTER);
+
allExecutionsPagingQueryProviderNoStepCount = getPagingQueryProvider(FROM_CLAUSE_TASK_EXEC_BATCH, null);
+ dataflowAllExecutionsPagingQueryProviderNoStepCount = getDataflowPagingQueryProvider(FROM_CLAUSE_TASK_EXEC_BATCH, null);
+
byStatusPagingQueryProvider = getPagingQueryProvider(FROM_CLAUSE_TASK_EXEC_BATCH, STATUS_FILTER);
+ dataflowByStatusPagingQueryProvider = getDataflowPagingQueryProvider(FROM_CLAUSE_TASK_EXEC_BATCH, STATUS_FILTER);
+
byJobNameAndStatusPagingQueryProvider = getPagingQueryProvider(FROM_CLAUSE_TASK_EXEC_BATCH, NAME_AND_STATUS_FILTER);
+ dataflowByJobNameAndStatusPagingQueryProvider = getDataflowPagingQueryProvider(FROM_CLAUSE_TASK_EXEC_BATCH, NAME_AND_STATUS_FILTER);
+
byJobNamePagingQueryProvider = getPagingQueryProvider(FROM_CLAUSE_TASK_EXEC_BATCH, NAME_FILTER);
+ dataflowByJobNamePagingQueryProvider = getDataflowPagingQueryProvider(FROM_CLAUSE_TASK_EXEC_BATCH, NAME_FILTER);
+
byJobNameWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, NAME_FILTER);
+ dataflowByJobNameWithStepCountPagingQueryProvider = getDataflowPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, NAME_FILTER);
+
byJobInstanceIdWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, JOB_INSTANCE_ID_FILTER);
+ dataflowByJobInstanceIdWithStepCountPagingQueryProvider = getDataflowPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, JOB_INSTANCE_ID_FILTER);
+
byTaskExecutionIdWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, TASK_EXECUTION_ID_FILTER);
+ dataflowByTaskExecutionIdWithStepCountPagingQueryProvider = getDataflowPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, TASK_EXECUTION_ID_FILTER);
+
jobExecutionsPagingQueryProviderByName = getPagingQueryProvider(FIND_JOBS_FIELDS, FIND_JOBS_FROM, FIND_JOBS_WHERE, Collections.singletonMap("E.JOB_EXECUTION_ID", Order.DESCENDING));
+ dataflowJobExecutionsPagingQueryProviderByName = getDataflowPagingQueryProvider(FIND_JOBS_FIELDS, FIND_JOBS_FROM, FIND_JOBS_WHERE, Collections.singletonMap("E.JOB_EXECUTION_ID", Order.DESCENDING));
+
byJobExecutionIdAndSchemaPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, FIND_BY_ID_SCHEMA);
+ dataflowByJobExecutionIdAndSchemaPagingQueryProvider = getDataflowPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, FIND_BY_ID_SCHEMA);
+
}
private boolean determineUseRowNumberOptimization(Environment environment) {
@@ -266,7 +311,6 @@ public Page listJobInstances(String jobName, Pageable pag
}
List taskJobInstancesForJobName = getTaskJobInstancesForJobName(jobName, pageable);
return new PageImpl<>(taskJobInstancesForJobName, pageable, total);
-
}
@Override
@@ -395,6 +439,7 @@ public TaskJobExecution getJobExecution(long jobExecutionId, String schemaTarget
private List getJobExecutionPage(long jobExecutionId, String schemaTarget) {
return queryForProvider(
+ dataflowByJobExecutionIdAndSchemaPagingQueryProvider,
byJobExecutionIdAndSchemaPagingQueryProvider,
new JobExecutionRowMapper(true),
0,
@@ -476,6 +521,7 @@ private List getJobExecutionsWithStepCountFilteredByJobInstanc
schemaTarget = SchemaVersionTarget.defaultTarget().getName();
}
return queryForProvider(
+ dataflowByJobInstanceIdWithStepCountPagingQueryProvider,
byJobInstanceIdWithStepCountPagingQueryProvider,
new JobExecutionRowMapper(true),
start,
@@ -495,6 +541,7 @@ private List getJobExecutionsWithStepCountFilteredByTaskExecut
schemaTarget = SchemaVersionTarget.defaultTarget().getName();
}
return queryForProvider(
+ dataflowByTaskExecutionIdWithStepCountPagingQueryProvider,
byTaskExecutionIdWithStepCountPagingQueryProvider,
new JobExecutionRowMapper(true),
start,
@@ -506,91 +553,67 @@ private List getJobExecutionsWithStepCountFilteredByTaskExecut
private List getJobExecutions(String jobName, BatchStatus status, int start, int count) throws NoSuchJobExecutionException {
if (StringUtils.hasText(jobName) && status != null) {
- return queryForProvider(byJobNameAndStatusPagingQueryProvider, new JobExecutionRowMapper(false), start, count, jobName, status.name());
+ return queryForProvider(dataflowByJobNameAndStatusPagingQueryProvider, byJobNameAndStatusPagingQueryProvider, new JobExecutionRowMapper(false), start, count, jobName, status.name());
} else if (status != null) {
- return queryForProvider(byStatusPagingQueryProvider, new JobExecutionRowMapper(false), start, count, status.name());
+ return queryForProvider(dataflowByStatusPagingQueryProvider, byStatusPagingQueryProvider, new JobExecutionRowMapper(false), start, count, status.name());
} else if (StringUtils.hasText(jobName)) {
- return queryForProvider(byJobNamePagingQueryProvider, new JobExecutionRowMapper(false), start, count, jobName);
+ return queryForProvider(dataflowByJobNamePagingQueryProvider, byJobNamePagingQueryProvider, new JobExecutionRowMapper(false), start, count, jobName);
}
- return queryForProvider(allExecutionsPagingQueryProviderNoStepCount, new JobExecutionRowMapper(false), start, count);
+ return queryForProvider(dataflowAllExecutionsPagingQueryProviderNoStepCount,
+ allExecutionsPagingQueryProviderNoStepCount, new JobExecutionRowMapper(false), start, count);
}
private List getJobExecutionsWithStepCount(String jobName, int start, int count) {
- return queryForProvider(byJobNameWithStepCountPagingQueryProvider, new JobExecutionRowMapper(true), start, count, jobName);
+ return queryForProvider(dataflowByJobNameWithStepCountPagingQueryProvider, byJobNameWithStepCountPagingQueryProvider, new JobExecutionRowMapper(true), start, count, jobName);
}
public List getJobExecutionsWithStepCount(int start, int count) {
- return queryForProvider(allExecutionsPagingQueryProvider, new JobExecutionRowMapper(true), start, count);
+ return queryForProvider(dataflowAllExecutionsPagingQueryProvider, allExecutionsPagingQueryProvider, new JobExecutionRowMapper(true), start, count);
}
+ //TODO: Boot3x followup This was a brute force conversion removing the boot2 components.
protected JobParameters getJobParameters(Long executionId, String schemaTarget) {
- final Map map = new HashMap<>();
+ final Map> map = new HashMap<>();
final SchemaVersionTarget schemaVersionTarget = schemaService.getTarget(schemaTarget);
boolean boot2 = AppBootSchemaVersion.BOOT2 == schemaVersionTarget.getSchemaVersion();
RowCallbackHandler handler;
- if (boot2) {
- handler = rs -> {
- String keyName = rs.getString("KEY_NAME");
- JobParameter.ParameterType type = JobParameter.ParameterType.valueOf(rs.getString("TYPE_CD"));
- boolean identifying = rs.getString("IDENTIFYING").equalsIgnoreCase("Y");
- JobParameter value;
- switch (type) {
- case STRING:
- value = new JobParameter(rs.getString("STRING_VAL"), identifying);
- break;
- case LONG:
- long longValue = rs.getLong("LONG_VAL");
- value = new JobParameter(rs.wasNull() ? null : longValue, identifying);
- break;
- case DOUBLE:
- double doubleValue = rs.getDouble("DOUBLE_VAL");
- value = new JobParameter(rs.wasNull() ? null : doubleValue, identifying);
- break;
- case DATE:
- value = new JobParameter(rs.getTimestamp("DATE_VAL"), identifying);
- break;
- default:
- LOG.error("Unknown type:{} for {}", type, keyName);
- return;
- }
- map.put(keyName, value);
- };
- } else {
- handler = rs -> {
- String parameterName = rs.getString("PARAMETER_NAME");
- Class> parameterType = null;
- try {
- parameterType = Class.forName(rs.getString("PARAMETER_TYPE"));
- } catch (ClassNotFoundException e) {
- throw new RuntimeException(e);
- }
- String stringValue = rs.getString("PARAMETER_VALUE");
- boolean identifying = rs.getString("IDENTIFYING").equalsIgnoreCase("Y");
- Object typedValue = conversionService.convert(stringValue, parameterType);
- JobParameter value;
- if (typedValue instanceof String) {
- value = new JobParameter((String) typedValue, identifying);
- } else if (typedValue instanceof Date) {
- value = new JobParameter((Date) typedValue, identifying);
- } else if (typedValue instanceof Double) {
- value = new JobParameter((Double) typedValue, identifying);
- } else if (typedValue instanceof Long) {
- value = new JobParameter((Long) typedValue, identifying);
- } else if (typedValue instanceof Number) {
- value = new JobParameter(((Number) typedValue).doubleValue(), identifying);
- } else if (typedValue instanceof Instant) {
- value = new JobParameter(new Date(((Instant) typedValue).toEpochMilli()), identifying);
- } else {
-
- value = new JobParameter(typedValue != null ? typedValue.toString() : null, identifying);
- }
- map.put(parameterName, value);
- };
+ if(boot2) {
+ throw new UnsupportedOperationException("BOOT2 applications are no longer supported");
}
+ handler = rs -> {
+ String parameterName = rs.getString("PARAMETER_NAME");
+ Class> parameterType = null;
+ try {
+ parameterType = Class.forName(rs.getString("PARAMETER_TYPE"));
+ } catch (ClassNotFoundException e) {
+ throw new RuntimeException(e);
+ }
+ String stringValue = rs.getString("PARAMETER_VALUE");
+ boolean identifying = rs.getString("IDENTIFYING").equalsIgnoreCase("Y");
+ Object typedValue = conversionService.convert(stringValue, parameterType);
+ JobParameter value;
+ if (typedValue instanceof String) {
+ value = new JobParameter(typedValue, String.class, identifying);
+ } else if (typedValue instanceof Date) {
+ value = new JobParameter(typedValue, Date.class, identifying);
+ } else if (typedValue instanceof Double) {
+ value = new JobParameter(typedValue, Double.class, identifying);
+ } else if (typedValue instanceof Long) {
+ value = new JobParameter(typedValue, Long.class, identifying);
+ } else if (typedValue instanceof Number) {
+ value = new JobParameter(((Number) typedValue).doubleValue(), Number.class, identifying);
+ } else if (typedValue instanceof Instant) {
+ value = new JobParameter(new Date(((Instant) typedValue).toEpochMilli()), Instant.class, identifying);
+ } else {
+
+ value = new JobParameter(typedValue != null ? typedValue.toString() : null, String.class, identifying);
+ }
+ map.put(parameterName, value);
+ };
jdbcTemplate.query(
getQuery(
- boot2 ? FIND_PARAMS_FROM_ID2 : FIND_PARAMS_FROM_ID3,
+ FIND_PARAMS_FROM_ID3,
schemaVersionTarget.getBatchPrefix()
),
handler,
@@ -599,7 +622,7 @@ protected JobParameters getJobParameters(Long executionId, String schemaTarget)
return new JobParameters(map);
}
- private > List queryForProvider(P pagingQueryProvider, M mapper, int start, int count, Object... arguments) {
+ private > List queryForProvider(D dataflowPagingQueryProvider, P pagingQueryProvider, M mapper, int start, int count, Object... arguments) {
if (start <= 0) {
String sql = pagingQueryProvider.generateFirstPageQuery(count);
if (LOG.isDebugEnabled()) {
@@ -608,7 +631,7 @@ private > List query
return jdbcTemplate.query(sql, mapper, arguments);
} else {
try {
- String sqlJump = pagingQueryProvider.generateJumpToItemQuery(start, count);
+ String sqlJump = dataflowPagingQueryProvider.generateJumpToItemQuery(start, count);
if (LOG.isDebugEnabled()) {
LOG.debug("queryJumpToItem:{}:{}:{}:{}", sqlJump, start, count, Arrays.asList(arguments));
}
@@ -627,7 +650,7 @@ private > List query
}
}
- private >> List queryForProvider(P pagingQueryProvider, R extractor, int start, int count, Object... arguments) {
+ private >> List queryForProvider(P dataFlowPagingQueryProvider, B pagingQueryProvider, R extractor, int start, int count, Object... arguments) {
if (start <= 0) {
String sql = pagingQueryProvider.generateFirstPageQuery(count);
if (LOG.isDebugEnabled()) {
@@ -635,7 +658,7 @@ private >
}
return jdbcTemplate.query(sql, extractor, arguments);
} else {
- String sqlJump = pagingQueryProvider.generateJumpToItemQuery(start, count);
+ String sqlJump = dataFlowPagingQueryProvider.generateJumpToItemQuery(start, count);
if (LOG.isDebugEnabled()) {
LOG.debug("queryJumpToItem:{}:{}:{}:{}", sqlJump, start, count, Arrays.asList(arguments));
}
@@ -655,7 +678,7 @@ private List getTaskJobInstancesForJobName(String jobName
Assert.notNull(jobName, "jobName must not be null");
int start = getPageOffset(pageable);
int count = pageable.getPageSize();
- return queryForProvider(jobExecutionsPagingQueryProviderByName, new JobInstanceExecutionsExtractor(false), start, count, jobName);
+ return queryForProvider(dataflowJobExecutionsPagingQueryProviderByName, jobExecutionsPagingQueryProviderByName, new JobInstanceExecutionsExtractor(false), start, count, jobName);
}
private TaskJobExecution createJobExecutionFromResultSet(ResultSet rs, int row, boolean readStepCount) throws SQLException {
@@ -669,12 +692,12 @@ private TaskJobExecution createJobExecutionFromResultSet(ResultSet rs, int row,
jobExecution = new JobExecution(jobInstance, jobParameters);
jobExecution.setId(jobExecutionId);
- jobExecution.setStartTime(rs.getTimestamp("START_TIME"));
- jobExecution.setEndTime(rs.getTimestamp("END_TIME"));
+ jobExecution.setStartTime(rs.getTimestamp("START_TIME").toLocalDateTime());
+ jobExecution.setEndTime(rs.getTimestamp("END_TIME").toLocalDateTime());
jobExecution.setStatus(BatchStatus.valueOf(rs.getString("STATUS")));
jobExecution.setExitStatus(new ExitStatus(rs.getString("EXIT_CODE"), rs.getString("EXIT_MESSAGE")));
- jobExecution.setCreateTime(rs.getTimestamp("CREATE_TIME"));
- jobExecution.setLastUpdated(rs.getTimestamp("LAST_UPDATED"));
+ jobExecution.setCreateTime(rs.getTimestamp("CREATE_TIME").toLocalDateTime());
+ jobExecution.setLastUpdated(rs.getTimestamp("LAST_UPDATED").toLocalDateTime());
jobExecution.setVersion(rs.getInt("VERSION"));
return readStepCount ?
@@ -684,6 +707,7 @@ private TaskJobExecution createJobExecutionFromResultSet(ResultSet rs, int row,
private List getTaskJobExecutionsByDate(Date startDate, Date endDate, int start, int count) {
return queryForProvider(
+ dataflowExecutionsByDateRangeWithStepCountPagingQueryProvider,
executionsByDateRangeWithStepCountPagingQueryProvider,
new JobExecutionRowMapper(true),
start,
@@ -733,7 +757,7 @@ public List extractData(ResultSet rs) throws SQLException
List executions = taskJobExecutions.computeIfAbsent(id, k -> new ArrayList<>());
long jobExecutionId = rs.getLong("JOB_EXECUTION_ID");
JobParameters jobParameters = getJobParameters(jobExecutionId, schemaTarget);
- JobExecution jobExecution = new JobExecution(jobInstance, jobExecutionId, jobParameters, null);
+ JobExecution jobExecution = new JobExecution(jobInstance, jobExecutionId, jobParameters);
int stepCount = readStepCount ? rs.getInt("STEP_COUNT") : 0;
TaskJobExecution execution = new TaskJobExecution(taskId, jobExecution, true, stepCount, schemaTarget);
@@ -799,6 +823,15 @@ private PagingQueryProvider getPagingQueryProvider(String fromClause, String whe
return getPagingQueryProvider(null, fromClause, whereClause, Collections.emptyMap());
}
+ /**
+ * @return a {@link PagingQueryProvider} with a where clause to narrow the
+ * query
+ * @throws Exception if page provider is not created.
+ */
+ private DataflowPagingQueryProvider getDataflowPagingQueryProvider(String fromClause, String whereClause) throws Exception {
+ return getDataflowPagingQueryProvider(null, fromClause, whereClause, Collections.emptyMap());
+ }
+
private PagingQueryProvider getPagingQueryProvider(String fields, String fromClause, String whereClause) throws Exception {
return getPagingQueryProvider(fields, fromClause, whereClause, Collections.emptyMap());
}
@@ -830,6 +863,15 @@ private PagingQueryProvider getPagingQueryProvider(String fields, String fromCla
return factory.getObject();
}
+ private DataflowPagingQueryProvider getDataflowPagingQueryProvider(String fields, String fromClause, String whereClause, Map sortKeys) throws Exception {
+ throw new UnsupportedOperationException("Need to create DataflowPagingQueryProvider so that dataflow can call " +
+ "generateRowNumSqlQueryWithNesting");
+ }
+
+ private DataflowPagingQueryProvider getDataflowPagingQueryProvider(String fields, String fromClause, String whereClause) throws Exception {
+ return getDataflowPagingQueryProvider(fields, fromClause, whereClause, Collections.emptyMap());
+ }
+
private boolean determineSupportsRowNumberFunction(DataSource dataSource) {
try {
return DatabaseType.supportsRowNumberFunction(dataSource);
@@ -910,14 +952,13 @@ public String generateRemainingPagesQuery(int pageSize) {
return generateRowNumSqlQuery(true, pageSize);
}
- @Override
public String generateJumpToItemQuery(int itemIndex, int pageSize) {
int page = itemIndex / pageSize;
int offset = (page * pageSize);
offset = (offset == 0) ? 1 : offset;
String sortKeyInnerSelect = this.getSortKeySelect(true);
String sortKeyOuterSelect = this.getSortKeySelect(false);
- return SqlPagingQueryUtils.generateRowNumSqlQueryWithNesting(this, sortKeyInnerSelect, sortKeyOuterSelect,
+ return DataflowSqlPagingQueryUtils.generateRowNumSqlQueryWithNesting(this, sortKeyInnerSelect, sortKeyOuterSelect,
false, "TMP_ROW_NUM = " + offset);
}
diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskJobService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskJobService.java
index 3b78df9637..92d4158971 100644
--- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskJobService.java
+++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskJobService.java
@@ -243,6 +243,7 @@ public void restartJobExecution(long jobExecutionId, String schemaTarget) throws
}
+ //TODO: Boot3x followup Remove boot2 check in this method once boot2 suuport code has been removed.
/**
* Apply identifying job parameters to arguments. There are cases (incrementers)
* that add parameters to a job and thus must be added for each restart so that the
@@ -254,10 +255,12 @@ public void restartJobExecution(long jobExecutionId, String schemaTarget) throws
* identifying job parameters not in the original task execution arguments.
*/
private List restartExecutionArgs(List taskExecutionArgs, JobParameters jobParameters, String schemaTarget) {
+ if(schemaTarget.equals(SchemaVersionTarget.createDefault(AppBootSchemaVersion.BOOT2).getName())) {
+ throw new UnsupportedOperationException("Boot 2 operations are not supported");
+ }
List result = new ArrayList<>(taskExecutionArgs);
- String boot3Version = SchemaVersionTarget.createDefault(AppBootSchemaVersion.BOOT3).getName();
String type;
- Map jobParametersMap = jobParameters.getParameters();
+ Map> jobParametersMap = jobParameters.getParameters();
for (String key : jobParametersMap.keySet()) {
if (!key.startsWith("-")) {
boolean existsFlag = false;
@@ -268,27 +271,8 @@ private List restartExecutionArgs(List taskExecutionArgs, JobPar
}
}
if (!existsFlag) {
- String param;
- if (boot3Version.equals(schemaTarget)) {
- if (JobParameter.ParameterType.LONG.equals(jobParametersMap.get(key).getType())) {
- type = Long.class.getCanonicalName();
- } else if (JobParameter.ParameterType.DATE.equals(jobParametersMap.get(key).getType())) {
- type = Date.class.getCanonicalName();
- } else if (JobParameter.ParameterType.DOUBLE.equals(jobParametersMap.get(key).getType())) {
- type = Double.class.getCanonicalName();
- } else if (JobParameter.ParameterType.STRING.equals(jobParametersMap.get(key).getType())) {
- type = String.class.getCanonicalName();
- } else {
- throw new IllegalArgumentException("Unable to convert " +
- jobParametersMap.get(key).getType() + " to known type of JobParameters");
- }
- param = String.format("%s=%s,%s", key, jobParametersMap.get(key).getValue(), type);
- } else {
- param = String.format("%s(%s)=%s", key,
- jobParametersMap.get(key).getType().toString().toLowerCase(),
- jobParameters.getString(key));
- }
- result.add(param);
+ type = jobParametersMap.get(key).getType().getCanonicalName();
+ result.add(String.format("%s=%s,%s", key, jobParametersMap.get(key).getValue(), type));
}
}
}
@@ -325,14 +309,14 @@ private List getTaskJobExecutionsWithStepCountForList(Collecti
return taskJobExecutions;
}
+ //TODO: Boot3x followup Brute force replacement when checking for schema target. Need to have executions only look for boot3
private TaskJobExecution getTaskJobExecutionWithStepCount(JobExecutionWithStepCount jobExecutionWithStepCount) {
- SchemaVersionTarget schemaVersionTarget = aggregateExecutionSupport.findSchemaVersionTarget(jobExecutionWithStepCount.getJobConfigurationName(), taskDefinitionReader);
return new TaskJobExecution(
- getTaskExecutionId(jobExecutionWithStepCount, schemaVersionTarget.getName()),
+ getTaskExecutionId(jobExecutionWithStepCount, "boot3"),
jobExecutionWithStepCount,
isTaskDefined(jobExecutionWithStepCount),
jobExecutionWithStepCount.getStepCount(),
- schemaVersionTarget.getName()
+ "boot3"
);
}
diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/validation/DockerRegistryValidator.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/validation/DockerRegistryValidator.java
index 2cf1ca1beb..717bbde4f1 100644
--- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/validation/DockerRegistryValidator.java
+++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/validation/DockerRegistryValidator.java
@@ -21,9 +21,17 @@
import java.util.Map;
import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.hc.client5.http.impl.classic.CloseableHttpClient;
+import org.apache.hc.client5.http.impl.classic.HttpClientBuilder;
+import org.apache.hc.client5.http.impl.classic.HttpClients;
+import org.apache.hc.client5.http.impl.io.BasicHttpClientConnectionManager;
+import org.apache.hc.client5.http.socket.ConnectionSocketFactory;
+import org.apache.hc.client5.http.socket.PlainConnectionSocketFactory;
+import org.apache.hc.client5.http.ssl.SSLConnectionSocketFactory;
+import org.apache.hc.core5.http.config.Lookup;
+import org.apache.hc.core5.http.config.RegistryBuilder;
import org.apache.http.conn.ssl.NoopHostnameVerifier;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.impl.client.HttpClients;
+
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -111,19 +119,25 @@ public boolean isImagePresent() {
private RestTemplate configureRestTemplate() {
CloseableHttpClient httpClient
- = HttpClients.custom()
- .setSSLHostnameVerifier(new NoopHostnameVerifier())
+ = httpClientBuilder()
.build();
HttpComponentsClientHttpRequestFactory requestFactory
= new HttpComponentsClientHttpRequestFactory();
requestFactory.setHttpClient(httpClient);
requestFactory.setConnectTimeout(dockerValidatiorProperties.getConnectTimeoutInMillis());
- requestFactory.setReadTimeout(dockerValidatiorProperties.getReadTimeoutInMillis());
-
RestTemplate restTemplate = new RestTemplate(requestFactory);
return restTemplate;
}
+
+ private HttpClientBuilder httpClientBuilder() {
+ // Register http/s connection factories
+ Lookup connSocketFactoryLookup = RegistryBuilder. create()
+ .register("http", new PlainConnectionSocketFactory())
+ .build();
+ return HttpClients.custom()
+ .setConnectionManager(new BasicHttpClientConnectionManager(connSocketFactoryLookup));
+ }
private DockerAuth getDockerAuth() {
DockerAuth result = null;
String userName = dockerValidatiorProperties.getUserName();
diff --git a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/batch/AbstractSimpleJobServiceTests.java b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/batch/AbstractSimpleJobServiceTests.java
index ed59c51111..96e28707da 100644
--- a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/batch/AbstractSimpleJobServiceTests.java
+++ b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/batch/AbstractSimpleJobServiceTests.java
@@ -18,6 +18,8 @@
import java.sql.Timestamp;
import java.sql.Types;
+import java.time.LocalDateTime;
+import java.time.OffsetDateTime;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Collection;
@@ -388,22 +390,22 @@ private List createJobExecutions(String name, BatchStatus batchSta
DataFieldMaxValueIncrementer jobExecutionIncrementer = incrementerFactory.getIncrementer(databaseType.name(),
prefix + "JOB_EXECUTION_SEQ");
for (int i = 0; i < numberOfJobs; i++) {
- JobExecution jobExecution = new JobExecution(jobInstance, null, name);
+ JobExecution jobExecution = new JobExecution(jobInstance, new JobParameters());
result.add(jobExecution);
jobExecution.setId(jobExecutionIncrementer.nextLongValue());
- jobExecution.setStartTime(new Date());
+ jobExecution.setStartTime(LocalDateTime.now());
if (!isRunning) {
- jobExecution.setEndTime(new Date());
+ jobExecution.setEndTime(LocalDateTime.now());
}
jobExecution.setVersion(3);
Timestamp startTime = jobExecution.getStartTime() == null ? null : Timestamp
- .valueOf(jobExecution.getStartTime().toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime());
+ .valueOf(jobExecution.getStartTime().toInstant(OffsetDateTime.now().getOffset()).atZone(ZoneId.systemDefault()).toLocalDateTime());
Timestamp endTime = jobExecution.getEndTime() == null ? null : Timestamp
- .valueOf(jobExecution.getEndTime().toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime());
+ .valueOf(jobExecution.getEndTime().toInstant(OffsetDateTime.now().getOffset()).atZone(ZoneId.systemDefault()).toLocalDateTime());
Timestamp createTime = jobExecution.getCreateTime() == null ? null : Timestamp
- .valueOf(jobExecution.getCreateTime().toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime());
+ .valueOf(jobExecution.getCreateTime().toInstant(OffsetDateTime.now().getOffset()).atZone(ZoneId.systemDefault()).toLocalDateTime());
Timestamp lastUpdated = jobExecution.getLastUpdated() == null ? null : Timestamp
- .valueOf(jobExecution.getLastUpdated().toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime());
+ .valueOf(jobExecution.getLastUpdated().toInstant(OffsetDateTime.now().getOffset()).atZone(ZoneId.systemDefault()).toLocalDateTime());
Object[] parameters = new Object[] { jobExecution.getId(), jobExecution.getJobId(), startTime, endTime,
batchStatus, jobExecution.getExitStatus().getExitCode(),
jobExecution.getExitStatus().getExitDescription(), jobExecution.getVersion(), createTime,
@@ -432,7 +434,7 @@ private void saveStepExecution(SchemaVersionTarget schemaVersionTarget, StepExec
stepExecution.setId(stepExecutionIncrementer.nextLongValue());
}
if (stepExecution.getStartTime() == null) {
- stepExecution.setStartTime(new Date());
+ stepExecution.setStartTime(LocalDateTime.now());
}
boolean isBatch4 = schemaVersionTarget.getSchemaVersion().equals(AppBootSchemaVersion.BOOT2);
Object[] parameters = isBatch4
@@ -441,7 +443,7 @@ private void saveStepExecution(SchemaVersionTarget schemaVersionTarget, StepExec
stepExecution.getStatus().toString(), stepExecution.getLastUpdated() }
: new Object[] { stepExecution.getId(), stepExecution.getStepName(), stepExecution.getJobExecutionId(),
stepExecution.getStartTime(), stepExecution.getEndTime(), stepExecution.getVersion(),
- stepExecution.getStatus().toString(), stepExecution.getLastUpdated(), new Date() };
+ stepExecution.getStatus().toString(), stepExecution.getLastUpdated(), LocalDateTime.now() };
String sql = getQuery(isBatch4 ? SAVE_STEP_EXECUTION_4 : SAVE_STEP_EXECUTION_5, schemaVersionTarget);
int[] argTypes4 = { Types.BIGINT, Types.VARCHAR, Types.BIGINT, Types.TIMESTAMP, Types.TIMESTAMP, Types.INTEGER,
Types.VARCHAR, Types.TIMESTAMP };
@@ -462,7 +464,7 @@ private TaskExecution createTaskExecution(AppBootSchemaVersion appBootSchemaVers
TaskRepository taskRepository = taskRepositoryContainer.get(appBootSchemaVersion);
TaskExecution taskExecution = new TaskExecution();
- taskExecution.setStartTime(new Date());
+ taskExecution.setStartTime(LocalDateTime.now());
taskExecution = taskRepository.createTaskExecution(taskExecution);
getJdbcTemplate().execute(
String.format(INSERT_TASK_BATCH, taskPrefix, taskExecution.getExecutionId(), jobExecution.getJobId()));
diff --git a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/config/SpringDocAutoConfigurationTests.java b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/config/SpringDocAutoConfigurationTests.java
index 32db20661d..3943161849 100644
--- a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/config/SpringDocAutoConfigurationTests.java
+++ b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/config/SpringDocAutoConfigurationTests.java
@@ -126,7 +126,7 @@ private void verifyCustomizerHasIgnorePatterns(AssertableWebApplicationContext c
WebSecurity webSecurity = mock(WebSecurity.class, Answers.RETURNS_DEEP_STUBS);
customizer.customize(webSecurity);
ArgumentCaptor antMatchersCaptor = ArgumentCaptor.forClass(String.class);
- verify(webSecurity.ignoring()).antMatchers(antMatchersCaptor.capture());
+ verify(webSecurity.ignoring()).requestMatchers(antMatchersCaptor.capture());
assertThat(antMatchersCaptor.getAllValues()).containsExactly(expected);
}
diff --git a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/configuration/JobDependencies.java b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/configuration/JobDependencies.java
index 8f344f9738..896883caa0 100644
--- a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/configuration/JobDependencies.java
+++ b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/configuration/JobDependencies.java
@@ -25,7 +25,7 @@
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import org.springframework.boot.autoconfigure.ImportAutoConfiguration;
-import org.springframework.boot.autoconfigure.batch.BatchDataSourceInitializer;
+import org.springframework.boot.autoconfigure.batch.BatchDataSourceScriptDatabaseInitializer;
import org.springframework.boot.autoconfigure.batch.BatchProperties;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.domain.EntityScan;
@@ -111,7 +111,6 @@
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.Primary;
import org.springframework.core.io.DefaultResourceLoader;
-import org.springframework.core.io.ResourceLoader;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.config.EnableJpaAuditing;
@@ -412,9 +411,9 @@ public PlatformTransactionManager transactionManager(EntityManagerFactory entity
@Bean
- public BatchDataSourceInitializer batchRepositoryInitializerForDefaultDBForServer(DataSource dataSource,
- ResourceLoader resourceLoader, BatchProperties properties) {
- return new BatchDataSourceInitializer(dataSource, resourceLoader, properties);
+ public BatchDataSourceScriptDatabaseInitializer batchRepositoryInitializerForDefaultDBForServer(DataSource dataSource,
+ BatchProperties properties) {
+ return new BatchDataSourceScriptDatabaseInitializer(dataSource, properties.getJdbc());
}
@Bean
diff --git a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/JobExecutionControllerTests.java b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/JobExecutionControllerTests.java
index 71d3eee15b..3dd24e3005 100644
--- a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/JobExecutionControllerTests.java
+++ b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/JobExecutionControllerTests.java
@@ -16,7 +16,7 @@
package org.springframework.cloud.dataflow.server.controller;
-import java.util.Date;
+import java.time.LocalDateTime;
import org.hamcrest.Matchers;
import org.junit.Before;
@@ -27,7 +27,10 @@
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobInstance;
import org.springframework.batch.core.JobParameters;
+import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
+import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.repository.JobRestartException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.batch.BatchProperties;
import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration;
@@ -99,7 +102,7 @@ public class JobExecutionControllerTests {
TaskDefinitionReader taskDefinitionReader;
@Before
- public void setupMockMVC() {
+ public void setupMockMVC() throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
this.mockMvc = JobExecutionUtils.createBaseJobExecutionMockMvc(
jobRepositoryContainer,
taskBatchDaoContainer,
@@ -342,14 +345,12 @@ public void testWildcardMatchSingleResult() throws Exception {
);
}
- private void createDirtyJob() {
+ private void createDirtyJob() throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
JobRepository jobRepository = jobRepositoryContainer.get(SchemaVersionTarget.defaultTarget().getName());
- JobInstance instance = jobRepository.createJobInstance(JobExecutionUtils.BASE_JOB_NAME + "_NO_TASK",
- new JobParameters());
JobExecution jobExecution = jobRepository.createJobExecution(
- instance, new JobParameters(), null);
+ JobExecutionUtils.BASE_JOB_NAME + "_NO_TASK", new JobParameters());
jobExecution.setStatus(BatchStatus.STOPPED);
- jobExecution.setEndTime(new Date());
+ jobExecution.setEndTime(LocalDateTime.now());
jobRepository.update(jobExecution);
}
diff --git a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/JobExecutionThinControllerTests.java b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/JobExecutionThinControllerTests.java
index 2e2ee2fe03..3b62eafac8 100644
--- a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/JobExecutionThinControllerTests.java
+++ b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/JobExecutionThinControllerTests.java
@@ -24,6 +24,9 @@
import org.junit.Test;
import org.junit.runner.RunWith;
+import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
+import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
+import org.springframework.batch.core.repository.JobRestartException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.batch.BatchProperties;
import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration;
@@ -89,7 +92,7 @@ public class JobExecutionThinControllerTests {
TaskDefinitionReader taskDefinitionReader;
@Before
- public void setupMockMVC() {
+ public void setupMockMVC() throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
this.mockMvc = JobExecutionUtils.createBaseJobExecutionMockMvc(
jobRepositoryContainer,
taskBatchDaoContainer,
diff --git a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/JobExecutionUtils.java b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/JobExecutionUtils.java
index c1b018983c..1d24fae268 100644
--- a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/JobExecutionUtils.java
+++ b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/JobExecutionUtils.java
@@ -18,10 +18,10 @@
import java.text.ParseException;
import java.text.SimpleDateFormat;
+import java.time.LocalDateTime;
+import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
-import java.util.Date;
import java.util.HashMap;
-import java.util.Locale;
import java.util.Map;
import org.springframework.batch.core.BatchStatus;
@@ -31,7 +31,10 @@
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
+import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.repository.JobRestartException;
import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport;
import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader;
import org.springframework.cloud.dataflow.rest.support.jackson.ISO8601DateFormatWithMilliSeconds;
@@ -90,7 +93,8 @@ static MockMvc createBaseJobExecutionMockMvc(
AggregateExecutionSupport aggregateExecutionSupport,
TaskDefinitionReader taskDefinitionReader,
WebApplicationContext wac,
- RequestMappingHandlerAdapter adapter) {
+ RequestMappingHandlerAdapter adapter)
+ throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
MockMvc mockMvc = MockMvcBuilders.webAppContextSetup(wac)
.defaultRequest(get("/").accept(MediaType.APPLICATION_JSON)).build();
JobExecutionUtils.createSampleJob(jobRepositoryContainer, taskBatchDaoContainer, taskExecutionDaoContainer, aggregateExecutionSupport, JOB_NAME_ORIG, 1, taskDefinitionReader);
@@ -102,18 +106,14 @@ static MockMvc createBaseJobExecutionMockMvc(
JobExecutionUtils.createSampleJob(jobRepositoryContainer, taskBatchDaoContainer, taskExecutionDaoContainer, aggregateExecutionSupport, JOB_NAME_FAILED1, 1, BatchStatus.FAILED, taskDefinitionReader);
JobExecutionUtils.createSampleJob(jobRepositoryContainer, taskBatchDaoContainer, taskExecutionDaoContainer, aggregateExecutionSupport, JOB_NAME_FAILED2, 1, BatchStatus.FAILED, taskDefinitionReader);
- Map jobParameterMap = new HashMap<>();
+ Map> jobParameterMap = new HashMap<>();
String dateInString = "7-Jun-2023";
- SimpleDateFormat formatter = new SimpleDateFormat("dd-MMM-yyyy", Locale.ENGLISH);
- Date date = null;
- try {
- date = formatter.parse(dateInString);
- } catch (ParseException e) {
- throw new RuntimeException(e);
- }
- jobParameterMap.put("javaUtilDate", new JobParameter(date));
- JobExecutionUtils.createSampleJob(jobRepositoryContainer, taskBatchDaoContainer, taskExecutionDaoContainer, aggregateExecutionSupport, JOB_NAME_ORIG_WITH_PARAM, 1, BatchStatus.UNKNOWN, taskDefinitionReader, new JobParameters(jobParameterMap));
-
+ DateTimeFormatter formatter = DateTimeFormatter.ofPattern("dd-MMM-yyyy");
+ LocalDateTime date = LocalDateTime.parse(dateInString, formatter);
+ jobParameterMap.put("javaUtilDate", new JobParameter( date, LocalDateTime.class,false));
+ JobExecutionUtils.createSampleJob(jobRepositoryContainer, taskBatchDaoContainer, taskExecutionDaoContainer,
+ aggregateExecutionSupport, JOB_NAME_ORIG_WITH_PARAM, 1, BatchStatus.UNKNOWN, taskDefinitionReader,
+ new JobParameters(jobParameterMap));
for (HttpMessageConverter> converter : adapter.getMessageConverters()) {
if (converter instanceof MappingJackson2HttpMessageConverter) {
@@ -133,7 +133,7 @@ private static void createSampleJob(
String jobName,
int jobExecutionCount,
TaskDefinitionReader taskDefinitionReader
- ) {
+ ) throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
createSampleJob(
jobRepositoryContainer,
taskBatchDaoContainer,
@@ -156,7 +156,7 @@ private static void createSampleJob(
int jobExecutionCount,
BatchStatus status,
TaskDefinitionReader taskDefinitionReader
- ) {
+ ) throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
createSampleJob(
jobRepositoryContainer,
taskBatchDaoContainer,
@@ -180,24 +180,23 @@ private static void createSampleJob(
BatchStatus status,
TaskDefinitionReader taskDefinitionReader,
JobParameters jobParameters
- ) {
+ ) throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
SchemaVersionTarget schemaVersionTarget = aggregateExecutionSupport.findSchemaVersionTarget(jobName, taskDefinitionReader);
JobRepository jobRepository = jobRepositoryContainer.get(schemaVersionTarget.getName());
- JobInstance instance = jobRepository.createJobInstance(jobName, jobParameters);
TaskExecutionDao taskExecutionDao = taskExecutionDaoContainer.get(schemaVersionTarget.getName());
- TaskExecution taskExecution = taskExecutionDao.createTaskExecution(jobName, new Date(), new ArrayList<>(), null);
+ TaskExecution taskExecution = taskExecutionDao.createTaskExecution(jobName, LocalDateTime.now(), new ArrayList<>(), null);
JobExecution jobExecution;
TaskBatchDao taskBatchDao = taskBatchDaoContainer.get(schemaVersionTarget.getName());
for (int i = 0; i < jobExecutionCount; i++) {
- jobExecution = jobRepository.createJobExecution(instance, jobParameters, null);
+ jobExecution = jobRepository.createJobExecution(jobName, jobParameters);
StepExecution stepExecution = new StepExecution("foo", jobExecution, 1L);
stepExecution.setId(null);
jobRepository.add(stepExecution);
taskBatchDao.saveRelationship(taskExecution, jobExecution);
jobExecution.setStatus(status);
- jobExecution.setStartTime(new Date());
+ jobExecution.setStartTime(LocalDateTime.now());
if (BatchStatus.STOPPED.equals(status)) {
- jobExecution.setEndTime(new Date());
+ jobExecution.setEndTime(LocalDateTime.now());
}
jobRepository.update(jobExecution);
}
diff --git a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/JobInstanceControllerTests.java b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/JobInstanceControllerTests.java
index 97b7b0a90c..133a57e29a 100644
--- a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/JobInstanceControllerTests.java
+++ b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/JobInstanceControllerTests.java
@@ -16,8 +16,8 @@
package org.springframework.cloud.dataflow.server.controller;
+import java.time.LocalDateTime;
import java.util.ArrayList;
-import java.util.Date;
import org.junit.Before;
import org.junit.Test;
@@ -27,7 +27,10 @@
import org.springframework.batch.core.JobInstance;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
+import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.repository.JobRestartException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.batch.BatchProperties;
import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration;
@@ -105,7 +108,7 @@ public class JobInstanceControllerTests {
TaskDefinitionReader taskDefinitionReader;
@Before
- public void setupMockMVC() {
+ public void setupMockMVC() throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
this.mockMvc = MockMvcBuilders.webAppContextSetup(wac)
.defaultRequest(get("/").accept(MediaType.APPLICATION_JSON)).build();
if (!initialized) {
@@ -161,17 +164,17 @@ public void testGetInstanceByNameNotFound() throws Exception {
.andExpect(content().string(containsString("NoSuchJobException")));
}
- private void createSampleJob(String jobName, int jobExecutionCount) {
+ private void createSampleJob(String jobName, int jobExecutionCount)
+ throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
String defaultSchemaTarget = SchemaVersionTarget.defaultTarget().getName();
JobRepository jobRepository = jobRepositoryContainer.get(defaultSchemaTarget);
- JobInstance instance = jobRepository.createJobInstance(jobName, new JobParameters());
TaskExecutionDao dao = daoContainer.get(defaultSchemaTarget);
- TaskExecution taskExecution = dao.createTaskExecution(jobName, new Date(), new ArrayList(), null);
+ TaskExecution taskExecution = dao.createTaskExecution(jobName, LocalDateTime.now(), new ArrayList(), null);
TaskBatchDao taskBatchDao = taskBatchDaoContainer.get(defaultSchemaTarget);
for (int i = 0; i < jobExecutionCount; i++) {
- JobExecution jobExecution = jobRepository.createJobExecution(instance, new JobParameters(), null);
+ JobExecution jobExecution = jobRepository.createJobExecution(jobName, new JobParameters());
StepExecution stepExecution = new StepExecution("foo", jobExecution, 1L);
stepExecution.setId(null);
jobRepository.add(stepExecution);
diff --git a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionControllerTests.java b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionControllerTests.java
index 31670d3de5..ae9225b055 100644
--- a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionControllerTests.java
+++ b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionControllerTests.java
@@ -16,10 +16,9 @@
package org.springframework.cloud.dataflow.server.controller;
+import java.time.LocalDateTime;
import java.util.ArrayList;
-import java.util.Date;
-import org.hamcrest.Matchers;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
@@ -28,7 +27,10 @@
import org.springframework.batch.core.JobInstance;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
+import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.repository.JobRestartException;
import org.springframework.batch.item.ExecutionContext;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.batch.BatchProperties;
@@ -126,7 +128,7 @@ public class JobStepExecutionControllerTests {
TaskJobService taskJobService;
@Before
- public void setupMockMVC() {
+ public void setupMockMVC() throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
this.mockMvc = MockMvcBuilders.webAppContextSetup(wac)
.defaultRequest(get("/").accept(MediaType.APPLICATION_JSON)).build();
if (!initialized) {
@@ -193,11 +195,11 @@ public void testSingleGetStepExecutionProgress() throws Exception {
.andExpect(jsonPath("$.stepExecutionHistory.commitCount.count", is(0)));
}
- private void createStepExecution(String jobName, String... stepNames) {
+ private void createStepExecution(String jobName, String... stepNames)
+ throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
SchemaVersionTarget schemaVersionTarget = aggregateExecutionSupport.findSchemaVersionTarget(jobName, taskDefinitionReader);
JobRepository jobRepository = jobRepositoryContainer.get(schemaVersionTarget.getName());
- JobInstance instance = jobRepository.createJobInstance(jobName, new JobParameters());
- JobExecution jobExecution = jobRepository.createJobExecution(instance, new JobParameters(), null);
+ JobExecution jobExecution = jobRepository.createJobExecution(jobName, new JobParameters());
for (String stepName : stepNames) {
StepExecution stepExecution = new StepExecution(stepName, jobExecution, 1L);
stepExecution.setId(null);
@@ -207,7 +209,7 @@ private void createStepExecution(String jobName, String... stepNames) {
jobRepository.add(stepExecution);
}
TaskExecutionDao dao = daoContainer.get(schemaVersionTarget.getName());
- TaskExecution taskExecution = dao.createTaskExecution(jobName, new Date(), new ArrayList(), null);
+ TaskExecution taskExecution = dao.createTaskExecution(jobName, LocalDateTime.now(), new ArrayList(), null);
TaskBatchDao taskBatchDao = taskBatchDaoContainer.get(schemaVersionTarget.getName());
taskBatchDao.saveRelationship(taskExecution, jobExecution);
}
diff --git a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/TaskControllerTests.java b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/TaskControllerTests.java
index e31994837d..68c87b9c31 100644
--- a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/TaskControllerTests.java
+++ b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/TaskControllerTests.java
@@ -17,9 +17,9 @@
package org.springframework.cloud.dataflow.server.controller;
import java.net.URI;
+import java.time.LocalDateTime;
import java.util.Arrays;
import java.util.Collections;
-import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
@@ -196,14 +196,14 @@ public void setupMockMVC() {
TaskExecution taskExecutionRunning = this.taskExecutionCreationService.createTaskExecution("myTask", null);
assertThat(taskExecutionRunning.getExecutionId()).isGreaterThan(0L);
- taskExecutionRunning.setStartTime(new Date());
+ taskExecutionRunning.setStartTime(LocalDateTime.now());
taskExecutionRunning.setArguments(SAMPLE_ARGUMENT_LIST);
SchemaVersionTarget schemaVersionTarget = this.aggregateExecutionSupport.findSchemaVersionTarget("myTask", taskDefinitionReader);
TaskExecutionDao taskExecutionDao = this.taskExecutionDaoContainer.get(schemaVersionTarget.getName());
taskExecutionDao.startTaskExecution(taskExecutionRunning.getExecutionId(),
taskExecutionRunning.getTaskName(),
- new Date(),
+ LocalDateTime.now(),
SAMPLE_ARGUMENT_LIST,
Long.toString(taskExecutionRunning.getExecutionId()));
taskExecutionRunning = taskExecutionDao.getTaskExecution(taskExecutionRunning.getExecutionId());
@@ -216,10 +216,10 @@ public void setupMockMVC() {
taskExecutionDao = this.taskExecutionDaoContainer.get(schemaVersionTarget2.getName());
taskExecutionDao.startTaskExecution(taskExecutionComplete.getExecutionId(),
taskExecutionComplete.getTaskName(),
- new Date(),
+ LocalDateTime.now(),
SAMPLE_ARGUMENT_LIST,
Long.toString(taskExecutionComplete.getExecutionId()));
- taskExecutionDao.completeTaskExecution(taskExecutionComplete.getExecutionId(), 0, new Date(), null);
+ taskExecutionDao.completeTaskExecution(taskExecutionComplete.getExecutionId(), 0, LocalDateTime.now(), null);
taskExecutionComplete = taskExecutionDao.getTaskExecution(taskExecutionComplete.getExecutionId());
dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDaoContainer.get(schemaVersionTarget2.getName());
dataflowTaskExecutionMetadataDao.save(taskExecutionComplete, taskManifest);
@@ -754,8 +754,8 @@ public void testTaskLaunchNoManifest() throws Exception {
final TaskExecution taskExecutionComplete = this.taskExecutionCreationService.createTaskExecution("myTask3", null);
assertThat(taskExecutionComplete.getExecutionId()).isGreaterThan(0L);
taskExecutionComplete.setTaskName("myTask3");
- taskExecutionComplete.setStartTime(new Date());
- taskExecutionComplete.setEndTime(new Date());
+ taskExecutionComplete.setStartTime(LocalDateTime.now());
+ taskExecutionComplete.setEndTime(LocalDateTime.now());
taskExecutionComplete.setExitCode(0);
repository.save(new TaskDefinition("myTask3", "foo"));
this.registry.save("foo", ApplicationType.task,
diff --git a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionControllerCleanupAsyncTests.java b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionControllerCleanupAsyncTests.java
index c43547d766..b70a56f895 100644
--- a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionControllerCleanupAsyncTests.java
+++ b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionControllerCleanupAsyncTests.java
@@ -18,9 +18,9 @@
import java.time.Duration;
import java.time.Instant;
+import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Collections;
-import java.util.Date;
import java.util.List;
import org.awaitility.Awaitility;
@@ -141,8 +141,8 @@ private void setupTaskExecutions(String taskName, String taskExecutionId) {
List taskArgs = new ArrayList<>();
taskArgs.add("foo=bar");
- TaskExecution taskExecution1 = taskExecutionDao.createTaskExecution(taskName, new Date(), taskArgs, taskExecutionId);
- taskExecutionDao.createTaskExecution(taskName, new Date(), taskArgs, taskExecutionId, taskExecution1.getExecutionId());
+ TaskExecution taskExecution1 = taskExecutionDao.createTaskExecution(taskName, LocalDateTime.now(), taskArgs, taskExecutionId);
+ taskExecutionDao.createTaskExecution(taskName, LocalDateTime.now(), taskArgs, taskExecutionId, taskExecution1.getExecutionId());
TaskDeployment taskDeployment = new TaskDeployment();
taskDeployment.setTaskDefinitionName(taskName);
diff --git a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionControllerTests.java b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionControllerTests.java
index 93e672b701..262546ff31 100644
--- a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionControllerTests.java
+++ b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionControllerTests.java
@@ -18,8 +18,8 @@
import java.net.URI;
import java.time.Instant;
+import java.time.LocalDateTime;
import java.util.Collections;
-import java.util.Date;
import java.util.LinkedList;
import java.util.List;
@@ -34,7 +34,10 @@
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobInstance;
import org.springframework.batch.core.JobParameters;
+import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
+import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.repository.JobRestartException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.batch.BatchProperties;
import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration;
@@ -189,7 +192,7 @@ public class TaskExecutionControllerTests {
@BeforeEach
- public void setupMockMVC() {
+ public void setupMockMVC() throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
assertThat(this.launcherRepository.findByName("default")).isNull();
Launcher launcher = new Launcher("default", "local", taskLauncher);
launcherRepository.save(launcher);
@@ -221,16 +224,15 @@ public void setupMockMVC() {
SchemaVersionTarget schemaVersionTarget = aggregateExecutionSupport.findSchemaVersionTarget(TASK_NAME_ORIG, taskDefinitionReader);
TaskExecutionDao dao = daoContainer.get(schemaVersionTarget.getName());
TaskExecution taskExecution1 =
- dao.createTaskExecution(TASK_NAME_ORIG, new Date(), SAMPLE_ARGUMENT_LIST, "foobar");
+ dao.createTaskExecution(TASK_NAME_ORIG, LocalDateTime.now(), SAMPLE_ARGUMENT_LIST, "foobar");
- dao.createTaskExecution(TASK_NAME_ORIG, new Date(), SAMPLE_ARGUMENT_LIST, "foobar", taskExecution1.getExecutionId());
- dao.createTaskExecution(TASK_NAME_FOO, new Date(), SAMPLE_ARGUMENT_LIST, null);
- TaskExecution taskExecution = dao.createTaskExecution(TASK_NAME_FOOBAR, new Date(), SAMPLE_ARGUMENT_LIST,
+ dao.createTaskExecution(TASK_NAME_ORIG, LocalDateTime.now(), SAMPLE_ARGUMENT_LIST, "foobar", taskExecution1.getExecutionId());
+ dao.createTaskExecution(TASK_NAME_FOO, LocalDateTime.now(), SAMPLE_ARGUMENT_LIST, null);
+ TaskExecution taskExecution = dao.createTaskExecution(TASK_NAME_FOOBAR, LocalDateTime.now(), SAMPLE_ARGUMENT_LIST,
null);
SchemaVersionTarget fooBarTarget = aggregateExecutionSupport.findSchemaVersionTarget(TASK_NAME_FOOBAR, taskDefinitionReader);
JobRepository jobRepository = jobRepositoryContainer.get(fooBarTarget.getName());
- JobInstance instance = jobRepository.createJobInstance(TASK_NAME_FOOBAR, new JobParameters());
- JobExecution jobExecution = jobRepository.createJobExecution(instance, new JobParameters(), null);
+ JobExecution jobExecution = jobRepository.createJobExecution(TASK_NAME_FOOBAR, new JobParameters());
TaskBatchDao taskBatchDao = taskBatchDaoContainer.get(fooBarTarget.getName());
taskBatchDao.saveRelationship(taskExecution, jobExecution);
TaskDeployment taskDeployment = new TaskDeployment();
diff --git a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/TasksInfoControllerTests.java b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/TasksInfoControllerTests.java
index 7b08540458..9a00741a09 100644
--- a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/TasksInfoControllerTests.java
+++ b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/TasksInfoControllerTests.java
@@ -17,8 +17,8 @@
package org.springframework.cloud.dataflow.server.controller;
import java.time.Instant;
+import java.time.LocalDateTime;
import java.util.Collections;
-import java.util.Date;
import java.util.LinkedList;
import java.util.List;
@@ -29,7 +29,10 @@
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobInstance;
import org.springframework.batch.core.JobParameters;
+import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
+import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.repository.JobRestartException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.batch.BatchProperties;
import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration;
@@ -131,7 +134,7 @@ public class TasksInfoControllerTests {
TaskDefinitionReader taskDefinitionReader;
@Before
- public void setupMockMVC() {
+ public void setupMockMVC() throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
assertThat(this.launcherRepository.findByName("default")).isNull();
Launcher launcher = new Launcher("default", "local", taskLauncher);
launcherRepository.save(launcher);
@@ -165,15 +168,14 @@ public void setupMockMVC() {
TaskExecutionDao dao = daoContainer.get(target.getName());
TaskExecution taskExecution1 =
- dao.createTaskExecution(TASK_NAME_ORIG, new Date(), SAMPLE_ARGUMENT_LIST, "foobar");
+ dao.createTaskExecution(TASK_NAME_ORIG, LocalDateTime.now(), SAMPLE_ARGUMENT_LIST, "foobar");
assertThat(taskExecution1.getExecutionId()).isGreaterThan(0L);
- dao.createTaskExecution(TASK_NAME_ORIG, new Date(), SAMPLE_ARGUMENT_LIST, "foobar", taskExecution1.getExecutionId());
- dao.createTaskExecution(TASK_NAME_FOO, new Date(), SAMPLE_ARGUMENT_LIST, null);
- TaskExecution taskExecution = dao.createTaskExecution(TASK_NAME_FOOBAR, new Date(), SAMPLE_ARGUMENT_LIST,
+ dao.createTaskExecution(TASK_NAME_ORIG, LocalDateTime.now(), SAMPLE_ARGUMENT_LIST, "foobar", taskExecution1.getExecutionId());
+ dao.createTaskExecution(TASK_NAME_FOO, LocalDateTime.now(), SAMPLE_ARGUMENT_LIST, null);
+ TaskExecution taskExecution = dao.createTaskExecution(TASK_NAME_FOOBAR, LocalDateTime.now(), SAMPLE_ARGUMENT_LIST,
null);
JobRepository jobRepository = jobRepositoryContainer.get(target.getName());
- JobInstance instance = jobRepository.createJobInstance(TASK_NAME_FOOBAR, new JobParameters());
- JobExecution jobExecution = jobRepository.createJobExecution(instance, new JobParameters(), null);
+ JobExecution jobExecution = jobRepository.createJobExecution(TASK_NAME_FOOBAR, new JobParameters());
TaskBatchDao taskBatchDao = taskBatchDaoContainer.get(target.getName());
taskBatchDao.saveRelationship(taskExecution, jobExecution);
TaskDeployment taskDeployment = new TaskDeployment();
diff --git a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/repository/SchemaGenerationTests.java b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/repository/SchemaGenerationTests.java
index 1b68a1aaaa..ef8d2d7a03 100644
--- a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/repository/SchemaGenerationTests.java
+++ b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/repository/SchemaGenerationTests.java
@@ -25,7 +25,9 @@
import jakarta.persistence.spi.PersistenceUnitInfo;
import org.hibernate.HibernateException;
+
import org.hibernate.boot.MetadataSources;
+import org.hibernate.boot.model.naming.CamelCaseToUnderscoresNamingStrategy;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.tool.hbm2ddl.SchemaExport;
import org.hibernate.tool.schema.TargetType;
@@ -39,7 +41,6 @@
import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration;
import org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration;
import org.springframework.boot.orm.jpa.hibernate.SpringImplicitNamingStrategy;
-import org.springframework.boot.orm.jpa.hibernate.SpringPhysicalNamingStrategy;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
@@ -102,7 +103,7 @@ private void generateDdlFiles(String dialect, File tempDir, PersistenceUnitInfo
final MetadataSources metadata = new MetadataSources(
new StandardServiceRegistryBuilder()
.applySetting("hibernate.dialect", "org.hibernate.dialect." + dialect + "Dialect")
- .applySetting("hibernate.physical_naming_strategy", SpringPhysicalNamingStrategy.class.getName())
+ .applySetting("hibernate.physical_naming_strategy", CamelCaseToUnderscoresNamingStrategy.class.getName())
.applySetting("hibernate.implicit_naming_strategy", SpringImplicitNamingStrategy.class.getName())
.build());
diff --git a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskDeleteServiceTests.java b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskDeleteServiceTests.java
index 0de8f872f4..f8cbc0d8d2 100644
--- a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskDeleteServiceTests.java
+++ b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskDeleteServiceTests.java
@@ -17,10 +17,10 @@
package org.springframework.cloud.dataflow.server.service.impl;
import javax.sql.DataSource;
+import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
-import java.util.Date;
import java.util.List;
import org.junit.jupiter.api.BeforeEach;
@@ -176,9 +176,9 @@ private void createTaskExecutions(int numberOfExecutions) throws Exception{
TaskRepository taskRepository = this.taskRepositoryContainer.get(schemaVersionTarget.getName());
for (int i = 1; i <= numberOfExecutions; i++) {
TaskExecution taskExecution = taskRepository.createTaskExecution(new TaskExecution(i, 0, TASK_NAME_ORIG,
- new Date(), new Date(), "", args, "", null,
+ LocalDateTime.now(), LocalDateTime.now(), "", args, "", null,
null));
- taskRepository.completeTaskExecution(taskExecution.getExecutionId(), 0, new Date(), "complete");
+ taskRepository.completeTaskExecution(taskExecution.getExecutionId(), 0, LocalDateTime.now(), "complete");
JobExecution jobExecution = this.jobLauncherTestUtils.launchJob();
TaskBatchDao taskBatchDao = taskBatchDaoContainer.get(SchemaVersionTarget.defaultTarget().getName());
taskBatchDao.saveRelationship(taskExecution, jobExecution);
diff --git a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionServiceTests.java b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionServiceTests.java
index f8218ca5ee..e2b970f399 100644
--- a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionServiceTests.java
+++ b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionServiceTests.java
@@ -19,9 +19,9 @@
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URI;
+import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Collections;
-import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
@@ -250,7 +250,7 @@ public void testFailedFirstLaunch() throws Exception {
this.launcherRepository.save(new Launcher(TaskPlatformFactory.CLOUDFOUNDRY_PLATFORM_TYPE, TaskPlatformFactory.CLOUDFOUNDRY_PLATFORM_TYPE, taskLauncher));
initializeSuccessfulRegistry(appRegistry);
SchemaVersionTarget schemaVersionTarget = aggregateExecutionSupport.findSchemaVersionTarget(TASK_NAME_ORIG, taskDefinitionReader);
- TaskExecution taskExecution = new TaskExecution(1, 0, TASK_NAME_ORIG, new Date(), new Date(), "", Collections.emptyList(), "", null, null);
+ TaskExecution taskExecution = new TaskExecution(1, 0, TASK_NAME_ORIG, LocalDateTime.now(), LocalDateTime.now(), "", Collections.emptyList(), "", null, null);
TaskRepository taskRepository = taskRepositoryContainer.get(schemaVersionTarget.getName());
taskRepository.createTaskExecution(taskExecution);
TaskManifest taskManifest = new TaskManifest();
@@ -395,8 +395,8 @@ private void setupUpgradeDueToResourceChange() throws IOException {
manifest.setTaskDeploymentRequest(request);
DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDaoContainer.get(schemaVersionTarget.getName());
dataflowTaskExecutionMetadataDao.save(myTask, manifest);
- taskRepository.startTaskExecution(myTask.getExecutionId(), TASK_NAME_ORIG, new Date(), new ArrayList<>(), null);
- taskRepository.completeTaskExecution(myTask.getExecutionId(), 0, new Date(), null);
+ taskRepository.startTaskExecution(myTask.getExecutionId(), TASK_NAME_ORIG, LocalDateTime.now(), new ArrayList<>(), null);
+ taskRepository.completeTaskExecution(myTask.getExecutionId(), 0, LocalDateTime.now(), null);
when(taskLauncher.launch(any())).thenReturn("0");
@@ -420,7 +420,7 @@ public void testRestoreAppPropertiesV2() throws IOException {
LaunchResponse launchResponse = this.taskExecutionService.executeTask(TASK_NAME_ORIG, properties, new LinkedList<>());
long firstTaskExecutionId = launchResponse.getExecutionId();
TaskRepository taskRepository = this.taskRepositoryContainer.get(launchResponse.getSchemaTarget());
- taskRepository.completeTaskExecution(firstTaskExecutionId, 0, new Date(), "all done");
+ taskRepository.completeTaskExecution(firstTaskExecutionId, 0, LocalDateTime.now(), "all done");
this.taskExecutionService.executeTask(TASK_NAME_ORIG, Collections.emptyMap(), new LinkedList<>());
DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDaoContainer.get(launchResponse.getSchemaTarget());
TaskManifest lastManifest = dataflowTaskExecutionMetadataDao.getLatestManifest(TASK_NAME_ORIG);
@@ -446,7 +446,7 @@ public void testSavesRequestedVersionNoLabel() throws IOException {
LaunchResponse launchResponse = this.taskExecutionService.executeTask("t1", properties, new LinkedList<>());
long firstTaskExecutionId = launchResponse.getExecutionId();
TaskRepository taskRepository = this.taskRepositoryContainer.get(launchResponse.getSchemaTarget());
- taskRepository.completeTaskExecution(firstTaskExecutionId, 0, new Date(), "all done");
+ taskRepository.completeTaskExecution(firstTaskExecutionId, 0, LocalDateTime.now(), "all done");
DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDaoContainer.get(launchResponse.getSchemaTarget());
TaskManifest lastManifest = dataflowTaskExecutionMetadataDao.getLatestManifest("t1");
@@ -471,7 +471,7 @@ public void testRestoresNonDefaultVersion() throws IOException {
LaunchResponse launchResponse = this.taskExecutionService.executeTask("t1", properties, new LinkedList<>());
long firstTaskExecutionId = launchResponse.getExecutionId();
TaskRepository taskRepository = this.taskRepositoryContainer.get(launchResponse.getSchemaTarget());
- taskRepository.completeTaskExecution(firstTaskExecutionId, 0, new Date(), "all done");
+ taskRepository.completeTaskExecution(firstTaskExecutionId, 0, LocalDateTime.now(), "all done");
DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDaoContainer.get(launchResponse.getSchemaTarget());
TaskManifest lastManifest = dataflowTaskExecutionMetadataDao.getLatestManifest("t1");
@@ -485,7 +485,7 @@ public void testRestoresNonDefaultVersion() throws IOException {
long secondTaskExecutionId = launchResponse2.getExecutionId();
taskRepository = taskRepositoryContainer.get(launchResponse2.getSchemaTarget());
- taskRepository.completeTaskExecution(secondTaskExecutionId, 0, new Date(), "all done");
+ taskRepository.completeTaskExecution(secondTaskExecutionId, 0, LocalDateTime.now(), "all done");
dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDaoContainer.get(launchResponse2.getSchemaTarget());
lastManifest = dataflowTaskExecutionMetadataDao.getLatestManifest("t1");
// without passing version, we should not get back to default app, in this case foo-task100
@@ -511,7 +511,7 @@ public void testSavesRequestedVersionLabel() throws IOException {
long firstTaskExecutionId = launchResponse.getExecutionId();
SchemaVersionTarget schemaVersionTarget = aggregateExecutionSupport.findSchemaVersionTarget("t2", taskDefinitionReader);
TaskRepository taskRepository = this.taskRepositoryContainer.get(schemaVersionTarget.getName());
- taskRepository.completeTaskExecution(firstTaskExecutionId, 0, new Date(), "all done");
+ taskRepository.completeTaskExecution(firstTaskExecutionId, 0, LocalDateTime.now(), "all done");
DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDaoContainer.get(schemaVersionTarget.getName());
TaskManifest lastManifest = dataflowTaskExecutionMetadataDao.getLatestManifest("t2");
@@ -536,7 +536,7 @@ public void testRestoreDeployerPropertiesV2() throws IOException {
LaunchResponse launchResponse = this.taskExecutionService.executeTask(TASK_NAME_ORIG, properties, new LinkedList<>());
long firstTaskExecutionId = launchResponse.getExecutionId();
TaskRepository taskRepository = this.taskRepositoryContainer.get(launchResponse.getSchemaTarget());
- taskRepository.completeTaskExecution(firstTaskExecutionId, 0, new Date(), "all done");
+ taskRepository.completeTaskExecution(firstTaskExecutionId, 0, LocalDateTime.now(), "all done");
this.taskExecutionService.executeTask(TASK_NAME_ORIG, Collections.emptyMap(), new LinkedList<>());
DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDaoContainer.get(launchResponse.getSchemaTarget());
TaskManifest lastManifest = dataflowTaskExecutionMetadataDao.getLatestManifest(TASK_NAME_ORIG);
@@ -575,7 +575,7 @@ public void testUpgradeDueToDeploymentPropsChangeForCloudFoundryFailsWhenAlready
manifest.setTaskDeploymentRequest(request);
DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDaoContainer.get(schemaVersionTarget.getName());
dataflowTaskExecutionMetadataDao.save(myTask, manifest);
- taskRepository.startTaskExecution(myTask.getExecutionId(), TASK_NAME_ORIG, new Date(), new ArrayList<>(), null);
+ taskRepository.startTaskExecution(myTask.getExecutionId(), TASK_NAME_ORIG, LocalDateTime.now(), new ArrayList<>(), null);
taskRepository.updateExternalExecutionId(myTask.getExecutionId(), "abc");
when(this.taskLauncher.launch(any())).thenReturn("abc");
when(this.taskLauncher.status("abc")).thenReturn(new TaskStatus("abc", LaunchState.running, new HashMap<>()));
@@ -602,7 +602,7 @@ public void testUpgradeDueToDeploymentPropsChangeForCloudFoundrySucceedsIfNotRea
manifest.setTaskDeploymentRequest(request);
DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDaoContainer.get(schemaVersionTarget.getName());
dataflowTaskExecutionMetadataDao.save(myTask, manifest);
- taskRepository.startTaskExecution(myTask.getExecutionId(), TASK_NAME_ORIG, new Date(), new ArrayList<>(), null);
+ taskRepository.startTaskExecution(myTask.getExecutionId(), TASK_NAME_ORIG, LocalDateTime.now(), new ArrayList<>(), null);
taskRepository.updateExternalExecutionId(myTask.getExecutionId(), "abc");
when(this.taskLauncher.launch(any())).thenReturn("abc");
when(this.taskLauncher.status("abc")).thenReturn(new TaskStatus("abc", LaunchState.failed, new HashMap<>()));
@@ -627,8 +627,8 @@ private void setupUpgradeDueToDeploymentPropsChangeForCloudFoundry() throws IOEx
manifest.setTaskDeploymentRequest(request);
DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDaoContainer.get(schemaVersionTarget.getName());
dataflowTaskExecutionMetadataDao.save(myTask, manifest);
- taskRepository.startTaskExecution(myTask.getExecutionId(), TASK_NAME_ORIG, new Date(), new ArrayList<>(), null);
- taskRepository.completeTaskExecution(myTask.getExecutionId(), 0, new Date(), null);
+ taskRepository.startTaskExecution(myTask.getExecutionId(), TASK_NAME_ORIG, LocalDateTime.now(), new ArrayList<>(), null);
+ taskRepository.completeTaskExecution(myTask.getExecutionId(), 0, LocalDateTime.now(), null);
taskRepository.updateExternalExecutionId(myTask.getExecutionId(), "0");
initializeSuccessfulRegistry(appRegistry);
@@ -688,8 +688,8 @@ private void setupUpgradeForCommandLineArgsChange() throws IOException {
manifest.setTaskDeploymentRequest(request);
DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDaoContainer.get(schemaVersionTarget.getName());
dataflowTaskExecutionMetadataDao.save(myTask, manifest);
- taskRepository.startTaskExecution(myTask.getExecutionId(), TASK_NAME_ORIG, new Date(), new ArrayList<>(), null);
- taskRepository.completeTaskExecution(myTask.getExecutionId(), 0, new Date(), null);
+ taskRepository.startTaskExecution(myTask.getExecutionId(), TASK_NAME_ORIG, LocalDateTime.now(), new ArrayList<>(), null);
+ taskRepository.completeTaskExecution(myTask.getExecutionId(), 0, LocalDateTime.now(), null);
initializeSuccessfulRegistry(appRegistry);
@@ -725,8 +725,8 @@ private void setupCommandLineArgAppPrefixes() throws IOException {
manifest.setTaskDeploymentRequest(request);
DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDaoContainer.get(schemaVersionTarget.getName());
dataflowTaskExecutionMetadataDao.save(myTask, manifest);
- taskRepository.startTaskExecution(myTask.getExecutionId(), TASK_NAME_ORIG, new Date(), new ArrayList<>(), null);
- taskRepository.completeTaskExecution(myTask.getExecutionId(), 0, new Date(), null);
+ taskRepository.startTaskExecution(myTask.getExecutionId(), TASK_NAME_ORIG, LocalDateTime.now(), new ArrayList<>(), null);
+ taskRepository.completeTaskExecution(myTask.getExecutionId(), 0, LocalDateTime.now(), null);
initializeSuccessfulRegistry(appRegistry);
@@ -757,8 +757,8 @@ private void setupUpgradeForAppPropsChange() throws IOException {
manifest.setTaskDeploymentRequest(request);
DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDaoContainer.get(schemaVersionTarget.getName());
dataflowTaskExecutionMetadataDao.save(myTask, manifest);
- taskRepository.startTaskExecution(myTask.getExecutionId(), TASK_NAME_ORIG, new Date(), new ArrayList<>(), null);
- taskRepository.completeTaskExecution(myTask.getExecutionId(), 0, new Date(), null);
+ taskRepository.startTaskExecution(myTask.getExecutionId(), TASK_NAME_ORIG, LocalDateTime.now(), new ArrayList<>(), null);
+ taskRepository.completeTaskExecution(myTask.getExecutionId(), 0, LocalDateTime.now(), null);
initializeSuccessfulRegistry(appRegistry);
@@ -796,7 +796,7 @@ public void testUpgradeFailureTaskCurrentlyRunning() throws MalformedURLExceptio
DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao = this.dataflowTaskExecutionMetadataDaoContainer.get(schemaVersionTarget.getName());
dataflowTaskExecutionMetadataDao.save(myTask, manifest);
- taskRepository.startTaskExecution(myTask.getExecutionId(), TASK_NAME_ORIG, new Date(), new ArrayList<>(), null);
+ taskRepository.startTaskExecution(myTask.getExecutionId(), TASK_NAME_ORIG, LocalDateTime.now(), new ArrayList<>(), null);
taskRepository.updateExternalExecutionId(myTask.getExecutionId(), "abc");
when(this.taskLauncher.launch(any())).thenReturn("abc");
when(this.taskLauncher.status("abc")).thenReturn(new TaskStatus("abc", LaunchState.running, new HashMap<>()));
@@ -928,7 +928,7 @@ public void executeStopTaskTestForChildApp(CapturedOutput outputCapture) {
TaskRepository taskRepository = this.taskRepositoryContainer.get(schemaVersionTarget.getName());
LaunchResponse launchResponse = this.taskExecutionService.executeTask(TASK_NAME_ORIG, new HashMap<>(), new LinkedList<>());
assertThat(launchResponse.getExecutionId()).isEqualTo(1L);
- TaskExecution taskExecution = new TaskExecution(2L, 0, "childTask", new Date(), new Date(), "", Collections.emptyList(), "", "1234A", 1L);
+ TaskExecution taskExecution = new TaskExecution(2L, 0, "childTask", LocalDateTime.now(), LocalDateTime.now(), "", Collections.emptyList(), "", "1234A", 1L);
taskRepository.createTaskExecution(taskExecution);
Set executionIds = new HashSet<>(1);
executionIds.add(2L);
@@ -945,7 +945,7 @@ public void executeStopTaskTestAppNoPlatform() {
LaunchResponse launchResponse = this.taskExecutionService.executeTask(TASK_NAME_ORIG, new HashMap<>(), new LinkedList<>());
assertThat(launchResponse.getExecutionId()).isEqualTo(1L);
- TaskExecution taskExecution = new TaskExecution(2L, 0, "childTask", new Date(), new Date(), "", Collections.emptyList(), "", "1234A", null);
+ TaskExecution taskExecution = new TaskExecution(2L, 0, "childTask", LocalDateTime.now(), LocalDateTime.now(), "", Collections.emptyList(), "", "1234A", null);
TaskRepository taskRepository = taskRepositoryContainer.get(launchResponse.getSchemaTarget());
taskRepository.createTaskExecution(taskExecution);
Set executionIds = new HashSet<>(1);
@@ -981,7 +981,7 @@ public void executeStopTaskWithNoChildExternalIdTest() {
TaskRepository taskRepository = this.taskRepositoryContainer.get(launchResponse.getSchemaTarget());
TaskExecution taskExecution = taskRepository.createTaskExecution();
- taskRepository.startTaskExecution(taskExecution.getExecutionId(), "invalidChildTaskExecution", new Date(), Collections.emptyList(), null, 1L);
+ taskRepository.startTaskExecution(taskExecution.getExecutionId(), "invalidChildTaskExecution", LocalDateTime.now(), Collections.emptyList(), null, 1L);
validateFailedTaskStop(2, launchResponse.getSchemaTarget());
}
@@ -1086,7 +1086,7 @@ public void getCFTaskLogByTaskIdOtherThanLatest() {
taskDeployment.setTaskDefinitionName(taskName);
this.taskDeploymentRepository.save(taskDeployment);
TaskExecution taskExecution = new TaskExecution();
- taskExecution.setStartTime(new Date());
+ taskExecution.setStartTime(LocalDateTime.now());
taskExecution.setTaskName(taskName);
taskExecution.setExternalExecutionId("12346");
SchemaVersionTarget schemaVersionTarget = aggregateExecutionSupport.findSchemaVersionTarget(taskName, taskDefinitionReader);
diff --git a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskJobServiceTests.java b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskJobServiceTests.java
index f2731c5b1e..88e6248d76 100644
--- a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskJobServiceTests.java
+++ b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskJobServiceTests.java
@@ -20,6 +20,7 @@
import java.net.MalformedURLException;
import java.net.URI;
import java.sql.Types;
+import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
@@ -39,7 +40,10 @@
import org.springframework.batch.core.JobParameter;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
+import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.repository.JobRestartException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.batch.BatchProperties;
import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties;
@@ -149,8 +153,8 @@ public class DefaultTaskJobServiceTests {
@Before
public void setup() {
- Map jobParameterMap = new HashMap<>();
- jobParameterMap.put("identifying.param", new JobParameter("testparam"));
+ Map> jobParameterMap = new HashMap<>();
+ jobParameterMap.put("identifying.param", new JobParameter("testparam", String.class));
this.jobParameters = new JobParameters(jobParameterMap);
this.jdbcTemplate = new JdbcTemplate(this.dataSource);
@@ -201,7 +205,8 @@ public void testRestartBoot3() throws Exception {
}
@Test
- public void testRestartNoPlatform() {
+ public void testRestartNoPlatform()
+ throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
createBaseLaunchers();
initializeJobs(false);
Exception exception = assertThrows(IllegalStateException.class, () -> {
@@ -222,12 +227,14 @@ public void testRestartOnePlatform() throws Exception {
assertTrue(appDeploymentRequest.getCommandlineArguments().contains("identifying.param(string)=testparam"));
}
- private void initializeJobs(boolean insertTaskExecutionMetadata) {
+ private void initializeJobs(boolean insertTaskExecutionMetadata)
+ throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
initializeJobs(insertTaskExecutionMetadata,
new SchemaVersionTarget("boot2", AppBootSchemaVersion.BOOT2, "TASK_",
"BATCH_", "H2"));
}
- private void initializeJobs(boolean insertTaskExecutionMetadata, SchemaVersionTarget schemaVersionTarget) {
+ private void initializeJobs(boolean insertTaskExecutionMetadata, SchemaVersionTarget schemaVersionTarget)
+ throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
String definitionName = (AppBootSchemaVersion.BOOT3.equals(schemaVersionTarget.getSchemaVersion())) ?
"some-name-boot3" : "some-name";
this.taskDefinitionRepository.save(new TaskDefinition(JOB_NAME_ORIG + jobInstanceCount, definitionName ));
@@ -260,10 +267,10 @@ private void createSampleJob(
BatchStatus status,
boolean insertTaskExecutionMetadata,
SchemaVersionTarget schemaVersionTarget
- ) {
+ ) throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
JobInstance instance = jobRepository.createJobInstance(jobName, new JobParameters());
- TaskExecution taskExecution = taskExecutionDao.createTaskExecution(jobName, new Date(), Collections.emptyList(), null);
+ TaskExecution taskExecution = taskExecutionDao.createTaskExecution(jobName, LocalDateTime.now(), Collections.emptyList(), null);
JobExecution jobExecution;
JdbcTemplate template = new JdbcTemplate(this.dataSource);
@@ -271,12 +278,12 @@ private void createSampleJob(
template.execute(String.format("INSERT INTO " + schemaVersionTarget.getTaskPrefix() + "EXECUTION_METADATA (ID, TASK_EXECUTION_ID, TASK_EXECUTION_MANIFEST) VALUES (%s, %s, '{\"taskDeploymentRequest\":{\"definition\":{\"name\":\"bd0917a\",\"properties\":{\"spring.datasource.username\":\"root\",\"spring.cloud.task.name\":\"bd0917a\",\"spring.datasource.url\":\"jdbc:mariadb://localhost:3306/task\",\"spring.datasource.driverClassName\":\"org.mariadb.jdbc.Driver\",\"spring.datasource.password\":\"password\"}},\"resource\":\"file:/Users/glennrenfro/tmp/batchdemo-0.0.1-SNAPSHOT.jar\",\"deploymentProperties\":{},\"commandlineArguments\":[\"run.id_long=1\",\"--spring.cloud.task.executionid=201\"]},\"platformName\":\"demo\"}')", taskExecution.getExecutionId(), taskExecution.getExecutionId()));
}
if(AppBootSchemaVersion.BOOT3.equals(schemaVersionTarget.getSchemaVersion())) {
- jobExecution = new JobExecution(instance, 1L, this.jobParameters, "foo");
- jobExecution.setCreateTime(new Date());
+ jobExecution = new JobExecution(instance, 1L, this.jobParameters);
+ jobExecution.setCreateTime(LocalDateTime.now());
jobExecution.setVersion(1);
- Object[] jobExecutionParameters = new Object[] { 1, 1, new Date(), new Date(),
+ Object[] jobExecutionParameters = new Object[] { 1, 1, LocalDateTime.now(), LocalDateTime.now(),
BatchStatus.COMPLETED, ExitStatus.COMPLETED,
- ExitStatus.COMPLETED.getExitDescription(), 1, new Date(), new Date() };
+ ExitStatus.COMPLETED.getExitDescription(), 1, LocalDateTime.now(), LocalDateTime.now() };
Object[] jobExecutionParmParameters = new Object[] { 1, "identifying.param", "java.lang.String", "testparm", "Y"};
this.jdbcTemplate.update(SAVE_JOB_EXECUTION, jobExecutionParameters,
new int[] { Types.BIGINT, Types.BIGINT, Types.TIMESTAMP, Types.TIMESTAMP, Types.VARCHAR, Types.VARCHAR,
@@ -284,15 +291,15 @@ private void createSampleJob(
this.jdbcTemplate.update(SAVE_JOB_EXECUTION_PARAM, jobExecutionParmParameters, new int[] { Types.BIGINT,
Types.VARCHAR, Types.VARCHAR, Types.VARCHAR, Types.CHAR});
} else {
- jobExecution = jobRepository.createJobExecution(instance,
- this.jobParameters, null);
+ jobExecution = jobRepository.createJobExecution(jobName,
+ this.jobParameters);
StepExecution stepExecution = new StepExecution("foo", jobExecution, 1L);
stepExecution.setId(null);
jobRepository.add(stepExecution);
}
taskBatchDao.saveRelationship(taskExecution, jobExecution);
jobExecution.setStatus(status);
- jobExecution.setStartTime(new Date());
+ jobExecution.setStartTime(LocalDateTime.now());
jobRepository.update(jobExecution);
}
diff --git a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/service/impl/validation/DefaultAppValidationServiceTests.java b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/service/impl/validation/DefaultAppValidationServiceTests.java
index 90c8670e60..09602f66df 100644
--- a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/service/impl/validation/DefaultAppValidationServiceTests.java
+++ b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/service/impl/validation/DefaultAppValidationServiceTests.java
@@ -18,9 +18,15 @@
import java.net.URI;
-import org.apache.http.conn.ssl.NoopHostnameVerifier;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.impl.client.HttpClients;
+import org.apache.hc.client5.http.impl.classic.HttpClientBuilder;
+import org.apache.hc.client5.http.impl.io.BasicHttpClientConnectionManager;
+import org.apache.hc.client5.http.socket.ConnectionSocketFactory;
+import org.apache.hc.client5.http.socket.PlainConnectionSocketFactory;
+import org.apache.hc.client5.http.ssl.NoopHostnameVerifier;
+import org.apache.hc.client5.http.impl.classic.CloseableHttpClient;
+import org.apache.hc.client5.http.impl.classic.HttpClients;
+import org.apache.hc.core5.http.config.Lookup;
+import org.apache.hc.core5.http.config.RegistryBuilder;
import org.junit.Test;
import org.junit.runner.RunWith;
@@ -142,14 +148,12 @@ private static boolean dockerCheck() {
boolean result = true;
try {
CloseableHttpClient httpClient
- = HttpClients.custom()
- .setSSLHostnameVerifier(new NoopHostnameVerifier())
+ = httpClientBuilder()
.build();
HttpComponentsClientHttpRequestFactory requestFactory
= new HttpComponentsClientHttpRequestFactory();
requestFactory.setHttpClient(httpClient);
requestFactory.setConnectTimeout(10000);
- requestFactory.setReadTimeout(10000);
RestTemplate restTemplate = new RestTemplate(requestFactory);
System.out.println("Testing access to " + DockerValidatorProperties.DOCKER_REGISTRY_URL
@@ -163,5 +167,13 @@ private static boolean dockerCheck() {
}
return result;
}
+ private static HttpClientBuilder httpClientBuilder() {
+ // Register http/s connection factories
+ Lookup connSocketFactoryLookup = RegistryBuilder. create()
+ .register("http", new PlainConnectionSocketFactory())
+ .build();
+ return HttpClients.custom()
+ .setConnectionManager(new BasicHttpClientConnectionManager(connSocketFactoryLookup));
+ }
}
diff --git a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/support/ArgumentSanitizerTest.java b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/support/ArgumentSanitizerTest.java
index 5e0fe54511..24bf97e4f9 100644
--- a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/support/ArgumentSanitizerTest.java
+++ b/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/support/ArgumentSanitizerTest.java
@@ -66,20 +66,20 @@ public void testSanitizeProperties() {
public void testSanitizeJobParameters() {
String[] JOB_PARAM_KEYS = {"username", "password", "name", "C", "D", "E"};
Date testDate = new Date();
- JobParameter[] PARAMETERS = {new JobParameter("foo", true),
- new JobParameter("bar", true),
- new JobParameter("baz", true),
- new JobParameter(1L, true),
- new JobParameter(1D, true),
- new JobParameter(testDate, false)};
-
- Map jobParamMap = new LinkedHashMap<>();
+ JobParameter[] PARAMETERS = {new JobParameter("foo", String.class, true),
+ new JobParameter("bar", String.class, true),
+ new JobParameter("baz", String.class, true),
+ new JobParameter(1L, Long.class, true),
+ new JobParameter(1D, Double.class, true),
+ new JobParameter(testDate, Date.class, false)};
+
+ Map> jobParamMap = new LinkedHashMap<>();
for (int paramCount = 0; paramCount < JOB_PARAM_KEYS.length; paramCount++) {
jobParamMap.put(JOB_PARAM_KEYS[paramCount], PARAMETERS[paramCount]);
}
JobParameters jobParameters = new JobParameters(jobParamMap);
JobParameters sanitizedJobParameters = this.sanitizer.sanitizeJobParameters(jobParameters);
- for(Map.Entry entry : sanitizedJobParameters.getParameters().entrySet()) {
+ for(Map.Entry> entry : sanitizedJobParameters.getParameters().entrySet()) {
if (entry.getKey().equals("username") || entry.getKey().equals("password")) {
Assert.assertEquals("******", entry.getValue().getValue());
}
diff --git a/spring-cloud-dataflow-server/src/main/java/org/springframework/cloud/dataflow/server/single/DataFlowServerApplication.java b/spring-cloud-dataflow-server/src/main/java/org/springframework/cloud/dataflow/server/single/DataFlowServerApplication.java
index 3b72569b06..5f2b6b1ce4 100644
--- a/spring-cloud-dataflow-server/src/main/java/org/springframework/cloud/dataflow/server/single/DataFlowServerApplication.java
+++ b/spring-cloud-dataflow-server/src/main/java/org/springframework/cloud/dataflow/server/single/DataFlowServerApplication.java
@@ -26,7 +26,7 @@
import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryDeployerAutoConfiguration;
import org.springframework.cloud.deployer.spi.kubernetes.KubernetesAutoConfiguration;
import org.springframework.cloud.deployer.spi.local.LocalDeployerAutoConfiguration;
-import org.springframework.cloud.task.configuration.MetricsAutoConfiguration;
+import org.springframework.cloud.task.configuration.observation.ObservationTaskAutoConfiguration;
import org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration;
/**
@@ -37,7 +37,7 @@
* @author Janne Valkealahti
*/
@SpringBootApplication(exclude = {
- MetricsAutoConfiguration.class,
+ ObservationTaskAutoConfiguration.class,
SessionAutoConfiguration.class,
SimpleTaskAutoConfiguration.class,
ManagementWebSecurityAutoConfiguration.class,
diff --git a/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/JobExecutionTestUtils.java b/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/JobExecutionTestUtils.java
index 8991b09ee6..165de22054 100644
--- a/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/JobExecutionTestUtils.java
+++ b/spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/JobExecutionTestUtils.java
@@ -18,9 +18,8 @@
import java.sql.Timestamp;
import java.sql.Types;
-import java.time.ZoneId;
+import java.time.LocalDateTime;
import java.util.ArrayList;
-import java.util.Date;
import javax.sql.DataSource;
@@ -88,13 +87,13 @@ TaskExecution createSampleJob(String jobName, int jobExecutionCount, BatchStatus
// BATCH_JOB_EXECUTION differs and the DAO can not be used for BATCH4/5 inserting
DataFieldMaxValueIncrementer jobExecutionIncrementer = incrementerFactory.getIncrementer(incrementerFallbackType.name(), schemaVersionTarget.getBatchPrefix() + "JOB_EXECUTION_SEQ");
TaskBatchDao taskBatchDao = this.taskBatchDaoContainer.get(schemaVersion);
- TaskExecution taskExecution = taskExecutionDao.createTaskExecution(jobName, new Date(), new ArrayList<>(), null);
+ TaskExecution taskExecution = taskExecutionDao.createTaskExecution(jobName, LocalDateTime.now(), new ArrayList<>(), null);
JobInstance jobInstance = jobInstanceDao.createJobInstance(jobName, jobParameters);
for (int i = 0; i < jobExecutionCount; i++) {
JobExecution jobExecution = new JobExecution(jobInstance, new JobParameters());
jobExecution.setStatus(batchStatus);
jobExecution.setId(jobExecutionIncrementer.nextLongValue());
- jobExecution.setStartTime(new Date());
+ jobExecution.setStartTime(LocalDateTime.now());
saveJobExecution(jobExecution, jdbcTemplate, schemaVersionTarget);
taskBatchDao.saveRelationship(taskExecution, jobExecution);
}
@@ -115,7 +114,7 @@ private DatabaseType determineIncrementerFallbackType(DataSource dataSource) {
}
private JobExecution saveJobExecution(JobExecution jobExecution, JdbcTemplate jdbcTemplate, SchemaVersionTarget schemaVersionTarget) {
- jobExecution.setStartTime(new Date());
+ jobExecution.setStartTime(LocalDateTime.now());
jobExecution.setVersion(1);
Timestamp startTime = timestampFromDate(jobExecution.getStartTime());
Timestamp endTime = timestampFromDate(jobExecution.getEndTime());
@@ -134,8 +133,8 @@ private JobExecution saveJobExecution(JobExecution jobExecution, JdbcTemplate jd
return jobExecution;
}
- private Timestamp timestampFromDate(Date date) {
- return (date != null) ? Timestamp.valueOf(date.toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime()) : null;
+ private Timestamp timestampFromDate(LocalDateTime date) {
+ return (date != null) ? Timestamp.valueOf(date) : null;
}
diff --git a/spring-cloud-dataflow-shell-core/src/main/java/org/springframework/cloud/dataflow/shell/command/HttpCommands.java b/spring-cloud-dataflow-shell-core/src/main/java/org/springframework/cloud/dataflow/shell/command/HttpCommands.java
index 59764ef867..66c4a39080 100644
--- a/spring-cloud-dataflow-shell-core/src/main/java/org/springframework/cloud/dataflow/shell/command/HttpCommands.java
+++ b/spring-cloud-dataflow-shell-core/src/main/java/org/springframework/cloud/dataflow/shell/command/HttpCommands.java
@@ -157,14 +157,14 @@ private RestTemplate createRestTemplate(final StringBuilder buffer) {
restTemplate.setErrorHandler(new ResponseErrorHandler() {
@Override
public boolean hasError(ClientHttpResponse response) throws IOException {
- HttpStatus status = response.getStatusCode();
+ HttpStatus status = (HttpStatus) response.getStatusCode();
return (status == HttpStatus.BAD_GATEWAY || status == HttpStatus.GATEWAY_TIMEOUT
|| status == HttpStatus.INTERNAL_SERVER_ERROR);
}
@Override
public void handleError(ClientHttpResponse response) throws IOException {
- outputError(response.getStatusCode(), buffer);
+ outputError((HttpStatus)response.getStatusCode(), buffer);
}
});
@@ -181,7 +181,7 @@ private void outputRequest(String method, URI requestUri, MediaType mediaType, S
}
private void outputResponse(ResponseEntity response, StringBuilder buffer) {
- buffer.append("> ").append(response.getStatusCode().value()).append(" ").append(response.getStatusCode().name())
+ buffer.append("> ").append(response.getStatusCode().value()).append(" ").append(((HttpStatus)response.getStatusCode()).name())
.append(System.lineSeparator());
String maybeJson = response.getBody();
if (maybeJson != null) {
diff --git a/spring-cloud-dataflow-shell-core/src/main/java/org/springframework/cloud/dataflow/shell/command/JobCommands.java b/spring-cloud-dataflow-shell-core/src/main/java/org/springframework/cloud/dataflow/shell/command/JobCommands.java
index 667c14db02..9aadab1b26 100644
--- a/spring-cloud-dataflow-shell-core/src/main/java/org/springframework/cloud/dataflow/shell/command/JobCommands.java
+++ b/spring-cloud-dataflow-shell-core/src/main/java/org/springframework/cloud/dataflow/shell/command/JobCommands.java
@@ -150,13 +150,13 @@ public Table executionDisplay(
.addValue(jobExecutionResource.isDefined() ? "Created" : "Destroyed");
modelBuilder.addRow().addValue("Schema Target").addValue(jobExecutionResource.getSchemaTarget());
modelBuilder.addRow().addValue("Job Parameters ").addValue("");
- for (Map.Entry jobParameterEntry : jobExecutionResource.getJobExecution()
+ for (Map.Entry> jobParameterEntry : jobExecutionResource.getJobExecution()
.getJobParameters().getParameters().entrySet()) {
String key = org.springframework.util.StringUtils.trimLeadingCharacter(jobParameterEntry.getKey(), '-');
if (!jobParameterEntry.getValue().isIdentifying()) {
key = "-" + key;
}
- String updatedKey = String.format("%s(%s) ", key, jobParameterEntry.getValue().getType().name());
+ String updatedKey = String.format("%s(%s) ", key, jobParameterEntry.getValue().getType().getName());
modelBuilder.addRow().addValue(updatedKey).addValue(new ArgumentSanitizer().sanitize(key, String.valueOf(jobParameterEntry.getValue())));
}
diff --git a/spring-cloud-dataflow-shell-core/src/test/java/org/springframework/cloud/dataflow/shell/command/JobCommandTests.java b/spring-cloud-dataflow-shell-core/src/test/java/org/springframework/cloud/dataflow/shell/command/JobCommandTests.java
index 28ea43a66e..b9e5267f08 100644
--- a/spring-cloud-dataflow-shell-core/src/test/java/org/springframework/cloud/dataflow/shell/command/JobCommandTests.java
+++ b/spring-cloud-dataflow-shell-core/src/test/java/org/springframework/cloud/dataflow/shell/command/JobCommandTests.java
@@ -17,8 +17,8 @@
package org.springframework.cloud.dataflow.shell.command;
import javax.sql.DataSource;
+import java.time.LocalDateTime;
import java.util.ArrayList;
-import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -34,7 +34,10 @@
import org.springframework.batch.core.JobParameter;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
+import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.repository.JobRestartException;
import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport;
import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader;
import org.springframework.cloud.dataflow.schema.SchemaVersionTarget;
@@ -113,21 +116,22 @@ public static void tearDown() {
}
}
- private static long createSampleJob(String jobName, int jobExecutionCount) {
+ private static long createSampleJob(String jobName, int jobExecutionCount)
+ throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
SchemaVersionTarget schemaVersionTarget = aggregateExecutionSupport.findSchemaVersionTarget(jobName, taskDefinitionReader);
JobRepository jobRepository = jobRepositoryContainer.get(schemaVersionTarget.getName());
JobInstance instance = jobRepository.createJobInstance(jobName, new JobParameters());
jobInstances.add(instance);
TaskExecutionDao dao = daoContainer.get(schemaVersionTarget.getName());
- TaskExecution taskExecution = dao.createTaskExecution(jobName, new Date(), new ArrayList<>(), null);
- Map jobParameterMap = new HashMap<>();
- jobParameterMap.put("foo", new JobParameter("FOO", true));
- jobParameterMap.put("bar", new JobParameter("BAR", false));
+ TaskExecution taskExecution = dao.createTaskExecution(jobName, LocalDateTime.now(), new ArrayList<>(), null);
+ Map> jobParameterMap = new HashMap<>();
+ jobParameterMap.put("foo", new JobParameter("FOO", String.class, true));
+ jobParameterMap.put("bar", new JobParameter("BAR", String.class, false));
JobParameters jobParameters = new JobParameters(jobParameterMap);
JobExecution jobExecution;
TaskBatchDao taskBatchDao = taskBatchDaoContainer.get(schemaVersionTarget.getName());
for (int i = 0; i < jobExecutionCount; i++) {
- jobExecution = jobRepository.createJobExecution(instance, jobParameters, null);
+ jobExecution = jobRepository.createJobExecution(jobName, jobParameters);
taskBatchDao.saveRelationship(taskExecution, jobExecution);
StepExecution stepExecution = new StepExecution("foobar", jobExecution);
jobRepository.add(stepExecution);
diff --git a/spring-cloud-skipper/spring-cloud-skipper-client/src/main/java/org/springframework/cloud/skipper/client/util/HttpClientConfigurer.java b/spring-cloud-skipper/spring-cloud-skipper-client/src/main/java/org/springframework/cloud/skipper/client/util/HttpClientConfigurer.java
index baf73086e6..2472443e37 100644
--- a/spring-cloud-skipper/spring-cloud-skipper-client/src/main/java/org/springframework/cloud/skipper/client/util/HttpClientConfigurer.java
+++ b/spring-cloud-skipper/spring-cloud-skipper-client/src/main/java/org/springframework/cloud/skipper/client/util/HttpClientConfigurer.java
@@ -17,14 +17,11 @@
import java.net.URI;
-import javax.net.ssl.SSLContext;
-
import org.apache.hc.client5.http.auth.AuthScope;
import org.apache.hc.client5.http.auth.UsernamePasswordCredentials;
import org.apache.hc.client5.http.impl.auth.BasicCredentialsProvider;
import org.apache.hc.client5.http.impl.classic.CloseableHttpClient;
import org.apache.hc.client5.http.impl.classic.HttpClientBuilder;
-import org.apache.hc.client5.http.impl.classic.HttpClients;
import org.apache.hc.client5.http.impl.io.BasicHttpClientConnectionManager;
import org.apache.hc.client5.http.socket.ConnectionSocketFactory;
import org.apache.hc.client5.http.socket.PlainConnectionSocketFactory;
@@ -102,7 +99,6 @@ public HttpClientConfigurer skipTlsCertificateVerification(boolean skipTlsCertif
public HttpClientConfigurer targetHost(URI targetHost) {
this.targetHost = new HttpHost(targetHost.getScheme(), targetHost.getHost(), targetHost.getPort());
-
return this;
}
diff --git a/spring-cloud-skipper/spring-cloud-skipper-server-core/pom.xml b/spring-cloud-skipper/spring-cloud-skipper-server-core/pom.xml
index 6f767877f9..f29cd8c772 100644
--- a/spring-cloud-skipper/spring-cloud-skipper-server-core/pom.xml
+++ b/spring-cloud-skipper/spring-cloud-skipper-server-core/pom.xml
@@ -49,7 +49,10 @@
org.hibernate.orm
hibernate-micrometer
- 6.1.7.Final
+
+
+ org.hibernate.orm
+ hibernate-ant
org.springframework.cloud
@@ -144,6 +147,10 @@
com.fasterxml.jackson.dataformat
jackson-dataformat-yaml
+
+ org.apache.httpcomponents.client5
+ httpclient5
+
org.apache.commons
diff --git a/spring-cloud-skipper/spring-cloud-skipper-server-core/src/main/java/org/springframework/cloud/skipper/server/config/security/SkipperOAuthSecurityConfiguration.java b/spring-cloud-skipper/spring-cloud-skipper-server-core/src/main/java/org/springframework/cloud/skipper/server/config/security/SkipperOAuthSecurityConfiguration.java
index 1aadf32bb9..9c7bc2658f 100644
--- a/spring-cloud-skipper/spring-cloud-skipper-server-core/src/main/java/org/springframework/cloud/skipper/server/config/security/SkipperOAuthSecurityConfiguration.java
+++ b/spring-cloud-skipper/spring-cloud-skipper-server-core/src/main/java/org/springframework/cloud/skipper/server/config/security/SkipperOAuthSecurityConfiguration.java
@@ -26,6 +26,7 @@
import org.springframework.context.annotation.Configuration;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configurers.ExpressionUrlAuthorizationConfigurer;
+import org.springframework.security.config.annotation.web.configurers.HttpBasicConfigurer;
import org.springframework.security.web.authentication.www.BasicAuthenticationEntryPoint;
import org.springframework.security.web.authentication.www.BasicAuthenticationFilter;
import org.springframework.security.web.util.matcher.AntPathRequestMatcher;
@@ -48,7 +49,7 @@ public class SkipperOAuthSecurityConfiguration extends OAuthSecurityConfiguratio
private AuthorizationProperties authorizationProperties;
@Override
- protected void configure(HttpSecurity http) throws Exception {
+ protected HttpBasicConfigurer configure(HttpSecurity http) throws Exception {
final BasicAuthenticationEntryPoint basicAuthenticationEntryPoint = new BasicAuthenticationEntryPoint();
basicAuthenticationEntryPoint.setRealmName(SecurityConfigUtils.BASIC_AUTH_REALM_NAME);
@@ -69,10 +70,10 @@ protected void configure(HttpSecurity http) throws Exception {
ExpressionUrlAuthorizationConfigurer.ExpressionInterceptUrlRegistry security =
http.authorizeRequests()
- .antMatchers(getAuthorizationProperties().getPermitAllPaths()
+ .requestMatchers(getAuthorizationProperties().getPermitAllPaths()
.toArray(new String[0]))
.permitAll()
- .antMatchers(getAuthorizationProperties().getAuthenticatedPaths()
+ .requestMatchers(getAuthorizationProperties().getAuthenticatedPaths()
.toArray(new String[0]))
.authenticated();
@@ -99,5 +100,6 @@ else if (getoAuth2ResourceServerProperties().getJwt().getJwkSetUri() != null) {
}
getSecurityStateBean().setAuthenticationEnabled(true);
+ return http.getConfigurer(HttpBasicConfigurer.class);
}
}
diff --git a/spring-cloud-skipper/spring-cloud-skipper-server-core/src/main/java/org/springframework/cloud/skipper/server/controller/AboutController.java b/spring-cloud-skipper/spring-cloud-skipper-server-core/src/main/java/org/springframework/cloud/skipper/server/controller/AboutController.java
index 28320f0351..3dccfb3bf1 100644
--- a/spring-cloud-skipper/spring-cloud-skipper-server-core/src/main/java/org/springframework/cloud/skipper/server/controller/AboutController.java
+++ b/spring-cloud-skipper/spring-cloud-skipper-server-core/src/main/java/org/springframework/cloud/skipper/server/controller/AboutController.java
@@ -15,9 +15,14 @@
*/
package org.springframework.cloud.skipper.server.controller;
-import org.apache.http.conn.ssl.NoopHostnameVerifier;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.impl.client.HttpClients;
+import org.apache.hc.client5.http.impl.classic.CloseableHttpClient;
+import org.apache.hc.client5.http.impl.classic.HttpClientBuilder;
+import org.apache.hc.client5.http.impl.classic.HttpClients;
+import org.apache.hc.client5.http.impl.io.BasicHttpClientConnectionManager;
+import org.apache.hc.client5.http.socket.ConnectionSocketFactory;
+import org.apache.hc.client5.http.socket.PlainConnectionSocketFactory;
+import org.apache.hc.core5.http.config.Lookup;
+import org.apache.hc.core5.http.config.RegistryBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -93,8 +98,7 @@ private String getChecksum(String defaultValue, String url,
String version) {
String result = defaultValue;
if (result == null && StringUtils.hasText(url)) {
- CloseableHttpClient httpClient = HttpClients.custom()
- .setSSLHostnameVerifier(new NoopHostnameVerifier())
+ CloseableHttpClient httpClient = httpClientBuilder()
.build();
HttpComponentsClientHttpRequestFactory requestFactory
= new HttpComponentsClientHttpRequestFactory();
@@ -115,7 +119,14 @@ private String getChecksum(String defaultValue, String url,
}
return result;
}
-
+ private HttpClientBuilder httpClientBuilder() {
+ // Register http/s connection factories
+ Lookup connSocketFactoryLookup = RegistryBuilder. create()
+ .register("http", new PlainConnectionSocketFactory())
+ .build();
+ return HttpClients.custom()
+ .setConnectionManager(new BasicHttpClientConnectionManager(connSocketFactoryLookup));
+ }
private void updateDependency(Dependency dependency, VersionInfoProperties.DependencyAboutInfo dependencyAboutInfo) {
dependency.setName(dependencyAboutInfo.getName());
if (dependencyAboutInfo.getUrl() != null) {
diff --git a/spring-cloud-skipper/spring-cloud-skipper-server-core/src/main/java/org/springframework/cloud/skipper/server/db/migration/SkipperFlywayMigrationStrategy.java b/spring-cloud-skipper/spring-cloud-skipper-server-core/src/main/java/org/springframework/cloud/skipper/server/db/migration/SkipperFlywayMigrationStrategy.java
index 34279583e4..8d9ccf73e1 100644
--- a/spring-cloud-skipper/spring-cloud-skipper-server-core/src/main/java/org/springframework/cloud/skipper/server/db/migration/SkipperFlywayMigrationStrategy.java
+++ b/spring-cloud-skipper/spring-cloud-skipper-server-core/src/main/java/org/springframework/cloud/skipper/server/db/migration/SkipperFlywayMigrationStrategy.java
@@ -16,8 +16,8 @@
package org.springframework.cloud.skipper.server.db.migration;
import org.flywaydb.core.Flyway;
+import org.flywaydb.core.api.CoreMigrationType;
import org.flywaydb.core.api.MigrationInfo;
-import org.flywaydb.core.api.MigrationType;
import org.flywaydb.core.api.MigrationVersion;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -38,7 +38,7 @@ public class SkipperFlywayMigrationStrategy implements FlywayMigrationStrategy {
@Override
public void migrate(Flyway flyway) {
MigrationInfo current = flyway.info().current();
- if (current != null && current.getVersion().equals(INITIAL) && current.getType() == MigrationType.SQL) {
+ if (current != null && current.getVersion().equals(INITIAL) && current.getType() == CoreMigrationType.SQL) {
logger.info("Detected initial version based on SQL scripts, doing repair to switch to Java based migrations.");
flyway.repair();
}
diff --git a/spring-cloud-skipper/spring-cloud-skipper-server-core/src/main/java/org/springframework/cloud/skipper/server/domain/AppDeployerData.java b/spring-cloud-skipper/spring-cloud-skipper-server-core/src/main/java/org/springframework/cloud/skipper/server/domain/AppDeployerData.java
index 07d3bc53e7..09906a9601 100644
--- a/spring-cloud-skipper/spring-cloud-skipper-server-core/src/main/java/org/springframework/cloud/skipper/server/domain/AppDeployerData.java
+++ b/spring-cloud-skipper/spring-cloud-skipper-server-core/src/main/java/org/springframework/cloud/skipper/server/domain/AppDeployerData.java
@@ -15,21 +15,21 @@
*/
package org.springframework.cloud.skipper.server.domain;
+import java.sql.Types;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
-import jakarta.persistence.Entity;
-import jakarta.persistence.Lob;
-import jakarta.persistence.Table;
-
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
-import org.hibernate.annotations.Type;
+import jakarta.persistence.Entity;
+import jakarta.persistence.Lob;
+import jakarta.persistence.Table;
+import org.hibernate.annotations.JdbcTypeCode;
import org.springframework.cloud.skipper.SkipperException;
import org.springframework.cloud.skipper.domain.AbstractEntity;
@@ -53,7 +53,7 @@ public class AppDeployerData extends AbstractEntity {
// Store deployment Ids associated with the given release.
@Lob
- @Type(type = "org.springframework.cloud.dataflow.common.persistence.type.DatabaseAwareLobType")
+ @JdbcTypeCode(Types.LONGVARCHAR)
private String deploymentData;
public AppDeployerData() {
diff --git a/spring-cloud-skipper/spring-cloud-skipper-server-core/src/test/java/org/springframework/cloud/skipper/server/controller/ReleaseControllerTests.java b/spring-cloud-skipper/spring-cloud-skipper-server-core/src/test/java/org/springframework/cloud/skipper/server/controller/ReleaseControllerTests.java
index a57aff9d77..6eb0efb2b3 100644
--- a/spring-cloud-skipper/spring-cloud-skipper-server-core/src/test/java/org/springframework/cloud/skipper/server/controller/ReleaseControllerTests.java
+++ b/spring-cloud-skipper/spring-cloud-skipper-server-core/src/test/java/org/springframework/cloud/skipper/server/controller/ReleaseControllerTests.java
@@ -21,7 +21,6 @@
import jakarta.servlet.DispatcherType;
import jakarta.servlet.ServletContext;
-
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
diff --git a/spring-cloud-skipper/spring-cloud-skipper-server-core/src/test/java/org/springframework/cloud/skipper/server/controller/docs/ApiDocumentation.java b/spring-cloud-skipper/spring-cloud-skipper-server-core/src/test/java/org/springframework/cloud/skipper/server/controller/docs/ApiDocumentation.java
index 31044d48f4..bfb60015a8 100644
--- a/spring-cloud-skipper/spring-cloud-skipper-server-core/src/test/java/org/springframework/cloud/skipper/server/controller/docs/ApiDocumentation.java
+++ b/spring-cloud-skipper/spring-cloud-skipper-server-core/src/test/java/org/springframework/cloud/skipper/server/controller/docs/ApiDocumentation.java
@@ -17,7 +17,6 @@
package org.springframework.cloud.skipper.server.controller.docs;
import jakarta.servlet.RequestDispatcher;
-
import org.junit.Test;
import org.springframework.test.context.ActiveProfiles;
diff --git a/spring-cloud-skipper/spring-cloud-skipper-server-core/src/test/java/org/springframework/cloud/skipper/server/repository/SchemaGenerationTests.java b/spring-cloud-skipper/spring-cloud-skipper-server-core/src/test/java/org/springframework/cloud/skipper/server/repository/SchemaGenerationTests.java
index 3907b9492b..948d83a396 100644
--- a/spring-cloud-skipper/spring-cloud-skipper-server-core/src/test/java/org/springframework/cloud/skipper/server/repository/SchemaGenerationTests.java
+++ b/spring-cloud-skipper/spring-cloud-skipper-server-core/src/test/java/org/springframework/cloud/skipper/server/repository/SchemaGenerationTests.java
@@ -23,7 +23,6 @@
import java.util.stream.Collectors;
import jakarta.persistence.spi.PersistenceUnitInfo;
-
import org.hibernate.HibernateException;
import org.hibernate.boot.MetadataSources;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
@@ -35,9 +34,9 @@
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.orm.jpa.hibernate.SpringImplicitNamingStrategy;
-import org.springframework.boot.orm.jpa.hibernate.SpringPhysicalNamingStrategy;
import org.springframework.cloud.skipper.server.AbstractIntegrationTest;
import org.springframework.cloud.skipper.server.config.SkipperServerConfiguration;
+import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.transaction.annotation.Transactional;
@@ -86,7 +85,7 @@ private void generateDdlFiles(String dialect, File tempDir, PersistenceUnitInfo
final MetadataSources metadata = new MetadataSources(
new StandardServiceRegistryBuilder()
.applySetting("hibernate.dialect", "org.hibernate.dialect." + dialect + "Dialect")
- .applySetting("hibernate.physical_naming_strategy", SpringPhysicalNamingStrategy.class.getName())
+ .applySetting("hibernate.physical_naming_strategy", CamelCaseAbbreviatingFieldNamingStrategy.class.getName())
.applySetting("hibernate.implicit_naming_strategy", SpringImplicitNamingStrategy.class.getName())
.build());
diff --git a/spring-cloud-skipper/spring-cloud-skipper-shell-commands/src/main/java/org/springframework/cloud/skipper/shell/command/ManifestCommands.java b/spring-cloud-skipper/spring-cloud-skipper-shell-commands/src/main/java/org/springframework/cloud/skipper/shell/command/ManifestCommands.java
index 1f2fc18c56..024353e009 100644
--- a/spring-cloud-skipper/spring-cloud-skipper-shell-commands/src/main/java/org/springframework/cloud/skipper/shell/command/ManifestCommands.java
+++ b/spring-cloud-skipper/spring-cloud-skipper-shell-commands/src/main/java/org/springframework/cloud/skipper/shell/command/ManifestCommands.java
@@ -16,7 +16,6 @@
package org.springframework.cloud.skipper.shell.command;
import jakarta.validation.constraints.NotNull;
-
import org.yaml.snakeyaml.DumperOptions;
import org.yaml.snakeyaml.Yaml;
diff --git a/spring-cloud-skipper/spring-cloud-skipper-shell-commands/src/main/java/org/springframework/cloud/skipper/shell/command/ReleaseCommands.java b/spring-cloud-skipper/spring-cloud-skipper-shell-commands/src/main/java/org/springframework/cloud/skipper/shell/command/ReleaseCommands.java
index cdd78ab275..f5ac8fa4d4 100644
--- a/spring-cloud-skipper/spring-cloud-skipper-shell-commands/src/main/java/org/springframework/cloud/skipper/shell/command/ReleaseCommands.java
+++ b/spring-cloud-skipper/spring-cloud-skipper-shell-commands/src/main/java/org/springframework/cloud/skipper/shell/command/ReleaseCommands.java
@@ -24,7 +24,6 @@
import java.util.List;
import jakarta.validation.constraints.NotNull;
-
import org.apache.commons.io.FilenameUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
diff --git a/spring-cloud-starter-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/local/security/LdapServerResource.java b/spring-cloud-starter-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/local/security/LdapServerResource.java
index 932cdc550b..10f8a07ed3 100644
--- a/spring-cloud-starter-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/local/security/LdapServerResource.java
+++ b/spring-cloud-starter-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/local/security/LdapServerResource.java
@@ -27,7 +27,6 @@
import org.springframework.test.util.TestSocketUtils;
import org.springframework.util.Assert;
import org.springframework.util.FileCopyUtils;
-import org.springframework.util.SocketUtils;
/**
* @author Marius Bogoevici
diff --git a/spring-cloud-starter-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/single/LocalConfigurationTests.java b/spring-cloud-starter-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/single/LocalConfigurationTests.java
index 712518fda6..4c40a7c4e2 100644
--- a/spring-cloud-starter-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/single/LocalConfigurationTests.java
+++ b/spring-cloud-starter-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/single/LocalConfigurationTests.java
@@ -38,7 +38,6 @@
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.core.io.ResourceLoader;
import org.springframework.test.util.TestSocketUtils;
-import org.springframework.util.SocketUtils;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
diff --git a/spring-cloud-starter-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/single/LocalDataflowResource.java b/spring-cloud-starter-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/single/LocalDataflowResource.java
index 0e693fed05..23177687ae 100644
--- a/spring-cloud-starter-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/single/LocalDataflowResource.java
+++ b/spring-cloud-starter-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/single/LocalDataflowResource.java
@@ -57,11 +57,9 @@
import org.springframework.test.util.TestSocketUtils;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
-import org.springframework.util.SocketUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.context.WebApplicationContext;
-import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;