Skip to content
This repository was archived by the owner on May 14, 2025. It is now read-only.

Commit 089dadb

Browse files
committed
Added dataflow specific PageQueryProvider for missing functions
Batch removed some methods that are still required by dataflow. Created dataflow version of those classes so that we can implement those methods Added hibernate version Checkpoint for server core and server Updated Skipper components Update classic docs to be restful doc 3.0 compliant
1 parent 15712ec commit 089dadb

File tree

74 files changed

+865
-514
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

74 files changed

+865
-514
lines changed

pom.xml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -77,9 +77,9 @@
7777
<module>spring-cloud-starter-dataflow-server</module>
7878
<module>spring-cloud-starter-dataflow-ui</module>
7979
<module>spring-cloud-dataflow-server</module>
80-
<module>spring-cloud-dataflow-tasklauncher</module>
81-
<module>spring-cloud-dataflow-single-step-batch-job</module>
82-
<module>spring-cloud-dataflow-composed-task-runner</module>
80+
<!-- <module>spring-cloud-dataflow-tasklauncher</module>-->
81+
<!-- <module>spring-cloud-dataflow-single-step-batch-job</module>-->
82+
<!-- <module>spring-cloud-dataflow-composed-task-runner</module>-->
8383
<module>spring-cloud-dataflow-test</module>
8484
<module>spring-cloud-dataflow-dependencies</module>
8585
<module>spring-cloud-dataflow-classic-docs</module>

spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AppRegistryDocumentation.java

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
3333
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
3434
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
35-
import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
35+
import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
3636
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
3737

3838
/**
@@ -83,7 +83,7 @@ public void registeringAnApplicationVersion() throws Exception {
8383
parameterWithName("name").description("The name of the application to register"),
8484
parameterWithName("version").description("The version of the application to register")
8585
),
86-
requestParameters(
86+
queryParameters(
8787
parameterWithName("uri").description("URI where the application bits reside"),
8888
parameterWithName("metadata-uri").optional()
8989
.description("URI where the application metadata jar can be found"),
@@ -108,7 +108,7 @@ public void bulkRegisteringApps() throws Exception {
108108
.andExpect(status().isCreated())
109109
.andDo(
110110
this.documentationHandler.document(
111-
requestParameters(
111+
queryParameters(
112112
parameterWithName("uri").optional().description("URI where a properties file containing registrations can be fetched. Exclusive with `apps`."),
113113
parameterWithName("apps").optional().description("Inline set of registrations. Exclusive with `uri`."),
114114
parameterWithName("force").optional().description("Must be true if a registration with the same name and type already exists, otherwise an error will occur")
@@ -133,7 +133,7 @@ public void getApplicationsFiltered() throws Exception {
133133
)
134134
.andExpect(status().isOk())
135135
.andDo(this.documentationHandler.document(
136-
requestParameters(
136+
queryParameters(
137137
parameterWithName("search").description("The search string performed on the name (optional)"),
138138
parameterWithName("type")
139139
.description("Restrict the returned apps to the type of the app. One of " + Arrays.asList(ApplicationType.values())),
@@ -167,7 +167,7 @@ public void getSingleApplication() throws Exception {
167167
parameterWithName("type").description("The type of application to query. One of " + Arrays.asList(ApplicationType.values())),
168168
parameterWithName("name").description("The name of the application to query")
169169
),
170-
requestParameters(
170+
queryParameters(
171171
parameterWithName("exhaustive").optional()
172172
.description("Return all application properties, including common Spring Boot properties")
173173
),
@@ -205,7 +205,7 @@ public void registeringAnApplication() throws Exception {
205205
parameterWithName("type").description("The type of application to register. One of " + Arrays.asList(ApplicationType.values())),
206206
parameterWithName("name").description("The name of the application to register")
207207
),
208-
requestParameters(
208+
queryParameters(
209209
parameterWithName("uri").description("URI where the application bits reside"),
210210
parameterWithName("metadata-uri").optional().description("URI where the application metadata jar can be found"),
211211
parameterWithName("bootVersion").optional().description("The Spring Boot version of the application.Default is 2"),

spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AuditRecordsDocumentation.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
2929
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
3030
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
31-
import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
31+
import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
3232
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
3333
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
3434

@@ -80,7 +80,7 @@ public void listAllAuditRecords() throws Exception {
8080
.andDo(print())
8181
.andExpect(status().isOk())
8282
.andDo(this.documentationHandler.document(
83-
requestParameters(
83+
queryParameters(
8484
parameterWithName("page").description("The zero-based page number (optional)"),
8585
parameterWithName("size").description("The requested page size (optional)"),
8686
parameterWithName("operations").description("Comma-separated list of Audit Operations (optional)"),

spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/BaseDocumentation.java

Lines changed: 1 addition & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
package org.springframework.cloud.dataflow.server.rest.documentation;
1818

1919
import javax.sql.DataSource;
20+
import java.sql.SQLException;
2021
import java.util.ArrayList;
2122
import java.util.Arrays;
2223
import java.util.HashMap;
@@ -32,14 +33,7 @@
3233
import org.springframework.cloud.dataflow.core.ApplicationType;
3334
import org.springframework.cloud.dataflow.core.Launcher;
3435
import org.springframework.cloud.dataflow.core.TaskPlatform;
35-
import org.springframework.cloud.dataflow.core.database.support.MultiSchemaIncrementerFactory;
36-
import org.springframework.cloud.dataflow.schema.SchemaVersionTarget;
37-
import org.springframework.cloud.dataflow.schema.service.SchemaService;
3836
import org.springframework.cloud.dataflow.server.controller.TaskSchedulerController;
39-
import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao;
40-
import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDaoContainer;
41-
import org.springframework.cloud.dataflow.server.repository.JdbcDataflowTaskExecutionMetadataDao;
42-
import org.springframework.cloud.dataflow.server.repository.support.SchemaUtilities;
4337
import org.springframework.cloud.dataflow.server.service.SchedulerService;
4438
import org.springframework.cloud.dataflow.server.single.LocalDataflowResource;
4539
import org.springframework.cloud.deployer.spi.app.ActuatorOperations;
@@ -205,28 +199,6 @@ void destroyStream(String name) throws Exception {
205199
);
206200
}
207201

208-
protected DataflowTaskExecutionMetadataDaoContainer createDataFlowTaskExecutionMetadataDaoContainer(SchemaService schemaService) {
209-
DataflowTaskExecutionMetadataDaoContainer result = new DataflowTaskExecutionMetadataDaoContainer();
210-
MultiSchemaIncrementerFactory incrementerFactory = new MultiSchemaIncrementerFactory(dataSource);
211-
String databaseType;
212-
try {
213-
databaseType = DatabaseType.fromMetaData(dataSource).name();
214-
} catch (MetaDataAccessException e) {
215-
throw new IllegalStateException(e);
216-
}
217-
for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) {
218-
DataflowTaskExecutionMetadataDao dao = new JdbcDataflowTaskExecutionMetadataDao(
219-
dataSource,
220-
incrementerFactory.getIncrementer(databaseType,
221-
SchemaUtilities.getQuery("%PREFIX%EXECUTION_METADATA_SEQ", target.getTaskPrefix())
222-
),
223-
target.getTaskPrefix()
224-
);
225-
result.add(target.getName(), dao);
226-
}
227-
return result;
228-
}
229-
230202
/**
231203
* A {@link ResultHandler} that can be turned off and on.
232204
*

spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobExecutionsDocumentation.java

Lines changed: 20 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616

1717
package org.springframework.cloud.dataflow.server.rest.documentation;
1818

19+
import java.time.LocalDateTime;
1920
import java.util.Collections;
2021
import java.util.Date;
2122
import java.util.HashMap;
@@ -29,7 +30,10 @@
2930
import org.springframework.batch.core.JobExecution;
3031
import org.springframework.batch.core.JobParameter;
3132
import org.springframework.batch.core.JobParameters;
33+
import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
34+
import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
3235
import org.springframework.batch.core.repository.JobRepository;
36+
import org.springframework.batch.core.repository.JobRestartException;
3337
import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration;
3438
import org.springframework.boot.test.context.SpringBootTest;
3539
import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport;
@@ -58,7 +62,7 @@
5862
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
5963
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
6064
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
61-
import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
65+
import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
6266
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
6367
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
6468

@@ -128,7 +132,7 @@ public void listJobExecutions() throws Exception {
128132
.param("size", "10"))
129133
.andDo(print())
130134
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
131-
requestParameters(
135+
queryParameters(
132136
parameterWithName("page")
133137
.description("The zero-based page number (optional)"),
134138
parameterWithName("size")
@@ -149,7 +153,7 @@ public void listThinJobExecutions() throws Exception {
149153
.param("size", "10"))
150154
.andDo(print())
151155
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
152-
requestParameters(
156+
queryParameters(
153157
parameterWithName("page")
154158
.description("The zero-based page number (optional)"),
155159
parameterWithName("size")
@@ -171,7 +175,7 @@ public void listThinJobExecutionsByJobInstanceId() throws Exception {
171175
.param("jobInstanceId", "1"))
172176
.andDo(print())
173177
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
174-
requestParameters(
178+
queryParameters(
175179
parameterWithName("page")
176180
.description("The zero-based page number (optional)"),
177181
parameterWithName("size")
@@ -195,7 +199,7 @@ public void listThinJobExecutionsByTaskExecutionId() throws Exception {
195199
.param("taskExecutionId", "1"))
196200
.andDo(print())
197201
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
198-
requestParameters(
202+
queryParameters(
199203
parameterWithName("page")
200204
.description("The zero-based page number (optional)"),
201205
parameterWithName("size")
@@ -220,7 +224,7 @@ public void listThinJobExecutionsByDate() throws Exception {
220224
.param("toDate", "2050-09-24T18:00:45,000"))
221225
.andDo(print())
222226
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
223-
requestParameters(
227+
queryParameters(
224228
parameterWithName("page")
225229
.description("The zero-based page number (optional)"),
226230
parameterWithName("size")
@@ -246,7 +250,7 @@ public void listJobExecutionsByName() throws Exception {
246250
.param("size", "10"))
247251
.andDo(print())
248252
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
249-
requestParameters(
253+
queryParameters(
250254
parameterWithName("page")
251255
.description("The zero-based page number (optional)"),
252256
parameterWithName("size")
@@ -270,7 +274,7 @@ public void listThinJobExecutionsByName() throws Exception {
270274
.param("size", "10"))
271275
.andDo(print())
272276
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
273-
requestParameters(
277+
queryParameters(
274278
parameterWithName("page")
275279
.description("The zero-based page number (optional)"),
276280
parameterWithName("size")
@@ -297,7 +301,7 @@ public void jobDisplayDetail() throws Exception {
297301
pathParameters(
298302
parameterWithName("id").description("The id of an existing job execution (required)")
299303
),
300-
requestParameters(
304+
queryParameters(
301305
parameterWithName("schemaTarget").description("Schema Target to the Job.").optional()
302306
),
303307
responseFields(
@@ -337,7 +341,7 @@ public void jobStop() throws Exception {
337341
.andDo(this.documentationHandler.document(
338342
pathParameters(parameterWithName("id")
339343
.description("The id of an existing job execution (required)"))
340-
, requestParameters(
344+
, queryParameters(
341345
parameterWithName("schemaTarget").description("The schema target of the job execution").optional(),
342346
parameterWithName("stop")
343347
.description("Sends signal to stop the job if set to true"))));
@@ -354,7 +358,7 @@ public void jobRestart() throws Exception {
354358
.andDo(this.documentationHandler.document(
355359
pathParameters(parameterWithName("id")
356360
.description("The id of an existing job execution (required)"))
357-
, requestParameters(
361+
, queryParameters(
358362
parameterWithName("schemaTarget").description("The schema target of the job execution").optional(),
359363
parameterWithName("restart")
360364
.description("Sends signal to restart the job if set to true")
@@ -373,18 +377,18 @@ private void initialize() {
373377

374378
}
375379

376-
private void createJobExecution(String name, BatchStatus status) {
380+
private void createJobExecution(String name, BatchStatus status) throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
377381
SchemaVersionTarget schemaVersionTarget = this.aggregateExecutionSupport.findSchemaVersionTarget(name, taskDefinitionReader);
378382
TaskExecutionDao dao = this.daoContainer.get(schemaVersionTarget.getName());
379-
TaskExecution taskExecution = dao.createTaskExecution(name, new Date(), Collections.singletonList("--spring.cloud.data.flow.platformname=default"), null);
380-
Map<String, JobParameter> jobParameterMap = new HashMap<>();
383+
TaskExecution taskExecution = dao.createTaskExecution(name, LocalDateTime.now(), Collections.singletonList("--spring.cloud.data.flow.platformname=default"), null);
384+
Map<String, JobParameter<?>> jobParameterMap = new HashMap<>();
381385
JobParameters jobParameters = new JobParameters(jobParameterMap);
382386
JobRepository jobRepository = this.jobRepositoryContainer.get(schemaVersionTarget.getName());
383-
JobExecution jobExecution = jobRepository.createJobExecution(jobRepository.createJobInstance(name, new JobParameters()), jobParameters, null);
387+
JobExecution jobExecution = jobRepository.createJobExecution(name, jobParameters);
384388
TaskBatchDao taskBatchDao = this.taskBatchDaoContainer.get(schemaVersionTarget.getName());
385389
taskBatchDao.saveRelationship(taskExecution, jobExecution);
386390
jobExecution.setStatus(status);
387-
jobExecution.setStartTime(new Date());
391+
jobExecution.setStartTime(LocalDateTime.now());
388392
jobRepository.update(jobExecution);
389393
final TaskManifest manifest = new TaskManifest();
390394
manifest.setPlatformName("default");

spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobInstancesDocumentation.java

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,8 @@
1616

1717
package org.springframework.cloud.dataflow.server.rest.documentation;
1818

19+
import java.time.LocalDateTime;
1920
import java.util.ArrayList;
20-
import java.util.Date;
2121

2222
import org.junit.Before;
2323
import org.junit.Test;
@@ -26,7 +26,10 @@
2626
import org.springframework.batch.core.BatchStatus;
2727
import org.springframework.batch.core.JobExecution;
2828
import org.springframework.batch.core.JobParameters;
29+
import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
30+
import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
2931
import org.springframework.batch.core.repository.JobRepository;
32+
import org.springframework.batch.core.repository.JobRestartException;
3033
import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration;
3134
import org.springframework.boot.test.context.SpringBootTest;
3235
import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport;
@@ -49,7 +52,7 @@
4952
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
5053
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
5154
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
52-
import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
55+
import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
5356
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
5457
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
5558

@@ -93,7 +96,7 @@ public void listJobInstances() throws Exception {
9396
.param("size", "10"))
9497
.andDo(print())
9598
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
96-
requestParameters(
99+
queryParameters(
97100
parameterWithName("page")
98101
.description("The zero-based page number (optional)"),
99102
parameterWithName("size")
@@ -117,7 +120,7 @@ public void jobDisplayDetail() throws Exception {
117120
pathParameters(
118121
parameterWithName("id").description("The id of an existing job instance (required)")
119122
),
120-
requestParameters(
123+
queryParameters(
121124
parameterWithName("schemaTarget").description("Schema target").optional()
122125
),
123126
responseFields(
@@ -138,16 +141,16 @@ private void initialize() {
138141
this.taskBatchDaoContainer = context.getBean(TaskBatchDaoContainer.class);
139142
}
140143

141-
private void createJobExecution(String name, BatchStatus status) {
144+
private void createJobExecution(String name, BatchStatus status) throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
142145
SchemaVersionTarget schemaVersionTarget = this.aggregateExecutionSupport.findSchemaVersionTarget(name, taskDefinitionReader);
143146
TaskExecutionDao dao = this.daoContainer.get(schemaVersionTarget.getName());
144-
TaskExecution taskExecution = dao.createTaskExecution(name, new Date(), new ArrayList<>(), null);
147+
TaskExecution taskExecution = dao.createTaskExecution(name, LocalDateTime.now(), new ArrayList<>(), null);
145148
JobRepository jobRepository = this.jobRepositoryContainer.get(schemaVersionTarget.getName());
146-
JobExecution jobExecution = jobRepository.createJobExecution(jobRepository.createJobInstance(name, new JobParameters()), new JobParameters(), null);
149+
JobExecution jobExecution = jobRepository.createJobExecution(name, new JobParameters());
147150
TaskBatchDao taskBatchDao = this.taskBatchDaoContainer.get(schemaVersionTarget.getName());
148151
taskBatchDao.saveRelationship(taskExecution, jobExecution);
149152
jobExecution.setStatus(status);
150-
jobExecution.setStartTime(new Date());
153+
jobExecution.setStartTime(LocalDateTime.now());
151154
jobRepository.update(jobExecution);
152155
}
153156
}

0 commit comments

Comments
 (0)