Java 类org.springframework.batch.core.job.SimpleJob 实例源码
项目:aws-elastic-beanstalk-worker-spring-boot-spring-batch-template
文件:SampleBatchApplication.java
@Bean(name="job2")
public Job job2() {
SimpleJob job = new SimpleJob();
job.setRestartable(false);
return jobs
.get("myJob2")
.incrementer(new RunIdIncrementer())
.start(steps.get("step3").tasklet((stepContribution, chunkContext) -> {
System.out.println("job2 step");
return RepeatStatus.FINISHED;}).build())
.build();
}
项目:spring-cloud-task
文件:TaskBatchExecutionListenerTests.java
@Test
public void testBatchExecutionListenerBeanPostProcessorWithJobNames() {
List jobNames = new ArrayList<String>(3);
jobNames.add("job1");
jobNames.add("job2");
jobNames.add("TESTOBJECT");
TaskBatchExecutionListenerBeanPostProcessor beanPostProcessor =
beanPostProcessor(jobNames);
SimpleJob testObject = new SimpleJob();
SimpleJob bean = (SimpleJob) beanPostProcessor.
postProcessBeforeInitialization(testObject,"TESTOBJECT");
assertEquals(testObject,bean);
}
项目:spring-cloud-task
文件:TaskBatchExecutionListenerTests.java
@Test
public void testBatchExecutionListenerBeanPostProcessorWithEmptyJobNames() {
TaskBatchExecutionListenerBeanPostProcessor beanPostProcessor =
beanPostProcessor(Collections.<String>emptyList());
SimpleJob testObject = new SimpleJob();
SimpleJob bean = (SimpleJob) beanPostProcessor.
postProcessBeforeInitialization(testObject,"TESTOBJECT");
assertEquals(testObject,bean);
}
项目:TechnologyReadinessTool
文件:BaseJobBean.java
protected SimpleJob createSimpleJob(String name, Step initialStep) {
SimpleJob job = new SimpleJob(name);
job.setJobRepository(jobRepository);
job.addStep(initialStep);
return job;
}
项目:TechnologyReadinessTool
文件:SnapshotBatchJob.java
@Override
protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
TaskletStep ts = createTaskletStep("snapshotRun", snapshotBatchJobTasklet);
SimpleJob job = createSimpleJob("snapshotBatchJob", ts);
// by default, jobs are assumed to have been already run if the exact same
// parameters are seen. Pass in the time to always be unique.
Map<String, JobParameter> map = new HashMap<>();
map.put("executeTime", new JobParameter(System.currentTimeMillis()));
runJob(job, context, map);
}
项目:TechnologyReadinessTool
文件:NightlyReportBatchJob.java
@Override
protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
TaskletStep ts = createTaskletStep("nightlyRun", nightlyReportBatchJobTasklet);
SimpleJob job = createSimpleJob("nightlyReportBatchJob", ts);
// by default, jobs are assumed to have been already run if the exact same
// parameters are seen. Pass in the time to always be unique.
Map<String, JobParameter> map = new HashMap<>();
map.put("executeTime", new JobParameter(System.currentTimeMillis()));
runJob(job, context, map);
}
项目:TechnologyReadinessTool
文件:HourlyReportBatchJob.java
@Override
protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
TaskletStep ts = createTaskletStep("HourlyRun", HourlyReportBatchJobTasklet);
SimpleJob job = createSimpleJob("HourlyReportBatchJob", ts);
// by default, jobs are assumed to have been already run if the exact same
// parameters are seen. Pass in the time to always be unique.
Map<String, JobParameter> map = new HashMap<>();
map.put("executeTime", new JobParameter(System.currentTimeMillis()));
runJob(job, context, map);
}
项目:TechnologyReadinessTool
文件:DeleteSnapshotBatchJob.java
@Override
protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
TaskletStep ts = createTaskletStep("deleteSnapshotWindow", deleteSnapshotBatchJobTasklet);
SimpleJob job = createSimpleJob("deleteSnapshotWindowBatchJob", ts);
// by default, jobs are assumed to have been already run if the exact same
// parameters are seen. Pass in the time to always be unique.
Map<String, JobParameter> map = new HashMap<>();
map.put("executeTime", new JobParameter(System.currentTimeMillis()));
map.put("snapshotWindowId", new JobParameter(context.getJobDetail().getJobDataMap().getLong("snapshotWindowId")));
runJob(job, context, map);
}