spring - Spring Batch In-memory (MapJobRepositoryFactoryBean) 清除旧作业而不是正在运行的作业
问题描述
我正在使用 spring-batch 来安排批处理作业,即内存中作为项目特定的要求(即不在生产中,它只是用于测试环境),下面是我的配置类,看起来像
// Batch Scheulder class
package org.learning.scheduler
import org.springframework.batch.core.explore.JobExplorer;
import org.springframework.batch.core.explore.support.SimpleJobExplorer;
import org.springframework.batch.core.launch.support.SimpleJobLauncher;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean;
import org.springframework.batch.support.transaction.ResourcelessTransactionManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.task.SimpleAsyncTaskExecutor;
import org.springframework.scheduling.annotation.EnableScheduling;
/**
* Job Inmemory Config
*
*/
@EnableScheduling
@Configuration
public class InmemoryJobConfig {
@Bean
public ResourcelessTransactionManager transactionManager() {
return new ResourcelessTransactionManager();
}
@Bean
public MapJobRepositoryFactoryBean mapJobRepositoryFactoryBean(ResourcelessTransactionManager resourcelessTransactionManager) throws Exception {
MapJobRepositoryFactoryBean factoryBean = new MapJobRepositoryFactoryBean(resourcelessTransactionManager);
factoryBean.afterPropertiesSet();
return factoryBean;
}
@Bean
public JobRepository jobRepository(MapJobRepositoryFactoryBean factoryBean) throws Exception{
return (JobRepository) factoryBean.getObject();
}
@Bean
public JobExplorer jobExplorer(MapJobRepositoryFactoryBean repositoryFactory) {
return new SimpleJobExplorer(repositoryFactory.getJobInstanceDao(), repositoryFactory.getJobExecutionDao(),
repositoryFactory.getStepExecutionDao(), repositoryFactory.getExecutionContextDao());
}
@Bean
public SimpleJobLauncher jobLauncher(JobRepository jobRepository) throws Exception {
SimpleJobLauncher simpleJobLauncher = new SimpleJobLauncher();
simpleJobLauncher.setJobRepository(jobRepository);
simpleJobLauncher.setTaskExecutor(new SimpleAsyncTaskExecutor());
return simpleJobLauncher;
}
}
//Job ConfiguratinClass
/**
* Batch Entry Point for Scheduler for all Jobs
*
*
*/
@Import({InmemoryJobConfig.class})
@EnableBatchProcessing
@Configuration
@Slf4j
public class BatchScheduler {
@Autowired
private JobBuilderFactory jobs;
@Autowired
private StepBuilderFactory steps;
@Autowired
private SimpleJobLauncher jobLauncher;
@Autowired
private JobExplorer jobExplorer;
@Autowired
private MapJobRepositoryFactoryBean mapJobRepositoryFactoryBean;
@Bean
public ItemReader<UserDTO> userReader() {
return new UserReader();
}
@Bean
public ItemWriter<User> userWriter() {
return new UserWriter();
}
@Bean
public ItemReader<OrderDTO> orderReader() {
return new OrderReader();
}
@Bean
public ItemWriter<Order> orderWriter() {
return new OrderWriter();
}
@Bean
public Step userStep(ItemReader<UserDTO> reader, ItemWriter<User> writer) {
return steps.get("userStep")
.<UserDTO, User>chunk(20)
.reader(userReader())
.processor(new UserProcessor())
.writer(userWriter())
.build();
}
@Bean
public Step orderStep(ItemReader<OrderDTO> reader, ItemWriter<Order> writer) {
return steps.get("orderStep")
.<OrderDTO, Order>chunk(20)
.reader(orderReader())
.processor(new OrderProcessor())
.writer(orderWriter())
.build();
}
@Bean
public Job userJob() {
return jobs.get("userJob").incrementer(new RunIdIncrementer()).start(userStep(userReader(), userWriter())).build();
}
@Bean
public Job orderJob() {
return jobs.get("orderJob").incrementer(new RunIdIncrementer()).start(orderStep(orderReader(), orderWriter())).build();
}
@Scheduled(cron = "0 0/15 * * * ?")
public void scheduleUserJob() throws JobExecutionException {
Set<JobExecution> runningJob = jobExplorer.findRunningJobExecutions("userJob");
if (!runningJob.isEmpty()) {
throw new JobExecutionException(" User Job is already in Start State ");
}
JobParameters userParam =
new JobParametersBuilder().addLong("date", System.currentTimeMillis())
.toJobParameters();
jobLauncher.run(userJob(), userParam);
}
@Scheduled(cron = "0 0/15 * * * ?")
public void scheduleOrderJob() throws JobExecutionException {
Set<JobExecution> runningJob = jobExplorer.findRunningJobExecutions("orderJob");
if (!runningJob.isEmpty()) {
throw new JobExecutionException(" Order Job is already in Start State ");
}
JobParameters orderParam =
new JobParametersBuilder().addLong("date", System.currentTimeMillis())
.toJobParameters();
jobLauncher.run(orderJob(), orderParam);
}
@Scheduled(cron = "0 0/30 * * * ?")
public void scheduleCleanupMemoryJob() throws BatchException {
Set<JobExecution> orderRunningJob = jobExplorer.findRunningJobExecutions("orderJob");
Set<JobExecution> userRunningJob = jobExplorer.findRunningJobExecutions("userJob");
if (!orderRunningJob.isEmpty() || !userRunningJob.isEmpty()) {
throw new BatchException(" Order/user Job is running state , cleanup job is aborted ");
}
mapJobRepositoryFactoryBean.clear();
}
}
我每 0/15 分钟安排了两个作业,这将执行一些业务逻辑,并且我已经安排了内存清理作业,仅当这两个作业中的任何一个未运行时才从“mapJobRepositoryFactoryBean”bean 中清理内存作业数据状态 。
我想建议找到最好的方法来删除已经执行的旧作业,如果任何作业处于运行状态,上述方法不会删除旧作业详细信息。
或者,一旦作业被执行,spring-batch 中是否有任何 API 可以从内存中清除特定的作业详细信息。?即通过 JobId 清除内存
注意:我只想使用MapJobRepositoryFactoryBean而不是持久数据库或任何嵌入式数据库(H2)
解决方案
MapJobRepository
提供了一种清除基于地图的作业存储库中所有数据的clear()
方法,但我没有看到任何明显的方法来删除特定作业的元数据。
我只想使用 MapJobRepositoryFactoryBean 而不是持久数据库或任何嵌入式数据库(H2)
我真的建议使用基于 JDBC 的作业存储库和内存数据库。这种方法更好,因为它允许您对内存数据库运行查询并删除特定作业的数据。
推荐阅读
- phpunit - 无法设置夹具(带有 dbunit 的 PHPunit)
- heroku - Heroku Connect:延长错误日志持续时间和 UNABLE_TO_LOCK_ROW 错误
- javascript - 将图像添加到 ajax 中的上传
- jquery - 无法识别的表达式:输入[名称
- uml - 是否允许我们只为 1 个参与者(用户)使用用例图?
- java - 当负载均衡器位于 MQ 和应用程序之间时,负载均衡器会消耗消息吗?
- html - 自定义 HTML 预处理器/视图引擎
- c# - 如何在 C# 上向 datagridview 插入多行?
- graphql - Apollo Server 2 + Express:后处理程序中缺少 req.body
- angular - 动态表列和行 [Angular 6 + Bootstrap 3]