gpt4 book ai didi

java - 如何在 spring-batch 中划分步骤?

转载 作者:行者123 更新时间:2023-11-30 10:01:39 25 4
gpt4 key购买 nike

我正在学习 spring batch 并编写了简单的应用程序来使用它。根据我的要求,我从单个 csv 文件中读取,进行一些转换并插入到数据库中。

我有以下配置:

    @Bean
public Step step1(JdbcBatchItemWriter<Person> writer) {
return stepBuilderFactory.get("step1")
.<Person, Person>chunk(10)
.reader(reader())
.processor(processor())
.writer(writer)
.build();
}

@Bean
public Job importUserJob(JobCompletionNotificationListener listener, Step step1, Step step2) {
return jobBuilderFactory.get("importUserJob")
.incrementer(new RunIdIncrementer())
.listener(listener)
.listener(new JobExecutionListener() {
@Override
public void beforeJob(JobExecution jobExecution) {
System.out.println("!!!!!!!!!!!!!SECOND_LISTENER_BEFORE!!!!!!!!!!!!!!!!");
}

@Override
public void afterJob(JobExecution jobExecution) {
System.out.println("!!!!!!!!!!!!!SECOND_LISTENER_AFTER!!!!!!!!!!!!!!!!");

}
})
.flow(step1)
.next(step2)
.end()
.build();
}

public FlatFileItemReader reader() {
return new FlatFileItemReaderBuilder()
.name("csvPersonReader")
.resource(csvResource)
.delimited()
.names(new String[]{"firstName", "lastName"})
.fieldSetMapper(new BeanWrapperFieldSetMapper<Person>() {{
setTargetType(Person.class);
}})
.build();

}

现在我想使用 10 个线程执行该步骤。据我所知,我需要为此使用分区功能。我找到了几个关于它的例子,但它包含 XML 配置。我更喜欢使用 java 配置。

我怎样才能实现它?

附言

我尝试了以下方法:

@Bean
public Step step1(JdbcBatchItemWriter<Person> writer) {
ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor();
taskExecutor.setCorePoolSize(1);
TaskletStep step1 = stepBuilderFactory.get("step1")
.<Person, Person>chunk(10)
.reader(reader())
.processor(processor())
.writer(writer)
.taskExecutor(taskExecutor)
.build();

return step1;
}

但我的应用程序挂起。此外,它不是分区,只能在一台 PC 上工作

最佳答案

您可以使用下面的代码来实现批量分区。

@Configuration
public class DemoJobBatchConfiguration {

private static final Logger LOGGER = LoggerFactory.getLogger(DemoJobBatchConfiguration.class);

@Autowired
private JobBuilderFactory jobBuilderFactory;

@Autowired
private StepBuilderFactory stepBuilderFactory;

@Autowired
@Qualifier("applicaionDS")
public DataSource dataSource;

@Autowired
UserWritter userWriter;

@Bean("demoJob")
public Job partitionJob(JobNotificationListener listener, JobBuilderFactory jobBuilderFactory,
@Qualifier("demoPartitionStep") Step demoPartitionStep) {
return jobBuilderFactory.get("demoJob").incrementer(new RunIdIncrementer()).listener(listener)
.start(demoPartitionStep).build();
}

@Bean(name = "demoPartitionStep")
public Step demoPartitionStep(Step demoSlaveStep, StepBuilderFactory stepBuilderFactory) {
return stepBuilderFactory.get("demoPartitionStep").partitioner("demoPartitionStep", demoPartitioner())
.gridSize(21).step(demoSlaveStep).taskExecutor(jobTaskExecutor()).build();
}

@Bean(name = "demoPartitioner", destroyMethod = "")
public Partitioner demoPartitioner() {
DemoPartitioner partitioner = new DemoPartitioner();
// partitioner.partition(20);
return partitioner;
}

@Bean
public Step demoSlaveStep(ItemReader<User> demoReader, ItemProcessor<User, User> demoJobProcessor) {
return stepBuilderFactory.get("demoSlaveStep").<User, User>chunk(3).reader(demoReader)
.processor(demoJobProcessor).writer(userWriter).build();
}

@Bean(name = "demoReader")
@StepScope
public JdbcCursorItemReader<User> demoReader(@Value("#{stepExecutionContext[SQL]}") String SQL,
@Value("#{jobParameters[JOB_PARM]}") String jobParm,
@Value("#{jobExecutionContext[jobExecutionParameter]}") String jobExecutionParameter) {
LOGGER.info("---------------------- demoReader ------------------------------- " + SQL);
LOGGER.info(" jobParm : " + jobParm);
LOGGER.info(" jobExecutionParameter : " + jobExecutionParameter);

JdbcCursorItemReader<User> reader = new JdbcCursorItemReader<>();
reader.setDataSource(this.dataSource);
reader.setFetchSize(200);
reader.setRowMapper(new BeanPropertyRowMapper<>(User.class));
reader.setSql(SQL);
return reader;
}

@Bean(name = "demoJobProcessor")
@StepScope
public ItemProcessor<User, User> demoJobProcessor() throws Exception {
LOGGER.info(" DemoJobBatchConfiguration: demoJobProcessor ");
return new UserProcessor();
}

/*
* @Bean public ItemWriter<User> demoWriter() { return users -> { for (User user
* : users) { if (LOGGER.isInfoEnabled()) { LOGGER.info("user read is :: " +
* user.toString()); } } if (LOGGER.isInfoEnabled()) {
* LOGGER.info("%%%%%%%%%%%%%%%%%%%%% demoWriter %%%%%%%%%%%%%%%%%%%%% "); } };
* }
*/

@Bean
public TaskExecutor jobTaskExecutor() {
ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor();
// there are 21 sites currently hence we have 21 threads
taskExecutor.setMaxPoolSize(30);
taskExecutor.setCorePoolSize(25);
taskExecutor.afterPropertiesSet();
return taskExecutor;
}

}

public class DemoPartitioner implements Partitioner {

@Override
public Map<String, ExecutionContext> partition(int gridSize) {

Map<String, ExecutionContext> result = new HashMap<String, ExecutionContext>();

int range = 3;
int fromId = 1;
int toId = range;

for (int i = fromId; i <= gridSize;) {
ExecutionContext executionContext = new ExecutionContext();
String SQL = "SELECT * FROM CUSTOMER WHERE ID BETWEEN " + fromId + " AND " + toId;
System.out.println("SQL : " + SQL);
executionContext.putInt("fromId", fromId);
executionContext.putInt("toId", toId);
executionContext.putString("SQL", SQL);
executionContext.putString("name", "Thread" + i);
result.put("partition" + i, executionContext);
fromId = toId + 1;
i = fromId;
toId += range;
}
return result;
}

}

关于java - 如何在 spring-batch 中划分步骤?,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/57329369/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com