diff --git a/pom.xml b/pom.xml index 38d49b1..a810811 100644 --- a/pom.xml +++ b/pom.xml @@ -21,22 +21,58 @@ org.springframework.boot spring-boot-starter-batch + + org.springframework.boot + spring-boot-starter-data-jpa + + + org.springframework.boot + spring-boot-starter-web + + + org.modelmapper + modelmapper + 2.4.4 + + + org.springframework.boot + spring-boot-starter-quartz + + + org.apache.httpcomponents + httpclient + 4.5.13 + + + org.springframework.batch + spring-batch-integration + mysql mysql-connector-java runtime - org.springframework.boot - spring-boot-configuration-processor - true + com.h2database + h2 + runtime org.projectlombok lombok true + + org.springframework + spring-oxm + 5.3.7 + + + com.thoughtworks.xstream + xstream + 1.4.16 + org.springframework.boot spring-boot-starter-test diff --git a/src/main/batch.http b/src/main/batch.http new file mode 100644 index 0000000..caac293 --- /dev/null +++ b/src/main/batch.http @@ -0,0 +1,19 @@ +### Send POST request with json body +POST http://localhost:8080/batch/start +Content-Type: application/json + +{ + "id": "leaven" +} + +### Send POST request with json body +POST http://localhost:8080/batch/stop +Content-Type: application/json + +### Send POST request with json body +POST http://localhost:8080/batch/restart +Content-Type: application/json + +### Send POST request with json body +POST http://localhost:8081/api/product/1 +Content-Type: application/json \ No newline at end of file diff --git a/src/main/java/io/springbatch/springbatchlecture/SpringBatchLectureApplication.java b/src/main/java/io/springbatch/springbatchlecture/SpringBatchLectureApplication.java index 0d8b2d4..8930a69 100644 --- a/src/main/java/io/springbatch/springbatchlecture/SpringBatchLectureApplication.java +++ b/src/main/java/io/springbatch/springbatchlecture/SpringBatchLectureApplication.java @@ -1,9 +1,11 @@ package io.springbatch.springbatchlecture; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @SpringBootApplication +@EnableBatchProcessing public class SpringBatchLectureApplication { public static void main(String[] args) { diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/chunk/processor/ApiItemProcessor1.java b/src/main/java/io/springbatch/springbatchlecture/batch/chunk/processor/ApiItemProcessor1.java new file mode 100644 index 0000000..fa978ce --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/chunk/processor/ApiItemProcessor1.java @@ -0,0 +1,19 @@ +package io.springbatch.springbatchlecture.batch.chunk.processor; + +import io.springbatch.springbatchlecture.batch.domain.ApiRequestVO; +import io.springbatch.springbatchlecture.batch.domain.ProductVO; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.stereotype.Component; + +@Component +public class ApiItemProcessor1 implements ItemProcessor { + + @Override + public ApiRequestVO process(ProductVO productVO) throws Exception { + + return ApiRequestVO.builder() + .id(productVO.getId()) + .productVO(productVO) + .build(); + } +} diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/chunk/processor/ApiItemProcessor2.java b/src/main/java/io/springbatch/springbatchlecture/batch/chunk/processor/ApiItemProcessor2.java new file mode 100644 index 0000000..7158013 --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/chunk/processor/ApiItemProcessor2.java @@ -0,0 +1,19 @@ +package io.springbatch.springbatchlecture.batch.chunk.processor; + +import io.springbatch.springbatchlecture.batch.domain.ApiRequestVO; +import io.springbatch.springbatchlecture.batch.domain.ProductVO; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.stereotype.Component; + +@Component +public class ApiItemProcessor2 implements ItemProcessor { + + @Override + public ApiRequestVO process(ProductVO productVO) throws Exception { + + return ApiRequestVO.builder() + .id(productVO.getId()) + .productVO(productVO) + .build(); + } +} diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/chunk/processor/ApiItemProcessor3.java b/src/main/java/io/springbatch/springbatchlecture/batch/chunk/processor/ApiItemProcessor3.java new file mode 100644 index 0000000..d10fe65 --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/chunk/processor/ApiItemProcessor3.java @@ -0,0 +1,19 @@ +package io.springbatch.springbatchlecture.batch.chunk.processor; + +import io.springbatch.springbatchlecture.batch.domain.ApiRequestVO; +import io.springbatch.springbatchlecture.batch.domain.ProductVO; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.stereotype.Component; + +@Component +public class ApiItemProcessor3 implements ItemProcessor { + + @Override + public ApiRequestVO process(ProductVO productVO) throws Exception { + + return ApiRequestVO.builder() + .id(productVO.getId()) + .productVO(productVO) + .build(); + } +} diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/chunk/processor/FileItemProcessor.java b/src/main/java/io/springbatch/springbatchlecture/batch/chunk/processor/FileItemProcessor.java new file mode 100644 index 0000000..1c11686 --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/chunk/processor/FileItemProcessor.java @@ -0,0 +1,19 @@ +package io.springbatch.springbatchlecture.batch.chunk.processor; + +import io.springbatch.springbatchlecture.batch.domain.Product; +import io.springbatch.springbatchlecture.batch.domain.ProductVO; +import org.modelmapper.ModelMapper; +import org.springframework.batch.item.ItemProcessor; + +public class FileItemProcessor implements ItemProcessor { + + @Override + public Product process(ProductVO item) throws Exception { + + ModelMapper modelMapper = new ModelMapper(); + Product product = modelMapper.map(item, Product.class); + + + return product; + } +} diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/chunk/processor/ProcessorClassifier.java b/src/main/java/io/springbatch/springbatchlecture/batch/chunk/processor/ProcessorClassifier.java new file mode 100644 index 0000000..6ba75a0 --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/chunk/processor/ProcessorClassifier.java @@ -0,0 +1,23 @@ +package io.springbatch.springbatchlecture.batch.chunk.processor; + +import io.springbatch.springbatchlecture.batch.domain.ApiRequestVO; +import io.springbatch.springbatchlecture.batch.domain.ProductVO; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.classify.Classifier; + +import java.util.HashMap; +import java.util.Map; + +public class ProcessorClassifier implements Classifier { + + private Map> processorMap = new HashMap<>(); + + @Override + public T classify(C classifiable) { + return (T)processorMap.get(((ProductVO)classifiable).getType()); + } + + public void setProcessorMap(Map> processorMap) { + this.processorMap = processorMap; + } +} \ No newline at end of file diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/chunk/writer/ApiItemWriter1.java b/src/main/java/io/springbatch/springbatchlecture/batch/chunk/writer/ApiItemWriter1.java new file mode 100644 index 0000000..fca20e5 --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/chunk/writer/ApiItemWriter1.java @@ -0,0 +1,43 @@ +package io.springbatch.springbatchlecture.batch.chunk.writer; + +import io.springbatch.springbatchlecture.batch.domain.ApiRequestVO; +import io.springbatch.springbatchlecture.batch.domain.ApiResponseVO; +import io.springbatch.springbatchlecture.service.AbstractApiService; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.item.file.FlatFileItemWriter; +import org.springframework.batch.item.file.transform.DelimitedLineAggregator; +import org.springframework.core.io.FileSystemResource; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +@Slf4j +public class ApiItemWriter1 extends FlatFileItemWriter { + + private final AbstractApiService apiService; + + public ApiItemWriter1(AbstractApiService apiService) { + this.apiService = apiService; + } + + @Override + public void write(List items) throws Exception { + + System.out.println("----------------------------------"); + items.forEach(item -> System.out.println("items = " + item)); + System.out.println("----------------------------------"); + + ApiResponseVO response = apiService.service(items); + System.out.println("response = " + response); + + items.forEach(item -> item.setApiResponseVO(response)); + + super.setResource(new FileSystemResource("C:\\jsw\\inflearn\\spring-batch-lecture\\src\\main\\resources\\product1.txt")); + super.open(new ExecutionContext()); + super.setLineAggregator(new DelimitedLineAggregator<>()); + super.setAppendAllowed(true); + super.write(items); + } +} diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/chunk/writer/ApiItemWriter2.java b/src/main/java/io/springbatch/springbatchlecture/batch/chunk/writer/ApiItemWriter2.java new file mode 100644 index 0000000..88ce534 --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/chunk/writer/ApiItemWriter2.java @@ -0,0 +1,42 @@ +package io.springbatch.springbatchlecture.batch.chunk.writer; + +import io.springbatch.springbatchlecture.batch.domain.ApiRequestVO; +import io.springbatch.springbatchlecture.batch.domain.ApiResponseVO; +import io.springbatch.springbatchlecture.service.AbstractApiService; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.item.ItemWriter; +import org.springframework.batch.item.file.FlatFileItemWriter; +import org.springframework.batch.item.file.transform.DelimitedLineAggregator; +import org.springframework.core.io.FileSystemResource; + +import java.util.List; + +@Slf4j +public class ApiItemWriter2 extends FlatFileItemWriter { + + private AbstractApiService apiService; + + public ApiItemWriter2(AbstractApiService apiService) { + this.apiService = apiService; + } + + @Override + public void write(List items) throws Exception { + + System.out.println("----------------------------------"); + items.forEach(item -> System.out.println("items = " + item)); + System.out.println("----------------------------------"); + + ApiResponseVO response = apiService.service(items); + System.out.println("response = " + response); + + items.forEach(item -> item.setApiResponseVO(response)); + + super.setResource(new FileSystemResource("C:\\jsw\\inflearn\\spring-batch-lecture\\src\\main\\resources\\product2.txt")); + super.open(new ExecutionContext()); + super.setLineAggregator(new DelimitedLineAggregator<>()); + super.setAppendAllowed(true); + super.write(items); + } +} diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/chunk/writer/ApiItemWriter3.java b/src/main/java/io/springbatch/springbatchlecture/batch/chunk/writer/ApiItemWriter3.java new file mode 100644 index 0000000..bbe6f86 --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/chunk/writer/ApiItemWriter3.java @@ -0,0 +1,42 @@ +package io.springbatch.springbatchlecture.batch.chunk.writer; + +import io.springbatch.springbatchlecture.batch.domain.ApiRequestVO; +import io.springbatch.springbatchlecture.batch.domain.ApiResponseVO; +import io.springbatch.springbatchlecture.service.AbstractApiService; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.item.ItemWriter; +import org.springframework.batch.item.file.FlatFileItemWriter; +import org.springframework.batch.item.file.transform.DelimitedLineAggregator; +import org.springframework.core.io.FileSystemResource; + +import java.util.List; + +@Slf4j +public class ApiItemWriter3 extends FlatFileItemWriter { + + private AbstractApiService apiService; + + public ApiItemWriter3(AbstractApiService apiService) { + this.apiService = apiService; + } + + @Override + public void write(List items) throws Exception { + + System.out.println("----------------------------------"); + items.forEach(item -> System.out.println("items = " + item)); + System.out.println("----------------------------------"); + + ApiResponseVO response = apiService.service(items); + System.out.println("response = " + response); + + items.forEach(item -> item.setApiResponseVO(response)); + + super.setResource(new FileSystemResource("C:\\jsw\\inflearn\\spring-batch-lecture\\src\\main\\resources\\product3.txt")); + super.open(new ExecutionContext()); + super.setLineAggregator(new DelimitedLineAggregator<>()); + super.setAppendAllowed(true); + super.write(items); + } +} diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/chunk/writer/WriterClassifier.java b/src/main/java/io/springbatch/springbatchlecture/batch/chunk/writer/WriterClassifier.java new file mode 100644 index 0000000..318ca73 --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/chunk/writer/WriterClassifier.java @@ -0,0 +1,24 @@ +package io.springbatch.springbatchlecture.batch.chunk.writer; + +import io.springbatch.springbatchlecture.batch.domain.ApiRequestVO; +import io.springbatch.springbatchlecture.batch.domain.ProductVO; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemWriter; +import org.springframework.classify.Classifier; + +import java.util.HashMap; +import java.util.Map; + +public class WriterClassifier implements Classifier { + + private Map> writerMap = new HashMap<>(); + + @Override + public T classify(C classifiable) { + return (T)writerMap.get(((ApiRequestVO)classifiable).getProductVO().getType()); + } + + public void setWriterMap(Map> writerMap) { + this.writerMap = writerMap; + } +} \ No newline at end of file diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/domain/ApiInfo.java b/src/main/java/io/springbatch/springbatchlecture/batch/domain/ApiInfo.java new file mode 100644 index 0000000..96e5044 --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/domain/ApiInfo.java @@ -0,0 +1,14 @@ +package io.springbatch.springbatchlecture.batch.domain; + +import lombok.Builder; +import lombok.Data; + +import java.util.List; + +@Data +@Builder +public class ApiInfo { + + private String url; + private List apiRequestList; +} diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/domain/ApiRequestVO.java b/src/main/java/io/springbatch/springbatchlecture/batch/domain/ApiRequestVO.java new file mode 100644 index 0000000..e303b76 --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/domain/ApiRequestVO.java @@ -0,0 +1,14 @@ +package io.springbatch.springbatchlecture.batch.domain; + +import lombok.Builder; +import lombok.Data; + +@Data +@Builder +public class ApiRequestVO{ + + private long id; + private ProductVO productVO; + private ApiResponseVO apiResponseVO; + +} diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/domain/ApiResponseVO.java b/src/main/java/io/springbatch/springbatchlecture/batch/domain/ApiResponseVO.java new file mode 100644 index 0000000..97d0a16 --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/domain/ApiResponseVO.java @@ -0,0 +1,17 @@ +package io.springbatch.springbatchlecture.batch.domain; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.util.List; + +@Data +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class ApiResponseVO{ + private String status; + private String msg; +} diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/domain/Product.java b/src/main/java/io/springbatch/springbatchlecture/batch/domain/Product.java new file mode 100644 index 0000000..f084b0e --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/domain/Product.java @@ -0,0 +1,17 @@ +package io.springbatch.springbatchlecture.batch.domain; + +import lombok.Data; + +import javax.persistence.Entity; +import javax.persistence.Id; + +@Data +@Entity +public class Product { + + @Id + private Long id; + private String name; + private int price; + private String type; +} diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/domain/ProductVO.java b/src/main/java/io/springbatch/springbatchlecture/batch/domain/ProductVO.java new file mode 100644 index 0000000..96466de --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/domain/ProductVO.java @@ -0,0 +1,18 @@ +package io.springbatch.springbatchlecture.batch.domain; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class ProductVO { + + private Long id; + private String name; + private int price; + private String type; +} diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/job/api/ApiStepConfiguration.java b/src/main/java/io/springbatch/springbatchlecture/batch/job/api/ApiStepConfiguration.java new file mode 100644 index 0000000..41aafdc --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/job/api/ApiStepConfiguration.java @@ -0,0 +1,153 @@ +package io.springbatch.springbatchlecture.batch.job.api; + +import io.springbatch.springbatchlecture.batch.chunk.processor.ApiItemProcessor1; +import io.springbatch.springbatchlecture.batch.chunk.processor.ApiItemProcessor2; +import io.springbatch.springbatchlecture.batch.chunk.processor.ApiItemProcessor3; +import io.springbatch.springbatchlecture.batch.chunk.processor.ProcessorClassifier; +import io.springbatch.springbatchlecture.batch.chunk.writer.ApiItemWriter1; +import io.springbatch.springbatchlecture.batch.chunk.writer.ApiItemWriter2; +import io.springbatch.springbatchlecture.batch.chunk.writer.ApiItemWriter3; +import io.springbatch.springbatchlecture.batch.chunk.writer.WriterClassifier; +import io.springbatch.springbatchlecture.batch.domain.ApiRequestVO; +import io.springbatch.springbatchlecture.batch.domain.ProductVO; +import io.springbatch.springbatchlecture.batch.partition.ProductPartitioner; +import io.springbatch.springbatchlecture.service.ApiService1; +import io.springbatch.springbatchlecture.service.ApiService2; +import io.springbatch.springbatchlecture.service.ApiService3; +import lombok.RequiredArgsConstructor; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.batch.item.database.JdbcPagingItemReader; +import org.springframework.batch.item.database.Order; +import org.springframework.batch.item.database.support.MySqlPagingQueryProvider; +import org.springframework.batch.item.support.ClassifierCompositeItemProcessor; +import org.springframework.batch.item.support.ClassifierCompositeItemWriter; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.task.TaskExecutor; +import org.springframework.jdbc.core.BeanPropertyRowMapper; +import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; + +import javax.sql.DataSource; +import java.util.HashMap; +import java.util.Map; + +@Configuration +@RequiredArgsConstructor +public class ApiStepConfiguration { + + private final StepBuilderFactory stepBuilderFactory; + private final DataSource dataSource; + + private int chunkSize = 10; + + @Bean + public Step apiMasterStep() throws Exception { + + ProductVO[] productList = QueryGenerator.getProductList(dataSource); + + return stepBuilderFactory.get("apiMasterStep") + .partitioner(apiSlaveStep().getName(), partitioner()) + .step(apiSlaveStep()) + .gridSize(productList.length) + .taskExecutor(taskExecutor()) + .build(); + } + + @Bean + public TaskExecutor taskExecutor(){ + ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor(); + taskExecutor.setCorePoolSize(3); + taskExecutor.setMaxPoolSize(6); + taskExecutor.setThreadNamePrefix("api-thread-"); + + return taskExecutor; + } + + @Bean + public Step apiSlaveStep() throws Exception { + + return stepBuilderFactory.get("apiSlaveStep") + .chunk(chunkSize) + .reader(itemReader(null)) + .processor(itemProcessor()) + .writer(itemWriter()) + .build(); + } + + @Bean + public ProductPartitioner partitioner() { + ProductPartitioner productPartitioner = new ProductPartitioner(); + productPartitioner.setDataSource(dataSource); + return productPartitioner; + } + + @Bean + @StepScope + public ItemReader itemReader(@Value("#{stepExecutionContext['product']}") ProductVO productVO) throws Exception { + + JdbcPagingItemReader reader = new JdbcPagingItemReader<>(); + + reader.setDataSource(dataSource); + reader.setPageSize(chunkSize); + reader.setRowMapper(new BeanPropertyRowMapper(ProductVO.class)); + + MySqlPagingQueryProvider queryProvider = new MySqlPagingQueryProvider(); + queryProvider.setSelectClause("id, name, price, type"); + queryProvider.setFromClause("from product"); + queryProvider.setWhereClause("where type = :type"); + + Map sortKeys = new HashMap<>(1); + sortKeys.put("id", Order.DESCENDING); + queryProvider.setSortKeys(sortKeys); + + reader.setParameterValues(QueryGenerator.getParameterForQuery("type", productVO.getType())); + reader.setQueryProvider(queryProvider); + reader.afterPropertiesSet(); + + return reader; + } + + @Bean + public ItemProcessor itemProcessor() { + + ClassifierCompositeItemProcessor processor = new ClassifierCompositeItemProcessor<>(); + + ProcessorClassifier> classifier = new ProcessorClassifier(); + + Map> processorMap = new HashMap<>(); + processorMap.put("1", new ApiItemProcessor1()); + processorMap.put("2", new ApiItemProcessor2()); + processorMap.put("3", new ApiItemProcessor3()); + + classifier.setProcessorMap(processorMap); + + processor.setClassifier(classifier); + + return processor; + } + + @Bean + public ItemWriter itemWriter() { + + ClassifierCompositeItemWriter writer = new ClassifierCompositeItemWriter<>(); + + WriterClassifier> classifier = new WriterClassifier(); + + Map> writerMap = new HashMap<>(); + writerMap.put("1", new ApiItemWriter1(new ApiService1())); + writerMap.put("2", new ApiItemWriter2(new ApiService2())); + writerMap.put("3", new ApiItemWriter3(new ApiService3())); + + classifier.setWriterMap(writerMap); + + writer.setClassifier(classifier); + + return writer; + } +} diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/job/api/QueryGenerator.java b/src/main/java/io/springbatch/springbatchlecture/batch/job/api/QueryGenerator.java new file mode 100644 index 0000000..c8779e4 --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/job/api/QueryGenerator.java @@ -0,0 +1,35 @@ +package io.springbatch.springbatchlecture.batch.job.api; + +import io.springbatch.springbatchlecture.batch.domain.ProductVO; +import io.springbatch.springbatchlecture.batch.rowmapper.ProductRowMapper; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class QueryGenerator { + + public static ProductVO[] getProductList(DataSource dataSource) { + + JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); + List productList = jdbcTemplate.query("select type as type from product group by type", new ProductRowMapper() { + @Override + public ProductVO mapRow(ResultSet rs, int i) throws SQLException { + return ProductVO.builder().type(rs.getString("type")).build(); + } + }); + + return productList.toArray(new ProductVO[]{}); + } + + public static Map getParameterForQuery(String parameter, String value) { + + HashMap parameters = new HashMap<>(); + parameters.put(parameter, value); + return parameters; + } +} diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/job/api/SendChildJobConfiguration.java b/src/main/java/io/springbatch/springbatchlecture/batch/job/api/SendChildJobConfiguration.java new file mode 100644 index 0000000..2969cad --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/job/api/SendChildJobConfiguration.java @@ -0,0 +1,35 @@ +package io.springbatch.springbatchlecture.batch.job.api; + +import lombok.RequiredArgsConstructor; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.JobBuilderFactory; +import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; +import org.springframework.batch.core.launch.JobLauncher; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +@RequiredArgsConstructor +public class SendChildJobConfiguration { + + private final JobBuilderFactory jobBuilderFactory; + private final StepBuilderFactory stepBuilderFactory; + private final Step apiMasterStep; + private final JobLauncher jobLauncher; + + @Bean + public Step jobStep() throws Exception { + return stepBuilderFactory.get("jobStep") + .job(childJob()) + .launcher(jobLauncher) + .build(); + } + + @Bean + public Job childJob() throws Exception { + return jobBuilderFactory.get("childJob") + .start(apiMasterStep) + .build(); + } +} \ No newline at end of file diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/job/api/SendJobConfiguration.java b/src/main/java/io/springbatch/springbatchlecture/batch/job/api/SendJobConfiguration.java new file mode 100644 index 0000000..5c0af05 --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/job/api/SendJobConfiguration.java @@ -0,0 +1,50 @@ +package io.springbatch.springbatchlecture.batch.job.api; + +import io.springbatch.springbatchlecture.batch.listener.JobListener; +import io.springbatch.springbatchlecture.batch.tasklet.ApiEndTasklet; +import io.springbatch.springbatchlecture.batch.tasklet.ApiStartTasklet; +import lombok.RequiredArgsConstructor; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.JobBuilderFactory; +import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; +import org.springframework.batch.core.launch.support.RunIdIncrementer; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +@RequiredArgsConstructor +public class SendJobConfiguration { + + private final JobBuilderFactory jobBuilderFactory; + private final StepBuilderFactory stepBuilderFactory; + private final ApiStartTasklet apiStartTasklet; + private final ApiEndTasklet apiEndTasklet; + private final Step jobStep; + + @Bean + public Job apiJob() throws Exception { + + return jobBuilderFactory.get("apiJob") + .incrementer(new RunIdIncrementer()) + .listener(new JobListener()) + .start(apiStep1()) + .next(jobStep) + .next(apiStep2()) + .build(); + } + + @Bean + public Step apiStep1() throws Exception { + return stepBuilderFactory.get("apiStep") + .tasklet(apiStartTasklet) + .build(); + } + + @Bean + public Step apiStep2() throws Exception { + return stepBuilderFactory.get("apiStep2") + .tasklet(apiEndTasklet) + .build(); + } +} diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/job/file/FileJobConfiguration.java b/src/main/java/io/springbatch/springbatchlecture/batch/job/file/FileJobConfiguration.java new file mode 100644 index 0000000..86efaea --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/job/file/FileJobConfiguration.java @@ -0,0 +1,76 @@ +package io.springbatch.springbatchlecture.batch.job.file; + +import io.springbatch.springbatchlecture.batch.chunk.processor.*; +import io.springbatch.springbatchlecture.batch.domain.Product; +import io.springbatch.springbatchlecture.batch.domain.ProductVO; +import lombok.RequiredArgsConstructor; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.JobBuilderFactory; +import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.database.JpaItemWriter; +import org.springframework.batch.item.database.builder.JpaItemWriterBuilder; +import org.springframework.batch.item.file.FlatFileItemReader; +import org.springframework.batch.item.file.builder.FlatFileItemReaderBuilder; +import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.io.ClassPathResource; + +import javax.persistence.EntityManagerFactory; + +@Configuration +@RequiredArgsConstructor +public class FileJobConfiguration { + + private final JobBuilderFactory jobBuilderFactory; + private final StepBuilderFactory stepBuilderFactory; + private final EntityManagerFactory entityManagerFactory; + + @Bean + public Job fileJob() { + return jobBuilderFactory.get("fileJob") + .start(fileStep1()) + .build(); + } + + @Bean + public Step fileStep1() { + return stepBuilderFactory.get("fileStep1") + .chunk(10) + .reader(fileItemReader(null)) + .processor(fileItemProcessor()) + .writer(fileItemWriter()) + .build(); + } + + @Bean + @StepScope + public FlatFileItemReader fileItemReader(@Value("#{jobParameters['requestDate']}") String requestDate) { + return new FlatFileItemReaderBuilder() + .name("flatFile") + .resource(new ClassPathResource("product_" + requestDate +".csv")) + .fieldSetMapper(new BeanWrapperFieldSetMapper<>()) + .targetType(ProductVO.class) + .linesToSkip(1) + .delimited().delimiter(",") + .names("id","name","price","type") + .build(); + } + + @Bean + public ItemProcessor fileItemProcessor() { + return new FileItemProcessor(); + } + + @Bean + public JpaItemWriter fileItemWriter() { + return new JpaItemWriterBuilder() + .entityManagerFactory(entityManagerFactory) + .usePersist(true) + .build(); + } +} diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/listener/JobListener.java b/src/main/java/io/springbatch/springbatchlecture/batch/listener/JobListener.java new file mode 100644 index 0000000..794aaed --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/listener/JobListener.java @@ -0,0 +1,30 @@ +package io.springbatch.springbatchlecture.batch.listener; + +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobExecutionListener; + +/** + *
+ * io.anymobi.core.batch.listener.job
+ * ㄴ DataSendJobListener.java
+ * 
+ * 배치 Job 이 실행되면 호출되는 JobExecutionListener + * + * @author : soowon.jung + * @version : 1.0.0 + * @date : 2021-07-22 오후 1:36 + * @see : + **/ + +public class JobListener implements JobExecutionListener { + + @Override + public void beforeJob(JobExecution jobExecution) { + } + + @Override + public void afterJob(JobExecution jobExecution) { + long time = jobExecution.getEndTime().getTime() - jobExecution.getStartTime().getTime(); + System.out.println("총 소요시간 : " + time); + } +} diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/partition/ProductPartitioner.java b/src/main/java/io/springbatch/springbatchlecture/batch/partition/ProductPartitioner.java new file mode 100644 index 0000000..93e39d0 --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/partition/ProductPartitioner.java @@ -0,0 +1,39 @@ + +package io.springbatch.springbatchlecture.batch.partition; + +import io.springbatch.springbatchlecture.batch.domain.ProductVO; +import io.springbatch.springbatchlecture.batch.job.api.QueryGenerator; +import org.springframework.batch.core.partition.support.Partitioner; +import org.springframework.batch.item.ExecutionContext; + +import javax.sql.DataSource; +import java.util.HashMap; +import java.util.Map; + +public class ProductPartitioner implements Partitioner { + + private DataSource dataSource; + + public void setDataSource(DataSource dataSource) { + this.dataSource = dataSource; + } + + @Override + public Map partition(int gridSize) { + + ProductVO[] productList = QueryGenerator.getProductList(dataSource); + Map result = new HashMap<>(); + int number = 0; + + for (int i = 0; i < productList.length; i++) { + + ExecutionContext value = new ExecutionContext(); + + result.put("partition" + number, value); + value.put("product", productList[i]); + + number++; + } + + return result; + }} diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/rowmapper/ProductRowMapper.java b/src/main/java/io/springbatch/springbatchlecture/batch/rowmapper/ProductRowMapper.java new file mode 100644 index 0000000..d9db81b --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/rowmapper/ProductRowMapper.java @@ -0,0 +1,19 @@ +package io.springbatch.springbatchlecture.batch.rowmapper; + +import io.springbatch.springbatchlecture.batch.domain.ProductVO; +import org.springframework.jdbc.core.RowMapper; + +import java.sql.ResultSet; +import java.sql.SQLException; + +public class ProductRowMapper implements RowMapper { + @Override + public ProductVO mapRow(ResultSet rs, int i) throws SQLException { + return ProductVO.builder() + .id(rs.getLong("id")) + .name(rs.getString("name")) + .price(rs.getInt("price")) + .type(rs.getString("type")) + .build(); + } +} diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/tasklet/ApiEndTasklet.java b/src/main/java/io/springbatch/springbatchlecture/batch/tasklet/ApiEndTasklet.java new file mode 100644 index 0000000..5f03d8c --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/tasklet/ApiEndTasklet.java @@ -0,0 +1,25 @@ +package io.springbatch.springbatchlecture.batch.tasklet; + +import org.springframework.batch.core.StepContribution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.tasklet.Tasklet; +import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.stereotype.Component; + +@Component +public class ApiEndTasklet implements Tasklet { + + @Override + public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { + + System.out.println(""); + System.out.println(">> ApiEndTasklet is started"); + System.out.println(""); + System.out.println("******************************************************************************************************************************************************"); + System.out.println("* Spring Batch is completed *"); + System.out.println("******************************************************************************************************************************************************"); + System.out.println(""); + + return RepeatStatus.FINISHED; + } +} diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/tasklet/ApiStartTasklet.java b/src/main/java/io/springbatch/springbatchlecture/batch/tasklet/ApiStartTasklet.java new file mode 100644 index 0000000..19913f7 --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/batch/tasklet/ApiStartTasklet.java @@ -0,0 +1,21 @@ +package io.springbatch.springbatchlecture.batch.tasklet; + +import org.springframework.batch.core.StepContribution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.tasklet.Tasklet; +import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.stereotype.Component; + +@Component +public class ApiStartTasklet implements Tasklet { + + @Override + public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { + + System.out.println(""); + System.out.println(">> ApiStartTasklet is started"); + System.out.println(""); + + return RepeatStatus.FINISHED; + } +} diff --git a/src/main/java/io/springbatch/springbatchlecture/scheduler/ApiJobRunner.java b/src/main/java/io/springbatch/springbatchlecture/scheduler/ApiJobRunner.java new file mode 100644 index 0000000..5ba3e42 --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/scheduler/ApiJobRunner.java @@ -0,0 +1,32 @@ +package io.springbatch.springbatchlecture.scheduler; + +import org.quartz.JobDetail; +import org.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.quartz.Trigger; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.ApplicationArguments; +import org.springframework.stereotype.Component; + +import java.util.HashMap; + +@Component +public class ApiJobRunner extends JobRunner { + + @Autowired + private Scheduler scheduler; + + @Override + protected void doRun(ApplicationArguments args) { + + JobDetail jobDetail = buildJobDetail(ApiSchJob.class, "apiJob", "batch", new HashMap()); + Trigger trigger = buildJobTrigger("0/30 * * * * ?"); + + try { + scheduler.scheduleJob(jobDetail, trigger); + } catch (SchedulerException e) { + e.printStackTrace(); + } + } + +} diff --git a/src/main/java/io/springbatch/springbatchlecture/scheduler/ApiSchJob.java b/src/main/java/io/springbatch/springbatchlecture/scheduler/ApiSchJob.java new file mode 100644 index 0000000..2f7409f --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/scheduler/ApiSchJob.java @@ -0,0 +1,37 @@ +package io.springbatch.springbatchlecture.scheduler; + +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; +import org.quartz.JobExecutionContext; +import org.quartz.JobExecutionException; +import org.springframework.batch.core.*; +import org.springframework.batch.core.explore.JobExplorer; +import org.springframework.batch.core.launch.JobLauncher; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.scheduling.quartz.QuartzJobBean; +import org.springframework.stereotype.Component; + +import java.util.Date; +import java.util.List; +import java.util.stream.Collectors; + +@Component +@Slf4j +public class ApiSchJob extends QuartzJobBean{ + + @Autowired + private Job apiJob; + + @Autowired + private JobLauncher jobLauncher; + + @SneakyThrows + @Override + protected void executeInternal(JobExecutionContext context) throws JobExecutionException { + + JobParameters jobParameters = new JobParametersBuilder() + .addLong("id", new Date().getTime()) + .toJobParameters(); + jobLauncher.run(apiJob, jobParameters); + } +} diff --git a/src/main/java/io/springbatch/springbatchlecture/scheduler/FileJobRunner.java b/src/main/java/io/springbatch/springbatchlecture/scheduler/FileJobRunner.java new file mode 100644 index 0000000..5129fc2 --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/scheduler/FileJobRunner.java @@ -0,0 +1,33 @@ +package io.springbatch.springbatchlecture.scheduler; + +import org.quartz.*; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.ApplicationArguments; +import org.springframework.stereotype.Component; + +import java.util.HashMap; + +import static org.quartz.JobBuilder.newJob; + +@Component +public class FileJobRunner extends JobRunner { + + @Autowired + private Scheduler scheduler; + + @Override + protected void doRun(ApplicationArguments args) { + + String[] sourceArgs = args.getSourceArgs(); + JobDetail jobDetail = buildJobDetail(FileSchJob.class, "fileJob", "batch", new HashMap()); + Trigger trigger = buildJobTrigger("0/50 * * * * ?"); + jobDetail.getJobDataMap().put("requestDate", sourceArgs[0]); + + try { + scheduler.scheduleJob(jobDetail, trigger); + } catch (SchedulerException e) { + e.printStackTrace(); + } + } + +} diff --git a/src/main/java/io/springbatch/springbatchlecture/scheduler/FileSchJob.java b/src/main/java/io/springbatch/springbatchlecture/scheduler/FileSchJob.java new file mode 100644 index 0000000..b37377a --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/scheduler/FileSchJob.java @@ -0,0 +1,61 @@ +package io.springbatch.springbatchlecture.scheduler; + +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; +import org.quartz.JobExecutionContext; +import org.quartz.JobExecutionException; +import org.springframework.batch.core.*; +import org.springframework.batch.core.explore.JobExplorer; +import org.springframework.batch.core.launch.JobLauncher; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.scheduling.quartz.QuartzJobBean; +import org.springframework.stereotype.Component; + +import java.util.Date; +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.stream.Collectors; + +@Component +@Slf4j +public class FileSchJob extends QuartzJobBean{ + + @Autowired + private Job fileJob; + + @Autowired + private JobLauncher jobLauncher; + + @Autowired + private JobExplorer jobExplorer; + + @SneakyThrows + @Override + protected void executeInternal(JobExecutionContext context) throws JobExecutionException { + + String requestDate = (String)context.getJobDetail().getJobDataMap().get("requestDate"); + + JobParameters jobParameters = new JobParametersBuilder() + .addLong("id", new Date().getTime()) + .addString("requestDate", requestDate) + .toJobParameters(); + + int jobInstanceCount = jobExplorer.getJobInstanceCount(fileJob.getName()); + List jobInstances = jobExplorer.getJobInstances(fileJob.getName(), 0, jobInstanceCount); + + if(jobInstances.size() > 0) { + for(JobInstance jobInstance : jobInstances){ + List jobExecutions = jobExplorer.getJobExecutions(jobInstance); + List jobExecutionList = jobExecutions.stream().filter(jobExecution -> + jobExecution.getJobParameters().getString("requestDate").equals(requestDate)) + .collect(Collectors.toList()); + if (jobExecutionList.size() > 0) { + throw new JobExecutionException(requestDate + " already exists"); + } + } + } + + jobLauncher.run(fileJob, jobParameters); + } + +} diff --git a/src/main/java/io/springbatch/springbatchlecture/scheduler/JobRunner.java b/src/main/java/io/springbatch/springbatchlecture/scheduler/JobRunner.java new file mode 100644 index 0000000..c0425d0 --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/scheduler/JobRunner.java @@ -0,0 +1,33 @@ +package io.springbatch.springbatchlecture.scheduler; + +import org.quartz.*; +import org.springframework.boot.ApplicationArguments; +import org.springframework.boot.ApplicationRunner; + +import java.util.Map; + +import static org.quartz.JobBuilder.newJob; + +public abstract class JobRunner implements ApplicationRunner { + + @Override + public void run(ApplicationArguments args) throws Exception { + doRun(args); + } + + protected abstract void doRun(ApplicationArguments args); + + public Trigger buildJobTrigger(String scheduleExp) { + return TriggerBuilder.newTrigger() + .withSchedule(CronScheduleBuilder.cronSchedule(scheduleExp)).build(); + } + + public JobDetail buildJobDetail(Class job, String name, String group, Map params) { + JobDataMap jobDataMap = new JobDataMap(); + jobDataMap.putAll(params); + + return newJob(job).withIdentity(name, group) + .usingJobData(jobDataMap) + .build(); + } +} diff --git a/src/main/java/io/springbatch/springbatchlecture/service/AbstractApiService.java b/src/main/java/io/springbatch/springbatchlecture/service/AbstractApiService.java new file mode 100644 index 0000000..5cd4354 --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/service/AbstractApiService.java @@ -0,0 +1,52 @@ +package io.springbatch.springbatchlecture.service; + +import io.springbatch.springbatchlecture.batch.domain.ApiInfo; +import io.springbatch.springbatchlecture.batch.domain.ApiRequestVO; +import io.springbatch.springbatchlecture.batch.domain.ApiResponseVO; +import io.springbatch.springbatchlecture.batch.domain.ProductVO; +import org.springframework.boot.web.client.RestTemplateBuilder; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpHeaders; +import org.springframework.http.MediaType; +import org.springframework.http.client.ClientHttpResponse; +import org.springframework.http.client.HttpComponentsClientHttpRequestFactory; +import org.springframework.stereotype.Service; +import org.springframework.web.client.ResponseErrorHandler; +import org.springframework.web.client.RestTemplate; + +import java.io.IOException; +import java.util.List; +import java.util.stream.Collectors; + +@Service +public abstract class AbstractApiService { + + public ApiResponseVO service(List apiRequest) { + + // 중계사업자와 API 연동 작업 + RestTemplateBuilder restTemplateBuilder = new RestTemplateBuilder(); + RestTemplate restTemplate = restTemplateBuilder.errorHandler(new ResponseErrorHandler() { + @Override + public boolean hasError(ClientHttpResponse clientHttpResponse) throws IOException { + return false; + } + + @Override + public void handleError(ClientHttpResponse clientHttpResponse) throws IOException { + + } + }).build(); + + restTemplate.setRequestFactory(new HttpComponentsClientHttpRequestFactory()); + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + + ApiInfo apiInfo = ApiInfo.builder().apiRequestList(apiRequest).build(); + HttpEntity reqEntity = new HttpEntity<>(apiInfo, headers); + + return doApiService(restTemplate, apiInfo); + + } + + protected abstract ApiResponseVO doApiService(RestTemplate restTemplate, ApiInfo apiInfo); +} diff --git a/src/main/java/io/springbatch/springbatchlecture/service/ApiService1.java b/src/main/java/io/springbatch/springbatchlecture/service/ApiService1.java new file mode 100644 index 0000000..31fe7bb --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/service/ApiService1.java @@ -0,0 +1,29 @@ +package io.springbatch.springbatchlecture.service; + +import io.springbatch.springbatchlecture.batch.domain.ApiInfo; +import io.springbatch.springbatchlecture.batch.domain.ApiRequestVO; +import io.springbatch.springbatchlecture.batch.domain.ApiResponseVO; +import io.springbatch.springbatchlecture.batch.domain.ProductVO; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpMessage; +import org.springframework.http.HttpMethod; +import org.springframework.http.ResponseEntity; +import org.springframework.stereotype.Service; +import org.springframework.web.client.RestTemplate; + +import java.util.List; + +@Service +public class ApiService1 extends AbstractApiService{ + + @Override + public ApiResponseVO doApiService(RestTemplate restTemplate, ApiInfo apiInfo){ + + ResponseEntity response = restTemplate.postForEntity("http://localhost:8081/api/product/1", apiInfo, String.class); + + int statusCodeValue = response.getStatusCodeValue(); + ApiResponseVO apiResponseVO = new ApiResponseVO(statusCodeValue + "", response.getBody()); + + return apiResponseVO; + } +} diff --git a/src/main/java/io/springbatch/springbatchlecture/service/ApiService2.java b/src/main/java/io/springbatch/springbatchlecture/service/ApiService2.java new file mode 100644 index 0000000..e9ec7d5 --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/service/ApiService2.java @@ -0,0 +1,22 @@ +package io.springbatch.springbatchlecture.service; + +import io.springbatch.springbatchlecture.batch.domain.ApiInfo; +import io.springbatch.springbatchlecture.batch.domain.ApiResponseVO; +import org.springframework.http.ResponseEntity; +import org.springframework.stereotype.Service; +import org.springframework.web.client.RestTemplate; + +@Service +public class ApiService2 extends AbstractApiService{ + + @Override + public ApiResponseVO doApiService(RestTemplate restTemplate, ApiInfo apiInfo){ + + ResponseEntity response = restTemplate.postForEntity("http://localhost:8081/api/product/2", apiInfo, String.class); + + int statusCodeValue = response.getStatusCodeValue(); + ApiResponseVO apiResponseVO = new ApiResponseVO(statusCodeValue + "", response.getBody()); + + return apiResponseVO; + } +} diff --git a/src/main/java/io/springbatch/springbatchlecture/service/ApiService3.java b/src/main/java/io/springbatch/springbatchlecture/service/ApiService3.java new file mode 100644 index 0000000..0a79f3f --- /dev/null +++ b/src/main/java/io/springbatch/springbatchlecture/service/ApiService3.java @@ -0,0 +1,22 @@ +package io.springbatch.springbatchlecture.service; + +import io.springbatch.springbatchlecture.batch.domain.ApiInfo; +import io.springbatch.springbatchlecture.batch.domain.ApiResponseVO; +import org.springframework.http.ResponseEntity; +import org.springframework.stereotype.Service; +import org.springframework.web.client.RestTemplate; + +@Service +public class ApiService3 extends AbstractApiService{ + + @Override + public ApiResponseVO doApiService(RestTemplate restTemplate, ApiInfo apiInfo){ + + ResponseEntity response = restTemplate.postForEntity("http://localhost:8081/api/product/3", apiInfo, String.class); + + int statusCodeValue = response.getStatusCodeValue(); + ApiResponseVO apiResponseVO = new ApiResponseVO(statusCodeValue + "", response.getBody()); + + return apiResponseVO; + } +} diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties deleted file mode 100644 index 8b13789..0000000 --- a/src/main/resources/application.properties +++ /dev/null @@ -1 +0,0 @@ - diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml new file mode 100644 index 0000000..5a7bc66 --- /dev/null +++ b/src/main/resources/application.yml @@ -0,0 +1,42 @@ +spring: + batch: + + profiles: + active: mysql + jpa: + hibernate: + ddl-auto: update + database-platform: org.hibernate.dialect.MySQL5InnoDBDialect + show-sql: true + properties: + hibernate.format_sql: true + +--- +spring: + config: + activate: + on-profile: local + datasource: + hikari: + jdbc-url: jdbc:h2:mem:testdb;DB_CLOSE_DELAY=-1;DB_CLOSE_ON_EXIT=FALSE + username: sa + password: + driver-class-name: org.h2.Driver + +--- +spring: + config: + activate: + on-profile: mysql + datasource: + hikari: + jdbc-url: jdbc:mysql://localhost:3306/springbatch?useUnicode=true&characterEncoding=utf8 + username: root + password: pass + driver-class-name: com.mysql.jdbc.Driver + batch: + job: + names: ${job.name:NONE} + enabled: false + jdbc: + initialize-schema: always \ No newline at end of file diff --git a/src/main/resources/data-mysql.sql b/src/main/resources/data-mysql.sql new file mode 100644 index 0000000..a3e3f0d --- /dev/null +++ b/src/main/resources/data-mysql.sql @@ -0,0 +1,32 @@ +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (1,"user1",1000,"1"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (2,"user2",2000,"1"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (3,"user3",3000,"1"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (4,"user4",4000,"1"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (5,"user5",5000,"1"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (6,"user6",6000,"1"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (7,"user7",7000,"1"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (8,"user8",8000,"1"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (9,"user9",9000,"1"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (10,"user10",10000,"1"); + +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (11,"user11",11000,"2"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (12,"user12",12000,"2"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (13,"user13",13000,"2"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (14,"user14",14000,"2"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (15,"user15",15000,"2"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (16,"user16",16000,"2"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (17,"user17",17000,"2"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (18,"user18",18000,"2"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (19,"user19",19000,"2"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (20,"user20",20000,"2"); + +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (21,"user21",21000,"3"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (22,"user22",22000,"3"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (23,"user23",23000,"3"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (24,"user24",24000,"3"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (25,"user25",25000,"3"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (26,"user26",26000,"3"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (27,"user27",27000,"3"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (28,"user28",28000,"3"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (29,"user29",29000,"3"); +INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (30,"user30",30000,"3"); \ No newline at end of file diff --git a/src/main/resources/product_20210101.csv b/src/main/resources/product_20210101.csv new file mode 100644 index 0000000..4b40c4d --- /dev/null +++ b/src/main/resources/product_20210101.csv @@ -0,0 +1,31 @@ +id,name,price,type +1,user1,1000,1 +2,user2,2000,1 +3,user3,3000,1 +4,user4,4000,1 +5,user5,5000,1 +6,user6,6000,1 +7,user7,7000,1 +8,user8,8000,1 +9,user9,9000,1 +10,user10,10000,1 +11,user11,11000,2 +12,user12,12000,2 +13,user13,13000,2 +14,user14,14000,2 +15,user15,15000,2 +16,user16,16000,2 +17,user17,17000,2 +18,user18,18000,2 +19,user19,19000,2 +20,user20,20000,2 +21,user21,21000,3 +22,user22,22000,3 +23,user23,23000,3 +24,user24,24000,3 +25,user25,25000,3 +26,user26,26000,3 +27,user27,27000,3 +28,user28,28000,3 +29,user29,29000,3 +30,user30,30000,3 \ No newline at end of file diff --git a/src/main/resources/product_20210102.csv b/src/main/resources/product_20210102.csv new file mode 100644 index 0000000..3568d84 --- /dev/null +++ b/src/main/resources/product_20210102.csv @@ -0,0 +1,31 @@ +id,name,price,type +31,user31,1000,1 +32,user32,2000,1 +33,user33,3000,1 +34,user34,4000,1 +35,user35,5000,1 +36,user36,6000,1 +37,user37,7000,1 +38,user38,8000,1 +39,user39,9000,1 +40,user40,10000,1 +41,user41,11000,2 +42,user42,12000,2 +43,user43,13000,2 +44,user44,14000,2 +45,user45,15000,2 +46,user46,16000,2 +47,user47,17000,2 +48,user48,18000,2 +49,user49,19000,2 +50,user50,20000,2 +51,user51,21000,3 +52,user52,22000,3 +53,user53,23000,3 +54,user54,24000,3 +55,user55,25000,3 +56,user56,26000,3 +57,user57,27000,3 +58,user58,28000,3 +59,user59,29000,3 +60,user50,30000,3 \ No newline at end of file diff --git a/src/test/java/io/springbatch/springbatchlecture/SpringBatchLectureApplicationTests.java b/src/test/java/io/springbatch/springbatchlecture/SpringBatchLectureApplicationTests.java deleted file mode 100644 index 30f8787..0000000 --- a/src/test/java/io/springbatch/springbatchlecture/SpringBatchLectureApplicationTests.java +++ /dev/null @@ -1,13 +0,0 @@ -package io.springbatch.springbatchlecture; - -import org.junit.jupiter.api.Test; -import org.springframework.boot.test.context.SpringBootTest; - -@SpringBootTest -class SpringBatchLectureApplicationTests { - - @Test - void contextLoads() { - } - -}