()
+ .entityManagerFactory(entityManagerFactory)
+ .usePersist(true)
+ .build();
+ }
+}
diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/listener/JobListener.java b/src/main/java/io/springbatch/springbatchlecture/batch/listener/JobListener.java
new file mode 100644
index 0000000..794aaed
--- /dev/null
+++ b/src/main/java/io/springbatch/springbatchlecture/batch/listener/JobListener.java
@@ -0,0 +1,30 @@
+package io.springbatch.springbatchlecture.batch.listener;
+
+import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.JobExecutionListener;
+
+/**
+ *
+ * io.anymobi.core.batch.listener.job
+ * ㄴ DataSendJobListener.java
+ *
+ * 배치 Job 이 실행되면 호출되는 JobExecutionListener
+ *
+ * @author : soowon.jung
+ * @version : 1.0.0
+ * @date : 2021-07-22 오후 1:36
+ * @see :
+ **/
+
+public class JobListener implements JobExecutionListener {
+
+ @Override
+ public void beforeJob(JobExecution jobExecution) {
+ }
+
+ @Override
+ public void afterJob(JobExecution jobExecution) {
+ long time = jobExecution.getEndTime().getTime() - jobExecution.getStartTime().getTime();
+ System.out.println("총 소요시간 : " + time);
+ }
+}
diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/partition/ProductPartitioner.java b/src/main/java/io/springbatch/springbatchlecture/batch/partition/ProductPartitioner.java
new file mode 100644
index 0000000..93e39d0
--- /dev/null
+++ b/src/main/java/io/springbatch/springbatchlecture/batch/partition/ProductPartitioner.java
@@ -0,0 +1,39 @@
+
+package io.springbatch.springbatchlecture.batch.partition;
+
+import io.springbatch.springbatchlecture.batch.domain.ProductVO;
+import io.springbatch.springbatchlecture.batch.job.api.QueryGenerator;
+import org.springframework.batch.core.partition.support.Partitioner;
+import org.springframework.batch.item.ExecutionContext;
+
+import javax.sql.DataSource;
+import java.util.HashMap;
+import java.util.Map;
+
+public class ProductPartitioner implements Partitioner {
+
+ private DataSource dataSource;
+
+ public void setDataSource(DataSource dataSource) {
+ this.dataSource = dataSource;
+ }
+
+ @Override
+ public Map partition(int gridSize) {
+
+ ProductVO[] productList = QueryGenerator.getProductList(dataSource);
+ Map result = new HashMap<>();
+ int number = 0;
+
+ for (int i = 0; i < productList.length; i++) {
+
+ ExecutionContext value = new ExecutionContext();
+
+ result.put("partition" + number, value);
+ value.put("product", productList[i]);
+
+ number++;
+ }
+
+ return result;
+ }}
diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/rowmapper/ProductRowMapper.java b/src/main/java/io/springbatch/springbatchlecture/batch/rowmapper/ProductRowMapper.java
new file mode 100644
index 0000000..d9db81b
--- /dev/null
+++ b/src/main/java/io/springbatch/springbatchlecture/batch/rowmapper/ProductRowMapper.java
@@ -0,0 +1,19 @@
+package io.springbatch.springbatchlecture.batch.rowmapper;
+
+import io.springbatch.springbatchlecture.batch.domain.ProductVO;
+import org.springframework.jdbc.core.RowMapper;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+
+public class ProductRowMapper implements RowMapper {
+ @Override
+ public ProductVO mapRow(ResultSet rs, int i) throws SQLException {
+ return ProductVO.builder()
+ .id(rs.getLong("id"))
+ .name(rs.getString("name"))
+ .price(rs.getInt("price"))
+ .type(rs.getString("type"))
+ .build();
+ }
+}
diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/tasklet/ApiEndTasklet.java b/src/main/java/io/springbatch/springbatchlecture/batch/tasklet/ApiEndTasklet.java
new file mode 100644
index 0000000..5f03d8c
--- /dev/null
+++ b/src/main/java/io/springbatch/springbatchlecture/batch/tasklet/ApiEndTasklet.java
@@ -0,0 +1,25 @@
+package io.springbatch.springbatchlecture.batch.tasklet;
+
+import org.springframework.batch.core.StepContribution;
+import org.springframework.batch.core.scope.context.ChunkContext;
+import org.springframework.batch.core.step.tasklet.Tasklet;
+import org.springframework.batch.repeat.RepeatStatus;
+import org.springframework.stereotype.Component;
+
+@Component
+public class ApiEndTasklet implements Tasklet {
+
+ @Override
+ public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
+
+ System.out.println("");
+ System.out.println(">> ApiEndTasklet is started");
+ System.out.println("");
+ System.out.println("******************************************************************************************************************************************************");
+ System.out.println("* Spring Batch is completed *");
+ System.out.println("******************************************************************************************************************************************************");
+ System.out.println("");
+
+ return RepeatStatus.FINISHED;
+ }
+}
diff --git a/src/main/java/io/springbatch/springbatchlecture/batch/tasklet/ApiStartTasklet.java b/src/main/java/io/springbatch/springbatchlecture/batch/tasklet/ApiStartTasklet.java
new file mode 100644
index 0000000..19913f7
--- /dev/null
+++ b/src/main/java/io/springbatch/springbatchlecture/batch/tasklet/ApiStartTasklet.java
@@ -0,0 +1,21 @@
+package io.springbatch.springbatchlecture.batch.tasklet;
+
+import org.springframework.batch.core.StepContribution;
+import org.springframework.batch.core.scope.context.ChunkContext;
+import org.springframework.batch.core.step.tasklet.Tasklet;
+import org.springframework.batch.repeat.RepeatStatus;
+import org.springframework.stereotype.Component;
+
+@Component
+public class ApiStartTasklet implements Tasklet {
+
+ @Override
+ public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
+
+ System.out.println("");
+ System.out.println(">> ApiStartTasklet is started");
+ System.out.println("");
+
+ return RepeatStatus.FINISHED;
+ }
+}
diff --git a/src/main/java/io/springbatch/springbatchlecture/scheduler/ApiJobRunner.java b/src/main/java/io/springbatch/springbatchlecture/scheduler/ApiJobRunner.java
new file mode 100644
index 0000000..5ba3e42
--- /dev/null
+++ b/src/main/java/io/springbatch/springbatchlecture/scheduler/ApiJobRunner.java
@@ -0,0 +1,32 @@
+package io.springbatch.springbatchlecture.scheduler;
+
+import org.quartz.JobDetail;
+import org.quartz.Scheduler;
+import org.quartz.SchedulerException;
+import org.quartz.Trigger;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.ApplicationArguments;
+import org.springframework.stereotype.Component;
+
+import java.util.HashMap;
+
+@Component
+public class ApiJobRunner extends JobRunner {
+
+ @Autowired
+ private Scheduler scheduler;
+
+ @Override
+ protected void doRun(ApplicationArguments args) {
+
+ JobDetail jobDetail = buildJobDetail(ApiSchJob.class, "apiJob", "batch", new HashMap());
+ Trigger trigger = buildJobTrigger("0/30 * * * * ?");
+
+ try {
+ scheduler.scheduleJob(jobDetail, trigger);
+ } catch (SchedulerException e) {
+ e.printStackTrace();
+ }
+ }
+
+}
diff --git a/src/main/java/io/springbatch/springbatchlecture/scheduler/ApiSchJob.java b/src/main/java/io/springbatch/springbatchlecture/scheduler/ApiSchJob.java
new file mode 100644
index 0000000..2f7409f
--- /dev/null
+++ b/src/main/java/io/springbatch/springbatchlecture/scheduler/ApiSchJob.java
@@ -0,0 +1,37 @@
+package io.springbatch.springbatchlecture.scheduler;
+
+import lombok.SneakyThrows;
+import lombok.extern.slf4j.Slf4j;
+import org.quartz.JobExecutionContext;
+import org.quartz.JobExecutionException;
+import org.springframework.batch.core.*;
+import org.springframework.batch.core.explore.JobExplorer;
+import org.springframework.batch.core.launch.JobLauncher;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.scheduling.quartz.QuartzJobBean;
+import org.springframework.stereotype.Component;
+
+import java.util.Date;
+import java.util.List;
+import java.util.stream.Collectors;
+
+@Component
+@Slf4j
+public class ApiSchJob extends QuartzJobBean{
+
+ @Autowired
+ private Job apiJob;
+
+ @Autowired
+ private JobLauncher jobLauncher;
+
+ @SneakyThrows
+ @Override
+ protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
+
+ JobParameters jobParameters = new JobParametersBuilder()
+ .addLong("id", new Date().getTime())
+ .toJobParameters();
+ jobLauncher.run(apiJob, jobParameters);
+ }
+}
diff --git a/src/main/java/io/springbatch/springbatchlecture/scheduler/FileJobRunner.java b/src/main/java/io/springbatch/springbatchlecture/scheduler/FileJobRunner.java
new file mode 100644
index 0000000..5129fc2
--- /dev/null
+++ b/src/main/java/io/springbatch/springbatchlecture/scheduler/FileJobRunner.java
@@ -0,0 +1,33 @@
+package io.springbatch.springbatchlecture.scheduler;
+
+import org.quartz.*;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.ApplicationArguments;
+import org.springframework.stereotype.Component;
+
+import java.util.HashMap;
+
+import static org.quartz.JobBuilder.newJob;
+
+@Component
+public class FileJobRunner extends JobRunner {
+
+ @Autowired
+ private Scheduler scheduler;
+
+ @Override
+ protected void doRun(ApplicationArguments args) {
+
+ String[] sourceArgs = args.getSourceArgs();
+ JobDetail jobDetail = buildJobDetail(FileSchJob.class, "fileJob", "batch", new HashMap());
+ Trigger trigger = buildJobTrigger("0/50 * * * * ?");
+ jobDetail.getJobDataMap().put("requestDate", sourceArgs[0]);
+
+ try {
+ scheduler.scheduleJob(jobDetail, trigger);
+ } catch (SchedulerException e) {
+ e.printStackTrace();
+ }
+ }
+
+}
diff --git a/src/main/java/io/springbatch/springbatchlecture/scheduler/FileSchJob.java b/src/main/java/io/springbatch/springbatchlecture/scheduler/FileSchJob.java
new file mode 100644
index 0000000..b37377a
--- /dev/null
+++ b/src/main/java/io/springbatch/springbatchlecture/scheduler/FileSchJob.java
@@ -0,0 +1,61 @@
+package io.springbatch.springbatchlecture.scheduler;
+
+import lombok.SneakyThrows;
+import lombok.extern.slf4j.Slf4j;
+import org.quartz.JobExecutionContext;
+import org.quartz.JobExecutionException;
+import org.springframework.batch.core.*;
+import org.springframework.batch.core.explore.JobExplorer;
+import org.springframework.batch.core.launch.JobLauncher;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.scheduling.quartz.QuartzJobBean;
+import org.springframework.stereotype.Component;
+
+import java.util.Date;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.stream.Collectors;
+
+@Component
+@Slf4j
+public class FileSchJob extends QuartzJobBean{
+
+ @Autowired
+ private Job fileJob;
+
+ @Autowired
+ private JobLauncher jobLauncher;
+
+ @Autowired
+ private JobExplorer jobExplorer;
+
+ @SneakyThrows
+ @Override
+ protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
+
+ String requestDate = (String)context.getJobDetail().getJobDataMap().get("requestDate");
+
+ JobParameters jobParameters = new JobParametersBuilder()
+ .addLong("id", new Date().getTime())
+ .addString("requestDate", requestDate)
+ .toJobParameters();
+
+ int jobInstanceCount = jobExplorer.getJobInstanceCount(fileJob.getName());
+ List jobInstances = jobExplorer.getJobInstances(fileJob.getName(), 0, jobInstanceCount);
+
+ if(jobInstances.size() > 0) {
+ for(JobInstance jobInstance : jobInstances){
+ List jobExecutions = jobExplorer.getJobExecutions(jobInstance);
+ List jobExecutionList = jobExecutions.stream().filter(jobExecution ->
+ jobExecution.getJobParameters().getString("requestDate").equals(requestDate))
+ .collect(Collectors.toList());
+ if (jobExecutionList.size() > 0) {
+ throw new JobExecutionException(requestDate + " already exists");
+ }
+ }
+ }
+
+ jobLauncher.run(fileJob, jobParameters);
+ }
+
+}
diff --git a/src/main/java/io/springbatch/springbatchlecture/scheduler/JobRunner.java b/src/main/java/io/springbatch/springbatchlecture/scheduler/JobRunner.java
new file mode 100644
index 0000000..c0425d0
--- /dev/null
+++ b/src/main/java/io/springbatch/springbatchlecture/scheduler/JobRunner.java
@@ -0,0 +1,33 @@
+package io.springbatch.springbatchlecture.scheduler;
+
+import org.quartz.*;
+import org.springframework.boot.ApplicationArguments;
+import org.springframework.boot.ApplicationRunner;
+
+import java.util.Map;
+
+import static org.quartz.JobBuilder.newJob;
+
+public abstract class JobRunner implements ApplicationRunner {
+
+ @Override
+ public void run(ApplicationArguments args) throws Exception {
+ doRun(args);
+ }
+
+ protected abstract void doRun(ApplicationArguments args);
+
+ public Trigger buildJobTrigger(String scheduleExp) {
+ return TriggerBuilder.newTrigger()
+ .withSchedule(CronScheduleBuilder.cronSchedule(scheduleExp)).build();
+ }
+
+ public JobDetail buildJobDetail(Class job, String name, String group, Map params) {
+ JobDataMap jobDataMap = new JobDataMap();
+ jobDataMap.putAll(params);
+
+ return newJob(job).withIdentity(name, group)
+ .usingJobData(jobDataMap)
+ .build();
+ }
+}
diff --git a/src/main/java/io/springbatch/springbatchlecture/service/AbstractApiService.java b/src/main/java/io/springbatch/springbatchlecture/service/AbstractApiService.java
new file mode 100644
index 0000000..5cd4354
--- /dev/null
+++ b/src/main/java/io/springbatch/springbatchlecture/service/AbstractApiService.java
@@ -0,0 +1,52 @@
+package io.springbatch.springbatchlecture.service;
+
+import io.springbatch.springbatchlecture.batch.domain.ApiInfo;
+import io.springbatch.springbatchlecture.batch.domain.ApiRequestVO;
+import io.springbatch.springbatchlecture.batch.domain.ApiResponseVO;
+import io.springbatch.springbatchlecture.batch.domain.ProductVO;
+import org.springframework.boot.web.client.RestTemplateBuilder;
+import org.springframework.http.HttpEntity;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.MediaType;
+import org.springframework.http.client.ClientHttpResponse;
+import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
+import org.springframework.stereotype.Service;
+import org.springframework.web.client.ResponseErrorHandler;
+import org.springframework.web.client.RestTemplate;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.stream.Collectors;
+
+@Service
+public abstract class AbstractApiService {
+
+ public ApiResponseVO service(List extends ApiRequestVO> apiRequest) {
+
+ // 중계사업자와 API 연동 작업
+ RestTemplateBuilder restTemplateBuilder = new RestTemplateBuilder();
+ RestTemplate restTemplate = restTemplateBuilder.errorHandler(new ResponseErrorHandler() {
+ @Override
+ public boolean hasError(ClientHttpResponse clientHttpResponse) throws IOException {
+ return false;
+ }
+
+ @Override
+ public void handleError(ClientHttpResponse clientHttpResponse) throws IOException {
+
+ }
+ }).build();
+
+ restTemplate.setRequestFactory(new HttpComponentsClientHttpRequestFactory());
+ HttpHeaders headers = new HttpHeaders();
+ headers.setContentType(MediaType.APPLICATION_JSON);
+
+ ApiInfo apiInfo = ApiInfo.builder().apiRequestList(apiRequest).build();
+ HttpEntity reqEntity = new HttpEntity<>(apiInfo, headers);
+
+ return doApiService(restTemplate, apiInfo);
+
+ }
+
+ protected abstract ApiResponseVO doApiService(RestTemplate restTemplate, ApiInfo apiInfo);
+}
diff --git a/src/main/java/io/springbatch/springbatchlecture/service/ApiService1.java b/src/main/java/io/springbatch/springbatchlecture/service/ApiService1.java
new file mode 100644
index 0000000..31fe7bb
--- /dev/null
+++ b/src/main/java/io/springbatch/springbatchlecture/service/ApiService1.java
@@ -0,0 +1,29 @@
+package io.springbatch.springbatchlecture.service;
+
+import io.springbatch.springbatchlecture.batch.domain.ApiInfo;
+import io.springbatch.springbatchlecture.batch.domain.ApiRequestVO;
+import io.springbatch.springbatchlecture.batch.domain.ApiResponseVO;
+import io.springbatch.springbatchlecture.batch.domain.ProductVO;
+import org.springframework.http.HttpEntity;
+import org.springframework.http.HttpMessage;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Service;
+import org.springframework.web.client.RestTemplate;
+
+import java.util.List;
+
+@Service
+public class ApiService1 extends AbstractApiService{
+
+ @Override
+ public ApiResponseVO doApiService(RestTemplate restTemplate, ApiInfo apiInfo){
+
+ ResponseEntity response = restTemplate.postForEntity("http://localhost:8081/api/product/1", apiInfo, String.class);
+
+ int statusCodeValue = response.getStatusCodeValue();
+ ApiResponseVO apiResponseVO = new ApiResponseVO(statusCodeValue + "", response.getBody());
+
+ return apiResponseVO;
+ }
+}
diff --git a/src/main/java/io/springbatch/springbatchlecture/service/ApiService2.java b/src/main/java/io/springbatch/springbatchlecture/service/ApiService2.java
new file mode 100644
index 0000000..e9ec7d5
--- /dev/null
+++ b/src/main/java/io/springbatch/springbatchlecture/service/ApiService2.java
@@ -0,0 +1,22 @@
+package io.springbatch.springbatchlecture.service;
+
+import io.springbatch.springbatchlecture.batch.domain.ApiInfo;
+import io.springbatch.springbatchlecture.batch.domain.ApiResponseVO;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Service;
+import org.springframework.web.client.RestTemplate;
+
+@Service
+public class ApiService2 extends AbstractApiService{
+
+ @Override
+ public ApiResponseVO doApiService(RestTemplate restTemplate, ApiInfo apiInfo){
+
+ ResponseEntity response = restTemplate.postForEntity("http://localhost:8081/api/product/2", apiInfo, String.class);
+
+ int statusCodeValue = response.getStatusCodeValue();
+ ApiResponseVO apiResponseVO = new ApiResponseVO(statusCodeValue + "", response.getBody());
+
+ return apiResponseVO;
+ }
+}
diff --git a/src/main/java/io/springbatch/springbatchlecture/service/ApiService3.java b/src/main/java/io/springbatch/springbatchlecture/service/ApiService3.java
new file mode 100644
index 0000000..0a79f3f
--- /dev/null
+++ b/src/main/java/io/springbatch/springbatchlecture/service/ApiService3.java
@@ -0,0 +1,22 @@
+package io.springbatch.springbatchlecture.service;
+
+import io.springbatch.springbatchlecture.batch.domain.ApiInfo;
+import io.springbatch.springbatchlecture.batch.domain.ApiResponseVO;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Service;
+import org.springframework.web.client.RestTemplate;
+
+@Service
+public class ApiService3 extends AbstractApiService{
+
+ @Override
+ public ApiResponseVO doApiService(RestTemplate restTemplate, ApiInfo apiInfo){
+
+ ResponseEntity response = restTemplate.postForEntity("http://localhost:8081/api/product/3", apiInfo, String.class);
+
+ int statusCodeValue = response.getStatusCodeValue();
+ ApiResponseVO apiResponseVO = new ApiResponseVO(statusCodeValue + "", response.getBody());
+
+ return apiResponseVO;
+ }
+}
diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties
deleted file mode 100644
index 8b13789..0000000
--- a/src/main/resources/application.properties
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml
new file mode 100644
index 0000000..5a7bc66
--- /dev/null
+++ b/src/main/resources/application.yml
@@ -0,0 +1,42 @@
+spring:
+ batch:
+
+ profiles:
+ active: mysql
+ jpa:
+ hibernate:
+ ddl-auto: update
+ database-platform: org.hibernate.dialect.MySQL5InnoDBDialect
+ show-sql: true
+ properties:
+ hibernate.format_sql: true
+
+---
+spring:
+ config:
+ activate:
+ on-profile: local
+ datasource:
+ hikari:
+ jdbc-url: jdbc:h2:mem:testdb;DB_CLOSE_DELAY=-1;DB_CLOSE_ON_EXIT=FALSE
+ username: sa
+ password:
+ driver-class-name: org.h2.Driver
+
+---
+spring:
+ config:
+ activate:
+ on-profile: mysql
+ datasource:
+ hikari:
+ jdbc-url: jdbc:mysql://localhost:3306/springbatch?useUnicode=true&characterEncoding=utf8
+ username: root
+ password: pass
+ driver-class-name: com.mysql.jdbc.Driver
+ batch:
+ job:
+ names: ${job.name:NONE}
+ enabled: false
+ jdbc:
+ initialize-schema: always
\ No newline at end of file
diff --git a/src/main/resources/data-mysql.sql b/src/main/resources/data-mysql.sql
new file mode 100644
index 0000000..a3e3f0d
--- /dev/null
+++ b/src/main/resources/data-mysql.sql
@@ -0,0 +1,32 @@
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (1,"user1",1000,"1");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (2,"user2",2000,"1");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (3,"user3",3000,"1");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (4,"user4",4000,"1");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (5,"user5",5000,"1");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (6,"user6",6000,"1");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (7,"user7",7000,"1");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (8,"user8",8000,"1");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (9,"user9",9000,"1");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (10,"user10",10000,"1");
+
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (11,"user11",11000,"2");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (12,"user12",12000,"2");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (13,"user13",13000,"2");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (14,"user14",14000,"2");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (15,"user15",15000,"2");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (16,"user16",16000,"2");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (17,"user17",17000,"2");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (18,"user18",18000,"2");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (19,"user19",19000,"2");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (20,"user20",20000,"2");
+
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (21,"user21",21000,"3");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (22,"user22",22000,"3");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (23,"user23",23000,"3");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (24,"user24",24000,"3");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (25,"user25",25000,"3");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (26,"user26",26000,"3");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (27,"user27",27000,"3");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (28,"user28",28000,"3");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (29,"user29",29000,"3");
+INSERT INTO `customer` (`id`,`name`,`price`,`type`) VALUES (30,"user30",30000,"3");
\ No newline at end of file
diff --git a/src/main/resources/product_20210101.csv b/src/main/resources/product_20210101.csv
new file mode 100644
index 0000000..4b40c4d
--- /dev/null
+++ b/src/main/resources/product_20210101.csv
@@ -0,0 +1,31 @@
+id,name,price,type
+1,user1,1000,1
+2,user2,2000,1
+3,user3,3000,1
+4,user4,4000,1
+5,user5,5000,1
+6,user6,6000,1
+7,user7,7000,1
+8,user8,8000,1
+9,user9,9000,1
+10,user10,10000,1
+11,user11,11000,2
+12,user12,12000,2
+13,user13,13000,2
+14,user14,14000,2
+15,user15,15000,2
+16,user16,16000,2
+17,user17,17000,2
+18,user18,18000,2
+19,user19,19000,2
+20,user20,20000,2
+21,user21,21000,3
+22,user22,22000,3
+23,user23,23000,3
+24,user24,24000,3
+25,user25,25000,3
+26,user26,26000,3
+27,user27,27000,3
+28,user28,28000,3
+29,user29,29000,3
+30,user30,30000,3
\ No newline at end of file
diff --git a/src/main/resources/product_20210102.csv b/src/main/resources/product_20210102.csv
new file mode 100644
index 0000000..3568d84
--- /dev/null
+++ b/src/main/resources/product_20210102.csv
@@ -0,0 +1,31 @@
+id,name,price,type
+31,user31,1000,1
+32,user32,2000,1
+33,user33,3000,1
+34,user34,4000,1
+35,user35,5000,1
+36,user36,6000,1
+37,user37,7000,1
+38,user38,8000,1
+39,user39,9000,1
+40,user40,10000,1
+41,user41,11000,2
+42,user42,12000,2
+43,user43,13000,2
+44,user44,14000,2
+45,user45,15000,2
+46,user46,16000,2
+47,user47,17000,2
+48,user48,18000,2
+49,user49,19000,2
+50,user50,20000,2
+51,user51,21000,3
+52,user52,22000,3
+53,user53,23000,3
+54,user54,24000,3
+55,user55,25000,3
+56,user56,26000,3
+57,user57,27000,3
+58,user58,28000,3
+59,user59,29000,3
+60,user50,30000,3
\ No newline at end of file
diff --git a/src/test/java/io/springbatch/springbatchlecture/SpringBatchLectureApplicationTests.java b/src/test/java/io/springbatch/springbatchlecture/SpringBatchLectureApplicationTests.java
deleted file mode 100644
index 30f8787..0000000
--- a/src/test/java/io/springbatch/springbatchlecture/SpringBatchLectureApplicationTests.java
+++ /dev/null
@@ -1,13 +0,0 @@
-package io.springbatch.springbatchlecture;
-
-import org.junit.jupiter.api.Test;
-import org.springframework.boot.test.context.SpringBootTest;
-
-@SpringBootTest
-class SpringBatchLectureApplicationTests {
-
- @Test
- void contextLoads() {
- }
-
-}