Merge pull request '스프링 배치 6/18_1' (#97) from feature/cha into dev

Reviewed-on: #97
This commit is contained in:
ysCha 2025-06-18 13:41:15 +09:00
commit d5a43e2ae6
6 changed files with 166 additions and 18 deletions

View File

@ -6,6 +6,7 @@ import javax.sql.DataSource;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean;
import org.springframework.batch.item.database.support.DataFieldMaxValueIncrementerFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
@ -30,6 +31,8 @@ public class BatchConfiguration {
JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean();
factory.setDataSource(dataSource);
factory.setTransactionManager(transactionManager);
factory.setIncrementerFactory(new CustomDefaultDataFieldMaxValueIncrementerFactory(dataSource));
// Use TABLE_PREFIX to map to Spring Batch tables
factory.setTablePrefix("BATCH_");
@ -38,6 +41,7 @@ public class BatchConfiguration {
factory.setIsolationLevelForCreate("ISOLATION_SERIALIZABLE");
// Set to "SQLSERVER" for Spring Batch compatibility
factory.setDatabaseType("SQLSERVER");
// Finalize factory setup
@ -45,6 +49,13 @@ public class BatchConfiguration {
return factory.getObject();
}
@Bean
public DataFieldMaxValueIncrementerFactory customIncrementerFactory(DataSource dataSource) {
return new CustomDefaultDataFieldMaxValueIncrementerFactory(dataSource);
}
/**
* TaskExecutor for batch processing (multithreading support).
*/

View File

@ -0,0 +1,22 @@
package com.interplug.qcast.config.batch;
import org.springframework.batch.item.database.support.DefaultDataFieldMaxValueIncrementerFactory;
import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer;
import org.springframework.jdbc.support.incrementer.SqlServerSequenceMaxValueIncrementer;
import javax.sql.DataSource;
public class CustomDefaultDataFieldMaxValueIncrementerFactory extends DefaultDataFieldMaxValueIncrementerFactory {
private final DataSource dataSource;
public CustomDefaultDataFieldMaxValueIncrementerFactory(DataSource dataSource) {
super(dataSource);
this.dataSource = dataSource;
}
@Override
public DataFieldMaxValueIncrementer getIncrementer(String databaseType, String incrementerName) {
CustomIncrementer customIncrementer = new CustomIncrementer(this.dataSource, incrementerName);
return customIncrementer;
}
}

View File

@ -0,0 +1,39 @@
package com.interplug.qcast.config.batch;
import org.springframework.jdbc.support.incrementer.AbstractColumnMaxValueIncrementer;
import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
public class CustomIncrementer extends AbstractColumnMaxValueIncrementer {
public CustomIncrementer(DataSource dataSource, String incrementerName) {
super(dataSource, incrementerName, "ID");
}
@Override
protected long getNextKey() {
long nextKey = 0;
try (Connection conn = this.getDataSource().getConnection()) {
PreparedStatement psSelect = conn.prepareStatement("SELECT ID FROM " + getIncrementerName() + " WITH (UPDLOCK, HOLDLOCK)");
ResultSet rs = psSelect.executeQuery();
if (rs.next()) {
nextKey = rs.getLong(1);
}
rs.close();
psSelect.close();
PreparedStatement psUpdate = conn.prepareStatement("UPDATE " + getIncrementerName() + " SET ID = ?");
psUpdate.setLong(1, nextKey + 1);
psUpdate.executeUpdate();
psUpdate.close();
} catch (SQLException e) {
throw new RuntimeException("Could not get next key for " + getIncrementerName(), e);
}
return nextKey + 1;
}
}

View File

@ -0,0 +1,94 @@
CREATE TABLE BATCH_JOB_INSTANCE (
JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY ,
VERSION BIGINT NULL,
JOB_NAME VARCHAR(100) NOT NULL,
JOB_KEY VARCHAR(32) NOT NULL,
constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY)
) ;
CREATE TABLE BATCH_JOB_EXECUTION (
JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY ,
VERSION BIGINT NULL,
JOB_INSTANCE_ID BIGINT NOT NULL,
CREATE_TIME DATETIME NOT NULL,
START_TIME DATETIME DEFAULT NULL ,
END_TIME DATETIME DEFAULT NULL ,
STATUS VARCHAR(10) NULL,
EXIT_CODE VARCHAR(2500) NULL,
EXIT_MESSAGE VARCHAR(2500) NULL,
LAST_UPDATED DATETIME NULL,
constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)
references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)
) ;
CREATE TABLE BATCH_JOB_EXECUTION_PARAMS (
JOB_EXECUTION_ID BIGINT NOT NULL ,
PARAMETER_NAME VARCHAR(100) NOT NULL ,
PARAMETER_TYPE VARCHAR(100) NOT NULL ,
PARAMETER_VALUE VARCHAR(2500) ,
IDENTIFYING CHAR(1) NOT NULL ,
constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)
references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)
) ;
CREATE TABLE BATCH_STEP_EXECUTION (
STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY ,
VERSION BIGINT NOT NULL,
STEP_NAME VARCHAR(100) NOT NULL,
JOB_EXECUTION_ID BIGINT NOT NULL,
CREATE_TIME DATETIME NOT NULL,
START_TIME DATETIME DEFAULT NULL ,
END_TIME DATETIME DEFAULT NULL ,
STATUS VARCHAR(10) NULL,
COMMIT_COUNT BIGINT NULL,
READ_COUNT BIGINT NULL,
FILTER_COUNT BIGINT NULL,
WRITE_COUNT BIGINT NULL,
READ_SKIP_COUNT BIGINT NULL,
WRITE_SKIP_COUNT BIGINT NULL,
PROCESS_SKIP_COUNT BIGINT NULL,
ROLLBACK_COUNT BIGINT NULL,
EXIT_CODE VARCHAR(2500) NULL,
EXIT_MESSAGE VARCHAR(2500) NULL,
LAST_UPDATED DATETIME NULL,
constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)
references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)
) ;
CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT (
STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,
SHORT_CONTEXT VARCHAR(2500) NOT NULL,
SERIALIZED_CONTEXT VARCHAR(MAX) NULL,
constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)
references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)
) ;
CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT (
JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,
SHORT_CONTEXT VARCHAR(2500) NOT NULL,
SERIALIZED_CONTEXT VARCHAR(MAX) NULL,
constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)
references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)
) ;
CREATE TABLE BATCH_JOB_SEQ (
ID BIGINT NOT NULL PRIMARY KEY,
NEXT_VAL BIGINT NOT NULL
);
-- 첫 번째 ID 값 초기화
INSERT INTO BATCH_JOB_SEQ (ID, NEXT_VAL) VALUES (1, 1);
CREATE TABLE BATCH_JOB_EXECUTION_SEQ (
ID BIGINT PRIMARY KEY
);
INSERT INTO BATCH_JOB_EXECUTION_SEQ (ID) VALUES (1);
CREATE TABLE BATCH_STEP_EXECUTION_SEQ (
ID BIGINT NOT NULL PRIMARY KEY
);
-- 초기 키 값을 삽입
INSERT INTO BATCH_STEP_EXECUTION_SEQ (ID) VALUES (0);

View File

@ -20,8 +20,6 @@ spring:
batch:
jdbc:
initialize-schema: never
table-prefix: BATCH_
schema:
job:
names: ${job.name:NONE}
enabled: false

View File

@ -15,21 +15,6 @@ spring:
username: pvDBuser
maximum-pool-size: 4
pool-name: Master-HikariPool
# datasource:
# master:
# driver-class-name: net.sf.log4jdbc.sql.jdbcapi.DriverSpy
# jdbc-url: jdbc:log4jdbc:sqlserver://1.248.227.176:1433;databaseName=NEWPVCAD;encrypt=true;trustServerCertificate=true
# username: pvDBuser
# password: ENC(W7owprYnvf7vqwO6Piw4dHfVBCSxE4Ck)
# maximum-pool-size: 4
# pool-name: Master-HikariPool
# read:
# driver-class-name: net.sf.log4jdbc.sql.jdbcapi.DriverSpy
# jdbc-url: jdbc:log4jdbc:sqlserver://1.248.227.176:1433;databaseName=NEWPVCAD;encrypt=true;trustServerCertificate=true
# username: pvDBuser
# password: ENC(W7owprYnvf7vqwO6Piw4dHfVBCSxE4Ck)
# maximum-pool-size: 4
# pool-name: Read-HikariPool
jackson:
time-zone: Asia/Seoul
batch:
@ -40,7 +25,6 @@ spring:
enabled: false
profiles:
scheduler: Y
batch:
job:
enabled: true