diff --git a/src/main/java/com/interplug/qcast/batch/JobLauncherController.java b/src/main/java/com/interplug/qcast/batch/JobLauncherController.java
index b175df11..62751ac5 100644
--- a/src/main/java/com/interplug/qcast/batch/JobLauncherController.java
+++ b/src/main/java/com/interplug/qcast/batch/JobLauncherController.java
@@ -151,9 +151,9 @@ public class JobLauncherController {
JobParameters jobParameters =
new JobParametersBuilder().addDate("time", new Date()).toJobParameters();
- if (batchJobEnabled) {
+ //if (batchJobEnabled) {
jobLauncher.run(job, jobParameters);
- }
+ //}
}
return "OK";
}
@@ -181,9 +181,9 @@ public class JobLauncherController {
JobParameters jobParameters =
new JobParametersBuilder().addDate("time", new Date()).toJobParameters();
- if (batchJobEnabled) {
+ //if (batchJobEnabled) {
jobLauncher.run(job, jobParameters);
- }
+ //}
}
return "OK";
@@ -274,9 +274,9 @@ public class JobLauncherController {
JobParameters jobParameters =
new JobParametersBuilder().addDate("time", new Date()).toJobParameters();
- if (batchJobEnabled) {
+ // if (batchJobEnabled) {
jobLauncher.run(job, jobParameters);
- }
+ // }
}
return "OK";
diff --git a/src/main/java/com/interplug/qcast/config/batch/BatchConfiguration.java b/src/main/java/com/interplug/qcast/config/batch/BatchConfiguration.java
new file mode 100644
index 00000000..e26a9246
--- /dev/null
+++ b/src/main/java/com/interplug/qcast/config/batch/BatchConfiguration.java
@@ -0,0 +1,72 @@
+package com.interplug.qcast.config.batch;
+
+
+import javax.sql.DataSource;
+
+import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
+import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean;
+import org.springframework.batch.item.database.support.DataFieldMaxValueIncrementerFactory;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Primary;
+import org.springframework.core.task.TaskExecutor;
+import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
+import org.springframework.transaction.PlatformTransactionManager;
+
+/**
+ * Spring Batch configuration for SQL Server 2008 without sequence support.
+ */
+@Configuration
+@EnableBatchProcessing
+public class BatchConfiguration {
+
+ /**
+ * Configuring JobRepository for SQL Server 2008.
+ * Uses table-based ID generation instead of sequences.
+ */
+ @Bean
+ @Primary
+ public JobRepository jobRepository(DataSource dataSource, PlatformTransactionManager transactionManager) throws Exception {
+ JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean();
+ factory.setDataSource(dataSource);
+ factory.setTransactionManager(transactionManager);
+ factory.setIncrementerFactory(new CustomDefaultDataFieldMaxValueIncrementerFactory(dataSource));
+
+
+ // Use TABLE_PREFIX to map to Spring Batch tables
+ factory.setTablePrefix("BATCH_");
+
+ // Isolation level setting for SQL Server
+ factory.setIsolationLevelForCreate("ISOLATION_SERIALIZABLE");
+
+ // Set to "SQLSERVER" for Spring Batch compatibility
+
+ factory.setDatabaseType("SQLSERVER");
+
+ // Finalize factory setup
+ factory.afterPropertiesSet();
+ return factory.getObject();
+ }
+
+ @Bean
+ public DataFieldMaxValueIncrementerFactory customIncrementerFactory(DataSource dataSource) {
+ return new CustomDefaultDataFieldMaxValueIncrementerFactory(dataSource);
+ }
+
+
+
+ /**
+ * TaskExecutor for batch processing (multithreading support).
+ */
+ @Bean
+ public TaskExecutor batchTaskExecutor() {
+ ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
+ executor.setCorePoolSize(5);
+ executor.setMaxPoolSize(10);
+ executor.setQueueCapacity(25);
+ executor.setThreadNamePrefix("batch-thread-");
+ executor.initialize();
+ return executor;
+ }
+}
\ No newline at end of file
diff --git a/src/main/java/com/interplug/qcast/config/batch/CustomDefaultDataFieldMaxValueIncrementerFactory.java b/src/main/java/com/interplug/qcast/config/batch/CustomDefaultDataFieldMaxValueIncrementerFactory.java
new file mode 100644
index 00000000..f005f042
--- /dev/null
+++ b/src/main/java/com/interplug/qcast/config/batch/CustomDefaultDataFieldMaxValueIncrementerFactory.java
@@ -0,0 +1,22 @@
+package com.interplug.qcast.config.batch;
+
+import org.springframework.batch.item.database.support.DefaultDataFieldMaxValueIncrementerFactory;
+import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer;
+import org.springframework.jdbc.support.incrementer.SqlServerSequenceMaxValueIncrementer;
+
+import javax.sql.DataSource;
+
+public class CustomDefaultDataFieldMaxValueIncrementerFactory extends DefaultDataFieldMaxValueIncrementerFactory {
+ private final DataSource dataSource;
+
+ public CustomDefaultDataFieldMaxValueIncrementerFactory(DataSource dataSource) {
+ super(dataSource);
+ this.dataSource = dataSource;
+ }
+ @Override
+ public DataFieldMaxValueIncrementer getIncrementer(String databaseType, String incrementerName) {
+ CustomIncrementer customIncrementer = new CustomIncrementer(this.dataSource, incrementerName);
+ return customIncrementer;
+ }
+
+}
\ No newline at end of file
diff --git a/src/main/java/com/interplug/qcast/config/batch/CustomIncrementer.java b/src/main/java/com/interplug/qcast/config/batch/CustomIncrementer.java
new file mode 100644
index 00000000..2f4ea0f8
--- /dev/null
+++ b/src/main/java/com/interplug/qcast/config/batch/CustomIncrementer.java
@@ -0,0 +1,39 @@
+package com.interplug.qcast.config.batch;
+
+import org.springframework.jdbc.support.incrementer.AbstractColumnMaxValueIncrementer;
+import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer;
+
+import javax.sql.DataSource;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+public class CustomIncrementer extends AbstractColumnMaxValueIncrementer {
+
+ public CustomIncrementer(DataSource dataSource, String incrementerName) {
+ super(dataSource, incrementerName, "ID");
+ }
+
+ @Override
+ protected long getNextKey() {
+ long nextKey = 0;
+ try (Connection conn = this.getDataSource().getConnection()) {
+ PreparedStatement psSelect = conn.prepareStatement("SELECT ID FROM " + getIncrementerName() + " WITH (UPDLOCK, HOLDLOCK)");
+ ResultSet rs = psSelect.executeQuery();
+ if (rs.next()) {
+ nextKey = rs.getLong(1);
+ }
+ rs.close();
+ psSelect.close();
+
+ PreparedStatement psUpdate = conn.prepareStatement("UPDATE " + getIncrementerName() + " SET ID = ?");
+ psUpdate.setLong(1, nextKey + 1);
+ psUpdate.executeUpdate();
+ psUpdate.close();
+ } catch (SQLException e) {
+ throw new RuntimeException("Could not get next key for " + getIncrementerName(), e);
+ }
+ return nextKey + 1;
+ }
+
+}
diff --git a/src/main/java/com/interplug/qcast/config/batch/SqlServer.sql b/src/main/java/com/interplug/qcast/config/batch/SqlServer.sql
new file mode 100644
index 00000000..34e5dfdc
--- /dev/null
+++ b/src/main/java/com/interplug/qcast/config/batch/SqlServer.sql
@@ -0,0 +1,94 @@
+CREATE TABLE BATCH_JOB_INSTANCE (
+ JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY ,
+ VERSION BIGINT NULL,
+ JOB_NAME VARCHAR(100) NOT NULL,
+ JOB_KEY VARCHAR(32) NOT NULL,
+ constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY)
+) ;
+
+CREATE TABLE BATCH_JOB_EXECUTION (
+ JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY ,
+ VERSION BIGINT NULL,
+ JOB_INSTANCE_ID BIGINT NOT NULL,
+ CREATE_TIME DATETIME NOT NULL,
+ START_TIME DATETIME DEFAULT NULL ,
+ END_TIME DATETIME DEFAULT NULL ,
+ STATUS VARCHAR(10) NULL,
+ EXIT_CODE VARCHAR(2500) NULL,
+ EXIT_MESSAGE VARCHAR(2500) NULL,
+ LAST_UPDATED DATETIME NULL,
+ constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)
+ references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)
+) ;
+
+CREATE TABLE BATCH_JOB_EXECUTION_PARAMS (
+ JOB_EXECUTION_ID BIGINT NOT NULL ,
+ PARAMETER_NAME VARCHAR(100) NOT NULL ,
+ PARAMETER_TYPE VARCHAR(100) NOT NULL ,
+ PARAMETER_VALUE VARCHAR(2500) ,
+ IDENTIFYING CHAR(1) NOT NULL ,
+ constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)
+ references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)
+) ;
+
+CREATE TABLE BATCH_STEP_EXECUTION (
+ STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY ,
+ VERSION BIGINT NOT NULL,
+ STEP_NAME VARCHAR(100) NOT NULL,
+ JOB_EXECUTION_ID BIGINT NOT NULL,
+ CREATE_TIME DATETIME NOT NULL,
+ START_TIME DATETIME DEFAULT NULL ,
+ END_TIME DATETIME DEFAULT NULL ,
+ STATUS VARCHAR(10) NULL,
+ COMMIT_COUNT BIGINT NULL,
+ READ_COUNT BIGINT NULL,
+ FILTER_COUNT BIGINT NULL,
+ WRITE_COUNT BIGINT NULL,
+ READ_SKIP_COUNT BIGINT NULL,
+ WRITE_SKIP_COUNT BIGINT NULL,
+ PROCESS_SKIP_COUNT BIGINT NULL,
+ ROLLBACK_COUNT BIGINT NULL,
+ EXIT_CODE VARCHAR(2500) NULL,
+ EXIT_MESSAGE VARCHAR(2500) NULL,
+ LAST_UPDATED DATETIME NULL,
+ constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)
+ references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)
+) ;
+
+CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT (
+ STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,
+ SHORT_CONTEXT VARCHAR(2500) NOT NULL,
+ SERIALIZED_CONTEXT VARCHAR(MAX) NULL,
+ constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)
+ references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)
+) ;
+
+CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT (
+ JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,
+ SHORT_CONTEXT VARCHAR(2500) NOT NULL,
+ SERIALIZED_CONTEXT VARCHAR(MAX) NULL,
+ constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)
+ references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)
+) ;
+
+CREATE TABLE BATCH_JOB_SEQ (
+ ID BIGINT NOT NULL PRIMARY KEY,
+ NEXT_VAL BIGINT NOT NULL
+);
+
+-- 첫 번째 ID 값 초기화
+INSERT INTO BATCH_JOB_SEQ (ID, NEXT_VAL) VALUES (1, 1);
+
+
+CREATE TABLE BATCH_JOB_EXECUTION_SEQ (
+ ID BIGINT PRIMARY KEY
+);
+
+INSERT INTO BATCH_JOB_EXECUTION_SEQ (ID) VALUES (1);
+
+CREATE TABLE BATCH_STEP_EXECUTION_SEQ (
+ ID BIGINT NOT NULL PRIMARY KEY
+);
+
+-- 초기 키 값을 삽입
+INSERT INTO BATCH_STEP_EXECUTION_SEQ (ID) VALUES (0);
diff --git a/src/main/resources/config/application-local.yml b/src/main/resources/config/application-local.yml
index b165146a..d70bbfa8 100644
--- a/src/main/resources/config/application-local.yml
+++ b/src/main/resources/config/application-local.yml
@@ -15,21 +15,6 @@ spring:
username: pvDBuser
maximum-pool-size: 4
pool-name: Master-HikariPool
- # datasource:
- # master:
- # driver-class-name: net.sf.log4jdbc.sql.jdbcapi.DriverSpy
- # jdbc-url: jdbc:log4jdbc:sqlserver://1.248.227.176:1433;databaseName=NEWPVCAD;encrypt=true;trustServerCertificate=true
- # username: pvDBuser
- # password: ENC(W7owprYnvf7vqwO6Piw4dHfVBCSxE4Ck)
- # maximum-pool-size: 4
- # pool-name: Master-HikariPool
- # read:
- # driver-class-name: net.sf.log4jdbc.sql.jdbcapi.DriverSpy
- # jdbc-url: jdbc:log4jdbc:sqlserver://1.248.227.176:1433;databaseName=NEWPVCAD;encrypt=true;trustServerCertificate=true
- # username: pvDBuser
- # password: ENC(W7owprYnvf7vqwO6Piw4dHfVBCSxE4Ck)
- # maximum-pool-size: 4
- # pool-name: Read-HikariPool
jackson:
time-zone: Asia/Seoul
batch:
@@ -40,7 +25,6 @@ spring:
enabled: false
profiles:
scheduler: Y
-
batch:
job:
enabled: true
diff --git a/src/main/resources/logback/logback-dev.xml b/src/main/resources/logback/logback-dev.xml
index 0356cb1d..d13a536a 100644
--- a/src/main/resources/logback/logback-dev.xml
+++ b/src/main/resources/logback/logback-dev.xml
@@ -36,4 +36,14 @@
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/main/resources/logback/logback-local.xml b/src/main/resources/logback/logback-local.xml
index 2b75f05f..43c24fb9 100644
--- a/src/main/resources/logback/logback-local.xml
+++ b/src/main/resources/logback/logback-local.xml
@@ -36,4 +36,15 @@
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file