One Developer
One Developer

Reputation: 566

Spring Batch Deadlock - Could not increment identity; nested exception is com.microsoft.sqlserver.jdbc.SQLServerException

We are in the process of moving to Azure SQL Server from Oracle DB for our Spring Batch application.

I am getting the following error while trying to execute two different jobs at the same time that updates different tables however uses the same common BATCH_ tables

Caused by: org.springframework.dao.DataAccessResourceFailureException: Could not increment identity; nested exception is com.microsoft.sqlserver.jdbc.SQLServerException: Transaction (Process ID 167) was deadlocked on lock resources with another process and has been chosen as the deadlock victim. Rerun the transaction. at org.springframework.jdbc.support.incrementer.SqlServerMaxValueIncrementer.getNextKey(SqlServerMaxValueIncrementer.java:124) ~[bat-applybatch-jobs-2.2.12-SNAPSHOT.jar:?] at org.springframework.jdbc.support.incrementer.AbstractDataFieldMaxValueIncrementer.nextLongValue(AbstractDataFieldMaxValueIncrementer.java:125)

My Job Repository configuration

<job-repository id="jobRepository" isolation-level-for-create="READ_COMMITED" />

Database deadlock

<deadlock>
  <victim-list>
    <victimProcess id="process2a41675a4e8" />
  </victim-list>
  <process-list>
    <process id="process2a41675a4e8" taskpriority="0" logused="280" waitresource="RID: 6:9:24682488:29" waittime="4984" ownerId="696000712" transactionname="implicit_transaction" lasttranstarted="2021-12-29T12:18:30.153" XDES="0x29a22bc4428" lockMode="U" schedulerid="4" kpid="52760" status="suspended" spid="173" sbid="0" ecid="0" priority="0" trancount="2" lastbatchstarted="2021-12-29T12:18:30.157" lastbatchcompleted="2021-12-29T12:18:30.153" lastattention="1900-01-01T00:00:00.153" clientapp="Microsoft JDBC Driver for SQL Server" hostname="ServerName" hostpid="0" loginname="LoginName" isolationlevel="read committed (2)" xactid="696000712" currentdb="6" currentdbname="Database" lockTimeout="4294967295" clientoption1="671088672" clientoption2="128058">
      <executionStack>
        <frame procname="unknown" queryhash="0xadc42a7474869694" queryplanhash="0x238c4f9df8a5d6cc" line="1" stmtstart="26" stmtend="146" sqlhandle="0x020000007654041849f4ffe980c136b592ccbe8260983e220000000000000000000000000000000000000000">
unknown    </frame>
        <frame procname="unknown" queryhash="0xadc42a7474869694" queryplanhash="0x238c4f9df8a5d6cc" line="1" stmtend="126" sqlhandle="0x0200000045a2af306ade799ae9ffa65edc0f722c526e26330000000000000000000000000000000000000000">
unknown    </frame>
      </executionStack>
      <inputbuf>
delete from LoginName.BATCH_STEP_EXECUTION_SEQ where ID &lt; 10899   </inputbuf>
    </process>
    <process id="process2a42d680ca8" taskpriority="0" logused="420" waitresource="RID: 6:9:24682490:8" waittime="4984" ownerId="696000707" transactionname="implicit_transaction" lasttranstarted="2021-12-29T12:18:30.153" XDES="0x2a41ae18428" lockMode="U" schedulerid="7" kpid="53280" status="suspended" spid="129" sbid="0" ecid="0" priority="0" trancount="2" lastbatchstarted="2021-12-29T12:18:30.153" lastbatchcompleted="2021-12-29T12:18:30.153" lastattention="1900-01-01T00:00:00.153" clientapp="Microsoft JDBC Driver for SQL Server" hostname="ServerName" hostpid="0" loginname="LoginName" isolationlevel="read committed (2)" xactid="696000707" currentdb="6" currentdbname="Database" lockTimeout="4294967295" clientoption1="671088672" clientoption2="128058">
      <executionStack>
        <frame procname="unknown" queryhash="0xadc42a7474869694" queryplanhash="0x238c4f9df8a5d6cc" line="1" stmtstart="26" stmtend="146" sqlhandle="0x020000007654041849f4ffe980c136b592ccbe8260983e220000000000000000000000000000000000000000">
unknown    </frame>
        <frame procname="unknown" queryhash="0xadc42a7474869694" queryplanhash="0x238c4f9df8a5d6cc" line="1" stmtend="126" sqlhandle="0x02000000a0f1f51de77e1eefa19367c42fc9d1938c2075020000000000000000000000000000000000000000">
unknown    </frame>
      </executionStack>
      <inputbuf>
delete from LoginName.BATCH_STEP_EXECUTION_SEQ where ID &lt; 10898   </inputbuf>
    </process>
  </process-list>
  <resource-list>
    <ridlock fileid="9" pageid="24682488" dbid="6" objectname="162589bb-bc36-4834-8bdc-e58a2deca742.LoginName.BATCH_STEP_EXECUTION_SEQ" id="lock2a043bbcc00" mode="X" associatedObjectId="72057594071547904">
      <owner-list>
        <owner id="process2a42d680ca8" mode="X" />
      </owner-list>
      <waiter-list>
        <waiter id="process2a41675a4e8" mode="U" requestType="wait" />
      </waiter-list>
    </ridlock>
    <ridlock fileid="9" pageid="24682490" dbid="6" objectname="162589bb-bc36-4834-8bdc-e58a2deca742.LoginName.BATCH_STEP_EXECUTION_SEQ" id="lock29f5f1b7f00" mode="X" associatedObjectId="72057594071547904">
      <owner-list>
        <owner id="process2a41675a4e8" mode="X" />
      </owner-list>
      <waiter-list>
        <waiter id="process2a42d680ca8" mode="U" requestType="wait" />
      </waiter-list>
    </ridlock>
  </resource-list>
</deadlock>

tried:

<job-repository id="jobRepository" isolation-level-for-create="READ_UNCOMMITED" />

<job-repository id="jobRepository"
                isolation-level-for-create="ISOLATION_REPEATABLE_READ" />

<job-repository id="jobRepository"
                isolation-level-for-create="SERIALIZABLE" />

I have created the tables as highlighted below

CREATE TABLE BATCH_STEP_EXECUTION_SEQ (
ID BIGINT IDENTITY(<last Oracle sequence value>, 1)
);

CREATE TABLE BATCH_JOB_EXECUTION_SEQ (
ID BIGINT IDENTITY(<last Oracle sequence value>, 1)
);

CREATE TABLE BATCH_JOB_SEQ (
ID BIGINT IDENTITY(<last Oracle sequence value>, 1)
);

what is the issue? How do I fix this?

Update: Job Definition

<bean id="simpleStep" class="org.springframework.batch.core.step.factory.SimpleStepFactoryBean"
    abstract="true">
    <property name="transactionManager" ref="transactionManager" />
    <property name="jobRepository" ref="jobRepository" />
    <property name="startLimit" value="100" />
    <property name="commitInterval" value="1" />
</bean>

Update#2: Can I try something like this?

<bean id="informixIncrementer" class="com.bah.batch.informixsupport.InformixMaxValueIncrementerFactory"><property name="dataSource" ref="dataSource" />

<bean  id="jobRepository" class="org.springframework.batch.core.repository.support.JobRepositoryFactoryBean" isolation-level-for-create="READ_COMMITTED" table-prefix="BATCH_">
   <property name="incrementerFactory" ref="informixIncrementer"/>
</bean>

Upvotes: 10

Views: 5950

Answers (2)

momonari8
momonari8

Reputation: 101

Basically, you need to overwrite incrementerFactory in JobRepository like so:

@Bean
public JobRepository jobRepository(DataSource dataSource, PlatformTransactionManager transactionManager) throws Exception {
    JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean();
    factory.setDataSource(dataSource);
    factory.setTransactionManager(transactionManager);
    factory.setIncrementerFactory(new DefaultDataFieldMaxValueIncrementerFactory(dataSource) {
        @Override
        public DataFieldMaxValueIncrementer getIncrementer(String incrementerType, String incrementerName) {
            return new SqlServerSequenceMaxValueIncrementer(dataSource, incrementerName);
        }
    });
    factory.afterPropertiesSet();
    return factory.getObject();
}

@Bean
public SimpleJobLauncher jobLauncher(JobRepository jobRepository) {
    SimpleJobLauncher simpleJobLauncher = new SimpleJobLauncher();
    simpleJobLauncher.setJobRepository(jobRepository);
    return simpleJobLauncher;
}

And here is the source code of SqlServerSequenceMaxValueIncrementer which you can include in your codebase.

Upvotes: 0

Henning
Henning

Reputation: 3889

I'm afraid this is a known bug in Spring Batch 4.x that is currently only planned to be resolved with Spring Batch 5, which is not to be expected for a couple of months: https://github.com/spring-projects/spring-batch/issues/3927

You can emulate the fix by making local adjustments as in the commit of the fix: https://github.com/spring-projects/spring-batch/commit/fe911c8456bb49a69b1c84c78c0a0e0fdf224803, i.e.

  • Adjust the schema to contain sequences
  • Change the DataFieldMaxValueIncrementerFactory that is used to build the JobRepository.

I don't think the latter is feasible with pure XML configuration. The discussion on this issue contains some hints how it can be done in Java: https://github.com/spring-projects/spring-batch/issues/1448

The concrete customization depends on the customizations that you already have but it should work at least roughly as follows. You can apply the incrementer factory with a BatchConfigurer:

@Bean
public BatchConfigurer batchConfigurer(DataSource dataSource) {
  return new DefaultBatchConfigurer(dataSource) {
    @Override
    protected JobRepository createJobRepository() throws Exception {
      JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean();
      factory.setDataSource(getDataSource());
      factory.setTransactionManager(getTransactionManager());
      factory.setIncrementerFactory(new MyIncrementerFactory(getDataSource()));
      factory.afterPropertiesSet();
      return factory.getObject();
    }
  };
}

where

public class MyIncrementerFactory implements DataFieldMaxValueIncrementerFactory {

  private final DataSource dataSource;

  public MyIncrementerFactory(DataSource dataSource) {
    this.dataSource = dataSource;
  }

  @Override
  public DataFieldMaxValueIncrementer getIncrementer(String databaseType, String incrementerName) {
    return new SqlServerSequenceMaxValueIncrementer(dataSource, incrementerName);
  }

  @Override
  public boolean isSupportedIncrementerType(String databaseType)       {
    return true;
  }

  @Override
  public String[] getSupportedIncrementerTypes() {
    return null; // method should not get called anyway
  }
}

and SqlServerSequenceMaxValueIncrementer should be the incrementer from the commit.

Upvotes: 6

Related Questions