当此作业开始读取包含292条记录的文件时 . 数据截断java.sql.BatchUpdateException从记录200抛出到211.但数据库表插入460个recoreds . 抛出异常后不会发生回滚 . 我看了this和许多其他答案 . 我的工作中Autocommit设置为false .

以下是数据库配置

import javax.sql.DataSource;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.env.Environment;

import net.sourceforge.jtds.jdbcx.JtdsDataSource;

@Configuration
public class TaxPackageBatchDbConfig {


    @Autowired
    Environment environment;

    @Bean
    public DataSource dataSource() {

        JtdsDataSource dataSource = new JtdsDataSource();

        dataSource.setServerName(dataSourceServername);
        dataSource.setPortNumber(dataSourcePort);
        dataSource.setDatabaseName(databaseName);
        dataSource.setUser(dataSourceUserName);
        dataSource.setPassword(dataSourcePassword);
        dataSource.setAutoCommit(false);

        return dataSource;
    }


}

以下是作业配置

@EnableBatchProcessing
    public class LoadFileConfiguration extends ApplicationCommonConfig {

        private static final Logger logger = LoggerFactory.get

Logger(LoadFileConfiguration.class);

    @Autowired
    public JobBuilderFactory jobBuilderFactory;

    @Autowired
    public StepBuilderFactory stepBuilderFactory;

    @Autowired
    public DataSource dataSource;

    @Autowired
    public AdapterService tpAdapterService;

    @Bean
    @StepScope
    public FlatFileItemReader<Item> reader() {
        FlatFileItemReader<Item> reader = new FlatFileItemReader<Item>();

        reader.setLinesToSkip(1);

        DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer("|");
        tokenizer.setNames(
                new String[] { "one","two","three" });
        tokenizer.setStrict(false);

        DefaultLineMapper<Item> lineMapper = new DefaultLineMapper<Item>();
        lineMapper.setLineTokenizer(tokenizer);
        lineMapper.setFieldSetMapper(new DataItemMapper());
        reader.setLineMapper(lineMapper);

        return reader;
    }

    @Bean
    public MultiResourceItemReader<Item> multiResourceItemReader() {
        MultiResourceItemReader<Item> resourceItemReader = new MultiResourceItemReader<Item>();

        Resource[] fileResources;
        try {
            fileResources = ResourcePatternUtils.getResourcePatternResolver(new DefaultResourceLoader())
                    .getResources("file:///data.txt");
            resourceItemReader.setResources(fileResources);
        } catch (IOException e) {
            e.printStackTrace();
        }
        resourceItemReader.setDelegate(reader());
        return resourceItemReader;
    }

    @Bean
    @StepScope
    public JdbcBatchItemWriter<Item> writer(
            @Value("#{jobParameters}") Map<String, JobParameter> jobParameters, @Value("#{stepExecution}")

            StepExecution stepExecution) {

        final String QUERY = "INSERT INTO tp.tp_table (one,two,three ) VALUES (?, ?, ?)";

        JdbcBatchItemWriter<Item> writer = new JdbcBatchItemWriter<Item>();
        writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<Item>());
        writer.setSql(QUERY);
        writer.setDataSource(dataSource);
        ItemPreparedStatementSetter<Item> valueSetter = new ItemDataPreparedStatementSetter();
        writer.setItemPreparedStatementSetter(valueSetter);
        return writer;
    }

    public class ItemFailureLoggerListener extends ItemListenerSupport<Item, Item> {

        @Override
        public void onReadError(Exception ex) {
            logger.error("Error reading " + ex.getMessage());
        }

        @Override
        public void onWriteError(Exception ex, List<? extends SourceManifestData> item) {
            logger.error("Error Writing" + ex.getMessage() " +item.toString());

        }

    }


    @Bean
    public ItemFailureLoggerListener itemListener() {
        return new ItemFailureLoggerListener();
    }

    @Bean
    public Step step1(StepBuilderFactory stepBuilderFactory, MultiResourceItemReader<Item> reader,
            ItemWriter<Item> fileItemWriter,
            ItemProcessListener<Item, Item> itemProcessListener) {
        return stepBuilderFactory.get("step1").<Item,Item>chunk(500).reader(reader)             .writer(fileItemWriter).listener(itemProcessListener).faultTolerant()
                .skipLimit(500).skip(RuntimeException.class).build();
    }

    @Bean
    public Job importLJob(JobBuilderFactory jobs, Step s1) {
        return jobBuilderFactory.get("importJob").incrementer(new RunIdIncrementer()).flow(s1).end()
                .build();
    }

    public class DataItemMapper implements FieldSetMapper<Item> {

        @Override
        public Item mapFieldSet(FieldSet fieldSet) throws BindException {

            Item record = new Item();
            record.setOne(fieldSet.readString("one"));
            record.setTwo(fieldSet.readString("two"));
            record.setThree(fieldSet.readString("three"));
            return record;
        }

    }

    final class ItemPreparedStatementSetter implements ItemPreparedStatementSetter<Item> {

        @Override
        public void setValues(Item data, PreparedStatement preparedStatement)
                throws SQLException {
            try {
                preparedStatement.setString(1, data.getOne());
                preparedStatement.setInt(2, data.getTwo());
                preparedStatement.setString(3,data.getThree());
            } catch (Exception e) {
                logger.error("error " + e.getMessage());
            }

        }
    }
}