2017-07-25 2 views
0

Je souhaite utiliser plusieurs sources de données, l'une pour les métadonnées par lots du printemps et l'autre pour les données métier. Mon travail par lots s'exécute et n'essaie même pas de se connecter à secondaryDataSource. quelqu'un peut-il signaler ce qui ne va pas avec ma configuration?Spring Batch Pas de lecture depuis la base de données ni d'écriture dans le fichier

@Configuration 
@EnableBatchProcessing 
public class BatchConfiguration extends DefaultBatchConfigurer { 

    @Override 
    @Autowired 
    public void setDataSource(
      @Qualifier("batchDataSource") DataSource batchDataSource) { 
     super.setDataSource(batchDataSource); 
    } 
} 

public class SpringBatchConfig { 

    @Autowired 
    private JobBuilderFactory jobs; 

    @Autowired 
    private StepBuilderFactory steps; 

    private static final String QUERY_FIND_STUDENTS = "select * from ..."; 

    @Bean 
    ItemReader<DotDetailsDTO> reader(
      @Qualifier("secondaryDataSource") DataSource dataSource) 
      throws SQLException { 
     JdbcCursorItemReader<DotDetailsDTO> databaseReader = new JdbcCursorItemReader<>(); 

     databaseReader.setDataSource(dataSource); 
     databaseReader.setSql(QUERY_FIND_STUDENTS); 
     databaseReader.setRowMapper(new DOTRowMapper()); 

     return databaseReader; 
    } 

    @Bean 
    public ItemProcessor<DotDetailsDTO, DotDetailsDTO> itemProcessor() { 
     return new CustomItemProcessor(); 
    } 

    @Bean 
    public ItemWriter<DotDetailsDTO> writer() throws Exception { 
     FlatFileItemWriter<DotDetailsDTO> writer = new FlatFileItemWriter<DotDetailsDTO>(); 
     writer.setResource(new ClassPathResource("file:test.csv")); 
     DelimitedLineAggregator<DotDetailsDTO> delLineAgg = new DelimitedLineAggregator<DotDetailsDTO>(); 
     delLineAgg.setDelimiter(","); 
     BeanWrapperFieldExtractor<DotDetailsDTO> fieldExtractor = new BeanWrapperFieldExtractor<DotDetailsDTO>(); 
     fieldExtractor.setNames(new String[] { "airwayBillNumber", 
       "outboundDate", "orig", "dest", "lotNumber", 
       "lotFlightNumber", "lotOrig", "lotDest", "lotPcs", "lotWt", 
       "lotFlightDepartDate", "iataCode" }); 
     delLineAgg.setFieldExtractor(fieldExtractor); 
     writer.setLineAggregator(delLineAgg); 
     writer.afterPropertiesSet(); 
     return writer; 
    } 

    @Bean 
    protected Step step1(ItemReader<DotDetailsDTO> reader, 
      ItemProcessor<DotDetailsDTO, DotDetailsDTO> processor, 
      ItemWriter<DotDetailsDTO> writer) throws SQLException { 
     return steps.get("step1").<DotDetailsDTO, DotDetailsDTO> chunk(10) 
       .reader(reader).processor(processor).writer(writer).build(); 
    } 

    @Bean(name = "firstBatchJob") 
    public Job job(@Qualifier("step1") Step step1) { 
     return jobs.get("firstBatchJob").start(step1).build(); 
    } 

} 

public class DataSourceConfiguration { 

    @Bean(name="batchDataSource") 
    public DataSource dataSource() throws SQLException { 
     BasicDataSource dataSource = new BasicDataSource(); 
     ... 
     return dataSource; 
    } 

    @Bean 
    public JdbcTemplate jdbcTemplate(
      @Qualifier("batchDataSource") final DataSource dataSource) { 
     return new JdbcTemplate(dataSource); 
    } 

    @Primary 
    @Bean(name="secondaryDataSource") 
    public DataSource secondaryDataSource() throws SQLException { 
     OracleDataSource secondaryDataSource = new OracleDataSource(); 
     ... 
     return secondaryDataSource; 
    } 

    @Bean 
    public JdbcTemplate secondaryJdbcTemplate(
      @Qualifier("secondaryDataSource") final DataSource secondaryDataSource) { 
     return new JdbcTemplate(secondaryDataSource); 
    } 
} 

public static void main(String[] args) { 
    // Spring Java config 
    AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); 
    context.register(DataSourceConfiguration.class); 
    context.register(BatchConfiguration.class); 
    context.register(SpringBatchConfig.class); 
    context.refresh(); 

    JobLauncher jobLauncher = (JobLauncher) context.getBean("jobLauncher"); 
    Job job = (Job) context.getBean("firstBatchJob"); 
    System.out.println("Starting the batch job"); 
    try { 
     JobExecution execution = jobLauncher.run(job, new JobParameters()); 
     System.out.println("Job Status : " + execution.getStatus()); 
     System.out.println("Job completed"); 
    } catch (Exception e) { 
     e.printStackTrace(); 
     System.out.println("Job failed"); 
    } 

} 

Répondre

0

Wow après 2 jours j'ai compris quel était le problème. Je ne fournissais pas de nouveaux JobParameters à cause desquels je courais le même vieux travail cassé encore et encore.

Voici le correctif de la méthode principale.

public static void main(String[] args) { 

    AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); 
    context.register(DataSourceConfiguration.class); 
    context.register(BatchConfiguration.class); 
    context.register(SpringBatchConfig.class); 
    context.refresh(); 

    JobLauncher jobLauncher = (JobLauncher) context.getBean("jobLauncher"); 
    Job job = (Job) context.getBean("firstBatchJob"); 
    System.out.println("Starting the batch job"); 
    try { 
     DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss"); 
     Date date = new Date(); 
     JobParameters jobParam = 
      new JobParametersBuilder().addString("jobDate",dateFormat.format(date)).toJobParameters(); 
     JobExecution execution = jobLauncher.run(job, jobParam); 
     System.out.println("Job Status : " + execution.getStatus()); 
     System.out.println("Job completed : " + execution.getJobId()); 
    } catch (Exception e) { 
     e.printStackTrace(); 
     System.out.println("Job failed"); 
    } 

}