List of usage examples for org.springframework.batch.item.file.mapping DefaultLineMapper setLineTokenizer
public void setLineTokenizer(LineTokenizer tokenizer)
From source file:com.cocktail.initializer.ItemReader.java
/** * Read items.//from ww w. j a v a 2s.c o m * * @param <I> * the generic type * @param path * the path * @param itemMapper * the item mapper * @return the list * @throws Exception * the exception */ public static <I> List<I> readItems(String path, FieldSetMapper<I> itemMapper) throws Exception { ClassPathResource resource = new ClassPathResource(path); Scanner scanner = new Scanner(resource.getInputStream()); String line = scanner.nextLine(); scanner.close(); FlatFileItemReader<I> itemReader = new FlatFileItemReader<I>(); itemReader.setResource(resource); // DelimitedLineTokenizer defaults to | as its delimiter DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer("|"); tokenizer.setNames(line.split("\\|")); tokenizer.setStrict(false); DefaultLineMapper<I> lineMapper = new DefaultLineMapper<I>(); lineMapper.setLineTokenizer(tokenizer); lineMapper.setFieldSetMapper(itemMapper); itemReader.setLineMapper(lineMapper); itemReader.setRecordSeparatorPolicy(new DefaultRecordSeparatorPolicy()); itemReader.setLinesToSkip(1); itemReader.open(new ExecutionContext()); List<I> items = new ArrayList<>(); I item = null; do { item = itemReader.read(); if (item != null) { items.add(item); } } while (item != null); return items; }
From source file:example.store.StoreInitializer.java
/** * Reads a file {@code starbucks.csv} from the class path and parses it into {@link Store} instances about to * persisted./* ww w. j a va 2s . co m*/ * * @return * @throws Exception */ public static List<Store> readStores() throws Exception { ClassPathResource resource = new ClassPathResource("starbucks.csv"); Scanner scanner = new Scanner(resource.getInputStream()); String line = scanner.nextLine(); scanner.close(); FlatFileItemReader<Store> itemReader = new FlatFileItemReader<Store>(); itemReader.setResource(resource); // DelimitedLineTokenizer defaults to comma as its delimiter DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer(); tokenizer.setNames(line.split(",")); tokenizer.setStrict(false); DefaultLineMapper<Store> lineMapper = new DefaultLineMapper<Store>(); lineMapper.setLineTokenizer(tokenizer); lineMapper.setFieldSetMapper(StoreFieldSetMapper.INSTANCE); itemReader.setLineMapper(lineMapper); itemReader.setRecordSeparatorPolicy(new DefaultRecordSeparatorPolicy()); itemReader.setLinesToSkip(1); itemReader.open(new ExecutionContext()); List<Store> stores = new ArrayList<>(); Store store = null; do { store = itemReader.read(); if (store != null) { stores.add(store); } } while (store != null); return stores; }
From source file:example.springdata.rest.stores.StoreInitializer.java
/** * Reads a file {@code starbucks.csv} from the class path and parses it into {@link Store} instances about to * persisted.//www . j a v a 2 s .c o m * * @return * @throws Exception */ public static List<Store> readStores() throws Exception { ClassPathResource resource = new ClassPathResource("starbucks.csv"); Scanner scanner = new Scanner(resource.getInputStream()); String line = scanner.nextLine(); scanner.close(); FlatFileItemReader<Store> itemReader = new FlatFileItemReader<Store>(); itemReader.setResource(resource); // DelimitedLineTokenizer defaults to comma as its delimiter DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer(); tokenizer.setNames(line.split(",")); tokenizer.setStrict(false); DefaultLineMapper<Store> lineMapper = new DefaultLineMapper<Store>(); lineMapper.setFieldSetMapper(fields -> { Point location = new Point(fields.readDouble("Longitude"), fields.readDouble("Latitude")); Address address = new Address(fields.readString("Street Address"), fields.readString("City"), fields.readString("Zip"), location); return new Store(fields.readString("Name"), address); }); lineMapper.setLineTokenizer(tokenizer); itemReader.setLineMapper(lineMapper); itemReader.setRecordSeparatorPolicy(new DefaultRecordSeparatorPolicy()); itemReader.setLinesToSkip(1); itemReader.open(new ExecutionContext()); List<Store> stores = new ArrayList<>(); Store store = null; do { store = itemReader.read(); if (store != null) { stores.add(store); } } while (store != null); return stores; }
From source file:my.sandbox.spring.batch.demo.readers.ProductReader.java
public ProductReader() { DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer(); tokenizer.setNames(new String[] { "description", "price", "purchaseDate" }); tokenizer.setDelimiter(DelimitedLineTokenizer.DELIMITER_TAB); BeanWrapperFieldSetMapper<Product> mapper = new BeanWrapperFieldSetMapper<>(); mapper.setTargetType(Product.class); DefaultLineMapper<Product> defaultLineMapper = new DefaultLineMapper<>(); defaultLineMapper.setLineTokenizer(tokenizer); defaultLineMapper.setFieldSetMapper(mapper); setLineMapper(defaultLineMapper);//from ww w . j ava2 s .com }
From source file:org.my.spring.batch.java.config.demo.readers.ProductReader.java
@Autowired public ProductReader(@Value("#{jobParameters[inputFile]}") String inputFile) { setResource(new FileSystemResource(inputFile)); DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer(); tokenizer.setNames(new String[] { "description", "price", "purchaseDate" }); tokenizer.setDelimiter(DelimitedLineTokenizer.DELIMITER_TAB); BeanWrapperFieldSetMapper<Product> mapper = new BeanWrapperFieldSetMapper<>(); mapper.setTargetType(Product.class); DefaultLineMapper<Product> defaultLineMapper = new DefaultLineMapper<>(); defaultLineMapper.setLineTokenizer(tokenizer); defaultLineMapper.setFieldSetMapper(mapper); setLineMapper(defaultLineMapper);// www . j a v a 2 s. c o m }
From source file:com.apress.prospringintegration.batch.JobConfiguration.java
@Bean @Scope("step")/*from w w w . j av a 2s .co m*/ public FlatFileItemReader dataReader(@Value("file:#{jobParameters['input.file']}") Resource resource) { FlatFileItemReader csvFileReader = new FlatFileItemReader(); csvFileReader.setResource(resource); DelimitedLineTokenizer delimitedLineTokenizer = new DelimitedLineTokenizer( DelimitedLineTokenizer.DELIMITER_COMMA); delimitedLineTokenizer.setNames(new String[] { "firstName", "lastName", "company", "address", "city", "state", "zip", "county", "url", "phoneNumber", "fax" }); BeanWrapperFieldSetMapper beanWrapperFieldSetMapper = new BeanWrapperFieldSetMapper(); beanWrapperFieldSetMapper.setTargetType(UserRegistration.class); DefaultLineMapper defaultLineMapper = new DefaultLineMapper(); defaultLineMapper.setLineTokenizer(delimitedLineTokenizer); defaultLineMapper.setFieldSetMapper(beanWrapperFieldSetMapper); csvFileReader.setLineMapper(defaultLineMapper); return csvFileReader; }
From source file:com.apress.prospringintegration.springbatch.integration.JobConfiguration.java
@Bean @Scope("step")// w ww .ja va2 s .com public FlatFileItemReader dataReader( @Value("file:src/main/resources/sample/#{jobParameters['input.file']}.csv") Resource resource) { FlatFileItemReader csvFileReader = new FlatFileItemReader(); csvFileReader.setResource(resource); DelimitedLineTokenizer delimitedLineTokenizer = new DelimitedLineTokenizer( DelimitedLineTokenizer.DELIMITER_COMMA); delimitedLineTokenizer.setNames(new String[] { "firstName", "lastName", "company", "address", "city", "state", "zip", "county", "url", "phoneNumber", "fax" }); BeanWrapperFieldSetMapper beanWrapperFieldSetMapper = new BeanWrapperFieldSetMapper(); beanWrapperFieldSetMapper.setTargetType(UserRegistration.class); DefaultLineMapper defaultLineMapper = new DefaultLineMapper(); defaultLineMapper.setLineTokenizer(delimitedLineTokenizer); defaultLineMapper.setFieldSetMapper(beanWrapperFieldSetMapper); csvFileReader.setLineMapper(defaultLineMapper); return csvFileReader; }
From source file:de.langmi.spring.batch.examples.readers.file.csv.CsvFlatFileItemReaderTest.java
/** * Test should read succesfully./* w ww . j av a 2 s .c o m*/ * * @throws Exception */ @Test public void testSuccessfulReading() throws Exception { // init linetokenizer DelimitedLineTokenizer lineTokenizer = new DelimitedLineTokenizer(); lineTokenizer.setNames(new String[] { "id", "value" }); // init linemapper DefaultLineMapper<FieldSet> lineMapper = new DefaultLineMapper<FieldSet>(); lineMapper.setLineTokenizer(lineTokenizer); lineMapper.setFieldSetMapper(new PassThroughFieldSetMapper()); // init reader reader.setLineMapper(lineMapper); reader.setResource(new FileSystemResource(INPUT_FILE)); // open, provide "mock" ExecutionContext reader.open(MetaDataInstanceFactory.createStepExecution().getExecutionContext()); // read try { int count = 0; FieldSet line; while ((line = reader.read()) != null) { // really test for the fieldSet names and values assertEquals("id", line.getNames()[0]); assertEquals(String.valueOf(count), line.getValues()[0]); assertEquals("value", line.getNames()[1]); // csv contains entry like '0,foo0' assertEquals("foo" + String.valueOf(count), line.getValues()[1]); count++; } assertEquals(EXPECTED_COUNT, count); } catch (Exception e) { throw e; } finally { reader.close(); } }
From source file:io.spring.batch.configuration.BatchConfiguration.java
@Bean @StepScope// www . ja v a 2 s . co m protected FlatFileItemReader<Customer> reader(@Value("#{jobParameters['fileName']}") Resource fileName) throws Exception { DefaultLineMapper<Customer> defaultLineMapper = new DefaultLineMapper<>(); defaultLineMapper.setLineTokenizer(new DelimitedLineTokenizer()); defaultLineMapper.setFieldSetMapper(new FieldSetMapper<Customer>() { @Override public Customer mapFieldSet(FieldSet fieldSet) throws BindException { Customer cust = new Customer(); cust.setCustomerName(fieldSet.readString(0)); cust.setQty(fieldSet.readInt(1)); return cust; } }); defaultLineMapper.afterPropertiesSet(); FlatFileItemReader<Customer> reader = new FlatFileItemReader<>(); reader.setLineMapper(defaultLineMapper); reader.setResource(fileName); reader.afterPropertiesSet(); return reader; }
From source file:com.springsource.html5expense.config.BatchConfig.java
@Bean @Scope("step")//from w w w . j a v a 2 s . com public FlatFileItemReader reader(@Value("#{jobParameters[file]}") String resource) { File f = new File(this.batchFileDirectory, resource + ".csv"); DelimitedLineTokenizer del = new DelimitedLineTokenizer(); del.setNames("date,amount,category,merchant".split(",")); del.setDelimiter(DelimitedLineTokenizer.DELIMITER_COMMA); DefaultLineMapper<FieldSet> defaultLineMapper = new DefaultLineMapper<FieldSet>(); defaultLineMapper.setLineTokenizer(del); defaultLineMapper.setFieldSetMapper(new PassThroughFieldSetMapper()); defaultLineMapper.afterPropertiesSet(); FlatFileItemReader<FieldSet> fileItemReader = new FlatFileItemReader<FieldSet>(); fileItemReader.setLineMapper(defaultLineMapper); fileItemReader.setResource(new FileSystemResource(f)); return fileItemReader; }