Example usage for org.springframework.batch.item.file.transform DelimitedLineTokenizer setNames

List of usage examples for org.springframework.batch.item.file.transform DelimitedLineTokenizer setNames

Introduction

In this page you can find the example usage for org.springframework.batch.item.file.transform DelimitedLineTokenizer setNames.

Prototype

public void setNames(String... names) 

Source Link

Document

Setter for column names.

Usage

From source file:com.cocktail.initializer.ItemReader.java

/**
 * Read items./*  w w  w  .  java  2 s  .  com*/
 *
 * @param <I>
 *            the generic type
 * @param path
 *            the path
 * @param itemMapper
 *            the item mapper
 * @return the list
 * @throws Exception
 *             the exception
 */
public static <I> List<I> readItems(String path, FieldSetMapper<I> itemMapper) throws Exception {

    ClassPathResource resource = new ClassPathResource(path);
    Scanner scanner = new Scanner(resource.getInputStream());
    String line = scanner.nextLine();
    scanner.close();

    FlatFileItemReader<I> itemReader = new FlatFileItemReader<I>();
    itemReader.setResource(resource);

    // DelimitedLineTokenizer defaults to | as its delimiter
    DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer("|");
    tokenizer.setNames(line.split("\\|"));
    tokenizer.setStrict(false);

    DefaultLineMapper<I> lineMapper = new DefaultLineMapper<I>();
    lineMapper.setLineTokenizer(tokenizer);
    lineMapper.setFieldSetMapper(itemMapper);
    itemReader.setLineMapper(lineMapper);
    itemReader.setRecordSeparatorPolicy(new DefaultRecordSeparatorPolicy());
    itemReader.setLinesToSkip(1);
    itemReader.open(new ExecutionContext());

    List<I> items = new ArrayList<>();
    I item = null;

    do {

        item = itemReader.read();

        if (item != null) {
            items.add(item);
        }

    } while (item != null);

    return items;
}

From source file:example.springdata.rest.stores.StoreInitializer.java

/**
 * Reads a file {@code starbucks.csv} from the class path and parses it into {@link Store} instances about to
 * persisted./*from  ww  w.  j  a  va 2s  .c  om*/
 * 
 * @return
 * @throws Exception
 */
public static List<Store> readStores() throws Exception {

    ClassPathResource resource = new ClassPathResource("starbucks.csv");
    Scanner scanner = new Scanner(resource.getInputStream());
    String line = scanner.nextLine();
    scanner.close();

    FlatFileItemReader<Store> itemReader = new FlatFileItemReader<Store>();
    itemReader.setResource(resource);

    // DelimitedLineTokenizer defaults to comma as its delimiter
    DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer();
    tokenizer.setNames(line.split(","));
    tokenizer.setStrict(false);

    DefaultLineMapper<Store> lineMapper = new DefaultLineMapper<Store>();
    lineMapper.setFieldSetMapper(fields -> {

        Point location = new Point(fields.readDouble("Longitude"), fields.readDouble("Latitude"));
        Address address = new Address(fields.readString("Street Address"), fields.readString("City"),
                fields.readString("Zip"), location);

        return new Store(fields.readString("Name"), address);
    });

    lineMapper.setLineTokenizer(tokenizer);
    itemReader.setLineMapper(lineMapper);
    itemReader.setRecordSeparatorPolicy(new DefaultRecordSeparatorPolicy());
    itemReader.setLinesToSkip(1);
    itemReader.open(new ExecutionContext());

    List<Store> stores = new ArrayList<>();
    Store store = null;

    do {

        store = itemReader.read();

        if (store != null) {
            stores.add(store);
        }

    } while (store != null);

    return stores;
}

From source file:example.store.StoreInitializer.java

/**
 * Reads a file {@code starbucks.csv} from the class path and parses it into {@link Store} instances about to
 * persisted.//from   w w  w . ja va  2  s.co m
 *
 * @return
 * @throws Exception
 */
public static List<Store> readStores() throws Exception {

    ClassPathResource resource = new ClassPathResource("starbucks.csv");
    Scanner scanner = new Scanner(resource.getInputStream());
    String line = scanner.nextLine();
    scanner.close();

    FlatFileItemReader<Store> itemReader = new FlatFileItemReader<Store>();
    itemReader.setResource(resource);

    // DelimitedLineTokenizer defaults to comma as its delimiter
    DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer();
    tokenizer.setNames(line.split(","));
    tokenizer.setStrict(false);

    DefaultLineMapper<Store> lineMapper = new DefaultLineMapper<Store>();
    lineMapper.setLineTokenizer(tokenizer);
    lineMapper.setFieldSetMapper(StoreFieldSetMapper.INSTANCE);
    itemReader.setLineMapper(lineMapper);
    itemReader.setRecordSeparatorPolicy(new DefaultRecordSeparatorPolicy());
    itemReader.setLinesToSkip(1);
    itemReader.open(new ExecutionContext());

    List<Store> stores = new ArrayList<>();
    Store store = null;

    do {

        store = itemReader.read();

        if (store != null) {
            stores.add(store);
        }

    } while (store != null);

    return stores;
}

From source file:example.UserInitializer.java

private static List<User> readUsers(Resource resource) throws Exception {

    Scanner scanner = new Scanner(resource.getInputStream());
    String line = scanner.nextLine();
    scanner.close();//from  w  w  w.j a va 2 s  . c  o m

    FlatFileItemReader<User> reader = new FlatFileItemReader<User>();
    reader.setResource(resource);

    DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer();
    tokenizer.setNames(line.split(","));
    tokenizer.setStrict(false);

    DefaultLineMapper<User> lineMapper = new DefaultLineMapper<User>();
    lineMapper.setFieldSetMapper(fields -> {

        User user = new User();

        user.setEmail(fields.readString("email"));
        user.setFirstname(capitalize(fields.readString("first")));
        user.setLastname(capitalize(fields.readString("last")));
        user.setNationality(fields.readString("nationality"));

        String city = Arrays.stream(fields.readString("city").split(" "))//
                .map(StringUtils::capitalize)//
                .collect(Collectors.joining(" "));
        String street = Arrays.stream(fields.readString("street").split(" "))//
                .map(StringUtils::capitalize)//
                .collect(Collectors.joining(" "));

        try {
            user.setAddress(new Address(city, street, fields.readString("zip")));
        } catch (IllegalArgumentException e) {
            user.setAddress(new Address(city, street, fields.readString("postcode")));
        }

        user.setPicture(new Picture(fields.readString("large"), fields.readString("medium"),
                fields.readString("thumbnail")));
        user.setUsername(fields.readString("username"));
        user.setPassword(fields.readString("password"));

        return user;
    });

    lineMapper.setLineTokenizer(tokenizer);

    reader.setLineMapper(lineMapper);
    reader.setRecordSeparatorPolicy(new DefaultRecordSeparatorPolicy());
    reader.setLinesToSkip(1);
    reader.open(new ExecutionContext());

    List<User> users = new ArrayList<>();
    User user = null;

    do {

        user = reader.read();

        if (user != null) {
            users.add(user);
        }

    } while (user != null);

    return users;
}

From source file:my.sandbox.spring.batch.demo.readers.ProductReader.java

public ProductReader() {

    DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer();
    tokenizer.setNames(new String[] { "description", "price", "purchaseDate" });
    tokenizer.setDelimiter(DelimitedLineTokenizer.DELIMITER_TAB);

    BeanWrapperFieldSetMapper<Product> mapper = new BeanWrapperFieldSetMapper<>();
    mapper.setTargetType(Product.class);

    DefaultLineMapper<Product> defaultLineMapper = new DefaultLineMapper<>();

    defaultLineMapper.setLineTokenizer(tokenizer);
    defaultLineMapper.setFieldSetMapper(mapper);

    setLineMapper(defaultLineMapper);/*from   w  w  w.j  av  a2s  .c om*/
}

From source file:org.my.spring.batch.java.config.demo.readers.ProductReader.java

@Autowired
public ProductReader(@Value("#{jobParameters[inputFile]}") String inputFile) {

    setResource(new FileSystemResource(inputFile));

    DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer();
    tokenizer.setNames(new String[] { "description", "price", "purchaseDate" });
    tokenizer.setDelimiter(DelimitedLineTokenizer.DELIMITER_TAB);

    BeanWrapperFieldSetMapper<Product> mapper = new BeanWrapperFieldSetMapper<>();
    mapper.setTargetType(Product.class);

    DefaultLineMapper<Product> defaultLineMapper = new DefaultLineMapper<>();

    defaultLineMapper.setLineTokenizer(tokenizer);
    defaultLineMapper.setFieldSetMapper(mapper);

    setLineMapper(defaultLineMapper);/*from  ww  w . j  av  a  2  s  .  c o  m*/
}

From source file:com.create.batch.TicketReaderFactory.java

public ItemStreamReader<Ticket> createReader(final Resource source) {

    final FlatFileItemReader<Ticket> reader = new FlatFileItemReader<>();
    reader.setResource(source);/*from w  w  w  . j a  v  a 2  s .c o m*/
    final DefaultLineMapper<Ticket> lineMapper = new DefaultLineMapper<>();
    final DelimitedLineTokenizer lineTokenizer = new DelimitedLineTokenizer();
    lineTokenizer.setNames(TICKET_FILE_CSV_FIELDS);
    lineMapper.setLineTokenizer(lineTokenizer);
    final BeanWrapperFieldSetMapper<Ticket> fieldMapper = new BeanWrapperFieldSetMapper<>();
    fieldMapper.setTargetType(Ticket.class);
    final DateFormat df = new SimpleDateFormat(DATE_FORMAT);
    final Map<Class, PropertyEditor> customEditors = Stream
            .of(new AbstractMap.SimpleEntry<>(Date.class, new CustomDateEditor(df, false)))
            .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
    fieldMapper.setCustomEditors(customEditors);
    lineMapper.setFieldSetMapper(fieldMapper);
    reader.setLineMapper(lineMapper);
    return reader;
}

From source file:de.langmi.spring.batch.examples.readers.file.csv.CsvFlatFileItemReaderTest.java

/**
 * Test should read succesfully.//from w  w w.jav  a 2 s  .co  m
 *
 * @throws Exception 
 */
@Test
public void testSuccessfulReading() throws Exception {
    // init linetokenizer
    DelimitedLineTokenizer lineTokenizer = new DelimitedLineTokenizer();
    lineTokenizer.setNames(new String[] { "id", "value" });
    // init linemapper
    DefaultLineMapper<FieldSet> lineMapper = new DefaultLineMapper<FieldSet>();
    lineMapper.setLineTokenizer(lineTokenizer);
    lineMapper.setFieldSetMapper(new PassThroughFieldSetMapper());
    // init reader
    reader.setLineMapper(lineMapper);
    reader.setResource(new FileSystemResource(INPUT_FILE));
    // open, provide "mock" ExecutionContext
    reader.open(MetaDataInstanceFactory.createStepExecution().getExecutionContext());
    // read
    try {
        int count = 0;
        FieldSet line;
        while ((line = reader.read()) != null) {
            // really test for the fieldSet names and values
            assertEquals("id", line.getNames()[0]);
            assertEquals(String.valueOf(count), line.getValues()[0]);
            assertEquals("value", line.getNames()[1]);
            // csv contains entry like '0,foo0'
            assertEquals("foo" + String.valueOf(count), line.getValues()[1]);
            count++;
        }
        assertEquals(EXPECTED_COUNT, count);
    } catch (Exception e) {
        throw e;
    } finally {
        reader.close();
    }
}

From source file:com.springsource.html5expense.config.BatchConfig.java

@Bean
@Scope("step")//from  w  w  w . j a va  2 s  .  c  o  m
public FlatFileItemReader reader(@Value("#{jobParameters[file]}") String resource) {

    File f = new File(this.batchFileDirectory, resource + ".csv");

    DelimitedLineTokenizer del = new DelimitedLineTokenizer();
    del.setNames("date,amount,category,merchant".split(","));
    del.setDelimiter(DelimitedLineTokenizer.DELIMITER_COMMA);

    DefaultLineMapper<FieldSet> defaultLineMapper = new DefaultLineMapper<FieldSet>();
    defaultLineMapper.setLineTokenizer(del);
    defaultLineMapper.setFieldSetMapper(new PassThroughFieldSetMapper());
    defaultLineMapper.afterPropertiesSet();

    FlatFileItemReader<FieldSet> fileItemReader = new FlatFileItemReader<FieldSet>();
    fileItemReader.setLineMapper(defaultLineMapper);
    fileItemReader.setResource(new FileSystemResource(f));

    return fileItemReader;
}

From source file:com.apress.prospringintegration.batch.JobConfiguration.java

@Bean
@Scope("step")//from   w w w .j a va2 s  .c  om
public FlatFileItemReader dataReader(@Value("file:#{jobParameters['input.file']}") Resource resource) {
    FlatFileItemReader csvFileReader = new FlatFileItemReader();
    csvFileReader.setResource(resource);

    DelimitedLineTokenizer delimitedLineTokenizer = new DelimitedLineTokenizer(
            DelimitedLineTokenizer.DELIMITER_COMMA);
    delimitedLineTokenizer.setNames(new String[] { "firstName", "lastName", "company", "address", "city",
            "state", "zip", "county", "url", "phoneNumber", "fax" });

    BeanWrapperFieldSetMapper beanWrapperFieldSetMapper = new BeanWrapperFieldSetMapper();
    beanWrapperFieldSetMapper.setTargetType(UserRegistration.class);

    DefaultLineMapper defaultLineMapper = new DefaultLineMapper();
    defaultLineMapper.setLineTokenizer(delimitedLineTokenizer);
    defaultLineMapper.setFieldSetMapper(beanWrapperFieldSetMapper);

    csvFileReader.setLineMapper(defaultLineMapper);

    return csvFileReader;
}