List of usage examples for org.springframework.batch.item.file FlatFileItemReader read
@Nullable
T read() throws Exception, UnexpectedInputException, ParseException, NonTransientResourceException;
From source file:com.cocktail.initializer.ItemReader.java
/** * Read items./* w ww.j a v a 2 s. co m*/ * * @param <I> * the generic type * @param path * the path * @param itemMapper * the item mapper * @return the list * @throws Exception * the exception */ public static <I> List<I> readItems(String path, FieldSetMapper<I> itemMapper) throws Exception { ClassPathResource resource = new ClassPathResource(path); Scanner scanner = new Scanner(resource.getInputStream()); String line = scanner.nextLine(); scanner.close(); FlatFileItemReader<I> itemReader = new FlatFileItemReader<I>(); itemReader.setResource(resource); // DelimitedLineTokenizer defaults to | as its delimiter DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer("|"); tokenizer.setNames(line.split("\\|")); tokenizer.setStrict(false); DefaultLineMapper<I> lineMapper = new DefaultLineMapper<I>(); lineMapper.setLineTokenizer(tokenizer); lineMapper.setFieldSetMapper(itemMapper); itemReader.setLineMapper(lineMapper); itemReader.setRecordSeparatorPolicy(new DefaultRecordSeparatorPolicy()); itemReader.setLinesToSkip(1); itemReader.open(new ExecutionContext()); List<I> items = new ArrayList<>(); I item = null; do { item = itemReader.read(); if (item != null) { items.add(item); } } while (item != null); return items; }
From source file:example.store.StoreInitializer.java
/** * Reads a file {@code starbucks.csv} from the class path and parses it into {@link Store} instances about to * persisted./*from w ww . jav a 2s. co m*/ * * @return * @throws Exception */ public static List<Store> readStores() throws Exception { ClassPathResource resource = new ClassPathResource("starbucks.csv"); Scanner scanner = new Scanner(resource.getInputStream()); String line = scanner.nextLine(); scanner.close(); FlatFileItemReader<Store> itemReader = new FlatFileItemReader<Store>(); itemReader.setResource(resource); // DelimitedLineTokenizer defaults to comma as its delimiter DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer(); tokenizer.setNames(line.split(",")); tokenizer.setStrict(false); DefaultLineMapper<Store> lineMapper = new DefaultLineMapper<Store>(); lineMapper.setLineTokenizer(tokenizer); lineMapper.setFieldSetMapper(StoreFieldSetMapper.INSTANCE); itemReader.setLineMapper(lineMapper); itemReader.setRecordSeparatorPolicy(new DefaultRecordSeparatorPolicy()); itemReader.setLinesToSkip(1); itemReader.open(new ExecutionContext()); List<Store> stores = new ArrayList<>(); Store store = null; do { store = itemReader.read(); if (store != null) { stores.add(store); } } while (store != null); return stores; }
From source file:example.springdata.rest.stores.StoreInitializer.java
/** * Reads a file {@code starbucks.csv} from the class path and parses it into {@link Store} instances about to * persisted./*from w w w . j a v a 2 s .co m*/ * * @return * @throws Exception */ public static List<Store> readStores() throws Exception { ClassPathResource resource = new ClassPathResource("starbucks.csv"); Scanner scanner = new Scanner(resource.getInputStream()); String line = scanner.nextLine(); scanner.close(); FlatFileItemReader<Store> itemReader = new FlatFileItemReader<Store>(); itemReader.setResource(resource); // DelimitedLineTokenizer defaults to comma as its delimiter DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer(); tokenizer.setNames(line.split(",")); tokenizer.setStrict(false); DefaultLineMapper<Store> lineMapper = new DefaultLineMapper<Store>(); lineMapper.setFieldSetMapper(fields -> { Point location = new Point(fields.readDouble("Longitude"), fields.readDouble("Latitude")); Address address = new Address(fields.readString("Street Address"), fields.readString("City"), fields.readString("Zip"), location); return new Store(fields.readString("Name"), address); }); lineMapper.setLineTokenizer(tokenizer); itemReader.setLineMapper(lineMapper); itemReader.setRecordSeparatorPolicy(new DefaultRecordSeparatorPolicy()); itemReader.setLinesToSkip(1); itemReader.open(new ExecutionContext()); List<Store> stores = new ArrayList<>(); Store store = null; do { store = itemReader.read(); if (store != null) { stores.add(store); } } while (store != null); return stores; }
From source file:de.mediait.batch.FlatFileItemReaderTest.java
@Test public void testFlatFileReader2() throws Exception { final FlatFileItemReader<String[]> reader = createFlatFileReader(',', '\''); reader.setResource(new ClassPathResource("csv-fix-samples/fixcomma.txt")); final ExecutionContext executionContext = new ExecutionContext(); reader.open(executionContext);//from www .j a v a 2s . com final String[] object = (String[]) reader.read(); reader.close(); assertArrayEquals(new String[] { "begin", "abc' \"d\" 'ef", "end" }, object); }
From source file:de.mediait.batch.FlatFileItemReaderTest.java
@Test public void testFlatFileReader() throws Exception { final FlatFileItemReader<String[]> reader = createFlatFileReader(';', '\''); reader.setResource(new ClassPathResource("csv-fix-samples/fixsemicolon.txt")); final ExecutionContext executionContext = new ExecutionContext(); reader.open(executionContext);/* w ww. j av a2 s .c o m*/ final String[] object = (String[]) reader.read(); reader.close(); assertArrayEquals(new String[] { "begin", "abc' \"d\" 'ef", "end" }, object); }
From source file:example.UserInitializer.java
private static List<User> readUsers(Resource resource) throws Exception { Scanner scanner = new Scanner(resource.getInputStream()); String line = scanner.nextLine(); scanner.close();// ww w .ja v a 2 s.c o m FlatFileItemReader<User> reader = new FlatFileItemReader<User>(); reader.setResource(resource); DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer(); tokenizer.setNames(line.split(",")); tokenizer.setStrict(false); DefaultLineMapper<User> lineMapper = new DefaultLineMapper<User>(); lineMapper.setFieldSetMapper(fields -> { User user = new User(); user.setEmail(fields.readString("email")); user.setFirstname(capitalize(fields.readString("first"))); user.setLastname(capitalize(fields.readString("last"))); user.setNationality(fields.readString("nationality")); String city = Arrays.stream(fields.readString("city").split(" "))// .map(StringUtils::capitalize)// .collect(Collectors.joining(" ")); String street = Arrays.stream(fields.readString("street").split(" "))// .map(StringUtils::capitalize)// .collect(Collectors.joining(" ")); try { user.setAddress(new Address(city, street, fields.readString("zip"))); } catch (IllegalArgumentException e) { user.setAddress(new Address(city, street, fields.readString("postcode"))); } user.setPicture(new Picture(fields.readString("large"), fields.readString("medium"), fields.readString("thumbnail"))); user.setUsername(fields.readString("username")); user.setPassword(fields.readString("password")); return user; }); lineMapper.setLineTokenizer(tokenizer); reader.setLineMapper(lineMapper); reader.setRecordSeparatorPolicy(new DefaultRecordSeparatorPolicy()); reader.setLinesToSkip(1); reader.open(new ExecutionContext()); List<User> users = new ArrayList<>(); User user = null; do { user = reader.read(); if (user != null) { users.add(user); } } while (user != null); return users; }
From source file:org.cbioportal.annotation.pipeline.MutationRecordReader.java
@Override public void open(ExecutionContext ec) throws ItemStreamException { processComments(ec);//from w w w . ja v a 2 s . co m FlatFileItemReader<MutationRecord> reader = new FlatFileItemReader<>(); reader.setResource(new FileSystemResource(filename)); DefaultLineMapper<MutationRecord> mapper = new DefaultLineMapper<>(); final DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer(); tokenizer.setDelimiter("\t"); mapper.setLineTokenizer(tokenizer); mapper.setFieldSetMapper(new MutationFieldSetMapper()); reader.setLineMapper(mapper); reader.setLinesToSkip(1); reader.setSkippedLinesCallback(new LineCallbackHandler() { @Override public void handleLine(String line) { tokenizer.setNames(line.split("\t")); } }); reader.open(ec); LOG.info("Loading records from: " + filename); MutationRecord mutationRecord; try { while ((mutationRecord = reader.read()) != null) { mutationRecords.add(mutationRecord); } } catch (Exception e) { throw new ItemStreamException(e); } reader.close(); int variantsToAnnotateCount = mutationRecords.size(); int annotatedVariantsCount = 0; LOG.info(String.valueOf(variantsToAnnotateCount) + " records to annotate"); for (MutationRecord record : mutationRecords) { annotatedVariantsCount++; if (annotatedVariantsCount % 2000 == 0) { LOG.info("\tOn record " + String.valueOf(annotatedVariantsCount) + " out of " + String.valueOf(variantsToAnnotateCount) + ", annotation " + String.valueOf((int) (((annotatedVariantsCount * 1.0) / variantsToAnnotateCount) * 100)) + "% complete"); } // save variant details for logging String variantDetails = "(sampleId,chr,start,end,ref,alt,url)= (" + record.getTUMOR_SAMPLE_BARCODE() + "," + record.getCHROMOSOME() + "," + record.getSTART_POSITION() + "," + record.getEND_POSITION() + "," + record.getREFERENCE_ALLELE() + "," + record.getTUMOR_SEQ_ALLELE2() + "," + annotator.getUrlForRecord(record, isoformOverride) + ")"; // init annotated record w/o genome nexus in case server error occurs // if no error then annotated record will get overwritten anyway with genome nexus response String serverErrorMessage = ""; AnnotatedRecord annotatedRecord = new AnnotatedRecord(record); try { annotatedRecord = annotator.annotateRecord(record, replace, isoformOverride, true); } catch (HttpServerErrorException ex) { serverErrorMessage = "Failed to annotate variant due to internal server error"; } catch (HttpClientErrorException ex) { serverErrorMessage = "Failed to annotate variant due to client error"; } catch (HttpMessageNotReadableException ex) { serverErrorMessage = "Failed to annotate variant due to message not readable error"; } catch (GenomeNexusAnnotationFailureException ex) { serverErrorMessage = "Failed to annotate variant due to Genome Nexus : " + ex.getMessage(); } annotatedRecords.add(annotatedRecord); header.addAll(annotatedRecord.getHeaderWithAdditionalFields()); // log server failure message if applicable if (!serverErrorMessage.isEmpty()) { LOG.warn(serverErrorMessage); failedAnnotations++; failedServerAnnotations++; if (errorReportLocation != null) updateErrorMessages(record, record.getVARIANT_CLASSIFICATION(), annotator.getUrlForRecord(record, isoformOverride), serverErrorMessage); continue; } String annotationErrorMessage = ""; if (MafUtil.variantContainsAmbiguousTumorSeqAllele(record.getREFERENCE_ALLELE(), record.getTUMOR_SEQ_ALLELE1(), record.getTUMOR_SEQ_ALLELE2())) { snpAndIndelVariants++; annotationErrorMessage = "Record contains ambiguous SNP and INDEL allele change - SNP allele will be used"; } if (annotatedRecord.getHGVSC().isEmpty() && annotatedRecord.getHGVSP().isEmpty()) { if (annotator.isHgvspNullClassifications(annotatedRecord.getVARIANT_CLASSIFICATION())) { failedNullHgvspAnnotations++; annotationErrorMessage = "Ignoring record with HGVSp null classification '" + annotatedRecord.getVARIANT_CLASSIFICATION() + "'"; } else { annotationErrorMessage = "Failed to annotate variant"; } failedAnnotations++; } if (!annotationErrorMessage.isEmpty()) { if (verbose) LOG.info(annotationErrorMessage + ": " + variantDetails); if (errorReportLocation != null) updateErrorMessages(record, annotatedRecord.getVARIANT_CLASSIFICATION(), annotator.getUrlForRecord(record, isoformOverride), annotationErrorMessage); } } // print summary statistics and save error messages to file if applicable printSummaryStatistics(failedAnnotations, failedNullHgvspAnnotations, snpAndIndelVariants, failedServerAnnotations); if (errorReportLocation != null) { saveErrorMessagesToFile(errorMessages); } ec.put("mutation_header", new ArrayList(header)); }
From source file:org.springframework.batch.admin.sample.LeadRandomizer.java
@Test public void testLeads() throws Exception { FlatFileItemReader<FieldSet> reader = new FlatFileItemReader<FieldSet>(); reader.setResource(new ClassPathResource("/data/test.txt")); DefaultLineMapper<FieldSet> lineMapper = new DefaultLineMapper<FieldSet>(); lineMapper.setLineTokenizer(new DelimitedLineTokenizer()); lineMapper.setFieldSetMapper(new PassThroughFieldSetMapper()); reader.setLinesToSkip(1);//from ww w. j a va 2 s . c om final List<String> headers = new ArrayList<String>(); reader.setSkippedLinesCallback(new LineCallbackHandler() { public void handleLine(String line) { headers.add(line); } }); reader.setLineMapper(lineMapper); reader.open(new ExecutionContext()); List<FieldSet> list = new ArrayList<FieldSet>(); FieldSet item = reader.read(); while (item != null) { list.add(item); item = reader.read(); } assertEquals(7, list.size()); FlatFileItemWriter<FieldSet> writer = new FlatFileItemWriter<FieldSet>(); FileSystemResource resource = new FileSystemResource("target/output/output.txt"); FileUtils.deleteQuietly(resource.getFile()); writer.setResource(resource); writer.setHeaderCallback(new FlatFileHeaderCallback() { public void writeHeader(Writer writer) throws IOException { for (String header : headers) { writer.write(header); } } }); writer.setLineAggregator(new DelimitedLineAggregator<FieldSet>()); writer.open(new ExecutionContext()); String[] names = getFields(list, 1); String[] country = getFields(list, 2); String[] products = getFields(list, 3); double[] amounts = getMinMax(list, 4); NumberFormat formatter = new DecimalFormat("#.##"); int count = 20; for (int i = 0; i < 100; i++) { List<FieldSet> items = new ArrayList<FieldSet>(); for (FieldSet fieldSet : list) { String[] values = fieldSet.getValues(); values[0] = "" + (count++); values[1] = choose(names); values[2] = choose(country); values[3] = choose(products); values[4] = formatter.format(random(amounts)); items.add(new DefaultFieldSet(values)); } writer.write(items); } writer.close(); }