Example usage for com.fasterxml.jackson.databind ObjectMapper readValues

List of usage examples for com.fasterxml.jackson.databind ObjectMapper readValues

Introduction

In this page you can find the example usage for com.fasterxml.jackson.databind ObjectMapper readValues.

Prototype

@Override
public <T> MappingIterator<T> readValues(JsonParser jp, TypeReference<?> valueTypeRef)
        throws IOException, JsonProcessingException 

Source Link

Document

Method for reading sequence of Objects from parser stream.

Usage

From source file:com.streamsets.datacollector.execution.store.FilePipelineStateStore.java

@Override
public List<PipelineState> getHistory(String pipelineName, String rev, boolean fromBeginning)
        throws PipelineStoreException {
    if (!pipelineDirExists(pipelineName, rev) || !pipelineStateHistoryFileExists(pipelineName, rev)) {
        return Collections.emptyList();
    }/*  w  w w  .  j  a  v  a  2  s.  c o  m*/
    try (Reader reader = new FileReader(getPipelineStateHistoryFile(pipelineName, rev))) {
        ObjectMapper objectMapper = ObjectMapperFactory.get();
        JsonParser jsonParser = objectMapper.getFactory().createParser(reader);
        MappingIterator<PipelineStateJson> pipelineStateMappingIterator = objectMapper.readValues(jsonParser,
                PipelineStateJson.class);
        List<PipelineStateJson> pipelineStateJsons = pipelineStateMappingIterator.readAll();
        Collections.reverse(pipelineStateJsons);
        if (fromBeginning) {
            return BeanHelper.unwrapPipelineStatesNewAPI(pipelineStateJsons);
        } else {
            int toIndex = pipelineStateJsons.size() > 100 ? 100 : pipelineStateJsons.size();
            return BeanHelper.unwrapPipelineStatesNewAPI(pipelineStateJsons.subList(0, toIndex));
        }
    } catch (IOException e) {
        throw new PipelineStoreException(ContainerError.CONTAINER_0115, pipelineName, rev, e.toString(), e);
    }
}

From source file:org.talend.components.localio.runtime.fixed.FixedDatasetRuntime.java

public List<IndexedRecord> getValues(int limit) {
    List<IndexedRecord> values = new ArrayList<>();
    switch (properties.format.getValue()) {
    case CSV:/*from  w  w w  .  j av  a2  s .com*/
        try {
            CsvRecordToIndexedRecordConverter converter = new CsvRecordToIndexedRecordConverter(getSchema());
            for (CSVRecord r : CSVFormat.RFC4180 //
                    .withDelimiter(properties.getFieldDelimiter().charAt(0)) //
                    .withRecordSeparator(properties.getRecordDelimiter())
                    .parse(new StringReader(properties.values.getValue())))
                values.add(converter.convertToAvro(r));
        } catch (IOException e) {
            throw LocalIOErrorCode.createCannotParseSchema(e, properties.values.getValue());
        }
        break;
    case JSON:
        ObjectMapper mapper = new ObjectMapper();
        JsonSchemaInferrer jsonSchemaInferrer = new JsonSchemaInferrer(mapper);
        JsonGenericRecordConverter converter = null;
        JsonFactory jsonFactory = new JsonFactory();
        try (StringReader r = new StringReader(properties.values.getValue())) {
            Iterator<JsonNode> value = mapper.readValues(jsonFactory.createParser(r), JsonNode.class);
            int count = 0;
            while (value.hasNext() && count++ < limit) {
                String json = value.next().toString();
                if (converter == null) {
                    Schema jsonSchema = jsonSchemaInferrer.inferSchema(json);
                    converter = new JsonGenericRecordConverter(jsonSchema);
                }
                values.add(converter.convertToAvro(json));
            }
        } catch (IOException e) {
            throw LocalIOErrorCode.createCannotParseJson(e, properties.schema.getValue(),
                    properties.values.getValue());
        }
        break;
    case AVRO:
        Schema schema = getSchema();
        if (isRandom()) {
            GeneratorFunction<IndexedRecord> gf = (GeneratorFunction<IndexedRecord>) GeneratorFunctions
                    .of(getSchema());
            GeneratorFunction.GeneratorContext ctx = GeneratorFunction.GeneratorContext.of(0, 0L);
            for (int i = 0; i < limit; i++) {
                ctx.setRowId(i);
                values.add(gf.apply(ctx));
            }
        } else {
            try (ByteArrayInputStream bais = new ByteArrayInputStream(
                    properties.values.getValue().trim().getBytes())) {
                JsonDecoder decoder = DecoderFactory.get().jsonDecoder(schema, bais);
                DatumReader<IndexedRecord> reader = new GenericDatumReader<>(schema);
                int count = 0;
                while (count++ < limit) {
                    values.add(reader.read(null, decoder));
                }
            } catch (EOFException e) {
                // Indicates the end of the values.
            } catch (IOException e) {
                throw LocalIOErrorCode.createCannotParseAvroJson(e, properties.schema.getValue(),
                        properties.values.getValue());
            }
        }
        break;
    }
    return values;
}