Example usage for com.fasterxml.jackson.databind MappingIterator hasNextValue

List of usage examples for com.fasterxml.jackson.databind MappingIterator hasNextValue

Introduction

In this page you can find the example usage for com.fasterxml.jackson.databind MappingIterator hasNextValue.

Prototype

public boolean hasNextValue() throws IOException 

Source Link

Document

Equivalent of #next but one that may throw checked exceptions from Jackson due to invalid input.

Usage

From source file:io.fabric8.forge.camel.commands.project.dto.NodeDtos.java

protected static <T> List<T> toList(MappingIterator<T> iter) throws java.io.IOException {
    List<T> answer = new ArrayList<>();
    while (iter != null && iter.hasNextValue()) {
        T value = iter.nextValue();//from   w  w  w  .ja  va2  s  .c  om
        answer.add(value);
    }
    return answer;
}

From source file:com.github.fge.jackson.JsonNodeReader.java

private static JsonNode readNode(final MappingIterator<JsonNode> iterator) throws IOException {
    final Object source = iterator.getParser().getInputSource();
    final JsonParseExceptionBuilder builder = new JsonParseExceptionBuilder(source);

    builder.setMessage(BUNDLE.getMessage("read.noContent"));

    if (!iterator.hasNextValue())
        throw builder.build();

    final JsonNode ret = iterator.nextValue();

    builder.setMessage(BUNDLE.getMessage("read.trailingData")).setLocation(iterator.getCurrentLocation());

    try {//from  ww w.  j  av  a 2s  .c  o  m
        if (iterator.hasNextValue())
            throw builder.build();
    } catch (JsonParseException e) {
        throw builder.setLocation(e.getLocation()).build();
    }

    return ret;
}

From source file:org.usrz.libs.riak.response.JsonContentHandler.java

@Override
protected T read(PartialResponse<T> partial, InputStream input) throws Exception {

    /* Use a MappingIterator, as we don't want to fail on empty JSON */
    final JsonParser parser = mapper.getFactory().createParser(input);
    final MappingIterator<T> iterator = mapper.readValues(parser, type);

    /* Read only the first value, then close */
    while (iterator.hasNextValue())
        try {/*from  w ww  . jav a2 s . c  om*/
            return iterator.next();
        } finally {
            iterator.close();
        }

    /* Didn't even get the first value */
    return null;
}

From source file:org.usrz.libs.riak.response.ChunkedContentHandler.java

@Override
protected Boolean read(PartialResponse<Boolean> partial, InputStream input) throws Exception {
    try {// w  w w.  j  a va2 s  .co m
        final JsonParser parser = mapper.getFactory().createParser(input);
        final MappingIterator<? extends Chunk<T, H>> iterator = mapper.readValues(parser, chunkType);
        while (iterator.hasNextValue()) {
            final Chunk<T, H> chunk = iterator.next();
            if (chunk.putAll(partial, thisInstance))
                continue;
            else
                return false;
        }
        return puttable.close();
    } catch (Throwable throwable) {
        puttable.fail(throwable);
        if (throwable instanceof Exception)
            throw (Exception) throwable;
        throw new ExecutionException(throwable);
    }
}

From source file:ro.fortsoft.dada.csv.CsvGenericDao.java

public long readFromCsv() {
    File file = new File(csvFile);
    if (!file.exists() || !file.isFile()) {
        return 0;
    }/* ww  w .  j a  v a 2 s.com*/

    Class<T> persistentClass = getPersistentClass();

    // create mapper and schema
    CsvMapper mapper = new CsvMapper();
    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
    CsvSchema schema = mapper.schemaFor(persistentClass).withHeader();

    this.entities = new ArrayList<>();

    // read entities
    long count = 0;
    try {
        MappingIterator<T> it = mapper.reader(persistentClass).with(schema).readValues(file);
        while (it.hasNextValue()) {
            entities.add(it.nextValue());
            count++;
        }
    } catch (IOException e) {
        e.printStackTrace();
    }

    return count;
}

From source file:web.StreamMeetupComTask.java

@Override
public Void call() {
    try {/*  w  ww .  ja  v a  2  s  . c o m*/
        ObjectMapper objectMapper = new ObjectMapper();
        ObjectReader reader = objectMapper.reader(Map.class);
        MappingIterator<Map<String, Object>> iterator = reader.readValues(getInputStream());

        while (iterator.hasNextValue()) {
            Map<String, Object> entry = iterator.nextValue();

            // monitor the distribution of countries
            if (entry.containsKey("group") && entry.get("group") instanceof Map) {
                Map<String, Object> group = (Map<String, Object>) entry.get("group");
                if (group.containsKey("group_country")) {
                    metrics.meter("meetup.country." + group.get("group_country")).mark();
                    metrics.meter("meetup.country.total").mark();
                }
            }

            // monitor the distribution of the number of guests
            if (entry.containsKey("guests") && entry.get("guests") instanceof Long) {
                metrics.histogram("meetup.guests").update((Long) entry.get("guests"));
            }

            // monitor reservation time upfront, 1d, 4d, 1w, 2w, 1m, 2m, -
            if (entry.containsKey("event") && entry.get("event") instanceof Map) {
                Map<String, Object> event = (Map<String, Object>) entry.get("event");
                if (event.get("time") instanceof Long) {
                    metrics.counter("meetup.reservation.time.total").inc();
                    metrics.counter(
                            "meetup.reservation.time." + getUpfrontReservationTime((Long) event.get("time")))
                            .inc();
                }
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
    }

    return null;
}

From source file:org.wikidata.wdtk.datamodel.json.jackson.JsonSerializerTest.java

@Test
public void testSerializer() throws IOException {
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    JsonSerializer serializer = new JsonSerializer(out);

    ItemDocument id1 = Datamodel.makeItemDocument(DataObjectFactoryImplTest.getTestItemIdValue(1),
            Collections//from   ww  w  .  j  a  v  a  2 s. c  o m
                    .<MonolingualTextValue>singletonList(Datamodel.makeMonolingualTextValue("Label1", "lang1")),
            Collections.<MonolingualTextValue>emptyList(), Collections.<MonolingualTextValue>emptyList(),
            DataObjectFactoryImplTest.getTestStatementGroups(1, 24, 1, EntityIdValue.ET_ITEM),
            Collections.<String, SiteLink>emptyMap());
    ItemDocument id2 = Datamodel.makeItemDocument(DataObjectFactoryImplTest.getTestItemIdValue(2),
            Collections.<MonolingualTextValue>emptyList(), Collections.<MonolingualTextValue>emptyList(),
            Collections.<MonolingualTextValue>emptyList(),
            DataObjectFactoryImplTest.getTestStatementGroups(2, 23, 1, EntityIdValue.ET_ITEM),
            Collections.<String, SiteLink>singletonMap("enwiki",
                    Datamodel.makeSiteLink("Title2", "enwiki", Collections.<String>emptyList())));
    PropertyDocument pd1 = Datamodel.makePropertyDocument(DataObjectFactoryImplTest.getTestPropertyIdValue(1),
            Collections.<MonolingualTextValue>emptyList(), Collections.<MonolingualTextValue>emptyList(),
            Collections
                    .<MonolingualTextValue>singletonList(Datamodel.makeMonolingualTextValue("Alias1", "lang1")),
            Collections.<StatementGroup>emptyList(),
            Datamodel.makeDatatypeIdValue(DatatypeIdValue.DT_COMMONS_MEDIA));

    serializer.open();
    serializer.processItemDocument(id1);
    serializer.processItemDocument(id2);
    serializer.processPropertyDocument(pd1);
    serializer.close();

    ArrayList<EntityDocument> inputDocuments = new ArrayList<>();
    inputDocuments.add(id1);
    inputDocuments.add(id2);
    inputDocuments.add(pd1);

    ArrayList<EntityDocument> outputDocuments = new ArrayList<>();

    ObjectMapper mapper = new ObjectMapper();
    ObjectReader documentReader = mapper.reader(JacksonTermedStatementDocument.class);

    MappingIterator<JacksonTermedStatementDocument> documentIterator = documentReader
            .readValues(out.toString());
    while (documentIterator.hasNextValue()) {
        JacksonTermedStatementDocument document = documentIterator.nextValue();
        document.setSiteIri("foo:");
        outputDocuments.add(document);
    }
    documentIterator.close();

    for (int i = 0; i < outputDocuments.size(); i++) {
        assertEquals(inputDocuments.get(i), outputDocuments.get(i));
    }
    assertEquals(serializer.getEntityDocumentCount(), 3);
}

From source file:edu.cmu.cs.lti.discoursedb.io.bazaar.converter.BazaarConverter.java

private void convert(String messageFileDir, String roomFileDir, String agentname)
        throws ParseException, IOException {

    Map<String, String> roomIdNameMap = new HashMap<>();
    List<String> messages = new ArrayList<>();

    //Read input file and preprocess
    String lineFragment = null;//  w  ww  .  ja v a2  s  . c  o  m
    for (String line : FileUtils.readLines(new File(messageFileDir))) {
        //line fragments occur in case we have line feeds in a column
        if (lineFragment != null) {
            line = lineFragment + line;
            lineFragment = null;
        }
        if (line.endsWith("\\") || line.endsWith("\\\r\f")) {
            line = line.replaceAll("\\\r\f", "");
            lineFragment = line;
        } else {
            if (line.contains("\\\"We're Ready\\\"")) {
                line = line.replaceAll("\"We're Ready\\\\\"", "We're Ready\\\\");
            }
            if (line.contains("\\\"ready\\\"")) {
                line = line.replaceAll("\\\\\"ready\\\\\"", "\\\\ready\\\\");
            }
            if (line.contains("\\\"" + agentname + "\\\"")) {
                line = line.replaceAll("\\\\\"" + agentname + "\\\\\"", "\\\\" + agentname + "\\\\");
            }
            messages.add(line);
        }
    }

    // Phase 1: read through input room file once and map all entities
    try (InputStream in = new FileInputStream(roomFileDir)) {
        CsvMapper mapper = new CsvMapper();
        CsvSchema schema = mapper.schemaFor(Room.class).withColumnSeparator(',');
        MappingIterator<Room> rIter = mapper.readerFor(Room.class).with(schema).readValues(in);
        while (rIter.hasNextValue()) {
            Room r = rIter.next();
            if (!roomIdNameMap.containsKey(r.getId()))
                roomIdNameMap.put(r.getId(), r.getName());
            converterService.mapRoom(r, dataSetName, discourseName);
        }
    } catch (IOException e) {
        log.error("Error reading room file", e);
    }

    // Phase 2: read through input message file and map relationships between room and message
    CsvMapper mapper = new CsvMapper();
    CsvSchema schema = mapper.schemaFor(Message.class).withColumnSeparator(',');
    for (String message : messages) {
        Message m = mapper.readerFor(Message.class).with(schema).readValue(message);
        if (m.getType().equals("text") || m.getType().equals("image") || m.getType().equals("private")) {
            converterService.mapMessage(m, dataSetName, discourseName, roomIdNameMap);
        } else {
            converterService.mapInteraction(m, dataSetName, discourseName, roomIdNameMap);
        }
    }
}

From source file:logfile.LogfileStreamer.java

public void run() throws Exception {
    startElasticsearchIfNecessary();//w ww. j  a v a2  s. c om
    createIndexAndMappingIfNecessary();

    // index into the metrics index without date formatting
    ElasticsearchReporter reporter = ElasticsearchReporter.forRegistry(registry).hosts("localhost:9200")
            .indexDateFormat("").percolationNotifier(new HttpNotifier()).percolationFilter(MetricFilter.ALL)
            .build();
    reporter.start(60, TimeUnit.SECONDS);

    ObjectMapper objectMapper = new ObjectMapper();
    objectMapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false);
    ObjectReader reader = objectMapper.reader(Map.class);
    MappingIterator<Map<String, Object>> iterator = reader.readValues(getInputStream());

    try {
        while (iterator.hasNextValue()) {
            Map<String, Object> entry = iterator.nextValue();
            if (entry.containsKey("_heartbeat_")) {
                heartbeatCounter.inc();
                continue;
            }

            if (entry.containsKey("ll") && entry.containsKey("t")) {
                long timestamp = ((Integer) entry.get("t")).longValue();
                List<Number> location = (List<Number>) entry.get("ll");
                double latitude = location.get(0).doubleValue();
                double longitude = location.get(1).doubleValue();

                addToBulkRequest(timestamp, latitude, longitude);
                entryMeter.mark(1);
            }
        }
    } finally {
        executeBulkRequest();
    }
}

From source file:org.wikidata.wdtk.dumpfiles.JsonDumpFileProcessor.java

/**
 * Process dump file data from the given input stream. This method uses the
 * efficient Jackson {@link MappingIterator}. However, this class cannot
 * recover from processing errors. If an error occurs in one entity, the
 * (presumably) less efficient processing method
 * {@link #processDumpFileContentsRecovery(InputStream)} is used instead.
 *
 * @see MwDumpFileProcessor#processDumpFileContents(InputStream, MwDumpFile)
 *//*from   www.j  av  a  2 s . com*/
@Override
public void processDumpFileContents(InputStream inputStream, MwDumpFile dumpFile) {

    logger.info("Processing JSON dump file " + dumpFile.toString());

    try {
        try {
            MappingIterator<JacksonTermedStatementDocument> documentIterator = documentReader
                    .readValues(inputStream);
            documentIterator.getParser().disable(Feature.AUTO_CLOSE_SOURCE);

            while (documentIterator.hasNextValue()) {
                JacksonTermedStatementDocument document = documentIterator.nextValue();
                handleDocument(document);
            }
            documentIterator.close();
        } catch (JsonProcessingException e) {
            logJsonProcessingException(e);
            processDumpFileContentsRecovery(inputStream);
        }
    } catch (IOException e) {
        throw new RuntimeException("Cannot read JSON input: " + e.getMessage(), e);
    }

}