Example usage for com.fasterxml.jackson.core JsonToken START_ARRAY

List of usage examples for com.fasterxml.jackson.core JsonToken START_ARRAY

Introduction

In this page you can find the example usage for com.fasterxml.jackson.core JsonToken START_ARRAY.

Prototype

JsonToken START_ARRAY

To view the source code for com.fasterxml.jackson.core JsonToken START_ARRAY.

Click Source Link

Document

START_ARRAY is returned when encountering '[' which signals starting of an Array value

Usage

From source file:com.axibase.tsd.driver.jdbc.content.ContentMetadata.java

private static Map<String, Object> getJsonScheme(String json) throws IOException {
    try (final JsonParser parser = JsonMappingUtil.getParser(json)) {
        final JsonToken token = parser.nextToken();
        Class<?> type;/*from  w w w.j a va2 s.co m*/
        if (token == JsonToken.START_OBJECT) {
            type = Map.class;
        } else if (token == JsonToken.START_ARRAY) {
            type = List.class;
        } else {
            type = String.class;
        }
        return (Map<String, Object>) parser.readValueAs(type);
    }
}

From source file:org.lobid.lodmill.JsonDecoder.java

private JsonToken processRecordContent(JsonToken token) throws IOException, JsonParseException {
    JsonToken currentToken = token;//from  w  w  w .j  a  v a 2 s  . c  om
    String key = null;
    while (currentToken != null) {
        if (JsonToken.FIELD_NAME == currentToken)
            key = this.jsonParser.getCurrentName();
        if (JsonToken.START_ARRAY == currentToken) {
            currentToken = this.jsonParser.nextToken();
            if (this.JSONP)
                currentToken = this.jsonParser.nextToken();
            else {
                // break to treat objects in arrays as new objects
                if (JsonToken.START_OBJECT == currentToken)
                    break;
                currentToken = handleValuesOfArrays(currentToken, key);
            }
        }
        if (JsonToken.START_OBJECT == currentToken) {
            if (this.jsonParser.getCurrentName() == null)
                break;
        } else
            handleValue(currentToken, key);
        try {
            currentToken = this.jsonParser.nextToken();
        } catch (JsonParseException e) {
            LOG.debug("Exception at the end of non JSON object, might be JSONP", e);
            currentToken = null;
            break;
        }
    }
    return currentToken;
}

From source file:com.tage.calcite.adapter.druid.DruidConnectionImpl.java

/** Parses the output of a {@code topN} query, sending the results to a
 * {@link Sink}. */// w w  w .  j  a v a2 s  .c  o m
private void parse(com.tage.calcite.adapter.druid.QueryType queryType, InputStream in, Sink sink,
        List<String> fieldNames, List<Primitive> fieldTypes, Page page) {
    final JsonFactory factory = new JsonFactory();
    final Row.RowBuilder rowBuilder = Row.newBuilder(fieldNames.size());

    if (CalcitePrepareImpl.DEBUG) {
        try {
            final byte[] bytes = AvaticaUtils.readFullyToBytes(in);
            System.out.println("Response: " + new String(bytes));
            in = new ByteArrayInputStream(bytes);
        } catch (IOException e) {
            throw Throwables.propagate(e);
        }
    }

    try (final JsonParser parser = factory.createParser(in)) {
        switch (queryType) {
        case TOP_N:
            if (parser.nextToken() == JsonToken.START_ARRAY && parser.nextToken() == JsonToken.START_OBJECT) {
                expectScalarField(parser, "timestamp");
                if (parser.nextToken() == JsonToken.FIELD_NAME && parser.getCurrentName().equals("result")
                        && parser.nextToken() == JsonToken.START_ARRAY) {
                    while (parser.nextToken() == JsonToken.START_OBJECT) {
                        // loop until token equal to "}"
                        parseFields(fieldNames, fieldTypes, rowBuilder, parser);
                        sink.send(rowBuilder.build());
                        rowBuilder.reset();
                    }
                }
            }
            break;

        case SELECT:
            if (parser.nextToken() == JsonToken.START_ARRAY && parser.nextToken() == JsonToken.START_OBJECT) {
                page.pagingIdentifier = null;
                page.offset = -1;
                expectScalarField(parser, "timestamp");
                if (parser.nextToken() == JsonToken.FIELD_NAME && parser.getCurrentName().equals("result")
                        && parser.nextToken() == JsonToken.START_OBJECT) {
                    if (parser.nextToken() == JsonToken.FIELD_NAME
                            && parser.getCurrentName().equals("pagingIdentifiers")
                            && parser.nextToken() == JsonToken.START_OBJECT) {
                        switch (parser.nextToken()) {
                        case FIELD_NAME:
                            page.pagingIdentifier = parser.getCurrentName();
                            if (parser.nextToken() == JsonToken.VALUE_NUMBER_INT) {
                                page.offset = parser.getIntValue();
                            }
                            expect(parser, JsonToken.END_OBJECT);
                            break;
                        case END_OBJECT:
                        }
                    }
                    if (parser.nextToken() == JsonToken.FIELD_NAME && parser.getCurrentName().equals("events")
                            && parser.nextToken() == JsonToken.START_ARRAY) {
                        while (parser.nextToken() == JsonToken.START_OBJECT) {
                            expectScalarField(parser, "segmentId");
                            expectScalarField(parser, "offset");
                            if (parser.nextToken() == JsonToken.FIELD_NAME
                                    && parser.getCurrentName().equals("event")
                                    && parser.nextToken() == JsonToken.START_OBJECT) {
                                parseFields(fieldNames, fieldTypes, rowBuilder, parser);
                                sink.send(rowBuilder.build());
                                rowBuilder.reset();
                            }
                            expect(parser, JsonToken.END_OBJECT);
                        }
                        parser.nextToken();
                    }
                }
            }
            break;

        case GROUP_BY:
            if (parser.nextToken() == JsonToken.START_ARRAY) {
                while (parser.nextToken() == JsonToken.START_OBJECT) {
                    expectScalarField(parser, "version");
                    expectScalarField(parser, "timestamp");
                    if (parser.nextToken() == JsonToken.FIELD_NAME && parser.getCurrentName().equals("event")
                            && parser.nextToken() == JsonToken.START_OBJECT) {
                        parseFields(fieldNames, fieldTypes, rowBuilder, parser);
                        sink.send(rowBuilder.build());
                        rowBuilder.reset();
                    }
                    expect(parser, JsonToken.END_OBJECT);
                }
            }
        }
    } catch (IOException | InterruptedException e) {
        throw Throwables.propagate(e);
    }
}

From source file:io.apiman.manager.api.migrator.JsonDataMigratorReader.java

private void readArrayStart() throws IOException, JsonParseException {
    JsonToken token = jp.getCurrentToken();
    if (token != JsonToken.START_ARRAY) {
        throw new IOException("Unexpected token (array start expected)."); //$NON-NLS-1$
    }/*  ww  w  .  j  ava  2 s . co m*/
    jp.nextToken();
}

From source file:org.apache.lucene.server.handlers.BulkUpdateDocumentsHandler.java

@Override
public String handleStreamed(Reader reader, Map<String, List<String>> params) throws Exception {

    JsonFactory jfactory = new JsonFactory();

    JsonParser parser = jfactory.createJsonParser(reader);

    if (parser.nextToken() != JsonToken.START_OBJECT) {
        throw new IllegalArgumentException("expected JSON object");
    }/*from   w ww. j  av a 2  s.  c  o  m*/
    if (parser.nextToken() != JsonToken.FIELD_NAME) {
        throw new IllegalArgumentException("expected indexName first");
    }
    if (!parser.getText().equals("indexName")) {
        throw new IllegalArgumentException("expected indexName first");
    }
    if (parser.nextToken() != JsonToken.VALUE_STRING) {
        throw new IllegalArgumentException("indexName should be string");
    }

    IndexState indexState = globalState.get(parser.getText());
    indexState.verifyStarted(null);
    if (parser.nextToken() != JsonToken.FIELD_NAME) {
        throw new IllegalArgumentException("expected documents next");
    }
    if (!parser.getText().equals("documents")) {
        throw new IllegalArgumentException("expected documents after indexName");
    }

    if (parser.nextToken() != JsonToken.START_ARRAY) {
        throw new IllegalArgumentException("documents should be a list");
    }
    ShardState shardState = indexState.getShard(0);

    int count = 0;
    IndexingContext ctx = new IndexingContext();

    AddDocumentHandler addDocHandler = (AddDocumentHandler) globalState.getHandler("addDocument");

    // Parse as many doc blocks as there are:
    while (true) {

        List<Document> children = null;
        Document parent = null;
        Term updateTerm = null;

        JsonToken token = parser.nextToken();
        if (token == JsonToken.END_ARRAY) {
            break;
        }
        if (token != JsonToken.START_OBJECT) {
            throw new IllegalArgumentException("expected object");
        }

        // Parse term + parent + children for this one doc block:
        while (true) {
            token = parser.nextToken();
            if (token == JsonToken.END_OBJECT) {
                // Done with parent + child in this block
                break;
            }
            if (token != JsonToken.FIELD_NAME) {
                throw new IllegalArgumentException("missing field name: " + token);
            }
            String f = parser.getText();
            if (f.equals("term")) {
                if (parser.nextToken() != JsonToken.START_OBJECT) {
                    throw new IllegalArgumentException("missing object");
                }

                // TODO: allow field to be specified only once, then
                // only text per document

                String field = null, term = null;

                while (parser.nextToken() != JsonToken.END_OBJECT) {
                    String f2 = parser.getText();
                    if (f2.equals("field")) {
                        if (parser.nextToken() != JsonToken.VALUE_STRING) {
                            throw new IllegalArgumentException("missing string value");
                        }
                        field = parser.getText();
                        // Ensure field is valid:
                        indexState.getField(field);
                    } else if (f2.equals("term")) {
                        if (parser.nextToken() != JsonToken.VALUE_STRING) {
                            throw new IllegalArgumentException("missing string value");
                        }
                        term = parser.getText();
                    } else {
                        throw new IllegalArgumentException("unexpected field " + f);
                    }
                }
                updateTerm = new Term(field, term);
            } else if (f.equals("children")) {
                token = parser.nextToken();
                if (token != JsonToken.START_ARRAY) {
                    throw new IllegalArgumentException("expected array for children");
                }

                children = new ArrayList<Document>();

                // Parse each child:
                while (true) {
                    Document doc = addDocHandler.parseDocument(indexState, parser);
                    if (doc == null) {
                        break;
                    }
                    children.add(doc);
                }
            } else if (f.equals("parent")) {
                parent = addDocHandler.parseDocument(indexState, parser);
            } else {
                throw new IllegalArgumentException("unrecognized field name \"" + f + "\"");
            }
        }

        if (parent == null) {
            throw new IllegalArgumentException("missing parent");
        }
        if (children == null) {
            throw new IllegalArgumentException("missing children");
        }

        // Parent is last:
        children.add(parent);

        globalState.submitIndexingTask(shardState.getAddDocumentsJob(count, updateTerm, children, ctx));
        count++;
    }

    // nocommit this is ... lameish:
    while (true) {
        if (ctx.addCount.get() == count) {
            break;
        }
        Thread.sleep(1);
    }

    Throwable t = ctx.getError();
    if (t != null) {
        IOUtils.reThrow(t);
    }

    JSONObject o = new JSONObject();
    o.put("indexGen", shardState.writer.getMaxCompletedSequenceNumber());
    o.put("indexedDocumentBlockCount", count);

    return o.toString();
}

From source file:com.amazonaws.services.cloudtrail.processinglibrary.serializer.AbstractEventSerializer.java

/**
 * Indicates whether the CloudTrail log has more events to read.
 *
 * @return <code>true</code> if the log contains more events; <code>false</code> otherwise.
 * @throws IOException if the log could not be opened or accessed.
 *//*w w w . ja  va2  s.  c om*/
public boolean hasNextEvent() throws IOException {
    /* In Fasterxml parser, hasNextEvent will consume next token. So do not call it multiple times. */
    JsonToken nextToken = this.jsonParser.nextToken();
    return nextToken == JsonToken.START_OBJECT || nextToken == JsonToken.START_ARRAY;
}

From source file:com.cedarsoft.couchdb.io.ViewResponseSerializer.java

public <K, V, D> ViewResponse<K, V, D> deserialize(@Nonnull JacksonSerializer<? super K> keySerializer,
        @Nonnull JacksonSerializer<? super V> valueSerializer,
        @Nullable JacksonSerializer<? extends D> documentSerializer, @Nonnull InputStream in)
        throws IOException, InvalidTypeException {
    JsonFactory jsonFactory = JacksonSupport.getJsonFactory();
    JsonParser parser = jsonFactory.createJsonParser(in);

    JacksonParserWrapper parserWrapper = new JacksonParserWrapper(parser);
    parserWrapper.nextToken(JsonToken.START_OBJECT);

    parserWrapper.nextToken(JsonToken.FIELD_NAME);
    //If reduced, no total rows and no offset are availlable!

    String fieldName = parser.getText();

    int totalRows = -1;
    int offset = -1;
    while (!fieldName.equals(PROPERTY_ROWS)) {
        if (fieldName.equals(PROPERTY_TOTAL_ROWS)) {
            parserWrapper.nextToken(JsonToken.VALUE_NUMBER_INT);
            totalRows = parser.getIntValue();
        }//from   w  ww .j av a  2 s. c  o m

        if (fieldName.equals(PROPERTY_OFFSET)) {
            parserWrapper.nextToken(JsonToken.VALUE_NUMBER_INT);
            offset = parser.getIntValue();
        }

        parserWrapper.nextToken(JsonToken.FIELD_NAME);
        fieldName = parser.getText();
    }

    //Now the rows...
    parserWrapper.nextToken(JsonToken.START_ARRAY);

    List<Row<K, V, D>> deserialized = new ArrayList<>();
    while (parser.nextToken() != JsonToken.END_ARRAY) {
        Row<K, V, D> deserializedRow = rowSerializer.deserialize(keySerializer, valueSerializer,
                documentSerializer, parser);
        deserialized.add(deserializedRow);
    }

    return new ViewResponse<>(totalRows, offset, deserialized);
}

From source file:com.netflix.spectator.tdigest.Json.java

/** Decode a list of measurements from a range of a byte array. */
List<TDigestMeasurement> decode(byte[] data, int offset, int length) throws IOException {
    JsonParser parser = FACTORY.createParser(data, offset, length);
    List<TDigestMeasurement> ms = new ArrayList<>();
    expect(parser, JsonToken.START_ARRAY);
    while (parser.nextToken() == JsonToken.START_ARRAY) {
        ms.add(decode(parser));//  ww w . j a v a 2s . c  o  m
    }
    return ms;
}

From source file:com.addthis.codec.config.ConfigTraversingParser.java

@Override
public JsonToken nextToken() throws IOException, JsonParseException {
    if (_nextToken != null) {
        _currToken = _nextToken;/*from  w ww.  j  av  a 2 s.c  o  m*/
        _nextToken = null;
        return _currToken;
    }
    // are we to descend to a container child?
    if (_startContainer) {
        _startContainer = false;
        // minor optimization: empty containers can be skipped
        if (!_nodeCursor.currentHasChildren()) {
            _currToken = (_currToken == JsonToken.START_OBJECT) ? JsonToken.END_OBJECT : JsonToken.END_ARRAY;
            return _currToken;
        }
        _nodeCursor = _nodeCursor.iterateChildren();
        _currToken = _nodeCursor.nextToken();
        if (_currToken == JsonToken.START_OBJECT || _currToken == JsonToken.START_ARRAY) {
            _startContainer = true;
        }
        currentConfig = currentNode();
        return _currToken;
    }
    // No more content?
    if (_nodeCursor == null) {
        _closed = true; // if not already set
        currentConfig = null;
        return null;
    }
    // Otherwise, next entry from current cursor
    _currToken = _nodeCursor.nextToken();
    if (_currToken != null) {
        currentConfig = currentNode();
        if (_currToken == JsonToken.START_OBJECT || _currToken == JsonToken.START_ARRAY) {
            _startContainer = true;
        }
        return _currToken;
    }
    // null means no more children; need to return end marker
    _currToken = _nodeCursor.endToken();
    _nodeCursor = _nodeCursor.getParent();
    currentConfig = currentNode();
    return _currToken;
}

From source file:org.seedstack.seed.core.internal.data.DataManagerImpl.java

@Override
public void importData(InputStream inputStream, String acceptGroup, String acceptName, boolean clear) {
    Set<DataImporter<Object>> usedDataImporters = new HashSet<>();

    try {/*from w ww.  jav a 2  s. c o m*/
        ParsingState state = ParsingState.START;
        String group = null;
        String name = null;
        JsonParser jsonParser = this.jsonFactory
                .createParser(new InputStreamReader(inputStream, Charset.forName(UTF_8)));
        JsonToken jsonToken = jsonParser.nextToken();

        while (jsonToken != null) {
            switch (state) {
            case START:
                if (jsonToken == JsonToken.START_ARRAY) {
                    state = ParsingState.DEFINITION_START;
                } else {
                    throwParsingError(jsonParser.getCurrentLocation(), "start array expected");
                }

                break;
            case DEFINITION_START:
                if (jsonToken == JsonToken.START_OBJECT) {
                    state = ParsingState.DEFINITION_GROUP;
                } else {
                    throwParsingError(jsonParser.getCurrentLocation(), "start object expected");
                }

                break;
            case DEFINITION_GROUP:
                if (jsonToken == JsonToken.FIELD_NAME && GROUP.equals(jsonParser.getCurrentName())) {
                    group = jsonParser.nextTextValue();
                    state = ParsingState.DEFINITION_NAME;
                } else {
                    throwParsingError(jsonParser.getCurrentLocation(), "group field expected");
                }

                break;
            case DEFINITION_NAME:
                if (jsonToken == JsonToken.FIELD_NAME && NAME.equals(jsonParser.getCurrentName())) {
                    name = jsonParser.nextTextValue();
                    state = ParsingState.DEFINITION_ITEMS;
                } else {
                    throwParsingError(jsonParser.getCurrentLocation(), "name field expected");
                }

                break;
            case DEFINITION_ITEMS:
                if (jsonToken == JsonToken.FIELD_NAME && ITEMS.equals(jsonParser.getCurrentName())) {
                    usedDataImporters.add(consumeItems(jsonParser, group, name, acceptGroup, acceptName));
                    state = ParsingState.DEFINITION_END;
                } else {
                    throwParsingError(jsonParser.getCurrentLocation(), "items field expected");
                }

                break;
            case DEFINITION_END:
                if (jsonToken == JsonToken.END_OBJECT) {
                    group = null;
                    name = null;
                    state = ParsingState.END;
                } else {
                    throwParsingError(jsonParser.getCurrentLocation(), "end object expected");
                }

                break;
            case END:
                if (jsonToken == JsonToken.START_OBJECT) {
                    state = ParsingState.DEFINITION_GROUP;
                } else if (jsonToken == JsonToken.END_ARRAY) {
                    state = ParsingState.START;
                } else {
                    throwParsingError(jsonParser.getCurrentLocation(), "start object or end array expected");
                }

                break;
            default:
                throwParsingError(jsonParser.getCurrentLocation(), "unexpected parser state");
            }

            jsonToken = jsonParser.nextToken();
        }
    } catch (Exception e1) {
        for (DataImporter<Object> usedDataImporter : usedDataImporters) {
            try {
                usedDataImporter.rollback();
            } catch (Exception e2) {
                e2.initCause(e1);
                throw SeedException.wrap(e2, DataErrorCode.FAILED_TO_ROLLBACK_IMPORT).put(IMPORTER_CLASS,
                        usedDataImporter.getClass().getName());
            }
        }

        throw SeedException.wrap(e1, DataErrorCode.IMPORT_FAILED);
    }

    for (DataImporter<Object> usedDataImporter : usedDataImporters) {
        try {
            usedDataImporter.commit(clear);
        } catch (Exception e) {
            throw SeedException.wrap(e, DataErrorCode.FAILED_TO_COMMIT_IMPORT).put(IMPORTER_CLASS,
                    usedDataImporter.getClass().getName());
        }
    }
}