Example usage for com.fasterxml.jackson.core JsonToken START_ARRAY

List of usage examples for com.fasterxml.jackson.core JsonToken START_ARRAY

Introduction

In this page you can find the example usage for com.fasterxml.jackson.core JsonToken START_ARRAY.

Prototype

JsonToken START_ARRAY

To view the source code for com.fasterxml.jackson.core JsonToken START_ARRAY.

Click Source Link

Document

START_ARRAY is returned when encountering '[' which signals starting of an Array value

Usage

From source file:org.mashti.jetson.util.JsonParserUtil.java

/**
 * Expects start array.//from   w  w w .  j  ava 2 s  .  c  om
 *
 * @param parser the parser
 * @throws IOException Signals that an I/O exception has occurred.
 */
private static void expectStartArray(final JsonParser parser) throws IOException {

    if (parser.nextToken() != JsonToken.START_ARRAY) {
        throw new JsonParseException("expected start array", parser.getCurrentLocation());
    }
}

From source file:com.amazonaws.services.cloudtrail.processinglibrary.serializer.AbstractEventSerializer.java

/**
 * Read the header of an AWS CloudTrail log.
 *
 * @throws JsonParseException if the log could not be parsed.
 * @throws IOException if the log could not be opened or accessed.
 *//*  w  w w  .ja  v  a2 s .  co m*/
protected void readArrayHeader() throws JsonParseException, IOException {
    if (this.jsonParser.nextToken() != JsonToken.START_OBJECT) {
        throw new JsonParseException("Not a Json object", this.jsonParser.getCurrentLocation());
    }

    this.jsonParser.nextToken();
    if (!jsonParser.getText().equals(RECORDS)) {
        throw new JsonParseException("Not a CloudTrail log", this.jsonParser.getCurrentLocation());
    }

    if (this.jsonParser.nextToken() != JsonToken.START_ARRAY) {
        throw new JsonParseException("Not a CloudTrail log", this.jsonParser.getCurrentLocation());
    }
}

From source file:com.blackducksoftware.bdio.io.BdioReader.java

public BdioReader(LinkedDataContext context, Reader in) throws IOException {
    // Store the context, we may need to overwrite it later
    this.context = checkNotNull(context);

    // Setup the JSON parser
    jp = new ObjectMapper().getFactory().createParser(in);

    // Start by finding the list of nodes
    if (jp.nextToken() != JsonToken.START_ARRAY) {
        throw new IOException("expected input to start with an array");
    }/*from  www  .j ava2  s.  c om*/
}

From source file:org.apache.lucene.server.handlers.BulkAddDocumentsHandler.java

@Override
public String handleStreamed(Reader reader, Map<String, List<String>> params) throws Exception {

    JsonFactory jfactory = new JsonFactory();

    JsonParser parser = jfactory.createJsonParser(reader);

    if (parser.nextToken() != JsonToken.START_OBJECT) {
        throw new IllegalArgumentException("expected JSON object");
    }/*  w w w  . j av a  2s.  co m*/
    if (parser.nextToken() != JsonToken.FIELD_NAME) {
        throw new IllegalArgumentException("expected indexName first");
    }
    if (!parser.getText().equals("indexName")) {
        throw new IllegalArgumentException("expected indexName first");
    }
    if (parser.nextToken() != JsonToken.VALUE_STRING) {
        throw new IllegalArgumentException("indexName should be string");
    }

    IndexState indexState = globalState.get(parser.getText());
    indexState.verifyStarted(null);
    if (parser.nextToken() != JsonToken.FIELD_NAME) {
        throw new IllegalArgumentException("expected documents next");
    }
    if (!parser.getText().equals("documents")) {
        throw new IllegalArgumentException("expected documents after indexName");
    }

    ShardState shardState = indexState.getShard(0);

    if (parser.nextToken() != JsonToken.START_ARRAY) {
        throw new IllegalArgumentException("documents should be a list");
    }

    int count = 0;
    IndexingContext ctx = new IndexingContext();

    AddDocumentHandler addDocHandler = (AddDocumentHandler) globalState.getHandler("addDocument");

    // Parse as many doc blocks as there are:
    while (true) {

        List<Document> children = null;
        Document parent = null;

        JsonToken token = parser.nextToken();
        if (token == JsonToken.END_ARRAY) {
            break;
        }
        if (token != JsonToken.START_OBJECT) {
            throw new IllegalArgumentException("expected object");
        }

        // Parse parent + children for this one doc block:
        while (true) {
            token = parser.nextToken();
            if (token == JsonToken.END_OBJECT) {
                // Done with parent + child in this block
                break;
            }
            if (token != JsonToken.FIELD_NAME) {
                throw new IllegalArgumentException("missing field name: " + token);
            }
            String f = parser.getText();
            if (f.equals("children")) {
                token = parser.nextToken();
                if (token != JsonToken.START_ARRAY) {
                    throw new IllegalArgumentException("expected array for children");
                }

                children = new ArrayList<Document>();

                // Parse each child:
                while (true) {
                    Document doc = addDocHandler.parseDocument(indexState, parser);
                    if (doc == null) {
                        break;
                    }
                    children.add(doc);
                }
            } else if (f.equals("parent")) {
                parent = addDocHandler.parseDocument(indexState, parser);
            } else {
                throw new IllegalArgumentException("unrecognized field name \"" + f + "\"");
            }
        }

        if (parent == null) {
            throw new IllegalArgumentException("missing parent");
        }
        if (children == null) {
            throw new IllegalArgumentException("missing children");
        }

        // Parent is last:
        children.add(parent);

        globalState.submitIndexingTask(shardState.getAddDocumentsJob(count, null, children, ctx));
        count++;
    }

    // nocommit this is ... lameish:
    while (true) {
        if (ctx.addCount.get() == count) {
            break;
        }
        Thread.sleep(1);
    }

    Throwable t = ctx.getError();
    if (t != null) {
        IOUtils.reThrow(t);
    }

    JSONObject o = new JSONObject();
    o.put("indexGen", shardState.writer.getMaxCompletedSequenceNumber());
    o.put("indexedDocumentBlockCount", count);
    return o.toString();
}

From source file:org.apache.lucene.server.handlers.BulkUpdateDocumentHandler.java

@Override
public String handleStreamed(Reader reader, Map<String, List<String>> params) throws Exception {
    JsonFactory jfactory = new JsonFactory();

    JsonParser parser = jfactory.createJsonParser(reader);

    if (parser.nextToken() != JsonToken.START_OBJECT) {
        throw new IllegalArgumentException("expected JSON object");
    }/* w  ww .  jav  a  2 s. co  m*/
    if (parser.nextToken() != JsonToken.FIELD_NAME) {
        throw new IllegalArgumentException("expected indexName first");
    }
    if (!parser.getText().equals("indexName")) {
        throw new IllegalArgumentException("expected indexName first");
    }
    if (parser.nextToken() != JsonToken.VALUE_STRING) {
        throw new IllegalArgumentException("indexName should be string");
    }

    IndexState indexState = globalState.get(parser.getText());
    indexState.verifyStarted(null);

    ShardState shardState = indexState.getShard(0);

    if (parser.nextToken() != JsonToken.FIELD_NAME) {
        throw new IllegalArgumentException("expected documents next");
    }
    if (!parser.getText().equals("documents")) {
        throw new IllegalArgumentException("expected documents after indexName");
    }
    if (parser.nextToken() != JsonToken.START_ARRAY) {
        throw new IllegalArgumentException("documents should be a list");
    }

    IndexingContext ctx = new IndexingContext();

    AddDocumentHandler addDocHandler = (AddDocumentHandler) globalState.getHandler("addDocument");

    // Parse any number of documents to update:
    int count = 0;

    while (true) {
        JsonToken token = parser.nextToken();
        if (token == JsonToken.END_ARRAY) {
            break;
        }
        if (token != JsonToken.START_OBJECT) {
            throw new IllegalArgumentException("missing object");
        }

        // Parse term: and fields:
        Term updateTerm = null;

        final Document doc = new Document();

        while (true) {
            token = parser.nextToken();
            if (token == JsonToken.END_OBJECT) {
                break;
            }
            if (token != JsonToken.FIELD_NAME) {
                throw new IllegalArgumentException("missing field name");
            }
            String f = parser.getText();
            if (f.equals("term")) {
                if (parser.nextToken() != JsonToken.START_OBJECT) {
                    throw new IllegalArgumentException("missing object");
                }

                // TODO: allow field to be specified only once, then
                // only text per document

                String field = null, term = null;

                while (parser.nextToken() != JsonToken.END_OBJECT) {
                    String f2 = parser.getText();
                    if (f2.equals("field")) {
                        if (parser.nextToken() != JsonToken.VALUE_STRING) {
                            throw new IllegalArgumentException("missing string value");
                        }
                        field = parser.getText();
                        // Ensure field is valid:
                        indexState.getField(field);
                    } else if (f2.equals("term")) {
                        if (parser.nextToken() != JsonToken.VALUE_STRING) {
                            throw new IllegalArgumentException("missing string value");
                        }
                        term = parser.getText();
                    } else {
                        throw new IllegalArgumentException("unexpected field " + f);
                    }
                }
                updateTerm = new Term(field, term);
            } else if (f.equals("fields")) {
                addDocHandler.parseFields(indexState, doc, parser);
            } else {
                boolean handled = false;
                for (AddDocumentHandler.PostHandle postHandle : addDocHandler.postHandlers) {
                    if (postHandle.invoke(indexState, f, parser, doc)) {
                        handled = true;
                        break;
                    }
                }
                if (!handled) {
                    throw new IllegalArgumentException("unrecognized field " + parser.getText());
                }
            }
        }

        if (updateTerm == null) {
            throw new IllegalArgumentException("missing term");
        }

        // TODO: this is dup'd code ... share better w/ AddDocHandler
        globalState.submitIndexingTask(shardState.getAddDocumentJob(count, updateTerm, doc, ctx));
        count++;
    }

    // nocommit this is ... lameish:
    while (true) {
        if (ctx.addCount.get() == count) {
            break;
        }
        Thread.sleep(1);
    }

    JSONObject o = new JSONObject();
    o.put("indexGen", shardState.writer.getMaxCompletedSequenceNumber());
    o.put("indexedDocumentCount", count);
    return o.toString();
}

From source file:com.basistech.rosette.dm.jackson.MorphoAnalysisListDeserializer.java

@Override
public List<MorphoAnalysis> deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException {

    if (!cached) {
        throw new JsonMappingException(jp,
                "attempt to deserialize with un-contextualized MorphoAnalysisListDeserializer");
    }/*  w  ww . j a v a  2  s  . co m*/

    /*
     * This will be entered pointing to the array start.
     */
    if (jp.getCurrentToken() != JsonToken.START_ARRAY) {
        throw ctxt.wrongTokenException(jp, JsonToken.START_ARRAY, "Expected array of items");
    }

    JsonDeserializer<Object> currentDeserializer = castDeserializer(
            ctxt.getAttribute(MorphoAnalysisListDeserializer.class));
    if (currentDeserializer == null) {
        currentDeserializer = maDeserializer;
        ctxt.setAttribute(MorphoAnalysisListDeserializer.class, maDeserializer);
    }

    List<MorphoAnalysis> result = Lists.newArrayList();
    while (jp.nextToken() != JsonToken.END_ARRAY) {
        // if we just read it in as the wrong class, any leftovers will end up in extendedAttributes, and we can cope.

        MorphoAnalysis analysis = (MorphoAnalysis) currentDeserializer.deserialize(jp, ctxt);
        if (analysis.getExtendedProperties().size() != 0) {
            // so, we have leftovers. Note that this will not trim han and arabic down. Tough

            if (analysis.getExtendedProperties().containsKey("morphemes")) {
                KoreanMorphoAnalysis.Builder builder = new KoreanMorphoAnalysis.Builder();
                copyBasic(analysis, builder);

                List<String> morphemes = cast(analysis.getExtendedProperties().get("morphemes"));
                List<String> morphemeTags = cast(analysis.getExtendedProperties().get("morphemeTags"));
                for (int x = 0; x < morphemes.size(); x++) {
                    builder.addMorpheme(morphemes.get(x), morphemeTags.get(x));
                }

                for (Map.Entry<String, Object> me : analysis.getExtendedProperties().entrySet()) {
                    if (!"morphemes".equals(me.getKey()) && !"morphemeTags".equals(me.getKey())) {
                        builder.extendedProperty(me.getKey(), me.getValue());
                    }
                }

                analysis = builder.build();
                ctxt.setAttribute(MorphoAnalysisListDeserializer.class, korMaDeserializer);

            } else if (analysis.getExtendedProperties().containsKey("readings")) {
                // convert to Han.
                HanMorphoAnalysis.Builder builder = new HanMorphoAnalysis.Builder();
                copyBasic(analysis, builder);

                for (String reading : cast(analysis.getExtendedProperties().get("readings"))) {
                    builder.addReading(reading);
                }

                for (Map.Entry<String, Object> me : analysis.getExtendedProperties().entrySet()) {
                    if (!"readings".equals(me.getKey())) {
                        builder.extendedProperty(me.getKey(), me.getValue());
                    }
                }

                analysis = builder.build();
                ctxt.setAttribute(MorphoAnalysisListDeserializer.class, hanMaDeserializer);
            } else if (anyArabicFields(analysis.getExtendedProperties().keySet())) {
                ArabicMorphoAnalysis.Builder builder = new ArabicMorphoAnalysis.Builder();
                copyBasic(analysis, builder);

                Integer prefixLength = (Integer) analysis.getExtendedProperties().get("prefixLength");
                Integer stemLength = (Integer) analysis.getExtendedProperties().get("stemLength");
                if (prefixLength != null && stemLength != null) {
                    builder.lengths(prefixLength, stemLength);
                }
                String root = (String) analysis.getExtendedProperties().get("root");
                if (root != null) {
                    builder.root(root);
                }
                Boolean definiteArticle = (Boolean) analysis.getExtendedProperties().get("definiteArticle");
                if (definiteArticle != null) {
                    builder.definiteArticle(definiteArticle);
                }
                Boolean strippablePrefix = (Boolean) analysis.getExtendedProperties().get("strippablePrefix");
                if (strippablePrefix != null) {
                    builder.strippablePrefix(strippablePrefix);
                }

                List<String> prefixes = cast(analysis.getExtendedProperties().get("prefixes"));
                if (prefixes != null) {
                    List<String> prefixTags = cast(analysis.getExtendedProperties().get("prefixTags"));
                    for (int x = 0; x < prefixes.size(); x++) {
                        builder.addPrefix(prefixes.get(x), prefixTags.get(x));
                    }
                }

                List<String> stems = cast(analysis.getExtendedProperties().get("stems"));
                if (stems != null) {
                    List<String> stemTags = cast(analysis.getExtendedProperties().get("stemTags"));
                    for (int x = 0; x < stems.size(); x++) {
                        builder.addStem(stems.get(x), stemTags.get(x));
                    }
                }

                List<String> suffixes = cast(analysis.getExtendedProperties().get("suffixes"));
                if (suffixes != null) {
                    List<String> suffixTags = cast(analysis.getExtendedProperties().get("suffixTags"));
                    for (int x = 0; x < suffixes.size(); x++) {
                        builder.addSuffix(suffixes.get(x), suffixTags.get(x));
                    }
                }

                for (Map.Entry<String, Object> me : analysis.getExtendedProperties().entrySet()) {
                    if (!ARABIC_FIELDS.contains(me.getKey())) {
                        builder.extendedProperty(me.getKey(), me.getValue());
                    }
                }

                analysis = builder.build();
                ctxt.setAttribute(MorphoAnalysisListDeserializer.class, arMaDeserializer);
            }
        }
        result.add(analysis);
    }
    return ImmutableList.copyOf(result);
}

From source file:org.springframework.data.couchbase.core.convert.translation.JacksonTranslationService.java

/**
 * Decode a JSON string into the {@link CouchbaseStorable} structure.
 *
 * @param source the source formatted document.
 * @param target the target of the populated data.
 *
 * @return the decoded structure.//  w w w  . ja v  a2s.c  om
 */
@Override
public final CouchbaseStorable decode(final Object source, final CouchbaseStorable target) {
    try {
        JsonParser parser = factory.createParser((String) source);
        while (parser.nextToken() != null) {
            JsonToken currentToken = parser.getCurrentToken();

            if (currentToken == JsonToken.START_OBJECT) {
                return decodeObject(parser, (CouchbaseDocument) target);
            } else if (currentToken == JsonToken.START_ARRAY) {
                return decodeArray(parser, new CouchbaseList());
            } else {
                throw new MappingException("JSON to decode needs to start as array or object!");
            }
        }
        parser.close();
    } catch (IOException ex) {
        throw new RuntimeException("Could not decode JSON", ex);
    }
    return target;
}

From source file:org.killbill.billing.plugin.meter.timeline.persistent.Replayer.java

@VisibleForTesting
public void read(final File file, final Function<SourceSamplesForTimestamp, Void> fn) throws IOException {
    final JsonParser smileParser = smileFactory.createJsonParser(file);
    if (smileParser.nextToken() != JsonToken.START_ARRAY) {
        return;//  w w  w.ja  v  a  2s.c o m
    }

    while (!shuttingDown.get() && smileParser.nextToken() != JsonToken.END_ARRAY) {
        final SourceSamplesForTimestamp sourceSamplesForTimestamp = smileParser
                .readValueAs(SourceSamplesForTimestamp.class);
        fn.apply(sourceSamplesForTimestamp);
    }

    smileParser.close();
}

From source file:com.basistech.rosette.dm.jackson.ListAttributeDeserializer.java

@SuppressWarnings("unchecked")
private ListAttribute deserialize(JsonParser jp, DeserializationContext ctxt, TokenBuffer tb)
        throws IOException {
    jp.nextToken();//from w  ww  .j a v a 2s.c o m
    String keyName = jp.getText();

    if (tb != null) { // need to put back skipped properties?
        jp = JsonParserSequence.createFlattened(tb.asParser(jp), jp);
    }
    // Must point to the next value; tb had no current, jp pointed to VALUE_STRING:

    KnownAttribute attribute = KnownAttribute.getAttributeForKey(keyName);
    if (attribute == null) {
        attribute = KnownAttribute.UNKNOWN;
    }
    Class<? extends BaseAttribute> itemClass = attribute.attributeClass();

    ListAttribute.Builder<BaseAttribute> builder = new ListAttribute.Builder<>(attribute.attributeClass());
    List<BaseAttribute> items = Lists.newArrayList();

    JsonToken nextToken;
    while ((nextToken = jp.nextToken()) != JsonToken.END_OBJECT) {
        if (nextToken != JsonToken.FIELD_NAME) {
            throw ctxt.wrongTokenException(jp, JsonToken.END_OBJECT, "Expected field name.");
        } else {
            String name = jp.getCurrentName();
            if ("items".equals(name)) {
                // the actual list items.
                nextToken = jp.nextToken();
                if (nextToken == JsonToken.VALUE_EMBEDDED_OBJECT) {
                    Object o = jp.getEmbeddedObject();
                    if (o instanceof List) { // could it be an array, also?!?
                        // when using JsonTree, sometimes Jackson just sticks the entire Java object in here.
                        items.addAll((List) o);
                    } else {
                        throw ctxt.mappingException(
                                "List contains VALUE_EMBEDDED_OBJECT for items, but it wasn't a list.");
                    }
                } else if (nextToken != JsonToken.START_ARRAY) { // what about nothing?
                    throw ctxt.wrongTokenException(jp, JsonToken.START_ARRAY, "Expected array of items");
                } else {
                    // the START_ARRAY case, which is _normal_. Read the elements.
                    while (jp.nextToken() != JsonToken.END_ARRAY) {
                        items.add(jp.readValueAs(itemClass));
                    }
                }
            } else {
                nextToken = jp.nextToken();
                Object value;
                if (nextToken == JsonToken.VALUE_EMBEDDED_OBJECT) {
                    value = jp.getEmbeddedObject();
                } else {
                    value = jp.readValueAs(Object.class);
                }
                builder.extendedProperty(name, value);
            }
        }
    }
    builder.setItems(items);
    return builder.build();
}

From source file:com.cedarsoft.serialization.jackson.test.UserSerializer.java

@Nonnull
@Override//from   w  w w  . ja va 2 s.  c  o  m
public User deserialize(@Nonnull JsonParser deserializeFrom, @Nonnull Version formatVersion)
        throws IOException, VersionException, JsonProcessingException {
    verifyVersionWritable(formatVersion);

    List<? extends Email> mails = null;
    List<? extends Role> roles = null;
    String name = null;
    UserDetails userDetails = null;
    Email singleEmail = null;

    JacksonParserWrapper parser = new JacksonParserWrapper(deserializeFrom);
    while (parser.nextToken() == JsonToken.FIELD_NAME) {
        String currentName = parser.getCurrentName();

        if (currentName.equals(PROPERTY_NAME)) {
            parser.nextToken(JsonToken.VALUE_STRING);
            name = deserializeFrom.getText();
            continue;
        }
        if (currentName.equals(PROPERTY_EMAILS)) {
            parser.nextToken(JsonToken.START_ARRAY);
            mails = deserializeArray(Email.class, deserializeFrom, formatVersion);
            continue;
        }
        if (currentName.equals(PROPERTY_ROLES)) {
            parser.nextToken(JsonToken.START_ARRAY);
            roles = deserializeArray(Role.class, deserializeFrom, formatVersion);
            continue;
        }
        if (currentName.equals(PROPERTY_USER_DETAILS)) {
            parser.nextToken(JsonToken.START_OBJECT);
            userDetails = deserialize(UserDetails.class, formatVersion, deserializeFrom);
            continue;
        }
        if (currentName.equals(PROPERTY_SINGLE_EMAIL)) {
            parser.nextToken(JsonToken.VALUE_STRING);
            singleEmail = deserialize(Email.class, formatVersion, deserializeFrom);
            continue;
        }

        throw new IllegalStateException("Unexpected field reached <" + currentName + ">");
    }

    //Verify deserialization
    parser.verifyDeserialized(mails, PROPERTY_EMAILS);
    parser.verifyDeserialized(roles, PROPERTY_ROLES);
    parser.verifyDeserialized(userDetails, PROPERTY_USER_DETAILS);
    parser.verifyDeserialized(singleEmail, PROPERTY_SINGLE_EMAIL);
    parser.verifyDeserialized(name, PROPERTY_NAME);
    assert mails != null;
    assert roles != null;
    assert name != null;
    assert userDetails != null;
    assert singleEmail != null;

    parser.ensureObjectClosed();

    return new User(name, mails, roles, singleEmail, userDetails);
}