Example usage for com.fasterxml.jackson.core JsonToken END_ARRAY

List of usage examples for com.fasterxml.jackson.core JsonToken END_ARRAY

Introduction

In this page you can find the example usage for com.fasterxml.jackson.core JsonToken END_ARRAY.

Prototype

JsonToken END_ARRAY

To view the source code for com.fasterxml.jackson.core JsonToken END_ARRAY.

Click Source Link

Document

END_ARRAY is returned when encountering ']' which signals ending of an Array value

Usage

From source file:com.kaaprotech.satu.jackson.SatuDeserializers.java

@Override
public JsonDeserializer<?> findCollectionDeserializer(final CollectionType type,
        final DeserializationConfig config, final BeanDescription beanDesc,
        final TypeDeserializer elementTypeDeserializer, final JsonDeserializer<?> elementDeserializer)
        throws JsonMappingException {

    if (ImmutableSet.class.isAssignableFrom(type.getRawClass())) {
        return new StdDeserializer<Object>(type) {
            private static final long serialVersionUID = 1L;

            @Override//from ww w  .j av  a  2 s.  c o m
            public Object deserialize(JsonParser jp, DeserializationContext context) throws IOException {

                if (jp.isExpectedStartArrayToken()) {
                    JsonToken t;

                    MutableSet<Object> s = Sets.mutable.of();

                    while ((t = jp.nextToken()) != JsonToken.END_ARRAY) {
                        Object value;
                        if (t == JsonToken.VALUE_NULL) {
                            value = null;
                        } else if (elementDeserializer == null) {
                            value = jp.readValueAs(type.getContentType().getRawClass());
                        } else if (elementTypeDeserializer == null) {
                            value = elementDeserializer.deserialize(jp, context);
                        } else {
                            value = elementDeserializer.deserializeWithType(jp, context,
                                    elementTypeDeserializer);
                        }
                        s.add(value);
                    }
                    return s.toImmutable();
                }
                throw context.mappingException(type.getRawClass());
            }
        };
    }

    return super.findCollectionDeserializer(type, config, beanDesc, elementTypeDeserializer,
            elementDeserializer);
}

From source file:org.apache.lucene.server.handlers.BulkAddDocumentsHandler.java

@Override
public String handleStreamed(Reader reader, Map<String, List<String>> params) throws Exception {

    JsonFactory jfactory = new JsonFactory();

    JsonParser parser = jfactory.createJsonParser(reader);

    if (parser.nextToken() != JsonToken.START_OBJECT) {
        throw new IllegalArgumentException("expected JSON object");
    }/*from   www  .  j av a2 s .  co  m*/
    if (parser.nextToken() != JsonToken.FIELD_NAME) {
        throw new IllegalArgumentException("expected indexName first");
    }
    if (!parser.getText().equals("indexName")) {
        throw new IllegalArgumentException("expected indexName first");
    }
    if (parser.nextToken() != JsonToken.VALUE_STRING) {
        throw new IllegalArgumentException("indexName should be string");
    }

    IndexState indexState = globalState.get(parser.getText());
    indexState.verifyStarted(null);
    if (parser.nextToken() != JsonToken.FIELD_NAME) {
        throw new IllegalArgumentException("expected documents next");
    }
    if (!parser.getText().equals("documents")) {
        throw new IllegalArgumentException("expected documents after indexName");
    }

    ShardState shardState = indexState.getShard(0);

    if (parser.nextToken() != JsonToken.START_ARRAY) {
        throw new IllegalArgumentException("documents should be a list");
    }

    int count = 0;
    IndexingContext ctx = new IndexingContext();

    AddDocumentHandler addDocHandler = (AddDocumentHandler) globalState.getHandler("addDocument");

    // Parse as many doc blocks as there are:
    while (true) {

        List<Document> children = null;
        Document parent = null;

        JsonToken token = parser.nextToken();
        if (token == JsonToken.END_ARRAY) {
            break;
        }
        if (token != JsonToken.START_OBJECT) {
            throw new IllegalArgumentException("expected object");
        }

        // Parse parent + children for this one doc block:
        while (true) {
            token = parser.nextToken();
            if (token == JsonToken.END_OBJECT) {
                // Done with parent + child in this block
                break;
            }
            if (token != JsonToken.FIELD_NAME) {
                throw new IllegalArgumentException("missing field name: " + token);
            }
            String f = parser.getText();
            if (f.equals("children")) {
                token = parser.nextToken();
                if (token != JsonToken.START_ARRAY) {
                    throw new IllegalArgumentException("expected array for children");
                }

                children = new ArrayList<Document>();

                // Parse each child:
                while (true) {
                    Document doc = addDocHandler.parseDocument(indexState, parser);
                    if (doc == null) {
                        break;
                    }
                    children.add(doc);
                }
            } else if (f.equals("parent")) {
                parent = addDocHandler.parseDocument(indexState, parser);
            } else {
                throw new IllegalArgumentException("unrecognized field name \"" + f + "\"");
            }
        }

        if (parent == null) {
            throw new IllegalArgumentException("missing parent");
        }
        if (children == null) {
            throw new IllegalArgumentException("missing children");
        }

        // Parent is last:
        children.add(parent);

        globalState.submitIndexingTask(shardState.getAddDocumentsJob(count, null, children, ctx));
        count++;
    }

    // nocommit this is ... lameish:
    while (true) {
        if (ctx.addCount.get() == count) {
            break;
        }
        Thread.sleep(1);
    }

    Throwable t = ctx.getError();
    if (t != null) {
        IOUtils.reThrow(t);
    }

    JSONObject o = new JSONObject();
    o.put("indexGen", shardState.writer.getMaxCompletedSequenceNumber());
    o.put("indexedDocumentBlockCount", count);
    return o.toString();
}

From source file:com.basistech.rosette.dm.jackson.ListAttributeDeserializer.java

@SuppressWarnings("unchecked")
private ListAttribute deserialize(JsonParser jp, DeserializationContext ctxt, TokenBuffer tb)
        throws IOException {
    jp.nextToken();/*from  ww  w  .j  a va2  s  .  c  o m*/
    String keyName = jp.getText();

    if (tb != null) { // need to put back skipped properties?
        jp = JsonParserSequence.createFlattened(tb.asParser(jp), jp);
    }
    // Must point to the next value; tb had no current, jp pointed to VALUE_STRING:

    KnownAttribute attribute = KnownAttribute.getAttributeForKey(keyName);
    if (attribute == null) {
        attribute = KnownAttribute.UNKNOWN;
    }
    Class<? extends BaseAttribute> itemClass = attribute.attributeClass();

    ListAttribute.Builder<BaseAttribute> builder = new ListAttribute.Builder<>(attribute.attributeClass());
    List<BaseAttribute> items = Lists.newArrayList();

    JsonToken nextToken;
    while ((nextToken = jp.nextToken()) != JsonToken.END_OBJECT) {
        if (nextToken != JsonToken.FIELD_NAME) {
            throw ctxt.wrongTokenException(jp, JsonToken.END_OBJECT, "Expected field name.");
        } else {
            String name = jp.getCurrentName();
            if ("items".equals(name)) {
                // the actual list items.
                nextToken = jp.nextToken();
                if (nextToken == JsonToken.VALUE_EMBEDDED_OBJECT) {
                    Object o = jp.getEmbeddedObject();
                    if (o instanceof List) { // could it be an array, also?!?
                        // when using JsonTree, sometimes Jackson just sticks the entire Java object in here.
                        items.addAll((List) o);
                    } else {
                        throw ctxt.mappingException(
                                "List contains VALUE_EMBEDDED_OBJECT for items, but it wasn't a list.");
                    }
                } else if (nextToken != JsonToken.START_ARRAY) { // what about nothing?
                    throw ctxt.wrongTokenException(jp, JsonToken.START_ARRAY, "Expected array of items");
                } else {
                    // the START_ARRAY case, which is _normal_. Read the elements.
                    while (jp.nextToken() != JsonToken.END_ARRAY) {
                        items.add(jp.readValueAs(itemClass));
                    }
                }
            } else {
                nextToken = jp.nextToken();
                Object value;
                if (nextToken == JsonToken.VALUE_EMBEDDED_OBJECT) {
                    value = jp.getEmbeddedObject();
                } else {
                    value = jp.readValueAs(Object.class);
                }
                builder.extendedProperty(name, value);
            }
        }
    }
    builder.setItems(items);
    return builder.build();
}

From source file:com.basistech.rosette.dm.jackson.MorphoAnalysisListDeserializer.java

@Override
public List<MorphoAnalysis> deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException {

    if (!cached) {
        throw new JsonMappingException(jp,
                "attempt to deserialize with un-contextualized MorphoAnalysisListDeserializer");
    }//from  www . j a  va 2  s.  c  om

    /*
     * This will be entered pointing to the array start.
     */
    if (jp.getCurrentToken() != JsonToken.START_ARRAY) {
        throw ctxt.wrongTokenException(jp, JsonToken.START_ARRAY, "Expected array of items");
    }

    JsonDeserializer<Object> currentDeserializer = castDeserializer(
            ctxt.getAttribute(MorphoAnalysisListDeserializer.class));
    if (currentDeserializer == null) {
        currentDeserializer = maDeserializer;
        ctxt.setAttribute(MorphoAnalysisListDeserializer.class, maDeserializer);
    }

    List<MorphoAnalysis> result = Lists.newArrayList();
    while (jp.nextToken() != JsonToken.END_ARRAY) {
        // if we just read it in as the wrong class, any leftovers will end up in extendedAttributes, and we can cope.

        MorphoAnalysis analysis = (MorphoAnalysis) currentDeserializer.deserialize(jp, ctxt);
        if (analysis.getExtendedProperties().size() != 0) {
            // so, we have leftovers. Note that this will not trim han and arabic down. Tough

            if (analysis.getExtendedProperties().containsKey("morphemes")) {
                KoreanMorphoAnalysis.Builder builder = new KoreanMorphoAnalysis.Builder();
                copyBasic(analysis, builder);

                List<String> morphemes = cast(analysis.getExtendedProperties().get("morphemes"));
                List<String> morphemeTags = cast(analysis.getExtendedProperties().get("morphemeTags"));
                for (int x = 0; x < morphemes.size(); x++) {
                    builder.addMorpheme(morphemes.get(x), morphemeTags.get(x));
                }

                for (Map.Entry<String, Object> me : analysis.getExtendedProperties().entrySet()) {
                    if (!"morphemes".equals(me.getKey()) && !"morphemeTags".equals(me.getKey())) {
                        builder.extendedProperty(me.getKey(), me.getValue());
                    }
                }

                analysis = builder.build();
                ctxt.setAttribute(MorphoAnalysisListDeserializer.class, korMaDeserializer);

            } else if (analysis.getExtendedProperties().containsKey("readings")) {
                // convert to Han.
                HanMorphoAnalysis.Builder builder = new HanMorphoAnalysis.Builder();
                copyBasic(analysis, builder);

                for (String reading : cast(analysis.getExtendedProperties().get("readings"))) {
                    builder.addReading(reading);
                }

                for (Map.Entry<String, Object> me : analysis.getExtendedProperties().entrySet()) {
                    if (!"readings".equals(me.getKey())) {
                        builder.extendedProperty(me.getKey(), me.getValue());
                    }
                }

                analysis = builder.build();
                ctxt.setAttribute(MorphoAnalysisListDeserializer.class, hanMaDeserializer);
            } else if (anyArabicFields(analysis.getExtendedProperties().keySet())) {
                ArabicMorphoAnalysis.Builder builder = new ArabicMorphoAnalysis.Builder();
                copyBasic(analysis, builder);

                Integer prefixLength = (Integer) analysis.getExtendedProperties().get("prefixLength");
                Integer stemLength = (Integer) analysis.getExtendedProperties().get("stemLength");
                if (prefixLength != null && stemLength != null) {
                    builder.lengths(prefixLength, stemLength);
                }
                String root = (String) analysis.getExtendedProperties().get("root");
                if (root != null) {
                    builder.root(root);
                }
                Boolean definiteArticle = (Boolean) analysis.getExtendedProperties().get("definiteArticle");
                if (definiteArticle != null) {
                    builder.definiteArticle(definiteArticle);
                }
                Boolean strippablePrefix = (Boolean) analysis.getExtendedProperties().get("strippablePrefix");
                if (strippablePrefix != null) {
                    builder.strippablePrefix(strippablePrefix);
                }

                List<String> prefixes = cast(analysis.getExtendedProperties().get("prefixes"));
                if (prefixes != null) {
                    List<String> prefixTags = cast(analysis.getExtendedProperties().get("prefixTags"));
                    for (int x = 0; x < prefixes.size(); x++) {
                        builder.addPrefix(prefixes.get(x), prefixTags.get(x));
                    }
                }

                List<String> stems = cast(analysis.getExtendedProperties().get("stems"));
                if (stems != null) {
                    List<String> stemTags = cast(analysis.getExtendedProperties().get("stemTags"));
                    for (int x = 0; x < stems.size(); x++) {
                        builder.addStem(stems.get(x), stemTags.get(x));
                    }
                }

                List<String> suffixes = cast(analysis.getExtendedProperties().get("suffixes"));
                if (suffixes != null) {
                    List<String> suffixTags = cast(analysis.getExtendedProperties().get("suffixTags"));
                    for (int x = 0; x < suffixes.size(); x++) {
                        builder.addSuffix(suffixes.get(x), suffixTags.get(x));
                    }
                }

                for (Map.Entry<String, Object> me : analysis.getExtendedProperties().entrySet()) {
                    if (!ARABIC_FIELDS.contains(me.getKey())) {
                        builder.extendedProperty(me.getKey(), me.getValue());
                    }
                }

                analysis = builder.build();
                ctxt.setAttribute(MorphoAnalysisListDeserializer.class, arMaDeserializer);
            }
        }
        result.add(analysis);
    }
    return ImmutableList.copyOf(result);
}

From source file:com.blackducksoftware.bdio.io.BdioReader.java

/**
 * Just keep calling this until it returns {@code null}.
 *///  ww  w  .j  av a  2  s  . com
@Nullable
public synchronized Node read() throws IOException {
    Node result = null;
    if (jp.nextToken() != JsonToken.END_ARRAY) {
        Map<String, Object> nodeMap = jp.readValueAs(MAP_TYPE);
        result = context.expandToNode(nodeMap);
        if (result.types().contains(BlackDuckType.BILL_OF_MATERIALS)) {
            // Replace the current context based on the specification version
            // NOTE: if the specVersion is absent, it means it is the initial version
            Object value = result.data().get(BlackDuckTerm.SPEC_VERSION);
            String specVersion = value != null ? value.toString() : null;
            context = context.newContextForReading(specVersion);
        }
    }
    return result;
}

From source file:org.debezium.core.doc.JacksonReader.java

private Array parseArray(JsonParser parser) throws IOException {
    // Iterate over the fields in the top-level document ...
    BasicArray array = new BasicArray();
    JsonToken token = parser.nextToken();
    while (token != JsonToken.END_ARRAY) {
        switch (token) {
        case START_OBJECT:
            array.add(parseDocument(parser, true));
            break;
        case START_ARRAY:
            array.add(parseArray(parser));
            break;
        case VALUE_STRING:
            array.add(parser.getValueAsString());
            break;
        case VALUE_TRUE:
            array.add(true);/*from   ww w  .  ja va2s  .  c  o m*/
            break;
        case VALUE_FALSE:
            array.add(false);
            break;
        case VALUE_NULL:
            array.addNull();
            break;
        case VALUE_NUMBER_FLOAT:
        case VALUE_NUMBER_INT:
            switch (parser.getNumberType()) {
            case FLOAT:
                array.add(parser.getFloatValue());
                break;
            case DOUBLE:
                array.add(parser.getDoubleValue());
                break;
            case BIG_DECIMAL:
                array.add(parser.getDecimalValue());
                break;
            case INT:
                array.add(parser.getIntValue());
                break;
            case LONG:
                array.add(parser.getLongValue());
                break;
            case BIG_INTEGER:
                array.add(parser.getBigIntegerValue());
                break;
            }
            break;
        case VALUE_EMBEDDED_OBJECT:
            // disregard this, since it's an extension ...
            break;
        case NOT_AVAILABLE:
            throw new JsonParseException("Non-blocking parsers are not supported", parser.getCurrentLocation());
        case FIELD_NAME:
            throw new JsonParseException("Not expecting a FIELD_NAME token", parser.getCurrentLocation());
        case END_ARRAY:
            throw new JsonParseException("Not expecting an END_ARRAY token", parser.getCurrentLocation());
        case END_OBJECT:
            throw new JsonParseException("Not expecting an END_OBJECT token", parser.getCurrentLocation());
        }
        token = parser.nextToken();
    }
    return array;
}

From source file:com.addthis.codec.config.ConfigTraversingParser.java

@Override
public JsonToken nextToken() throws IOException, JsonParseException {
    if (_nextToken != null) {
        _currToken = _nextToken;/*from w ww . j  av  a 2 s. co  m*/
        _nextToken = null;
        return _currToken;
    }
    // are we to descend to a container child?
    if (_startContainer) {
        _startContainer = false;
        // minor optimization: empty containers can be skipped
        if (!_nodeCursor.currentHasChildren()) {
            _currToken = (_currToken == JsonToken.START_OBJECT) ? JsonToken.END_OBJECT : JsonToken.END_ARRAY;
            return _currToken;
        }
        _nodeCursor = _nodeCursor.iterateChildren();
        _currToken = _nodeCursor.nextToken();
        if (_currToken == JsonToken.START_OBJECT || _currToken == JsonToken.START_ARRAY) {
            _startContainer = true;
        }
        currentConfig = currentNode();
        return _currToken;
    }
    // No more content?
    if (_nodeCursor == null) {
        _closed = true; // if not already set
        currentConfig = null;
        return null;
    }
    // Otherwise, next entry from current cursor
    _currToken = _nodeCursor.nextToken();
    if (_currToken != null) {
        currentConfig = currentNode();
        if (_currToken == JsonToken.START_OBJECT || _currToken == JsonToken.START_ARRAY) {
            _startContainer = true;
        }
        return _currToken;
    }
    // null means no more children; need to return end marker
    _currToken = _nodeCursor.endToken();
    _nodeCursor = _nodeCursor.getParent();
    currentConfig = currentNode();
    return _currToken;
}

From source file:io.apiman.manager.api.exportimport.json.JsonImportReader.java

public void readOrgs() throws Exception {
    current = nextToken();/*from  www  .j  av  a 2s.  co m*/
    if (current == JsonToken.END_ARRAY) {
        return;
    }
    while (nextToken() != JsonToken.END_ARRAY) {
        // Traverse each org definition
        while (nextToken() != JsonToken.END_OBJECT) {
            if (jp.getCurrentName().equals(OrganizationBean.class.getSimpleName())) {
                current = nextToken();
                OrganizationBean orgBean = jp.readValueAs(OrganizationBean.class);
                dispatcher.organization(orgBean);
            } else {
                OrgElementsEnum fieldName = OrgElementsEnum.valueOf(jp.getCurrentName());
                current = nextToken();

                switch (fieldName) {
                case Memberships:
                    processEntities(RoleMembershipBean.class, dispatcher::membership);
                    break;
                case Plans:
                    readPlans();
                    break;
                case Apis:
                    readApis();
                    break;
                case Clients:
                    readClients();
                    break;
                case Audits:
                    processEntities(AuditEntryBean.class, dispatcher::audit);
                    break;
                default:
                    throw new RuntimeException("Unhandled entity " + fieldName + " with token " + current);
                }
            }
        }
    }
}

From source file:net.troja.eve.crest.CrestDataProcessor.java

private <T> void processItems(final CrestApiProcessor<T> processor, final CrestContainer<T> container,
        final JsonParser jsonParser) throws IOException {
    if (jsonParser.isExpectedStartArrayToken()) {
        while (jsonParser.nextToken() != JsonToken.END_ARRAY) {
            container.addEntry(processor.parseEntry((JsonNode) mapper.readTree(jsonParser)));
        }//from  www  .ja  v a2 s.  com
    }
}

From source file:com.cedarsoft.couchdb.io.ViewResponseSerializer.java

public <K, V, D> ViewResponse<K, V, D> deserialize(@Nonnull JacksonSerializer<? super K> keySerializer,
        @Nonnull JacksonSerializer<? super V> valueSerializer,
        @Nullable JacksonSerializer<? extends D> documentSerializer, @Nonnull InputStream in)
        throws IOException, InvalidTypeException {
    JsonFactory jsonFactory = JacksonSupport.getJsonFactory();
    JsonParser parser = jsonFactory.createJsonParser(in);

    JacksonParserWrapper parserWrapper = new JacksonParserWrapper(parser);
    parserWrapper.nextToken(JsonToken.START_OBJECT);

    parserWrapper.nextToken(JsonToken.FIELD_NAME);
    //If reduced, no total rows and no offset are availlable!

    String fieldName = parser.getText();

    int totalRows = -1;
    int offset = -1;
    while (!fieldName.equals(PROPERTY_ROWS)) {
        if (fieldName.equals(PROPERTY_TOTAL_ROWS)) {
            parserWrapper.nextToken(JsonToken.VALUE_NUMBER_INT);
            totalRows = parser.getIntValue();
        }// w w  w .j  av  a  2 s.c  o  m

        if (fieldName.equals(PROPERTY_OFFSET)) {
            parserWrapper.nextToken(JsonToken.VALUE_NUMBER_INT);
            offset = parser.getIntValue();
        }

        parserWrapper.nextToken(JsonToken.FIELD_NAME);
        fieldName = parser.getText();
    }

    //Now the rows...
    parserWrapper.nextToken(JsonToken.START_ARRAY);

    List<Row<K, V, D>> deserialized = new ArrayList<>();
    while (parser.nextToken() != JsonToken.END_ARRAY) {
        Row<K, V, D> deserializedRow = rowSerializer.deserialize(keySerializer, valueSerializer,
                documentSerializer, parser);
        deserialized.add(deserializedRow);
    }

    return new ViewResponse<>(totalRows, offset, deserialized);
}