Example usage for com.fasterxml.jackson.core JsonToken START_OBJECT

List of usage examples for com.fasterxml.jackson.core JsonToken START_OBJECT

Introduction

In this page you can find the example usage for com.fasterxml.jackson.core JsonToken START_OBJECT.

Prototype

JsonToken START_OBJECT

To view the source code for com.fasterxml.jackson.core JsonToken START_OBJECT.

Click Source Link

Document

START_OBJECT is returned when encountering '{' which signals starting of an Object value.

Usage

From source file:org.jmxtrans.embedded.output.CopperEggWriter.java

/**
 * read_config()//  w  w w.j a  v  a  2 s .  c o  m
 * The copperegg_config.json file contains a specification for the metric groups and dashboards to be created / or updated.
 * Mandatory
 */
public void read_config(InputStream in) throws Exception {

    JsonFactory f = new MappingJsonFactory();
    JsonParser jp = f.createJsonParser(in);

    JsonToken current;

    current = jp.nextToken();
    if (current != JsonToken.START_OBJECT) {
        logger.warn("read_config: Error:  START_OBJECT not found : quiting.");
        return;
    }
    current = jp.nextToken();
    String fieldName = jp.getCurrentName();
    current = jp.nextToken();
    if (fieldName.equals("config")) {
        if (current != JsonToken.START_OBJECT) {
            logger.warn("read_config: Error:  START_OBJECT not found after config : quiting.");
            return;
        }
        current = jp.nextToken();
        String fieldName2 = jp.getCurrentName();
        if (fieldName2.equals("metric_groups")) {
            current = jp.nextToken();
            if (current != JsonToken.START_ARRAY) {
                logger.warn("read_config: Error:  START_ARRAY not found after metric_groups : quiting.");
                return;
            }

            current = jp.nextToken();
            while (current != JsonToken.END_ARRAY) {
                if (current != JsonToken.START_OBJECT) {
                    logger.warn(
                            "read_config: Error:  START_OBJECT not found after metric_groups START_ARRAY : quiting.");
                    return;
                }
                current = jp.nextToken();
                JsonNode node1 = jp.readValueAsTree();
                String node1string = write_tostring(node1);
                metricgroupMap.put(node1.get("name").asText(), node1string);
                current = jp.nextToken();
            }

            current = jp.nextToken();
            String fieldName3 = jp.getCurrentName();

            if (fieldName3.equals("dashboards")) {
                current = jp.nextToken();
                if (current != JsonToken.START_ARRAY) {
                    logger.warn("read_config: Error:  START_ARRAY not found after dashboards : quiting.");
                    return;
                }
                current = jp.nextToken();
                while (current != JsonToken.END_ARRAY) {
                    if (current != JsonToken.START_OBJECT) {
                        logger.warn(
                                "read_config: Error:  START_OBJECT not found after dashboards START_ARRAY : quiting.");
                        return;
                    }
                    current = jp.nextToken();
                    JsonNode node = jp.readValueAsTree();
                    String nodestring = write_tostring(node);
                    dashMap.put(node.get("name").asText(), nodestring);
                    current = jp.nextToken();

                }
                if (jp.nextToken() != JsonToken.END_OBJECT) {
                    logger.warn("read_config: Error:  END_OBJECT expected, not found (1): quiting.");
                    return;
                }
                if (jp.nextToken() != JsonToken.END_OBJECT) {
                    logger.warn("read_config: Error:  END_OBJECT expected, not found (2): quiting.");
                    return;
                }
            } else {
                logger.warn("read_config: Error:  Expected dashboards : quiting.");
                return;
            }
        } else {
            logger.warn("read_config: Error:  Expected metric_groups : quiting.");
            return;
        }
    }
}

From source file:com.adobe.communities.ugc.migration.importer.UGCImportHelper.java

protected static void importTranslation(final JsonParser jsonParser, final Resource post) throws IOException {
    JsonToken token = jsonParser.getCurrentToken();
    final Map<String, Object> properties = new HashMap<String, Object>();
    if (token != JsonToken.START_OBJECT) {
        throw new IOException("expected a start object token, got " + token.asString());
    }/*from w ww .j  av  a  2 s.  co m*/
    properties.put("jcr:primaryType", "social:asiResource");
    Resource translationFolder = null;
    token = jsonParser.nextToken();
    while (token == JsonToken.FIELD_NAME) {
        token = jsonParser.nextToken(); //advance to the field value
        if (jsonParser.getCurrentName().equals((ContentTypeDefinitions.LABEL_TRANSLATIONS))) {
            if (null == translationFolder) {
                // begin by creating the translation folder resource
                translationFolder = post.getResourceResolver().create(post, "translation", properties);
            }
            //now check to see if any translations exist
            if (token == JsonToken.START_OBJECT) {
                token = jsonParser.nextToken();
                if (token == JsonToken.FIELD_NAME) {
                    while (token == JsonToken.FIELD_NAME) { // each new field represents another translation
                        final Map<String, Object> translationProperties = new HashMap<String, Object>();
                        translationProperties.put("jcr:primaryType", "social:asiResource");
                        String languageLabel = jsonParser.getCurrentName();
                        token = jsonParser.nextToken();
                        if (token != JsonToken.START_OBJECT) {
                            throw new IOException("expected a start object token for translation item, got "
                                    + token.asString());
                        }
                        token = jsonParser.nextToken();
                        while (token != JsonToken.END_OBJECT) {
                            jsonParser.nextToken(); //get next field value
                            if (jsonParser.getCurrentName()
                                    .equals(ContentTypeDefinitions.LABEL_TIMESTAMP_FIELDS)) {
                                jsonParser.nextToken(); // advance to first field name
                                while (!jsonParser.getCurrentToken().equals(JsonToken.END_ARRAY)) {
                                    final String timestampLabel = jsonParser.getValueAsString();
                                    if (translationProperties.containsKey(timestampLabel)) {
                                        final Calendar calendar = new GregorianCalendar();
                                        calendar.setTimeInMillis(Long
                                                .parseLong((String) translationProperties.get(timestampLabel)));
                                        translationProperties.put(timestampLabel, calendar.getTime());
                                    }
                                    jsonParser.nextToken();
                                }
                            } else if (jsonParser.getCurrentName()
                                    .equals(ContentTypeDefinitions.LABEL_SUBNODES)) {
                                jsonParser.skipChildren();
                            } else {
                                translationProperties.put(jsonParser.getCurrentName(),
                                        URLDecoder.decode(jsonParser.getValueAsString(), "UTF-8"));
                            }
                            token = jsonParser.nextToken(); //get next field label
                        }
                        // add the language-specific translation under the translation folder resource
                        Resource translation = post.getResourceResolver().create(post.getChild("translation"),
                                languageLabel, translationProperties);
                        if (null == translation) {
                            throw new IOException("translation not actually imported");
                        }
                    }
                    jsonParser.nextToken(); //skip END_OBJECT token for translation
                } else if (token == JsonToken.END_OBJECT) {
                    // no actual translation to import, so we're done here
                    jsonParser.nextToken();
                }
            } else {
                throw new IOException(
                        "expected translations to be contained in an object, saw instead: " + token.asString());
            }
        } else if (jsonParser.getCurrentName().equals("mtlanguage")
                || jsonParser.getCurrentName().equals("jcr:createdBy")) {
            properties.put(jsonParser.getCurrentName(), jsonParser.getValueAsString());
        } else if (jsonParser.getCurrentName().equals("jcr:created")) {
            final Calendar calendar = new GregorianCalendar();
            calendar.setTimeInMillis(jsonParser.getLongValue());
            properties.put("jcr:created", calendar.getTime());
        }
        token = jsonParser.nextToken();
    }
    if (null == translationFolder && properties.containsKey("mtlanguage")) {
        // it's possible that no translations existed, so we need to make sure the translation resource (which
        // includes the original post's detected language) is created anyway
        post.getResourceResolver().create(post, "translation", properties);
    }
}

From source file:org.jmxtrans.embedded.output.CopperEggWriter.java

public String groupFind(String findName, String findIndex, Integer ExpectInt) throws Exception {
    JsonFactory f = new MappingJsonFactory();
    JsonParser jp = f.createJsonParser(findIndex);

    int count = 0;
    int foundit = 0;
    String Result = null;/*from w w w .  j a v  a2s  .  c o  m*/

    JsonToken current = jp.nextToken();
    if (current != JsonToken.START_ARRAY) {
        logger.warn("groupFind: Error: START_ARRAY expected, not found : quiting.");
        return (Result);
    }
    current = jp.nextToken();
    while (current != JsonToken.END_ARRAY) {
        if (current != JsonToken.START_OBJECT) {
            logger.warn("groupFind: Error: START_OBJECT expected, not found : quiting.");
            return (Result);
        }
        current = jp.nextToken();
        JsonNode node = jp.readValueAsTree();
        String tmpStr = node.get("name").asText().toString();
        if (findName.equals(node.get("name").asText().toString())) {
            if (ExpectInt != 0) {
                foundit = node.get("id").asInt();
                Result = String.valueOf(foundit);
            } else {
                Result = node.get("id").asText().toString();
            }
            break;
        }
        current = jp.nextToken();
        count = count + 1;
    }
    return (Result);
}

From source file:org.apache.hadoop.hbase.rest.TestTableScan.java

@Test
public void testStreamingJSON() throws Exception {
    //Test with start row and end row.
    StringBuilder builder = new StringBuilder();
    builder.append("/*");
    builder.append("?");
    builder.append(Constants.SCAN_COLUMN + "=" + COLUMN_1);
    builder.append("&");
    builder.append(Constants.SCAN_START_ROW + "=aaa");
    builder.append("&");
    builder.append(Constants.SCAN_END_ROW + "=aay");
    Response response = client.get("/" + TABLE + builder.toString(), Constants.MIMETYPE_JSON);
    assertEquals(200, response.getCode());

    int count = 0;
    ObjectMapper mapper = new JacksonJaxbJsonProvider().locateMapper(CellSetModel.class,
            MediaType.APPLICATION_JSON_TYPE);
    JsonFactory jfactory = new JsonFactory(mapper);
    JsonParser jParser = jfactory.createJsonParser(response.getStream());
    boolean found = false;
    while (jParser.nextToken() != JsonToken.END_OBJECT) {
        if (jParser.getCurrentToken() == JsonToken.START_OBJECT && found) {
            RowModel row = jParser.readValueAs(RowModel.class);
            assertNotNull(row.getKey());
            for (int i = 0; i < row.getCells().size(); i++) {
                if (count == 0) {
                    assertEquals("aaa", Bytes.toString(row.getKey()));
                }/*  ww w  .  ja  v a2  s .  co m*/
                if (count == 23) {
                    assertEquals("aax", Bytes.toString(row.getKey()));
                }
                count++;
            }
            jParser.skipChildren();
        } else {
            found = jParser.getCurrentToken() == JsonToken.START_ARRAY;
        }
    }
    assertEquals(24, count);
}

From source file:org.eclipse.rdf4j.rio.rdfjson.RDFJSONParser.java

private void rdfJsonToHandlerInternal(final RDFHandler handler, final ValueFactory vf, final JsonParser jp)
        throws IOException, JsonParseException, RDFParseException, RDFHandlerException {
    if (jp.nextToken() != JsonToken.START_OBJECT) {
        reportFatalError("Expected RDF/JSON document to start with an Object", jp.getCurrentLocation());
    }//  w  ww  .ja  v  a  2 s .  c  o  m

    while (jp.nextToken() != JsonToken.END_OBJECT) {
        final String subjStr = jp.getCurrentName();
        Resource subject = null;

        subject = subjStr.startsWith("_:") ? createNode(subjStr.substring(2)) : vf.createIRI(subjStr);
        if (jp.nextToken() != JsonToken.START_OBJECT) {
            reportFatalError("Expected subject value to start with an Object", jp.getCurrentLocation());
        }

        boolean foundPredicate = false;
        while (jp.nextToken() != JsonToken.END_OBJECT) {
            final String predStr = jp.getCurrentName();

            final IRI predicate = vf.createIRI(predStr);
            foundPredicate = true;

            if (jp.nextToken() != JsonToken.START_ARRAY) {
                reportFatalError("Expected predicate value to start with an array", jp.getCurrentLocation());
            }

            boolean foundObject = false;

            while (jp.nextToken() != JsonToken.END_ARRAY) {
                if (jp.getCurrentToken() != JsonToken.START_OBJECT) {
                    reportFatalError("Expected object value to start with an Object: subject=<" + subjStr
                            + "> predicate=<" + predStr + ">", jp.getCurrentLocation());
                }

                String nextValue = null;
                String nextType = null;
                String nextDatatype = null;
                String nextLanguage = null;
                final Set<String> nextContexts = new HashSet<String>(2);

                while (jp.nextToken() != JsonToken.END_OBJECT) {
                    final String fieldName = jp.getCurrentName();
                    if (RDFJSONUtility.VALUE.equals(fieldName)) {
                        if (nextValue != null) {
                            reportError(
                                    "Multiple values found for a single object: subject=" + subjStr
                                            + " predicate=" + predStr,
                                    jp.getCurrentLocation(),
                                    RDFJSONParserSettings.FAIL_ON_MULTIPLE_OBJECT_VALUES);
                        }

                        jp.nextToken();

                        nextValue = jp.getText();
                    } else if (RDFJSONUtility.TYPE.equals(fieldName)) {
                        if (nextType != null) {
                            reportError(
                                    "Multiple types found for a single object: subject=" + subjStr
                                            + " predicate=" + predStr,
                                    jp.getCurrentLocation(),
                                    RDFJSONParserSettings.FAIL_ON_MULTIPLE_OBJECT_TYPES);
                        }

                        jp.nextToken();

                        nextType = jp.getText();
                    } else if (RDFJSONUtility.LANG.equals(fieldName)) {
                        if (nextLanguage != null) {
                            reportError(
                                    "Multiple languages found for a single object: subject=" + subjStr
                                            + " predicate=" + predStr,
                                    jp.getCurrentLocation(),
                                    RDFJSONParserSettings.FAIL_ON_MULTIPLE_OBJECT_LANGUAGES);
                        }

                        jp.nextToken();

                        nextLanguage = jp.getText();
                    } else if (RDFJSONUtility.DATATYPE.equals(fieldName)) {
                        if (nextDatatype != null) {
                            reportError(
                                    "Multiple datatypes found for a single object: subject=" + subjStr
                                            + " predicate=" + predStr,
                                    jp.getCurrentLocation(),
                                    RDFJSONParserSettings.FAIL_ON_MULTIPLE_OBJECT_DATATYPES);
                        }

                        jp.nextToken();

                        nextDatatype = jp.getText();
                    } else if (RDFJSONUtility.GRAPHS.equals(fieldName)) {
                        if (jp.nextToken() != JsonToken.START_ARRAY) {
                            reportError("Expected graphs to start with an array", jp.getCurrentLocation(),
                                    RDFJSONParserSettings.SUPPORT_GRAPHS_EXTENSION);
                        }

                        while (jp.nextToken() != JsonToken.END_ARRAY) {
                            final String nextGraph = jp.getText();
                            nextContexts.add(nextGraph);
                        }
                    } else {
                        reportError(
                                "Unrecognised JSON field name for object: subject=" + subjStr + " predicate="
                                        + predStr + " fieldname=" + fieldName,
                                jp.getCurrentLocation(), RDFJSONParserSettings.FAIL_ON_UNKNOWN_PROPERTY);
                    }
                }

                Value object = null;

                if (nextType == null) {
                    reportFatalError("No type for object: subject=" + subjStr + " predicate=" + predStr,
                            jp.getCurrentLocation());
                }

                if (nextValue == null) {
                    reportFatalError("No value for object: subject=" + subjStr + " predicate=" + predStr,
                            jp.getCurrentLocation());
                }

                if (RDFJSONUtility.LITERAL.equals(nextType)) {
                    if (nextLanguage != null) {
                        object = this.createLiteral(nextValue, nextLanguage, null, jp.getCurrentLocation());
                    } else if (nextDatatype != null) {
                        object = this.createLiteral(nextValue, null, this.createURI(nextDatatype),
                                jp.getCurrentLocation());
                    } else {
                        object = this.createLiteral(nextValue, null, null, jp.getCurrentLocation());
                    }
                } else if (RDFJSONUtility.BNODE.equals(nextType)) {
                    if (nextLanguage != null) {
                        reportFatalError("Language was attached to a blank node object: subject=" + subjStr
                                + " predicate=" + predStr, jp.getCurrentLocation());
                    }
                    if (nextDatatype != null) {
                        reportFatalError("Datatype was attached to a blank node object: subject=" + subjStr
                                + " predicate=" + predStr, jp.getCurrentLocation());
                    }
                    object = createNode(nextValue.substring(2));
                } else if (RDFJSONUtility.URI.equals(nextType)) {
                    if (nextLanguage != null) {
                        reportFatalError("Language was attached to a uri object: subject=" + subjStr
                                + " predicate=" + predStr, jp.getCurrentLocation());
                    }
                    if (nextDatatype != null) {
                        reportFatalError("Datatype was attached to a uri object: subject=" + subjStr
                                + " predicate=" + predStr, jp.getCurrentLocation());
                    }
                    object = vf.createIRI(nextValue);
                }
                foundObject = true;

                if (!nextContexts.isEmpty()) {
                    for (final String nextContext : nextContexts) {
                        final Resource context;

                        if (nextContext.equals(RDFJSONUtility.NULL)) {
                            context = null;
                        } else if (nextContext.startsWith("_:")) {
                            context = createNode(nextContext.substring(2));
                        } else {
                            context = vf.createIRI(nextContext);
                        }
                        Statement st = vf.createStatement(subject, predicate, object, context);
                        if (handler != null) {
                            handler.handleStatement(st);
                        }
                    }
                } else {
                    Statement st = vf.createStatement(subject, predicate, object);
                    if (handler != null) {
                        handler.handleStatement(st);
                    }
                }

            }
            if (!foundObject) {
                reportFatalError("No object for predicate: subject=" + subjStr + " predicate=" + predStr,
                        jp.getCurrentLocation());
            }
        }

        if (!foundPredicate) {
            reportFatalError("No predicate for object: subject=" + subjStr, jp.getCurrentLocation());
        }
    }
}

From source file:org.flockdata.integration.FileProcessor.java

private int processJsonEntities(String fileName, ExtractProfile extractProfile) throws FlockException {
    int rows = 0;

    File file = new File(fileName);
    InputStream stream = null;/*from  w w w. ja  v  a 2 s . c  o m*/
    if (!file.exists()) {
        stream = ClassLoader.class.getResourceAsStream(fileName);
        if (stream == null) {
            logger.error("{} does not exist", fileName);
            return 0;
        }
    }
    StopWatch watch = new StopWatch();

    JsonFactory jfactory = new JsonFactory();
    JsonParser jParser;
    List<EntityToEntityLinkInput> referenceInputBeans = new ArrayList<>();

    try {

        //String docType = mappable.getDataType();
        watch.start();
        ObjectMapper om = FdJsonObjectMapper.getObjectMapper();
        try {

            if (stream != null)
                jParser = jfactory.createParser(stream);
            else
                jParser = jfactory.createParser(file);

            JsonToken currentToken = jParser.nextToken();
            long then = new DateTime().getMillis();
            JsonNode node;
            if (currentToken == JsonToken.START_ARRAY || currentToken == JsonToken.START_OBJECT) {
                while (currentToken != null && currentToken != JsonToken.END_OBJECT) {

                    while (currentToken != null && jParser.nextToken() != JsonToken.END_ARRAY) {
                        node = om.readTree(jParser);
                        if (node != null) {
                            processJsonNode(node, extractProfile.getContentModel(), referenceInputBeans);
                            if (stopProcessing(rows++, then)) {
                                break;
                            }

                        }
                        currentToken = jParser.nextToken();

                    }
                }
            } else if (currentToken == JsonToken.START_OBJECT) {

                //om.readTree(jParser);
                node = om.readTree(jParser);
                processJsonNode(node, extractProfile.getContentModel(), referenceInputBeans);
            }

        } catch (IOException e1) {
            logger.error("Unexpected", e1);
        }

    } finally {
        getPayloadWriter().flush();
    }

    return endProcess(watch, rows, 0);
}

From source file:org.openhab.binding.weather.internal.parser.JsonWeatherParser.java

/**
 * Iterates through the JSON structure and collects weather data.
 *///from  ww  w  . j a va  2  s. c  o  m
private void handleToken(JsonParser jp, String property, Weather weather) throws Exception {
    JsonToken token = jp.getCurrentToken();
    String prop = PropertyResolver.add(property, jp.getCurrentName());

    if (token.isStructStart()) {
        boolean isObject = token == JsonToken.START_OBJECT || token == JsonToken.END_OBJECT;

        Weather forecast = !isObject ? weather : startIfForecast(weather, prop);
        while (!jp.nextValue().isStructEnd()) {
            handleToken(jp, prop, forecast);
        }
        if (isObject) {
            endIfForecast(weather, forecast, prop);
        }
    } else {
        try {
            setValue(weather, prop, jp.getValueAsString());
        } catch (Exception ex) {
            logger.error("Error setting property '{}' with value '{}' ({})", prop, jp.getValueAsString(),
                    ex.getMessage());
        }
    }
}

From source file:org.openrdf.rio.rdfjson.RDFJSONParser.java

private void rdfJsonToHandlerInternal(final RDFHandler handler, final ValueFactory vf, final JsonParser jp)
        throws IOException, JsonParseException, RDFParseException, RDFHandlerException {
    if (jp.nextToken() != JsonToken.START_OBJECT) {
        reportFatalError("Expected RDF/JSON document to start with an Object", jp.getCurrentLocation());
    }//from w  w w  .  j  a  v a  2 s  .c om

    while (jp.nextToken() != JsonToken.END_OBJECT) {
        final String subjStr = jp.getCurrentName();
        Resource subject = null;

        subject = subjStr.startsWith("_:") ? vf.createBNode(subjStr.substring(2)) : vf.createIRI(subjStr);
        if (jp.nextToken() != JsonToken.START_OBJECT) {
            reportFatalError("Expected subject value to start with an Object", jp.getCurrentLocation());
        }

        boolean foundPredicate = false;
        while (jp.nextToken() != JsonToken.END_OBJECT) {
            final String predStr = jp.getCurrentName();

            final IRI predicate = vf.createIRI(predStr);
            foundPredicate = true;

            if (jp.nextToken() != JsonToken.START_ARRAY) {
                reportFatalError("Expected predicate value to start with an array", jp.getCurrentLocation());
            }

            boolean foundObject = false;

            while (jp.nextToken() != JsonToken.END_ARRAY) {
                if (jp.getCurrentToken() != JsonToken.START_OBJECT) {
                    reportFatalError("Expected object value to start with an Object: subject=<" + subjStr
                            + "> predicate=<" + predStr + ">", jp.getCurrentLocation());
                }

                String nextValue = null;
                String nextType = null;
                String nextDatatype = null;
                String nextLanguage = null;
                final Set<String> nextContexts = new HashSet<String>(2);

                while (jp.nextToken() != JsonToken.END_OBJECT) {
                    final String fieldName = jp.getCurrentName();
                    if (RDFJSONUtility.VALUE.equals(fieldName)) {
                        if (nextValue != null) {
                            reportError(
                                    "Multiple values found for a single object: subject=" + subjStr
                                            + " predicate=" + predStr,
                                    jp.getCurrentLocation(),
                                    RDFJSONParserSettings.FAIL_ON_MULTIPLE_OBJECT_VALUES);
                        }

                        jp.nextToken();

                        nextValue = jp.getText();
                    } else if (RDFJSONUtility.TYPE.equals(fieldName)) {
                        if (nextType != null) {
                            reportError(
                                    "Multiple types found for a single object: subject=" + subjStr
                                            + " predicate=" + predStr,
                                    jp.getCurrentLocation(),
                                    RDFJSONParserSettings.FAIL_ON_MULTIPLE_OBJECT_TYPES);
                        }

                        jp.nextToken();

                        nextType = jp.getText();
                    } else if (RDFJSONUtility.LANG.equals(fieldName)) {
                        if (nextLanguage != null) {
                            reportError(
                                    "Multiple languages found for a single object: subject=" + subjStr
                                            + " predicate=" + predStr,
                                    jp.getCurrentLocation(),
                                    RDFJSONParserSettings.FAIL_ON_MULTIPLE_OBJECT_LANGUAGES);
                        }

                        jp.nextToken();

                        nextLanguage = jp.getText();
                    } else if (RDFJSONUtility.DATATYPE.equals(fieldName)) {
                        if (nextDatatype != null) {
                            reportError(
                                    "Multiple datatypes found for a single object: subject=" + subjStr
                                            + " predicate=" + predStr,
                                    jp.getCurrentLocation(),
                                    RDFJSONParserSettings.FAIL_ON_MULTIPLE_OBJECT_DATATYPES);
                        }

                        jp.nextToken();

                        nextDatatype = jp.getText();
                    } else if (RDFJSONUtility.GRAPHS.equals(fieldName)) {
                        if (jp.nextToken() != JsonToken.START_ARRAY) {
                            reportError("Expected graphs to start with an array", jp.getCurrentLocation(),
                                    RDFJSONParserSettings.SUPPORT_GRAPHS_EXTENSION);
                        }

                        while (jp.nextToken() != JsonToken.END_ARRAY) {
                            final String nextGraph = jp.getText();
                            nextContexts.add(nextGraph);
                        }
                    } else {
                        reportError(
                                "Unrecognised JSON field name for object: subject=" + subjStr + " predicate="
                                        + predStr + " fieldname=" + fieldName,
                                jp.getCurrentLocation(), RDFJSONParserSettings.FAIL_ON_UNKNOWN_PROPERTY);
                    }
                }

                Value object = null;

                if (nextType == null) {
                    reportFatalError("No type for object: subject=" + subjStr + " predicate=" + predStr,
                            jp.getCurrentLocation());
                }

                if (nextValue == null) {
                    reportFatalError("No value for object: subject=" + subjStr + " predicate=" + predStr,
                            jp.getCurrentLocation());
                }

                if (RDFJSONUtility.LITERAL.equals(nextType)) {
                    if (nextLanguage != null) {
                        object = this.createLiteral(nextValue, nextLanguage, null, jp.getCurrentLocation());
                    } else if (nextDatatype != null) {
                        object = this.createLiteral(nextValue, null, this.createURI(nextDatatype),
                                jp.getCurrentLocation());
                    } else {
                        object = this.createLiteral(nextValue, null, null, jp.getCurrentLocation());
                    }
                } else if (RDFJSONUtility.BNODE.equals(nextType)) {
                    if (nextLanguage != null) {
                        reportFatalError("Language was attached to a blank node object: subject=" + subjStr
                                + " predicate=" + predStr, jp.getCurrentLocation());
                    }
                    if (nextDatatype != null) {
                        reportFatalError("Datatype was attached to a blank node object: subject=" + subjStr
                                + " predicate=" + predStr, jp.getCurrentLocation());
                    }
                    object = vf.createBNode(nextValue.substring(2));
                } else if (RDFJSONUtility.URI.equals(nextType)) {
                    if (nextLanguage != null) {
                        reportFatalError("Language was attached to a uri object: subject=" + subjStr
                                + " predicate=" + predStr, jp.getCurrentLocation());
                    }
                    if (nextDatatype != null) {
                        reportFatalError("Datatype was attached to a uri object: subject=" + subjStr
                                + " predicate=" + predStr, jp.getCurrentLocation());
                    }
                    object = vf.createIRI(nextValue);
                }
                foundObject = true;

                if (!nextContexts.isEmpty()) {
                    for (final String nextContext : nextContexts) {
                        final Resource context = nextContext.equals(RDFJSONUtility.NULL) ? null
                                : vf.createIRI(nextContext);
                        Statement st = vf.createStatement(subject, predicate, object, context);
                        if (handler != null) {
                            handler.handleStatement(st);
                        }
                    }
                } else {
                    Statement st = vf.createStatement(subject, predicate, object);
                    if (handler != null) {
                        handler.handleStatement(st);
                    }
                }

            }
            if (!foundObject) {
                reportFatalError("No object for predicate: subject=" + subjStr + " predicate=" + predStr,
                        jp.getCurrentLocation());
            }
        }

        if (!foundPredicate) {
            reportFatalError("No predicate for object: subject=" + subjStr, jp.getCurrentLocation());
        }
    }
}

From source file:org.sead.repositories.reference.RefRepository.java

private static void indexChildren(File index, JsonParser jp) throws IOException {

    JsonGenerator generator = new JsonFactory().createGenerator(index, JsonEncoding.UTF8);
    generator.useDefaultPrettyPrinter();

    generator.writeStartObject();/*from  ww  w.  j a  v a2 s.  c  o m*/

    JsonToken cur = jp.nextToken();
    while (cur.equals(JsonToken.START_OBJECT)) {
        long start = jp.getTokenLocation().getByteOffset();
        int depth = 1;
        while (depth > 0) {
            cur = jp.nextToken();
            if (cur.equals(JsonToken.START_OBJECT)) {
                depth++;
            } else if (cur.equals(JsonToken.END_OBJECT)) {
                depth--;
            } else if (cur.equals(JsonToken.FIELD_NAME) && depth == 1) {
                if (jp.getText().equals("@id")) {
                    cur = jp.nextToken();

                    String vName = jp.getText();
                    generator.writeNumberField(vName, start);
                } else {
                    report(jp, cur);
                }
            }
        }
        cur = jp.nextToken();
    }
    generator.writeEndObject();
    generator.close();

}

From source file:org.talend.dataprep.api.dataset.DataSetDataReader.java

/**
 * Reads and Maps the data set from the specified input stream.
 * <p><strong>Does NOT close the supplied {@link InputStream}</strong></p>
 *
 * @param inputStream  the input stream containing the data set
 * @param joinOnColumn the column used to join the lookup data set
 * @return a map which associates to each value of the joint column its corresponding data set row
 * @throws IOException              In case of JSON exception related error.
 * @throws IllegalArgumentException If the input stream is not of the expected JSON structure.
 *///  ww  w .  j  a va  2s  . c o  m
public LightweightExportableDataSet parseAndMapLookupDataSet(InputStream inputStream, String joinOnColumn)
        throws IOException {
    Validate.isTrue(inputStream != null, "The provided input stream must not be null");

    try (JsonParser jsonParser = mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
            .getFactory().createParser(inputStream)) {
        LightweightExportableDataSet lookupDataset = new LightweightExportableDataSet();
        RowMetadata rowMetadata = new RowMetadata();

        JsonToken currentToken = jsonParser.nextToken();
        Validate.isTrue(currentToken == JsonToken.START_OBJECT, INCORRECT_OBJECT_STRUCTURE_ERROR_MESSAGE);

        while (currentToken != JsonToken.END_OBJECT && !jsonParser.isClosed()) {
            currentToken = jsonParser.nextToken();
            String currentField = jsonParser.getCurrentName();
            if ("metadata".equalsIgnoreCase(currentField)) {
                JsonToken metadataStartToken = jsonParser.nextToken(); // advance to start object
                Validate.isTrue(metadataStartToken == JsonToken.START_OBJECT,
                        INCORRECT_OBJECT_STRUCTURE_ERROR_MESSAGE);
                rowMetadata = parseDataSetMetadataAndReturnRowMetadata(jsonParser);
                lookupDataset.setMetadata(rowMetadata);
            } else if ("records".equalsIgnoreCase(currentField)) {
                JsonToken recordsStartToken = jsonParser.nextToken(); // advance to start object
                Validate.isTrue(recordsStartToken == JsonToken.START_ARRAY,
                        INCORRECT_OBJECT_STRUCTURE_ERROR_MESSAGE);
                lookupDataset.setRecords(parseRecords(jsonParser, rowMetadata, joinOnColumn));
            }
        }
        if (lookupDataset.isEmpty()) {
            throw new IOException(
                    "No lookup data has been retrieved when trying to parse the specified data set.");
        }
        return lookupDataset;

    }
}