List of usage examples for com.fasterxml.jackson.core JsonToken END_ARRAY
JsonToken END_ARRAY
To view the source code for com.fasterxml.jackson.core JsonToken END_ARRAY.
Click Source Link
From source file:com.zenesis.qx.remote.RequestHandler.java
/** * Reads an array from JSON, where each value is of the class clazz. Note that while the result * is an array, you cannot assume that it is an array of Object, or use generics because generics * are always Objects - this is because arrays of primitive types are not arrays of Objects * @param jp/*from w w w . j a va 2s . co m*/ * @param clazz * @return * @throws IOException */ private Object readArray(JsonParser jp, Class clazz) throws IOException { if (jp.getCurrentToken() == JsonToken.VALUE_NULL) return null; boolean isProxyClass = Proxied.class.isAssignableFrom(clazz); ArrayList result = new ArrayList(); for (; jp.nextToken() != JsonToken.END_ARRAY;) { if (isProxyClass) { Integer id = jp.readValueAs(Integer.class); if (id != null) { Proxied obj = getProxied(id); if (obj == null) log.fatal("Cannot read object of class " + clazz + " from id=" + id); else if (!clazz.isInstance(obj)) throw new ClassCastException( "Cannot cast " + obj + " class " + obj.getClass() + " to " + clazz); else result.add(obj); } else result.add(null); } else { Object obj = readSimpleValue(jp, clazz); result.add(obj); } } Object arr = Array.newInstance(clazz, result.size()); for (int i = 0; i < result.size(); i++) Array.set(arr, i, result.get(i)); return arr; //return result.toArray(Array.newInstance(clazz, result.size())); }
From source file:com.ntsync.shared.RawContact.java
@SuppressWarnings({ "unchecked", "rawtypes" }) private static <T extends ListType> List<ListRawData<T>> readJsonList(String rowId, List<ListRawData<T>> listData, JsonParser jp, String fieldname, ListType defaultType, Class<T> typeClass) throws IOException { List<ListRawData<T>> newListData = listData; while (jp.nextToken() != JsonToken.END_ARRAY) { String number = null;//from w w w . ja va2s .com ListType type = defaultType; String label = null; boolean isSuperPrimary = false; boolean isPrimary = false; while (jp.nextToken() != JsonToken.END_OBJECT) { String namefield = jp.getCurrentName(); // move to value if (jp.nextToken() == null) { throw new JsonParseException("Invalid JSON-Structure. End of Array missing.", jp.getCurrentLocation()); } if (ContactConstants.DATA.equals(namefield)) { number = jp.getValueAsString(); } else if (ContactConstants.TYPE.equals(namefield)) { type = ContactConstants.fromVal(typeClass, jp.getValueAsInt()); if (type == null) { type = defaultType; } } else if (ContactConstants.SUPERPRIMARY.equals(namefield)) { isSuperPrimary = jp.getValueAsBoolean(); } else if (ContactConstants.PRIMARY.equals(namefield)) { isPrimary = jp.getValueAsBoolean(); } else if (ContactConstants.LABEL.equals(namefield)) { label = jp.getValueAsString(); } else { LOG.error(JSON_FIELDNOTRECOGNIZED + rowId + " Fieldname:" + fieldname + " Unrecognized: " + namefield); break; } } if (number != null) { if (newListData == null) { newListData = new ArrayList(); } newListData.add(new ListRawData(number, type, label, isPrimary, isSuperPrimary)); } } return newListData; }
From source file:org.jmxtrans.embedded.output.CopperEggWriter.java
/** * read_config()//from www . j av a2 s.co m * The copperegg_config.json file contains a specification for the metric groups and dashboards to be created / or updated. * Mandatory */ public void read_config(InputStream in) throws Exception { JsonFactory f = new MappingJsonFactory(); JsonParser jp = f.createJsonParser(in); JsonToken current; current = jp.nextToken(); if (current != JsonToken.START_OBJECT) { logger.warn("read_config: Error: START_OBJECT not found : quiting."); return; } current = jp.nextToken(); String fieldName = jp.getCurrentName(); current = jp.nextToken(); if (fieldName.equals("config")) { if (current != JsonToken.START_OBJECT) { logger.warn("read_config: Error: START_OBJECT not found after config : quiting."); return; } current = jp.nextToken(); String fieldName2 = jp.getCurrentName(); if (fieldName2.equals("metric_groups")) { current = jp.nextToken(); if (current != JsonToken.START_ARRAY) { logger.warn("read_config: Error: START_ARRAY not found after metric_groups : quiting."); return; } current = jp.nextToken(); while (current != JsonToken.END_ARRAY) { if (current != JsonToken.START_OBJECT) { logger.warn( "read_config: Error: START_OBJECT not found after metric_groups START_ARRAY : quiting."); return; } current = jp.nextToken(); JsonNode node1 = jp.readValueAsTree(); String node1string = write_tostring(node1); metricgroupMap.put(node1.get("name").asText(), node1string); current = jp.nextToken(); } current = jp.nextToken(); String fieldName3 = jp.getCurrentName(); if (fieldName3.equals("dashboards")) { current = jp.nextToken(); if (current != JsonToken.START_ARRAY) { logger.warn("read_config: Error: START_ARRAY not found after dashboards : quiting."); return; } current = jp.nextToken(); while (current != JsonToken.END_ARRAY) { if (current != JsonToken.START_OBJECT) { logger.warn( "read_config: Error: START_OBJECT not found after dashboards START_ARRAY : quiting."); return; } current = jp.nextToken(); JsonNode node = jp.readValueAsTree(); String nodestring = write_tostring(node); dashMap.put(node.get("name").asText(), nodestring); current = jp.nextToken(); } if (jp.nextToken() != JsonToken.END_OBJECT) { logger.warn("read_config: Error: END_OBJECT expected, not found (1): quiting."); return; } if (jp.nextToken() != JsonToken.END_OBJECT) { logger.warn("read_config: Error: END_OBJECT expected, not found (2): quiting."); return; } } else { logger.warn("read_config: Error: Expected dashboards : quiting."); return; } } else { logger.warn("read_config: Error: Expected metric_groups : quiting."); return; } } }
From source file:com.adobe.communities.ugc.migration.importer.UGCImportHelper.java
protected static void importTranslation(final JsonParser jsonParser, final Resource post) throws IOException { JsonToken token = jsonParser.getCurrentToken(); final Map<String, Object> properties = new HashMap<String, Object>(); if (token != JsonToken.START_OBJECT) { throw new IOException("expected a start object token, got " + token.asString()); }/*from w w w .ja v a 2 s .c om*/ properties.put("jcr:primaryType", "social:asiResource"); Resource translationFolder = null; token = jsonParser.nextToken(); while (token == JsonToken.FIELD_NAME) { token = jsonParser.nextToken(); //advance to the field value if (jsonParser.getCurrentName().equals((ContentTypeDefinitions.LABEL_TRANSLATIONS))) { if (null == translationFolder) { // begin by creating the translation folder resource translationFolder = post.getResourceResolver().create(post, "translation", properties); } //now check to see if any translations exist if (token == JsonToken.START_OBJECT) { token = jsonParser.nextToken(); if (token == JsonToken.FIELD_NAME) { while (token == JsonToken.FIELD_NAME) { // each new field represents another translation final Map<String, Object> translationProperties = new HashMap<String, Object>(); translationProperties.put("jcr:primaryType", "social:asiResource"); String languageLabel = jsonParser.getCurrentName(); token = jsonParser.nextToken(); if (token != JsonToken.START_OBJECT) { throw new IOException("expected a start object token for translation item, got " + token.asString()); } token = jsonParser.nextToken(); while (token != JsonToken.END_OBJECT) { jsonParser.nextToken(); //get next field value if (jsonParser.getCurrentName() .equals(ContentTypeDefinitions.LABEL_TIMESTAMP_FIELDS)) { jsonParser.nextToken(); // advance to first field name while (!jsonParser.getCurrentToken().equals(JsonToken.END_ARRAY)) { final String timestampLabel = jsonParser.getValueAsString(); if (translationProperties.containsKey(timestampLabel)) { final Calendar calendar = new GregorianCalendar(); calendar.setTimeInMillis(Long .parseLong((String) translationProperties.get(timestampLabel))); translationProperties.put(timestampLabel, calendar.getTime()); } jsonParser.nextToken(); } } else if (jsonParser.getCurrentName() .equals(ContentTypeDefinitions.LABEL_SUBNODES)) { jsonParser.skipChildren(); } else { translationProperties.put(jsonParser.getCurrentName(), URLDecoder.decode(jsonParser.getValueAsString(), "UTF-8")); } token = jsonParser.nextToken(); //get next field label } // add the language-specific translation under the translation folder resource Resource translation = post.getResourceResolver().create(post.getChild("translation"), languageLabel, translationProperties); if (null == translation) { throw new IOException("translation not actually imported"); } } jsonParser.nextToken(); //skip END_OBJECT token for translation } else if (token == JsonToken.END_OBJECT) { // no actual translation to import, so we're done here jsonParser.nextToken(); } } else { throw new IOException( "expected translations to be contained in an object, saw instead: " + token.asString()); } } else if (jsonParser.getCurrentName().equals("mtlanguage") || jsonParser.getCurrentName().equals("jcr:createdBy")) { properties.put(jsonParser.getCurrentName(), jsonParser.getValueAsString()); } else if (jsonParser.getCurrentName().equals("jcr:created")) { final Calendar calendar = new GregorianCalendar(); calendar.setTimeInMillis(jsonParser.getLongValue()); properties.put("jcr:created", calendar.getTime()); } token = jsonParser.nextToken(); } if (null == translationFolder && properties.containsKey("mtlanguage")) { // it's possible that no translations existed, so we need to make sure the translation resource (which // includes the original post's detected language) is created anyway post.getResourceResolver().create(post, "translation", properties); } }
From source file:org.jmxtrans.embedded.output.CopperEggWriter.java
public String groupFind(String findName, String findIndex, Integer ExpectInt) throws Exception { JsonFactory f = new MappingJsonFactory(); JsonParser jp = f.createJsonParser(findIndex); int count = 0; int foundit = 0; String Result = null;// ww w . j a v a2s . c o m JsonToken current = jp.nextToken(); if (current != JsonToken.START_ARRAY) { logger.warn("groupFind: Error: START_ARRAY expected, not found : quiting."); return (Result); } current = jp.nextToken(); while (current != JsonToken.END_ARRAY) { if (current != JsonToken.START_OBJECT) { logger.warn("groupFind: Error: START_OBJECT expected, not found : quiting."); return (Result); } current = jp.nextToken(); JsonNode node = jp.readValueAsTree(); String tmpStr = node.get("name").asText().toString(); if (findName.equals(node.get("name").asText().toString())) { if (ExpectInt != 0) { foundit = node.get("id").asInt(); Result = String.valueOf(foundit); } else { Result = node.get("id").asText().toString(); } break; } current = jp.nextToken(); count = count + 1; } return (Result); }
From source file:org.apache.druid.query.aggregation.AggregationTestHelper.java
private List readQueryResultArrayFromString(String str) throws Exception { List result = new ArrayList(); JsonParser jp = mapper.getFactory().createParser(str); if (jp.nextToken() != JsonToken.START_ARRAY) { throw new IAE("not an array [%s]", str); }//from ww w. ja va 2 s . c o m ObjectCodec objectCodec = jp.getCodec(); while (jp.nextToken() != JsonToken.END_ARRAY) { result.add(objectCodec.readValue(jp, toolChest.getResultTypeReference())); } return result; }
From source file:org.eclipse.rdf4j.rio.rdfjson.RDFJSONParser.java
private void rdfJsonToHandlerInternal(final RDFHandler handler, final ValueFactory vf, final JsonParser jp) throws IOException, JsonParseException, RDFParseException, RDFHandlerException { if (jp.nextToken() != JsonToken.START_OBJECT) { reportFatalError("Expected RDF/JSON document to start with an Object", jp.getCurrentLocation()); }/*from w w w . j a v a 2 s . c o m*/ while (jp.nextToken() != JsonToken.END_OBJECT) { final String subjStr = jp.getCurrentName(); Resource subject = null; subject = subjStr.startsWith("_:") ? createNode(subjStr.substring(2)) : vf.createIRI(subjStr); if (jp.nextToken() != JsonToken.START_OBJECT) { reportFatalError("Expected subject value to start with an Object", jp.getCurrentLocation()); } boolean foundPredicate = false; while (jp.nextToken() != JsonToken.END_OBJECT) { final String predStr = jp.getCurrentName(); final IRI predicate = vf.createIRI(predStr); foundPredicate = true; if (jp.nextToken() != JsonToken.START_ARRAY) { reportFatalError("Expected predicate value to start with an array", jp.getCurrentLocation()); } boolean foundObject = false; while (jp.nextToken() != JsonToken.END_ARRAY) { if (jp.getCurrentToken() != JsonToken.START_OBJECT) { reportFatalError("Expected object value to start with an Object: subject=<" + subjStr + "> predicate=<" + predStr + ">", jp.getCurrentLocation()); } String nextValue = null; String nextType = null; String nextDatatype = null; String nextLanguage = null; final Set<String> nextContexts = new HashSet<String>(2); while (jp.nextToken() != JsonToken.END_OBJECT) { final String fieldName = jp.getCurrentName(); if (RDFJSONUtility.VALUE.equals(fieldName)) { if (nextValue != null) { reportError( "Multiple values found for a single object: subject=" + subjStr + " predicate=" + predStr, jp.getCurrentLocation(), RDFJSONParserSettings.FAIL_ON_MULTIPLE_OBJECT_VALUES); } jp.nextToken(); nextValue = jp.getText(); } else if (RDFJSONUtility.TYPE.equals(fieldName)) { if (nextType != null) { reportError( "Multiple types found for a single object: subject=" + subjStr + " predicate=" + predStr, jp.getCurrentLocation(), RDFJSONParserSettings.FAIL_ON_MULTIPLE_OBJECT_TYPES); } jp.nextToken(); nextType = jp.getText(); } else if (RDFJSONUtility.LANG.equals(fieldName)) { if (nextLanguage != null) { reportError( "Multiple languages found for a single object: subject=" + subjStr + " predicate=" + predStr, jp.getCurrentLocation(), RDFJSONParserSettings.FAIL_ON_MULTIPLE_OBJECT_LANGUAGES); } jp.nextToken(); nextLanguage = jp.getText(); } else if (RDFJSONUtility.DATATYPE.equals(fieldName)) { if (nextDatatype != null) { reportError( "Multiple datatypes found for a single object: subject=" + subjStr + " predicate=" + predStr, jp.getCurrentLocation(), RDFJSONParserSettings.FAIL_ON_MULTIPLE_OBJECT_DATATYPES); } jp.nextToken(); nextDatatype = jp.getText(); } else if (RDFJSONUtility.GRAPHS.equals(fieldName)) { if (jp.nextToken() != JsonToken.START_ARRAY) { reportError("Expected graphs to start with an array", jp.getCurrentLocation(), RDFJSONParserSettings.SUPPORT_GRAPHS_EXTENSION); } while (jp.nextToken() != JsonToken.END_ARRAY) { final String nextGraph = jp.getText(); nextContexts.add(nextGraph); } } else { reportError( "Unrecognised JSON field name for object: subject=" + subjStr + " predicate=" + predStr + " fieldname=" + fieldName, jp.getCurrentLocation(), RDFJSONParserSettings.FAIL_ON_UNKNOWN_PROPERTY); } } Value object = null; if (nextType == null) { reportFatalError("No type for object: subject=" + subjStr + " predicate=" + predStr, jp.getCurrentLocation()); } if (nextValue == null) { reportFatalError("No value for object: subject=" + subjStr + " predicate=" + predStr, jp.getCurrentLocation()); } if (RDFJSONUtility.LITERAL.equals(nextType)) { if (nextLanguage != null) { object = this.createLiteral(nextValue, nextLanguage, null, jp.getCurrentLocation()); } else if (nextDatatype != null) { object = this.createLiteral(nextValue, null, this.createURI(nextDatatype), jp.getCurrentLocation()); } else { object = this.createLiteral(nextValue, null, null, jp.getCurrentLocation()); } } else if (RDFJSONUtility.BNODE.equals(nextType)) { if (nextLanguage != null) { reportFatalError("Language was attached to a blank node object: subject=" + subjStr + " predicate=" + predStr, jp.getCurrentLocation()); } if (nextDatatype != null) { reportFatalError("Datatype was attached to a blank node object: subject=" + subjStr + " predicate=" + predStr, jp.getCurrentLocation()); } object = createNode(nextValue.substring(2)); } else if (RDFJSONUtility.URI.equals(nextType)) { if (nextLanguage != null) { reportFatalError("Language was attached to a uri object: subject=" + subjStr + " predicate=" + predStr, jp.getCurrentLocation()); } if (nextDatatype != null) { reportFatalError("Datatype was attached to a uri object: subject=" + subjStr + " predicate=" + predStr, jp.getCurrentLocation()); } object = vf.createIRI(nextValue); } foundObject = true; if (!nextContexts.isEmpty()) { for (final String nextContext : nextContexts) { final Resource context; if (nextContext.equals(RDFJSONUtility.NULL)) { context = null; } else if (nextContext.startsWith("_:")) { context = createNode(nextContext.substring(2)); } else { context = vf.createIRI(nextContext); } Statement st = vf.createStatement(subject, predicate, object, context); if (handler != null) { handler.handleStatement(st); } } } else { Statement st = vf.createStatement(subject, predicate, object); if (handler != null) { handler.handleStatement(st); } } } if (!foundObject) { reportFatalError("No object for predicate: subject=" + subjStr + " predicate=" + predStr, jp.getCurrentLocation()); } } if (!foundPredicate) { reportFatalError("No predicate for object: subject=" + subjStr, jp.getCurrentLocation()); } } }
From source file:org.flockdata.integration.FileProcessor.java
private int processJsonEntities(String fileName, ExtractProfile extractProfile) throws FlockException { int rows = 0; File file = new File(fileName); InputStream stream = null;//from w ww . jav a 2 s.c om if (!file.exists()) { stream = ClassLoader.class.getResourceAsStream(fileName); if (stream == null) { logger.error("{} does not exist", fileName); return 0; } } StopWatch watch = new StopWatch(); JsonFactory jfactory = new JsonFactory(); JsonParser jParser; List<EntityToEntityLinkInput> referenceInputBeans = new ArrayList<>(); try { //String docType = mappable.getDataType(); watch.start(); ObjectMapper om = FdJsonObjectMapper.getObjectMapper(); try { if (stream != null) jParser = jfactory.createParser(stream); else jParser = jfactory.createParser(file); JsonToken currentToken = jParser.nextToken(); long then = new DateTime().getMillis(); JsonNode node; if (currentToken == JsonToken.START_ARRAY || currentToken == JsonToken.START_OBJECT) { while (currentToken != null && currentToken != JsonToken.END_OBJECT) { while (currentToken != null && jParser.nextToken() != JsonToken.END_ARRAY) { node = om.readTree(jParser); if (node != null) { processJsonNode(node, extractProfile.getContentModel(), referenceInputBeans); if (stopProcessing(rows++, then)) { break; } } currentToken = jParser.nextToken(); } } } else if (currentToken == JsonToken.START_OBJECT) { //om.readTree(jParser); node = om.readTree(jParser); processJsonNode(node, extractProfile.getContentModel(), referenceInputBeans); } } catch (IOException e1) { logger.error("Unexpected", e1); } } finally { getPayloadWriter().flush(); } return endProcess(watch, rows, 0); }
From source file:org.hippoecm.frontend.service.restproxy.custom.json.deserializers.AnnotationJsonDeserializer.java
protected Byte[] deserializeByteArrayAnnotationAttribute(JsonParser jsonParser) throws JsonParseException, IOException { List<Byte> byteArray = new ArrayList<Byte>(); while (jsonParser.nextToken() != JsonToken.END_ARRAY) { byteArray.add(jsonParser.getByteValue()); }//from w ww .ja v a 2 s.c o m return byteArray.toArray(new Byte[byteArray.size()]); }
From source file:org.hippoecm.frontend.service.restproxy.custom.json.deserializers.AnnotationJsonDeserializer.java
protected Short[] deserializeShortArrayAnnotationAttribute(JsonParser jsonParser) throws JsonParseException, IOException { List<Short> integerArray = new ArrayList<Short>(); while (jsonParser.nextToken() != JsonToken.END_ARRAY) { integerArray.add(jsonParser.getShortValue()); }//from w ww . ja va 2 s. com return integerArray.toArray(new Short[integerArray.size()]); }