List of usage examples for com.fasterxml.jackson.core JsonToken END_ARRAY
JsonToken END_ARRAY
To view the source code for com.fasterxml.jackson.core JsonToken END_ARRAY.
Click Source Link
From source file:io.apiman.manager.api.exportimport.json.JsonImportReader.java
public void readApiVersions() throws Exception { current = nextToken();/*from w w w . jav a 2 s . co m*/ if (current == JsonToken.END_ARRAY) { return; } while (nextToken() != JsonToken.END_ARRAY) { // Traverse each api definition while (nextToken() != JsonToken.END_OBJECT) { if (jp.getCurrentName().equals(ApiVersionBean.class.getSimpleName())) { current = nextToken(); ApiVersionBean apiBean = jp.readValueAs(ApiVersionBean.class); dispatcher.apiVersion(apiBean); } else { OrgElementsEnum fieldName = OrgElementsEnum.valueOf(jp.getCurrentName()); current = nextToken(); switch (fieldName) { case Policies: processEntities(PolicyBean.class, dispatcher::apiPolicy); break; default: throw new RuntimeException("Unhandled entity " + fieldName + " with token " + current); } } } } }
From source file:com.github.heuermh.personalgenome.client.converter.JacksonPersonalGenomeConverter.java
@Override public Haplogroup parseHaplogroups(final InputStream inputStream) { checkNotNull(inputStream);// w ww. j av a 2 s. c o m JsonParser parser = null; try { parser = jsonFactory.createParser(inputStream); parser.nextToken(); String id = null; String maternal = null; String paternal = null; String rsid = null; String rcrsPosition = null; String snp = null; List<PaternalTerminalSnp> paternalTerminalSnps = new ArrayList<PaternalTerminalSnp>(); List<MaternalTerminalSnp> maternalTerminalSnps = new ArrayList<MaternalTerminalSnp>(); while (parser.nextToken() != JsonToken.END_OBJECT) { String field = parser.getCurrentName(); parser.nextToken(); if ("id".equals(field)) { id = parser.getText(); } else if ("maternal".equals(field)) { maternal = parser.getText(); } else if ("paternal".equals(field)) { paternal = "null" == parser.getText() ? null : parser.getText(); } else if ("maternal_terminal_snps".equals(field)) { while (parser.nextToken() != JsonToken.END_ARRAY) { while (parser.nextToken() != JsonToken.END_OBJECT) { String maternalTerminalSnpsField = parser.getCurrentName(); parser.nextToken(); if ("rsid".equals(maternalTerminalSnpsField)) { rsid = parser.getText(); } else if ("rcrs_position".equals(maternalTerminalSnpsField)) { rcrsPosition = parser.getText(); } } maternalTerminalSnps.add(new MaternalTerminalSnp(rsid, rcrsPosition)); } } else if ("paternal_terminal_snps".equals(field)) { while (parser.nextToken() != JsonToken.END_ARRAY) { while (parser.nextToken() != JsonToken.END_OBJECT) { String paternalTerminalSnpsField = parser.getCurrentName(); parser.nextToken(); if ("rsid".equals(paternalTerminalSnpsField)) { rsid = parser.getText(); } else if ("snp".equals(paternalTerminalSnpsField)) { snp = parser.getText(); } } paternalTerminalSnps.add(new PaternalTerminalSnp(rsid, snp)); } } } return new Haplogroup(id, paternal, maternal, paternalTerminalSnps, maternalTerminalSnps); } catch (IOException e) { logger.warn("could not parse haplogroups", e); } finally { try { inputStream.close(); } catch (Exception e) { // ignored } try { parser.close(); } catch (Exception e) { // ignored } } return null; }
From source file:org.apache.manifoldcf.agents.output.amazoncloudsearch.AmazonCloudSearchConnector.java
private String parseMessage(JsonParser parser) throws JsonParseException, IOException { while (parser.nextToken() != JsonToken.END_ARRAY) { String name = parser.getCurrentName(); if ("message".equalsIgnoreCase(name)) { parser.nextToken();/*from ww w .jav a 2 s .c o m*/ return parser.getText(); } } return null; }
From source file:org.seedstack.seed.core.internal.data.DataManagerImpl.java
private DataImporter<Object> consumeItems(JsonParser jsonParser, String group, String name, String acceptGroup, String acceptName) throws IOException { Map<String, DataImporterDefinition<Object>> dataImporterDefinitionMap = allDataImporters.get(group); if (dataImporterDefinitionMap == null) { throw SeedException.createNew(DataErrorCode.NO_IMPORTER_FOUND).put(GROUP, group).put(NAME, name); }/*from www.java 2 s . c om*/ DataImporterDefinition<Object> currentImporterDefinition = dataImporterDefinitionMap.get(name); if (currentImporterDefinition == null) { throw SeedException.createNew(DataErrorCode.NO_IMPORTER_FOUND).put(GROUP, group).put(NAME, name); } if (!group.equals(currentImporterDefinition.getGroup())) { throw SeedException.createNew(DataErrorCode.UNEXPECTED_DATA_TYPE) .put(DATA_SET, String.format(CLASSES_MAP_KEY, group, name)) .put(IMPORTER_CLASS, currentImporterDefinition.getDataImporterClass().getName()); } if (!name.equals(currentImporterDefinition.getName())) { throw SeedException.createNew(DataErrorCode.UNEXPECTED_DATA_TYPE) .put(DATA_SET, String.format(CLASSES_MAP_KEY, group, name)) .put(IMPORTER_CLASS, currentImporterDefinition.getDataImporterClass().getName()); } DataImporter<Object> currentDataImporter = null; if ((acceptGroup == null || acceptGroup.equals(group)) && (acceptName == null || acceptName.equals(name))) { currentDataImporter = injector.getInstance(currentImporterDefinition.getDataImporterClass()); // Check if items contains an array if (jsonParser.nextToken() != JsonToken.START_ARRAY) { throw new IllegalArgumentException("Items should be an array"); } jsonParser.nextToken(); // If the array is not empty consume it if (jsonParser.getCurrentToken() != JsonToken.END_ARRAY) { Iterator<Object> objectIterator = jsonParser .readValuesAs(currentImporterDefinition.getImportedClass()); while (objectIterator.hasNext()) { currentDataImporter.importData(objectIterator.next()); } // The array should end correctly if (jsonParser.getCurrentToken() != JsonToken.END_ARRAY) { throw new IllegalArgumentException("end array expected"); } } } // the data importer containing the data return currentDataImporter; }
From source file:com.cedarsoft.serialization.jackson.AbstractJacksonSerializer.java
@Nonnull protected <T> List<? extends T> deserializeArray(@Nonnull Class<T> type, @Nullable String propertyName, @Nonnull JsonParser deserializeFrom, @Nonnull Version formatVersion) throws IOException { JacksonParserWrapper parserWrapper = new JacksonParserWrapper(deserializeFrom); if (propertyName == null) { parserWrapper.verifyCurrentToken(JsonToken.START_ARRAY); } else {//from w w w .ja v a 2 s . co m parserWrapper.nextToken(); parserWrapper.verifyCurrentToken(JsonToken.FIELD_NAME); String currentName = parserWrapper.getCurrentName(); if (!propertyName.equals(currentName)) { throw new JsonParseException( "Invalid field. Expected <" + propertyName + "> but was <" + currentName + ">", parserWrapper.getCurrentLocation()); } parserWrapper.nextToken(); } List<T> deserialized = new ArrayList<T>(); while (deserializeFrom.nextToken() != JsonToken.END_ARRAY) { deserialized.add(deserialize(type, formatVersion, deserializeFrom)); } return deserialized; }
From source file:io.apiman.manager.api.exportimport.json.JsonImportReader.java
public void readClientVersions() throws Exception { current = nextToken();//w ww . ja va2 s . c o m if (current == JsonToken.END_ARRAY) { return; } while (nextToken() != JsonToken.END_ARRAY) { // Traverse each client definition while (nextToken() != JsonToken.END_OBJECT) { if (jp.getCurrentName().equals(ClientVersionBean.class.getSimpleName())) { current = nextToken(); ClientVersionBean clientBean = jp.readValueAs(ClientVersionBean.class); dispatcher.clientVersion(clientBean); } else { OrgElementsEnum fieldName = OrgElementsEnum.valueOf(jp.getCurrentName()); current = nextToken(); switch (fieldName) { case Policies: processEntities(PolicyBean.class, dispatcher::clientPolicy); break; case Contracts: processEntities(ContractBean.class, dispatcher::clientContract); break; default: throw new RuntimeException("Unhandled entity " + fieldName + " with token " + current); } } } } }
From source file:com.quinsoft.zeidon.standardoe.ActivateOisFromJsonStream.java
private void readEntity(String entityName) throws Exception { // Keeps track of whether the entity list starts with a [ or not. If there // is no [ then we are done reading entities of this type when we find the // end of the object. boolean entityArray = false; int twinCount = 0; JsonToken token = jp.getCurrentToken(); if (token == JsonToken.START_ARRAY) { token = jp.nextToken();//from w w w. j ava 2 s . c om entityArray = true; // Entity list started with [ } assert token == JsonToken.START_OBJECT; EntityDef entityDef = lodDef.getEntityDef(entityName, true, true); // Read tokens until we find the token that ends the current list of entities. while ((token = jp.nextToken()) != null) { twinCount++; if (token == JsonToken.END_ARRAY) break; if (token == JsonToken.END_OBJECT) { // If we get here then this should indicate an empty OI. Get the next // token, verify that it's an END_ARRAY, and return. token = jp.nextToken(); assert token == JsonToken.END_ARRAY; break; } // If there are multiple twins then the token is START_OBJECT to // indicate a new EI. if (token == JsonToken.START_OBJECT) { assert twinCount > 1; // Assert that we already created at least one EI. token = jp.nextToken(); } assert token == JsonToken.FIELD_NAME; EntityInstanceImpl ei = (EntityInstanceImpl) view.cursor(entityDef).createEntity(CursorPosition.LAST, CREATE_FLAGS); List<AttributeMeta> attributeMetas = new ArrayList<>(); // Read tokens until we find the token that ends the current entity. EntityMeta entityMeta = DEFAULT_ENTITY_META; while ((token = jp.nextToken()) != JsonToken.END_OBJECT) { String fieldName = jp.getCurrentName(); if (token == JsonToken.FIELD_NAME || token == JsonToken.START_OBJECT) token = jp.nextToken(); if (StringUtils.equals(fieldName, ".meta")) { entityMeta = readEntityMeta(ei); // Now that we have everything we can perform some processing. if (entityMeta.isLinkedSource) linkSources.put(entityMeta.entityKey, ei); else if (entityMeta.linkedSource != null) ei.linkInstances(linkSources.get(entityMeta.linkedSource)); continue; } if (fieldName.startsWith(".")) { AttributeMeta am = readAttributeMeta(ei, fieldName); attributeMetas.add(am); continue; } // Is this the start of an entity. if (token == JsonToken.START_ARRAY || token == JsonToken.START_OBJECT) { boolean recursiveChild = false; // Validate that the entity name is valid. EntityDef childEntity = lodDef.getEntityDef(fieldName, true, true); if (childEntity.getParent() != entityDef) { // Check to see the childEntity is a recursive child. if (entityDef.isRecursive()) { view.cursor(entityDef).setToSubobject(); recursiveChild = true; } else throw new ZeidonException("Parse error: %s is not a child of %s", fieldName, entityName); } readEntity(fieldName); if (recursiveChild) view.resetSubobject(); continue; } if (StringUtils.equals(jp.getText(), fieldName)) // If jp points to attr name, get next token. token = jp.nextToken(); // This better be an attribute // Try getting the attribute. We won't throw an exception (yet) if there // is no attribute with a matching name. AttributeDef attributeDef = entityDef.getAttribute(fieldName, false, true); if (attributeDef == null) { // We didn't find an attribute with a name matching fieldName. Do we allow // dynamic attributes for this entity? if (options.getAllowableDynamicEntities() == null || !options.getAllowableDynamicEntities().contains(entityDef.getName())) { entityDef.getAttribute(fieldName); // This will throw the exception. } // We are allowing dynamic attributes. Create one. DynamicAttributeDefConfiguration config = new DynamicAttributeDefConfiguration(); config.setAttributeName(fieldName); attributeDef = entityDef.createDynamicAttributeDef(config); } else if (attributeDef.isDerived()) // We'll ignore derived attributes. continue; Domain domain = attributeDef.getDomain(); Object internalValue = domain.convertExternalValue(task, ei.getAttribute(attributeDef), attributeDef, null, jp.getText()); ei.getAttribute(attributeDef).setInternalValue(internalValue, !attributeDef.isKey()); if (incremental) { // Since incremental flags are set, assume the attribute hasn't been // updated. We'll be told later if it has. AttributeValue attrib = ei.getInternalAttribute(attributeDef); attrib.setUpdated(false); } else { // If we just set the key then we'll assume the entity has // already been created. if (attributeDef.isKey()) ei.setIncrementalFlags(IncrementalEntityFlags.UPDATED); } } // while ( ( token = jp.nextToken() ) != JsonToken.END_OBJECT )... // Apply all the attribute metas to correctly set the attribute flags. for (AttributeMeta am : attributeMetas) am.apply(ei); // Now that we've updated everything, set the flags. if (incremental) { ei.setCreated(entityMeta.created); ei.setUpdated(entityMeta.updated); ei.setDeleted(entityMeta.deleted); ei.setIncluded(entityMeta.included); ei.setExcluded(entityMeta.excluded); if (entityMeta.incomplete) ei.setIncomplete(null); if (entityMeta.lazyLoaded != null) { String[] names = entityMeta.lazyLoaded.split(","); for (String name : names) ei.getEntitiesLoadedLazily().add(lodDef.getEntityDef(name, true, true)); } } // If the entity list didn't start with a [ then there is only one entity // in the list of twins so exit. if (entityArray == false) break; } // while ( ( token = jp.nextToken() ) != null )... }
From source file:org.apache.lucene.server.handlers.AddDocumentHandler.java
/** Parse a Document using Jackson's streaming parser * API. The document should look like {indexName: 'foo', * fields: {..., ...}} *//*from ww w .j ava2 s .co m*/ public static Document parseDocument(IndexState state, JsonParser p) throws IOException { JsonToken token = p.nextToken(); if (token == JsonToken.END_ARRAY) { // nocommit hackish.. caller should tell us this means "end"? return null; } else if (token != JsonToken.START_OBJECT) { throw new IllegalArgumentException("expected JSON Object"); } final Document doc = new Document(); while (true) { token = p.nextToken(); if (token == JsonToken.END_OBJECT) { break; } assert token == JsonToken.FIELD_NAME : token; String fieldName = p.getText(); if (fieldName.equals("fields")) { parseFields(state, doc, p); } else { // Let a plugin handle it: boolean handled = false; for (PostHandle postHandle : postHandlers) { if (postHandle.invoke(state, fieldName, p, doc)) { handled = true; break; } } if (!handled) { throw new IllegalArgumentException("unrecognized field " + p.getText()); } } // nocommit need test that same field name can't // appear more than once? app must put all values for // a given field into an array (for a multi-valued // field) } return doc; }
From source file:com.netflix.hollow.jsonadapter.HollowJsonAdapter.java
private int addSubArray(JsonParser parser, String arrayType, HollowWriteRecord arrayRec) throws IOException { JsonToken token = parser.nextToken(); arrayRec.reset();/*from www. java 2 s . c o m*/ HollowCollectionSchema schema = (HollowCollectionSchema) hollowSchemas.get(arrayType); ObjectFieldMapping valueRec = null; ObjectMappedFieldPath fieldMapping = null; while (token != JsonToken.END_ARRAY) { int elementOrdinal; if (token == JsonToken.START_OBJECT || token == JsonToken.START_ARRAY) { elementOrdinal = parseSubType(parser, token, schema.getElementType()); } else { if (valueRec == null) { valueRec = getObjectFieldMapping(schema.getElementType()); fieldMapping = valueRec.getSingleFieldMapping(); } addObjectField(parser, token, fieldMapping); elementOrdinal = valueRec.build(-1); } if (arrayRec instanceof HollowListWriteRecord) { ((HollowListWriteRecord) arrayRec).addElement(elementOrdinal); } else { ((HollowSetWriteRecord) arrayRec).addElement(elementOrdinal); } token = parser.nextToken(); } return stateEngine.add(arrayType, arrayRec); }
From source file:com.github.heuermh.personalgenome.client.converter.JacksonPersonalGenomeConverter.java
List<Ancestry> parseSubPopulation(final String id, final List<Ancestry> ancestries, final JsonParser parser) throws IOException { String label = null;/* w w w.j av a 2 s . c o m*/ double proportion = 0.0d; double unassigned = 0.0d; List<Ancestry> subPopulations = new ArrayList<Ancestry>(); while (parser.nextToken() != JsonToken.END_ARRAY) { while (parser.nextToken() != JsonToken.END_OBJECT) { String field = parser.getCurrentName(); parser.nextToken(); if ("label".equals(field)) { label = parser.getText(); } else if ("proportion".equals(field)) { proportion = Double.parseDouble(parser.getText()); } else if ("unassigned".equals(field)) { unassigned = Double.parseDouble(parser.getText()); } else if ("sub_populations".equals(field)) { subPopulations = parseSubPopulation(id, subPopulations, parser); } } Ancestry ancestry = new Ancestry(id, label, proportion, unassigned, subPopulations); ancestries.add(ancestry); label = null; proportion = 0.0d; unassigned = 0.0d; subPopulations.clear(); } return ancestries; }