List of usage examples for com.fasterxml.jackson.core JsonToken START_OBJECT
JsonToken START_OBJECT
To view the source code for com.fasterxml.jackson.core JsonToken START_OBJECT.
Click Source Link
From source file:msearch.filmlisten.MSFilmlisteLesen.java
public void readFilmListe(String source, final ListeFilme listeFilme, int days) { JsonToken jsonToken;//from w ww. j av a 2 s . c om String sender = "", thema = ""; listeFilme.clear(); this.notifyStart(source, PROGRESS_MAX); // fr die Progressanzeige if (days > 0) { final long maxDays = 1000L * 60L * 60L * 24L * days; seconds = new Date().getTime() - maxDays; } else { seconds = 0; } try { InputStream in = selectDecompressor(source, getInputStreamForLocation(source)); JsonParser jp = new JsonFactory().createParser(in); if (jp.nextToken() != JsonToken.START_OBJECT) { throw new IllegalStateException("Expected data to start with an Object"); } while ((jsonToken = jp.nextToken()) != null) { if (jsonToken == JsonToken.END_OBJECT) { break; } if (jp.isExpectedStartArrayToken()) { for (int k = 0; k < ListeFilme.MAX_ELEM; ++k) { listeFilme.metaDaten[k] = jp.nextTextValue(); } break; } } while ((jsonToken = jp.nextToken()) != null) { if (jsonToken == JsonToken.END_OBJECT) { break; } if (jp.isExpectedStartArrayToken()) { // sind nur die Feldbeschreibungen, brauch mer nicht jp.nextToken(); break; } } while (!MSConfig.getStop() && (jsonToken = jp.nextToken()) != null) { if (jsonToken == JsonToken.END_OBJECT) { break; } if (jp.isExpectedStartArrayToken()) { DatenFilm datenFilm = new DatenFilm(); for (int i = 0; i < DatenFilm.COLUMN_NAMES_JSON.length; ++i) { //if we are in FASTAUTO mode, we dont need film descriptions. //this should speed up loading on low end devices... if (workMode == WorkMode.FASTAUTO) { if (DatenFilm.COLUMN_NAMES_JSON[i] == DatenFilm.FILM_BESCHREIBUNG_NR || DatenFilm.COLUMN_NAMES_JSON[i] == DatenFilm.FILM_WEBSEITE_NR || DatenFilm.COLUMN_NAMES_JSON[i] == DatenFilm.FILM_GEO_NR) { jp.nextToken(); continue; } } datenFilm.arr[DatenFilm.COLUMN_NAMES_JSON[i]] = jp.nextTextValue(); /// fr die Entwicklungszeit if (datenFilm.arr[DatenFilm.COLUMN_NAMES_JSON[i]] == null) { datenFilm.arr[DatenFilm.COLUMN_NAMES_JSON[i]] = ""; } } if (datenFilm.arr[DatenFilm.FILM_SENDER_NR].isEmpty()) { datenFilm.arr[DatenFilm.FILM_SENDER_NR] = sender; } else { sender = datenFilm.arr[DatenFilm.FILM_SENDER_NR]; } if (datenFilm.arr[DatenFilm.FILM_THEMA_NR].isEmpty()) { datenFilm.arr[DatenFilm.FILM_THEMA_NR] = thema; } else { thema = datenFilm.arr[DatenFilm.FILM_THEMA_NR]; } listeFilme.importFilmliste(datenFilm); if (seconds > 0) { // muss "rckwrts" laufen, da das Datum sonst 2x gebaut werden muss // wenns drin bleibt, kann mans noch ndern if (!checkDate(datenFilm)) { listeFilme.remove(datenFilm); } } } } } catch (FileNotFoundException ex) { listeFilme.clear(); } catch (Exception ex) { MSLog.fehlerMeldung(945123641, MSLog.FEHLER_ART_PROG, "MSearchIoXmlFilmlisteLesen.readFilmListe: " + source, ex); listeFilme.clear(); } if (MSConfig.getStop()) { listeFilme.clear(); } notifyFertig(source, listeFilme); }
From source file:org.elasticsearch.client.sniff.ElasticsearchHostsSniffer.java
private List<HttpHost> readHosts(HttpEntity entity) throws IOException { try (InputStream inputStream = entity.getContent()) { JsonParser parser = jsonFactory.createParser(inputStream); if (parser.nextToken() != JsonToken.START_OBJECT) { throw new IOException("expected data to start with an object"); }/*from w ww .j a v a2 s. c o m*/ List<HttpHost> hosts = new ArrayList<>(); while (parser.nextToken() != JsonToken.END_OBJECT) { if (parser.getCurrentToken() == JsonToken.START_OBJECT) { if ("nodes".equals(parser.getCurrentName())) { while (parser.nextToken() != JsonToken.END_OBJECT) { JsonToken token = parser.nextToken(); assert token == JsonToken.START_OBJECT; String nodeId = parser.getCurrentName(); HttpHost sniffedHost = readHost(nodeId, parser, this.scheme); if (sniffedHost != null) { logger.trace("adding node [" + nodeId + "]"); hosts.add(sniffedHost); } } } else { parser.skipChildren(); } } } return hosts; } }
From source file:com.sdl.odata.unmarshaller.json.core.JsonProcessor.java
/** * Parse the complex values.//from w ww .ja v a2 s . com * * @param jsonParser the parser * @return list of parsed result objects * @throws IOException If unable to read input parser */ private List<Object> getCollectionValue(JsonParser jsonParser) throws IOException { LOG.info("Start parsing {} array", jsonParser.getCurrentName()); List<Object> list = new ArrayList<>(); while (jsonParser.nextToken() != JsonToken.END_ARRAY) { if (jsonParser.getCurrentToken() == JsonToken.START_OBJECT) { Object embedded = getEmbeddedObject(jsonParser); list.add(embedded); } if (!"}".equals(jsonParser.getText())) { list.add(jsonParser.getText()); } else { LOG.info("Array is over."); } } return list; }
From source file:com.tage.calcite.adapter.druid.DruidConnectionImpl.java
/** Parses the output of a {@code topN} query, sending the results to a * {@link Sink}. *//*w w w .j a v a2 s.c o m*/ private void parse(com.tage.calcite.adapter.druid.QueryType queryType, InputStream in, Sink sink, List<String> fieldNames, List<Primitive> fieldTypes, Page page) { final JsonFactory factory = new JsonFactory(); final Row.RowBuilder rowBuilder = Row.newBuilder(fieldNames.size()); if (CalcitePrepareImpl.DEBUG) { try { final byte[] bytes = AvaticaUtils.readFullyToBytes(in); System.out.println("Response: " + new String(bytes)); in = new ByteArrayInputStream(bytes); } catch (IOException e) { throw Throwables.propagate(e); } } try (final JsonParser parser = factory.createParser(in)) { switch (queryType) { case TOP_N: if (parser.nextToken() == JsonToken.START_ARRAY && parser.nextToken() == JsonToken.START_OBJECT) { expectScalarField(parser, "timestamp"); if (parser.nextToken() == JsonToken.FIELD_NAME && parser.getCurrentName().equals("result") && parser.nextToken() == JsonToken.START_ARRAY) { while (parser.nextToken() == JsonToken.START_OBJECT) { // loop until token equal to "}" parseFields(fieldNames, fieldTypes, rowBuilder, parser); sink.send(rowBuilder.build()); rowBuilder.reset(); } } } break; case SELECT: if (parser.nextToken() == JsonToken.START_ARRAY && parser.nextToken() == JsonToken.START_OBJECT) { page.pagingIdentifier = null; page.offset = -1; expectScalarField(parser, "timestamp"); if (parser.nextToken() == JsonToken.FIELD_NAME && parser.getCurrentName().equals("result") && parser.nextToken() == JsonToken.START_OBJECT) { if (parser.nextToken() == JsonToken.FIELD_NAME && parser.getCurrentName().equals("pagingIdentifiers") && parser.nextToken() == JsonToken.START_OBJECT) { switch (parser.nextToken()) { case FIELD_NAME: page.pagingIdentifier = parser.getCurrentName(); if (parser.nextToken() == JsonToken.VALUE_NUMBER_INT) { page.offset = parser.getIntValue(); } expect(parser, JsonToken.END_OBJECT); break; case END_OBJECT: } } if (parser.nextToken() == JsonToken.FIELD_NAME && parser.getCurrentName().equals("events") && parser.nextToken() == JsonToken.START_ARRAY) { while (parser.nextToken() == JsonToken.START_OBJECT) { expectScalarField(parser, "segmentId"); expectScalarField(parser, "offset"); if (parser.nextToken() == JsonToken.FIELD_NAME && parser.getCurrentName().equals("event") && parser.nextToken() == JsonToken.START_OBJECT) { parseFields(fieldNames, fieldTypes, rowBuilder, parser); sink.send(rowBuilder.build()); rowBuilder.reset(); } expect(parser, JsonToken.END_OBJECT); } parser.nextToken(); } } } break; case GROUP_BY: if (parser.nextToken() == JsonToken.START_ARRAY) { while (parser.nextToken() == JsonToken.START_OBJECT) { expectScalarField(parser, "version"); expectScalarField(parser, "timestamp"); if (parser.nextToken() == JsonToken.FIELD_NAME && parser.getCurrentName().equals("event") && parser.nextToken() == JsonToken.START_OBJECT) { parseFields(fieldNames, fieldTypes, rowBuilder, parser); sink.send(rowBuilder.build()); rowBuilder.reset(); } expect(parser, JsonToken.END_OBJECT); } } } } catch (IOException | InterruptedException e) { throw Throwables.propagate(e); } }
From source file:com.boundary.zoocreeper.Restore.java
private void doRestore(JsonParser jp, ZooKeeper zk) throws IOException, KeeperException, InterruptedException { expectNextToken(jp, JsonToken.START_OBJECT); final Set<String> createdPaths = Sets.newHashSet(); while (jp.nextToken() != JsonToken.END_OBJECT) { final BackupZNode zNode = readZNode(jp, jp.getCurrentName()); // We are the root if (path.isEmpty()) { path.add(zNode);/*from www . ja v a 2 s. c o m*/ } else { for (ListIterator<BackupZNode> it = path.listIterator(path.size()); it.hasPrevious();) { final BackupZNode parent = it.previous(); if (zNode.path.startsWith(parent.path)) { break; } it.remove(); } path.add(zNode); } if (zNode.ephemeralOwner != 0) { LOGGER.info("Skipping ephemeral ZNode: {}", zNode.path); continue; } if (!zNode.path.startsWith(options.rootPath)) { LOGGER.info("Skipping ZNode (not under root path '{}'): {}", options.rootPath, zNode.path); continue; } if (options.isPathExcluded(LOGGER, zNode.path) || !options.isPathIncluded(LOGGER, zNode.path)) { continue; } for (BackupZNode pathComponent : path) { if (createdPaths.add(pathComponent.path)) { restoreNode(zk, pathComponent); } } } }
From source file:com.microsoft.azure.storage.table.CEKReturn.java
/** * Reserved for internal use. Parses the operation response as a collection of entities. Reads entity data from the * specified input stream using the specified class type and optionally projects each entity result with the * specified resolver into an {@link ODataPayload} containing a collection of {@link TableResult} objects. * //from w ww . jav a2 s . c o m * @param inStream * The <code>InputStream</code> to read the data to parse from. * @param clazzType * The class type <code>T</code> implementing {@link TableEntity} for the entities returned. Set to * <code>null</code> to ignore the returned entities and copy only response properties into the * {@link TableResult} objects. * @param resolver * An {@link EntityResolver} instance to project the entities into instances of type <code>R</code>. Set * to <code>null</code> to return the entities as instances of the class type <code>T</code>. * @param options * A {@link TableRequestOptions} object that specifies execution options such as retry policy and timeout * settings for the operation. * @param opContext * An {@link OperationContext} object used to track the execution of the operation. * @return * An {@link ODataPayload} containing a collection of {@link TableResult} objects with the parsed operation * response. * @throws InstantiationException * if an error occurs while constructing the result. * @throws IllegalAccessException * if an error occurs in reflection while parsing the result. * @throws StorageException * if a storage service error occurs. * @throws IOException * if an error occurs while accessing the stream. * @throws JsonParseException * if an error occurs while parsing the stream. */ @SuppressWarnings("unchecked") static <T extends TableEntity, R> ODataPayload<?> parseQueryResponse(final InputStream inStream, final TableRequestOptions options, final Class<T> clazzType, final EntityResolver<R> resolver, final OperationContext opContext) throws JsonParseException, IOException, InstantiationException, IllegalAccessException, StorageException { ODataPayload<T> corePayload = null; ODataPayload<R> resolvedPayload = null; ODataPayload<?> commonPayload = null; JsonParser parser = Utility.getJsonParser(inStream); try { if (resolver != null) { resolvedPayload = new ODataPayload<R>(); commonPayload = resolvedPayload; } else { corePayload = new ODataPayload<T>(); commonPayload = corePayload; } if (!parser.hasCurrentToken()) { parser.nextToken(); } JsonUtilities.assertIsStartObjectJsonToken(parser); // move into data parser.nextToken(); // if there is a clazz type and if JsonNoMetadata, create a classProperties dictionary to use for type inference once // instead of querying the cache many times HashMap<String, PropertyPair> classProperties = null; if (options.getTablePayloadFormat() == TablePayloadFormat.JsonNoMetadata && clazzType != null) { classProperties = PropertyPair.generatePropertyPairs(clazzType); } while (parser.getCurrentToken() != null) { if (parser.getCurrentToken() == JsonToken.FIELD_NAME && parser.getCurrentName().equals(ODataConstants.VALUE)) { // move to start of array parser.nextToken(); JsonUtilities.assertIsStartArrayJsonToken(parser); // go to properties parser.nextToken(); while (parser.getCurrentToken() == JsonToken.START_OBJECT) { final TableResult res = parseJsonEntity(parser, clazzType, classProperties, resolver, options, opContext); if (corePayload != null) { corePayload.tableResults.add(res); } if (resolver != null) { resolvedPayload.results.add((R) res.getResult()); } else { corePayload.results.add((T) res.getResult()); } parser.nextToken(); } JsonUtilities.assertIsEndArrayJsonToken(parser); } parser.nextToken(); } } finally { parser.close(); } return commonPayload; }
From source file:com.cedarsoft.couchdb.io.ActionResponseSerializer.java
@Nonnull public UniqueId deserialize(@Nonnull InputStream in) throws VersionException, IOException { JsonFactory jsonFactory = JacksonSupport.getJsonFactory(); JsonParser parser = jsonFactory.createJsonParser(in); JacksonParserWrapper parserWrapper = new JacksonParserWrapper(parser); parserWrapper.nextToken(JsonToken.START_OBJECT); UniqueId deserialized = deserialize(parser); parserWrapper.ensureObjectClosed();/*from w w w. j a v a 2 s .com*/ return deserialized; }
From source file:com.amazonaws.services.cloudtrail.processinglibrary.serializer.AbstractEventSerializer.java
/** * Indicates whether the CloudTrail log has more events to read. * * @return <code>true</code> if the log contains more events; <code>false</code> otherwise. * @throws IOException if the log could not be opened or accessed. *///from w ww . j a v a 2s.co m public boolean hasNextEvent() throws IOException { /* In Fasterxml parser, hasNextEvent will consume next token. So do not call it multiple times. */ JsonToken nextToken = this.jsonParser.nextToken(); return nextToken == JsonToken.START_OBJECT || nextToken == JsonToken.START_ARRAY; }
From source file:org.lobid.lodmill.JsonDecoder.java
private JsonToken processRecordContent(JsonToken token) throws IOException, JsonParseException { JsonToken currentToken = token;//from w w w .j av a2 s.c o m String key = null; while (currentToken != null) { if (JsonToken.FIELD_NAME == currentToken) key = this.jsonParser.getCurrentName(); if (JsonToken.START_ARRAY == currentToken) { currentToken = this.jsonParser.nextToken(); if (this.JSONP) currentToken = this.jsonParser.nextToken(); else { // break to treat objects in arrays as new objects if (JsonToken.START_OBJECT == currentToken) break; currentToken = handleValuesOfArrays(currentToken, key); } } if (JsonToken.START_OBJECT == currentToken) { if (this.jsonParser.getCurrentName() == null) break; } else handleValue(currentToken, key); try { currentToken = this.jsonParser.nextToken(); } catch (JsonParseException e) { LOG.debug("Exception at the end of non JSON object, might be JSONP", e); currentToken = null; break; } } return currentToken; }