List of usage examples for com.fasterxml.jackson.core JsonParser nextToken
public abstract JsonToken nextToken() throws IOException, JsonParseException;
From source file:com.github.heuermh.personalgenome.client.converter.JacksonPersonalGenomeConverter.java
@Override public UserName parseNames(final InputStream inputStream) { checkNotNull(inputStream);/*from w ww . j a va2 s. com*/ JsonParser parser = null; try { parser = jsonFactory.createParser(inputStream); parser.nextToken(); String id = null; String firstName = null; String lastName = null; String profileId = null; String profileFirstName = null; String profileLastName = null; List<ProfileName> profileNames = new ArrayList<ProfileName>(); while (parser.nextToken() != JsonToken.END_OBJECT) { String field = parser.getCurrentName(); parser.nextToken(); if ("id".equals(field)) { id = parser.getText(); } else if ("first_name".equals(field)) { firstName = parser.getText(); } else if ("last_name".equals(field)) { lastName = parser.getText(); } else if ("profiles".equals(field)) { while (parser.nextToken() != JsonToken.END_ARRAY) { while (parser.nextToken() != JsonToken.END_OBJECT) { String profileNameField = parser.getCurrentName(); parser.nextToken(); if ("id".equals(profileNameField)) { profileId = parser.getText(); } else if ("first_name".equals(profileNameField)) { profileFirstName = parser.getText(); } else if ("last_name".equals(profileNameField)) { profileLastName = parser.getText(); } } profileNames.add(new ProfileName(profileId, profileFirstName, profileLastName)); } } } return new UserName(id, firstName, lastName, profileNames); } catch (IOException e) { logger.warn("could not parse names", e); } finally { try { inputStream.close(); } catch (Exception e) { // ignored } try { parser.close(); } catch (Exception e) { // ignored } } return null; }
From source file:com.github.heuermh.personalgenome.client.converter.JacksonPersonalGenomeConverter.java
@Override public List<Risk> parseRisks(final InputStream inputStream) { checkNotNull(inputStream);/* w w w. java 2 s . co m*/ JsonParser parser = null; try { parser = jsonFactory.createParser(inputStream); parser.nextToken(); String id = null; String reportId = null; String description = null; double risk = 0.0d; double populationRisk = 0.0d; List<Risk> risks = new ArrayList<Risk>(); while (parser.nextToken() != JsonToken.END_OBJECT) { String field = parser.getCurrentName(); parser.nextToken(); if ("id".equals(field)) { id = parser.getText(); } else if ("risks".equals(field)) { while (parser.nextToken() != JsonToken.END_ARRAY) { while (parser.nextToken() != JsonToken.END_OBJECT) { String riskField = parser.getCurrentName(); parser.nextToken(); if ("report_id".equals(riskField)) { reportId = parser.getText(); } else if ("description".equals(riskField)) { description = parser.getText(); } else if ("risk".equals(riskField)) { risk = Double.parseDouble(parser.getText()); } else if ("population_risk".equals(riskField)) { populationRisk = Double.parseDouble(parser.getText()); } } risks.add(new Risk(id, reportId, description, risk, populationRisk)); reportId = null; description = null; risk = 0.0d; populationRisk = 0.0d; } } } return risks; } catch (IOException e) { logger.warn("could not parse risks", e); } finally { try { inputStream.close(); } catch (Exception e) { // ignored } try { parser.close(); } catch (Exception e) { // ignored } } return null; }
From source file:com.github.heuermh.personalgenome.client.converter.JacksonPersonalGenomeConverter.java
@Override public List<Trait> parseTraits(final InputStream inputStream) { checkNotNull(inputStream);// www .j a v a 2s . com JsonParser parser = null; try { parser = jsonFactory.createParser(inputStream); parser.nextToken(); String id = null; String reportId = null; String description = null; String trait = null; Set<String> possibleTraits = new HashSet<String>(); List<Trait> traits = new ArrayList<Trait>(); while (parser.nextToken() != JsonToken.END_OBJECT) { String field = parser.getCurrentName(); parser.nextToken(); if ("id".equals(field)) { id = parser.getText(); } else if ("traits".equals(field)) { while (parser.nextToken() != JsonToken.END_ARRAY) { while (parser.nextToken() != JsonToken.END_OBJECT) { String traitField = parser.getCurrentName(); parser.nextToken(); if ("report_id".equals(traitField)) { reportId = parser.getText(); } else if ("description".equals(traitField)) { description = parser.getText(); } else if ("trait".equals(traitField)) { trait = parser.getText(); } else if ("possible_traits".equals(traitField)) { while (parser.nextToken() != JsonToken.END_ARRAY) { possibleTraits.add(parser.getText()); } } } traits.add(new Trait(id, reportId, description, trait, possibleTraits)); reportId = null; description = null; trait = null; possibleTraits.clear(); } } } return traits; } catch (IOException e) { logger.warn("could not parse traits", e); } finally { try { inputStream.close(); } catch (Exception e) { // ignored } try { parser.close(); } catch (Exception e) { // ignored } } return null; }
From source file:org.h2gis.drivers.geojson.GeoJsonReaderDriver.java
/** * We skip the CRS because it has been already parsed. * /*from w w w . j a v a 2 s. c o m*/ * * @param jp */ private String skipCRS(JsonParser jp) throws IOException { jp.nextToken(); //START_OBJECT { jp.skipChildren(); /* jp.nextToken();// crs type jp.nextToken(); // crs name jp.nextToken(); // crs properties jp.nextToken(); //START_OBJECT { jp.nextToken(); // crs name jp.nextToken(); // crs value jp.nextToken(); //END_OBJECT } jp.nextToken(); //END_OBJECT }*/ jp.nextToken(); //Go to features return jp.getText(); }
From source file:com.evolveum.midpoint.prism.lex.json.AbstractJsonLexicalProcessor.java
@NotNull private List<RootXNode> parseFromStart(JsonParser unconfiguredParser, ParsingContext parsingContext, RootXNodeHandler handler) throws SchemaException { List<RootXNode> rv = new ArrayList<>(); JsonParsingContext ctx = null;/*from w w w.j a va 2 s . c om*/ try { JsonParser parser = configureParser(unconfiguredParser); parser.nextToken(); if (parser.currentToken() == null) { throw new SchemaException("Nothing to parse: the input is empty."); } do { ctx = new JsonParsingContext(parser, parsingContext); IterativeParsingContext ipc = handler != null ? new IterativeParsingContext(handler) : null; XNode xnode = parseValue(ctx, ipc); if (ipc != null && ipc.dataSent) { // all the objects were sent to the handler, nothing more to do } else { List<RootXNode> roots = valueToRootList(xnode, null, ctx); if (ipc == null) { rv.addAll(roots); } else { for (RootXNode root : roots) { if (!ipc.handler.handleData(root)) { ipc.abortProcessing = true; break; } } } } if (ipc != null && ipc.abortProcessing) { // set either here or in parseValue break; } } while (ctx.parser.nextToken() != null); // for multi-document YAML files return rv; } catch (IOException e) { throw new SchemaException( "Cannot parse JSON/YAML object: " + e.getMessage() + getPositionSuffixIfPresent(ctx), e); } }
From source file:org.h2gis.drivers.geojson.GeoJsonReaderDriver.java
/** * Parses a GeoJSON coordinate array and returns a JTS coordinate. The first * token corresponds to the first X value. The last token correponds to the * end of the coordinate array "]"./*from w w w . ja v a 2s .c o m*/ * * Parsed syntax: * * 100.0, 0.0] * * @param jp * @throws IOException * @return Coordinate */ private Coordinate parseCoordinate(JsonParser jp) throws IOException { jp.nextToken(); double x = jp.getDoubleValue();// VALUE_NUMBER_FLOAT jp.nextToken(); // second value double y = jp.getDoubleValue(); Coordinate coord; //We look for a z value jp.nextToken(); if (jp.getCurrentToken() == JsonToken.END_ARRAY) { coord = new Coordinate(x, y); } else { double z = jp.getDoubleValue(); jp.nextToken(); // exit array coord = new Coordinate(x, y, z); } jp.nextToken(); return coord; }
From source file:data.DefaultExchanger.java
public void importData(String dbName, JsonParser parser, JdbcTemplate jdbcTemplate) throws IOException { PlatformTransactionManager tm = new DataSourceTransactionManager(jdbcTemplate.getDataSource()); TransactionStatus ts = tm.getTransaction(new DefaultTransactionDefinition()); try {/*from w w w .j a v a 2 s . c o m*/ if (dbName.equals("MySQL")) { jdbcTemplate.update("SET FOREIGN_KEY_CHECKS = 0"); jdbcTemplate.update("SET NAMES \'utf8mb4\'"); } final Configuration config = Configuration.root(); int batchSize = config.getInt(DATA_BATCH_SIZE_KEY, DEFAULT_BATCH_SIZE); if (parser.nextToken() != JsonToken.END_OBJECT) { String fieldName = parser.getCurrentName(); play.Logger.debug("importing {}", fieldName); if (fieldName.equalsIgnoreCase(getTable())) { truncateTable(jdbcTemplate); JsonToken current = parser.nextToken(); if (current == JsonToken.START_ARRAY) { importDataFromArray(parser, jdbcTemplate, batchSize); importSequence(dbName, parser, jdbcTemplate); } else { play.Logger.info("Error: records should be an array: skipping."); parser.skipChildren(); } } } tm.commit(ts); } catch (Exception e) { e.printStackTrace(); tm.rollback(ts); } finally { if (dbName.equals("MySQL")) { jdbcTemplate.update("SET FOREIGN_KEY_CHECKS = 1"); } } }
From source file:com.github.heuermh.personalgenome.client.converter.JacksonPersonalGenomeConverter.java
@Override public Haplogroup parseHaplogroups(final InputStream inputStream) { checkNotNull(inputStream);/*from w w w.j av a 2s . c o m*/ JsonParser parser = null; try { parser = jsonFactory.createParser(inputStream); parser.nextToken(); String id = null; String maternal = null; String paternal = null; String rsid = null; String rcrsPosition = null; String snp = null; List<PaternalTerminalSnp> paternalTerminalSnps = new ArrayList<PaternalTerminalSnp>(); List<MaternalTerminalSnp> maternalTerminalSnps = new ArrayList<MaternalTerminalSnp>(); while (parser.nextToken() != JsonToken.END_OBJECT) { String field = parser.getCurrentName(); parser.nextToken(); if ("id".equals(field)) { id = parser.getText(); } else if ("maternal".equals(field)) { maternal = parser.getText(); } else if ("paternal".equals(field)) { paternal = "null" == parser.getText() ? null : parser.getText(); } else if ("maternal_terminal_snps".equals(field)) { while (parser.nextToken() != JsonToken.END_ARRAY) { while (parser.nextToken() != JsonToken.END_OBJECT) { String maternalTerminalSnpsField = parser.getCurrentName(); parser.nextToken(); if ("rsid".equals(maternalTerminalSnpsField)) { rsid = parser.getText(); } else if ("rcrs_position".equals(maternalTerminalSnpsField)) { rcrsPosition = parser.getText(); } } maternalTerminalSnps.add(new MaternalTerminalSnp(rsid, rcrsPosition)); } } else if ("paternal_terminal_snps".equals(field)) { while (parser.nextToken() != JsonToken.END_ARRAY) { while (parser.nextToken() != JsonToken.END_OBJECT) { String paternalTerminalSnpsField = parser.getCurrentName(); parser.nextToken(); if ("rsid".equals(paternalTerminalSnpsField)) { rsid = parser.getText(); } else if ("snp".equals(paternalTerminalSnpsField)) { snp = parser.getText(); } } paternalTerminalSnps.add(new PaternalTerminalSnp(rsid, snp)); } } } return new Haplogroup(id, paternal, maternal, paternalTerminalSnps, maternalTerminalSnps); } catch (IOException e) { logger.warn("could not parse haplogroups", e); } finally { try { inputStream.close(); } catch (Exception e) { // ignored } try { parser.close(); } catch (Exception e) { // ignored } } return null; }
From source file:com.cedarsoft.serialization.test.performance.XmlParserPerformance.java
private void benchParse(@Nonnull JsonFactory factory, @Nonnull String contentSample) throws XMLStreamException, IOException { for (int i = 0; i < BIG; i++) { JsonParser parser = factory.createParser(new StringReader(contentSample)); assertEquals(JsonToken.START_OBJECT, parser.nextToken()); assertEquals(JsonToken.FIELD_NAME, parser.nextToken()); assertEquals("dependent", parser.getCurrentName()); assertEquals(JsonToken.VALUE_FALSE, parser.nextToken()); boolean dependent = parser.getBooleanValue(); assertFalse(dependent);/*from www .j a v a 2s . co m*/ assertEquals(JsonToken.FIELD_NAME, parser.nextToken()); assertEquals("id", parser.getCurrentName()); assertEquals(JsonToken.VALUE_STRING, parser.nextToken()); String id = parser.getText(); assertEquals("Canon Raw", id); assertEquals(JsonToken.FIELD_NAME, parser.nextToken()); assertEquals("extension", parser.getCurrentName()); assertEquals(JsonToken.START_OBJECT, parser.nextToken()); assertEquals(JsonToken.FIELD_NAME, parser.nextToken()); assertEquals("isDefault", parser.getCurrentName()); assertEquals(JsonToken.VALUE_TRUE, parser.nextToken()); boolean isDefault = parser.getBooleanValue(); assertTrue(isDefault); assertEquals(JsonToken.FIELD_NAME, parser.nextToken()); assertEquals("delimiter", parser.getCurrentName()); assertEquals(JsonToken.VALUE_STRING, parser.nextToken()); String delimiter = parser.getText(); assertEquals(".", delimiter); assertEquals(JsonToken.FIELD_NAME, parser.nextToken()); assertEquals("extension", parser.getCurrentName()); assertEquals(JsonToken.VALUE_STRING, parser.nextToken()); String extension = parser.getText(); assertEquals("cr2", extension); assertEquals(JsonToken.END_OBJECT, parser.nextToken()); assertEquals(JsonToken.END_OBJECT, parser.nextToken()); assertNull(parser.nextToken()); parser.close(); FileType type = new FileType(id, new Extension(delimiter, extension, isDefault), dependent); assertNotNull(type); } }
From source file:com.couchbase.lite.replicator.ChangeTracker.java
protected void runLoop() { paused = false;/*from ww w . j a va2 s . com*/ if (client == null) { // This is a race condition that can be reproduced by calling cbpuller.start() and cbpuller.stop() // directly afterwards. What happens is that by the time the Changetracker thread fires up, // the cbpuller has already set this.client to null. See issue #109 Log.w(Log.TAG_CHANGE_TRACKER, "%s: ChangeTracker run() loop aborting because client == null", this); return; } if (mode == ChangeTrackerMode.Continuous) { // there is a failing unit test for this, and from looking at the code the Replication // object will never use Continuous mode anyway. Explicitly prevent its use until // it is demonstrated to actually work. throw new RuntimeException("ChangeTracker does not correctly support continuous mode"); } OkHttpClient httpClient = client.getOkHttpClient(); backoff = new ChangeTrackerBackoff(); while (running) { startTime = System.currentTimeMillis(); Request.Builder builder = new Request.Builder(); URL url = getChangesFeedURL(); builder.url(url); if (usePOST) { builder.header("Content-Type", "application/json").addHeader("User-Agent", Manager.getUserAgent()) .addHeader("Accept-Encoding", "gzip").post(RequestBody.create(JSON, changesFeedPOSTBody())); } addRequestHeaders(builder); // Perform BASIC Authentication if needed builder = RequestUtils.preemptivelySetAuthCredentials(builder, url, authenticator); request = builder.build(); try { String maskedRemoteWithoutCredentials = getChangesFeedURL().toString(); maskedRemoteWithoutCredentials = maskedRemoteWithoutCredentials.replaceAll("://.*:.*@", "://---:---@"); Log.v(Log.TAG_CHANGE_TRACKER, "%s: Making request to %s", this, maskedRemoteWithoutCredentials); call = httpClient.newCall(request); Response response = call.execute(); try { // In case response status is Error, ChangeTracker stops here if (isResponseFailed(response)) { RequestUtils.closeResponseBody(response); if (retryIfFailedPost(response)) continue; break; } // Parse response body ResponseBody responseBody = response.body(); Log.v(Log.TAG_CHANGE_TRACKER, "%s: got response. status: %s mode: %s", this, response.message(), mode); if (responseBody != null) { try { Log.v(Log.TAG_CHANGE_TRACKER, "%s: /entity.getContent(). mode: %s", this, mode); //inputStream = entity.getContent(); inputStream = responseBody.byteStream(); // decompress if contentEncoding is gzip if (Utils.isGzip(response)) inputStream = new GZIPInputStream(inputStream); if (mode == ChangeTrackerMode.LongPoll) { // continuous replications // NOTE: 1. check content length, ObjectMapper().readValue() throws Exception if size is 0. // NOTE: 2. HttpEntity.getContentLength() returns the number of bytes of the content, or a negative number if unknown. // NOTE: 3. If Http Status is error, not parse response body boolean responseOK = false; // default value if (responseBody.contentLength() != 0 && response.code() < 300) { try { Log.v(Log.TAG_CHANGE_TRACKER, "%s: readValue", this); Map<String, Object> fullBody = Manager.getObjectMapper() .readValue(inputStream, Map.class); Log.v(Log.TAG_CHANGE_TRACKER, "%s: /readValue. fullBody: %s", this, fullBody); responseOK = receivedPollResponse(fullBody); } catch (JsonParseException jpe) { Log.w(Log.TAG_CHANGE_TRACKER, "%s: json parsing error; %s", this, jpe.toString()); } catch (JsonMappingException jme) { Log.w(Log.TAG_CHANGE_TRACKER, "%s: json mapping error; %s", this, jme.toString()); } } Log.v(Log.TAG_CHANGE_TRACKER, "%s: responseOK: %s", this, responseOK); if (responseOK) { // TODO: this logic is questionable, there's lots // TODO: of differences in the iOS changetracker code, if (!caughtUp) { caughtUp = true; client.changeTrackerCaughtUp(); } Log.v(Log.TAG_CHANGE_TRACKER, "%s: Starting new longpoll", this); backoff.resetBackoff(); continue; } else { long elapsed = (System.currentTimeMillis() - startTime) / 1000; Log.w(Log.TAG_CHANGE_TRACKER, "%s: Longpoll connection closed (by proxy?) after %d sec", this, elapsed); if (elapsed >= 30) { // Looks like the connection got closed by a proxy (like AWS' load balancer) while the // server was waiting for a change to send, due to lack of activity. // Lower the heartbeat time to work around this, and reconnect: this.heartBeatSeconds = Math.min(this.heartBeatSeconds, (int) (elapsed * 0.75)); Log.v(Log.TAG_CHANGE_TRACKER, "%s: Starting new longpoll", this); backoff.resetBackoff(); continue; } else { Log.d(Log.TAG_CHANGE_TRACKER, "%s: Change tracker calling stop (LongPoll)", this); client.changeTrackerFinished(this); break; } } } else { // one-shot replications Log.v(Log.TAG_CHANGE_TRACKER, "%s: readValue (oneshot)", this); JsonFactory factory = new JsonFactory(); JsonParser jp = factory.createParser(inputStream); JsonToken token; // nextToken() is null => no more token while (((token = jp.nextToken()) != JsonToken.START_ARRAY) && (token != null)) { // ignore these tokens } while (jp.nextToken() == JsonToken.START_OBJECT) { Map<String, Object> change = (Map) Manager.getObjectMapper().readValue(jp, Map.class); if (!receivedChange(change)) { Log.w(Log.TAG_CHANGE_TRACKER, "Received unparseable change line from server: %s", change); } // if not running state anymore, exit from loop. if (!running) break; } if (jp != null) jp.close(); Log.v(Log.TAG_CHANGE_TRACKER, "%s: /readValue (oneshot)", this); client.changeTrackerCaughtUp(); if (isContinuous()) { // if enclosing replication is continuous mode = ChangeTrackerMode.LongPoll; } else { Log.d(Log.TAG_CHANGE_TRACKER, "%s: Change tracker calling stop (OneShot)", this); client.changeTrackerFinished(this); break; } } backoff.resetBackoff(); } finally { try { if (inputStream != null) { inputStream.close(); inputStream = null; } } catch (IOException e) { } } } } finally { RequestUtils.closeResponseBody(response); } } catch (Exception e) { if (!running && e instanceof IOException) { // in this case, just silently absorb the exception because it // frequently happens when we're shutting down and have to // close the socket underneath our read. } else { Log.w(Log.TAG_CHANGE_TRACKER, this + ": Exception in change tracker", e); this.error = e; } backoff.sleepAppropriateAmountOfTime(); } } Log.v(Log.TAG_CHANGE_TRACKER, "%s: Change tracker run loop exiting", this); }