List of usage examples for com.fasterxml.jackson.core JsonFactory createGenerator
public JsonGenerator createGenerator(Writer out) throws IOException
From source file:com.codealot.url2text.Response.java
/** * Renders this object as JSON.//from w w w . jav a 2 s .c o m * <p> * Beware. This method consumes the internal Reader, creating a buffer of * unlimited size. * * @return * @throws Url2TextException */ public String toJson() throws Url2TextException { final JsonFactory jFactory = new JsonFactory(); final ByteArrayOutputStream destination = new ByteArrayOutputStream(); try (final JsonGenerator jsonGenerator = jFactory.createGenerator(destination);) { jsonGenerator.writeStartObject(); // transaction metadata jsonGenerator.writeFieldName(HDR_TRANSACTION_METADATA); jsonGenerator.writeStartObject(); jsonGenerator.writeStringField(HDR_REQUEST_PAGE, this.requestPage); jsonGenerator.writeStringField(HDR_LANDING_PAGE, this.landingPage); jsonGenerator.writeNumberField(HDR_STATUS, this.status); jsonGenerator.writeStringField(HDR_STATUS_MESSAGE, this.statusMessage); jsonGenerator.writeStringField(HDR_FETCH_DATE, DateFormatUtils.ISO_DATETIME_TIME_ZONE_FORMAT.format(this.fetchDate)); jsonGenerator.writeNumberField(HDR_FETCH_DURATION, this.fetchDuration); jsonGenerator.writeStringField(HDR_CONTENT_TYPE, this.contentType); jsonGenerator.writeStringField(HDR_CONTENT_CHARSET, this.contentCharset); jsonGenerator.writeNumberField(HDR_CONTENT_LENGTH, this.contentLength); jsonGenerator.writeStringField(HDR_ETAG, this.etag); jsonGenerator.writeStringField(HDR_LAST_MODIFIED, this.lastModified); jsonGenerator.writeNumberField(HDR_CONVERSION_DURATION, this.conversionDuration); jsonGenerator.writeEndObject(); // response headers if (!this.responseHeaders.isEmpty()) { outputNameAndValueArray(jsonGenerator, HDR_RESPONSE_HEADERS, this.responseHeaders); } // content metadata if (!this.contentMetadata.isEmpty()) { outputNameAndValueArray(jsonGenerator, HDR_CONTENT_METADATA, this.contentMetadata); } // text jsonGenerator.writeStringField(HDR_CONVERTED_TEXT, this.getText()); jsonGenerator.writeEndObject(); jsonGenerator.close(); String result = destination.toString(UTF_8); return result; } catch (IOException e) { throw new Url2TextException("Error emitting JSON", e); } }
From source file:com.castlemock.web.mock.rest.converter.swagger.SwaggerRestDefinitionConverter.java
/** * The method generates a body based on the provided {@link Response} and a map of {@link Model}. * @param response The response which the body will be based on. * @param definitions The map of definitions that might be required to generate the response. * @return A HTTP response body based on the provided {@link Response}. * @since 1.13/*ww w .ja va 2 s.co m*/ * @see {@link #generateJsonBody(String, Property, Map, JsonGenerator)} */ private String generateJsonBody(final Response response, final Map<String, Model> definitions) { final StringWriter writer = new StringWriter(); final Property schema = response.getSchema(); if (schema == null) { return writer.toString(); } final JsonFactory factory = new JsonFactory(); JsonGenerator generator = null; try { generator = factory.createGenerator(writer); generateJsonBody(null, schema, definitions, generator); } catch (IOException e) { LOGGER.error("Unable to generate a response body", e); } finally { if (generator != null) { try { generator.close(); } catch (IOException e) { LOGGER.error("Unable to close the JsonGenerator", e); } } } return writer.toString(); }
From source file:org.dswarm.xsd2jsonschema.model.test.JSRootTest.java
@Test public void testOverloadedRender() throws Exception { final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); final StringWriter writer = new StringWriter(); File tmpFile;/*w w w .j a v a 2s . c o m*/ final String render = obj.render(); doRenderTest(render); obj.render(BaseJSTest.om, outputStream); doRenderTest(outputStream); outputStream.reset(); obj.render(BaseJSTest.om, outputStream, JsonEncoding.UTF8); doRenderTest(outputStream); outputStream.reset(); obj.render(BaseJSTest.om, writer); doRenderTest(writer); outputStream.reset(); tmpFile = File.createTempFile("dmp-test", "tmp"); obj.render(BaseJSTest.om, tmpFile, JsonEncoding.UTF8); doRenderTest(BaseJSTest.om.readTree(tmpFile)); tmpFile.deleteOnExit(); final JsonFactory factory = BaseJSTest.om.getFactory(); obj.render(factory, outputStream); doRenderTest(outputStream); outputStream.reset(); obj.render(factory, outputStream, JsonEncoding.UTF8); doRenderTest(outputStream); outputStream.reset(); obj.render(factory, writer); doRenderTest(writer); outputStream.reset(); tmpFile = File.createTempFile("dmp-test", "tmp"); obj.render(factory, tmpFile, JsonEncoding.UTF8); doRenderTest(BaseJSTest.om.readTree(tmpFile)); tmpFile.deleteOnExit(); final JsonGenerator generator = factory.createGenerator(outputStream); obj.render(generator); generator.flush(); doRenderTest(outputStream); }
From source file:com.tage.calcite.adapter.druid.DruidQuery.java
private QuerySpec getQuery(RelDataType rowType, RexNode filter, List<RexNode> projects, ImmutableBitSet groupSet, List<AggregateCall> aggCalls, List<String> aggNames) { QueryType queryType = QueryType.SELECT; final Translator translator = new Translator(druidTable, rowType); List<String> fieldNames = rowType.getFieldNames(); Json jsonFilter = null;/*w w w . ja v a 2 s .c o m*/ if (filter != null) { jsonFilter = translator.translateFilter(filter); translator.metrics.clear(); translator.dimensions.clear(); } if (projects != null) { final ImmutableList.Builder<String> builder = ImmutableList.builder(); for (RexNode project : projects) { builder.add(translator.translate(project)); } fieldNames = builder.build(); } final List<String> dimensions = new ArrayList<>(); final List<JsonAggregation> aggregations = new ArrayList<>(); if (groupSet != null) { assert aggCalls != null; assert aggNames != null; assert aggCalls.size() == aggNames.size(); queryType = QueryType.GROUP_BY; final ImmutableList.Builder<String> builder = ImmutableList.builder(); for (int groupKey : groupSet) { final String s = fieldNames.get(groupKey); dimensions.add(s); builder.add(s); } for (Pair<AggregateCall, String> agg : Pair.zip(aggCalls, aggNames)) { final JsonAggregation jsonAggregation = getJsonAggregation(fieldNames, agg.right, agg.left); aggregations.add(jsonAggregation); builder.add(jsonAggregation.name); } fieldNames = builder.build(); } else { assert aggCalls == null; assert aggNames == null; } final StringWriter sw = new StringWriter(); final JsonFactory factory = new JsonFactory(); try { final JsonGenerator generator = factory.createGenerator(sw); switch (queryType) { case GROUP_BY: generator.writeStartObject(); if (aggregations.isEmpty()) { // Druid requires at least one aggregation, otherwise gives: // Must have at least one AggregatorFactory aggregations.add(new JsonAggregation("longSum", "unit_sales", "unit_sales")); } generator.writeStringField("queryType", "groupBy"); generator.writeStringField("dataSource", druidTable.dataSource); generator.writeStringField("granularity", "all"); writeField(generator, "dimensions", dimensions); writeFieldIf(generator, "limitSpec", null); writeFieldIf(generator, "filter", jsonFilter); writeField(generator, "aggregations", aggregations); writeFieldIf(generator, "postAggregations", null); writeField(generator, "intervals", druidTable.intervals); writeFieldIf(generator, "having", null); generator.writeEndObject(); break; case SELECT: generator.writeStartObject(); generator.writeStringField("queryType", "select"); generator.writeStringField("dataSource", druidTable.dataSource); generator.writeStringField("descending", "false"); writeField(generator, "intervals", druidTable.intervals); writeFieldIf(generator, "filter", jsonFilter); writeField(generator, "dimensions", translator.dimensions); writeField(generator, "metrics", translator.metrics); generator.writeStringField("granularity", "all"); generator.writeFieldName("pagingSpec"); generator.writeStartObject(); final int fetch = CalciteConnectionProperty.DRUID_FETCH.wrap(new Properties()).getInt(); generator.writeNumberField("threshold", fetch); generator.writeEndObject(); generator.writeEndObject(); break; default: throw new AssertionError("unknown query type " + queryType); } generator.close(); } catch (IOException e) { e.printStackTrace(); } return new QuerySpec(queryType, sw.toString(), fieldNames); }
From source file:de.tudarmstadt.ukp.dkpro.core.io.brat.BratWriter.java
private void writeAnnotations(JCas aJCas) throws IOException { BratAnnotationDocument doc = new BratAnnotationDocument(); List<FeatureStructure> relationFS = new ArrayList<>(); Map<BratEventAnnotation, FeatureStructure> eventFS = new LinkedHashMap<>(); // Go through all the annotations but only handle the ones that have no references to // other annotations. for (FeatureStructure fs : selectAll(aJCas)) { // Skip document annotation if (fs == aJCas.getDocumentAnnotationFs()) { continue; }//from w w w . j a v a2 s . c o m // Skip excluded types if (excludeTypes.contains(fs.getType().getName())) { getLogger().debug("Excluding [" + fs.getType().getName() + "]"); continue; } if (spanTypes.contains(fs.getType().getName())) { writeTextAnnotation(doc, (AnnotationFS) fs); } else if (parsedRelationTypes.containsKey(fs.getType().getName())) { relationFS.add(fs); } else if (hasNonPrimitiveFeatures(fs) && (fs instanceof AnnotationFS)) { // else if (parsedEventTypes.containsKey(fs.getType().getName())) { BratEventAnnotation event = writeEventAnnotation(doc, (AnnotationFS) fs); eventFS.put(event, fs); } else if (fs instanceof AnnotationFS) { warnings.add("Assuming annotation type [" + fs.getType().getName() + "] is span"); writeTextAnnotation(doc, (AnnotationFS) fs); } else { warnings.add("Skipping annotation with type [" + fs.getType().getName() + "]"); } } // Handle relations now since now we can resolve their targets to IDs. for (FeatureStructure fs : relationFS) { writeRelationAnnotation(doc, fs); } // Handle event slots now since now we can resolve their targets to IDs. for (Entry<BratEventAnnotation, FeatureStructure> e : eventFS.entrySet()) { writeSlots(doc, e.getKey(), e.getValue()); } switch (filenameSuffix) { case ".ann": try (Writer out = new OutputStreamWriter(getOutputStream(aJCas, filenameSuffix), "UTF-8")) { doc.write(out); break; } case ".html": case ".json": String template; if (filenameSuffix.equals(".html")) { template = IOUtils.toString(getClass().getResource("html/template.html")); } else { template = "{ \"collData\" : ##COLL-DATA## , \"docData\" : ##DOC-DATA## }"; } JsonFactory jfactory = new JsonFactory(); try (Writer out = new OutputStreamWriter(getOutputStream(aJCas, filenameSuffix), "UTF-8")) { String docData; try (StringWriter buf = new StringWriter()) { try (JsonGenerator jg = jfactory.createGenerator(buf)) { jg.useDefaultPrettyPrinter(); doc.write(jg, aJCas.getDocumentText()); } docData = buf.toString(); } String collData; try (StringWriter buf = new StringWriter()) { try (JsonGenerator jg = jfactory.createGenerator(buf)) { jg.useDefaultPrettyPrinter(); conf.write(jg); } collData = buf.toString(); } template = StringUtils.replaceEach(template, new String[] { "##COLL-DATA##", "##DOC-DATA##" }, new String[] { collData, docData }); out.write(template); } conf = new BratConfiguration(); break; default: throw new IllegalArgumentException("Unknown file format: [" + filenameSuffix + "]"); } }
From source file:squash.booking.lambdas.core.PageManager.java
/** * Returns JSON-encoded valid-dates data for a specified date. * //from w ww . ja va 2 s .c o m * <p>This is not private only so that it can be unit-tested. * * @param validDates the dates for which bookings can be made, in YYYY-MM-DD format. * @throws IOException */ protected String createValidDatesData(List<String> validDates) throws IllegalArgumentException, IOException { // N.B. we assume that the date is known to be a valid date logger.log("About to create cached valid dates data"); // Encode valid dates as JSON // Create the node factory that gives us nodes. JsonNodeFactory factory = new JsonNodeFactory(false); // Create a json factory to write the treenode as json. JsonFactory jsonFactory = new JsonFactory(); ObjectNode rootNode = factory.objectNode(); ArrayNode validDatesNode = rootNode.putArray("dates"); for (int i = 0; i < validDates.size(); i++) { validDatesNode.add(validDates.get(i)); } ByteArrayOutputStream validDatesStream = new ByteArrayOutputStream(); PrintStream printStream = new PrintStream(validDatesStream); try (JsonGenerator generator = jsonFactory.createGenerator(printStream)) { ObjectMapper mapper = new ObjectMapper(); mapper.writeTree(generator, rootNode); } String validDatesString = validDatesStream.toString(StandardCharsets.UTF_8.name()); logger.log("Created cached valid dates data : " + validDatesString); return validDatesString; }
From source file:com.ntsync.shared.RawContact.java
/** * Convert the RawContact object into a DTO. From the JSONString interface. * //from w ww. j a va 2s. c o m * @return a JSON string representation of the object */ public byte[] toDTO(Key secret, String pwdSaltBase64) { try { ByteArrayOutputStream out = new ByteArrayOutputStream(DEFAULT_BYTEARRAY_SIZE); AEADBlockCipher ecipher = CryptoHelper.getCipher(); byte[] iv = new byte[CryptoHelper.IV_LEN]; SecureRandom random = new SecureRandom(); StringBuilder hashValue = new StringBuilder(); hashValue.append(pwdSaltBase64); hashValue.append(displayName); hashValue.append(lastName); hashValue.append(firstName); hashValue.append(middleName); out.write(ContactConstants.ROWID); byte[] rowId = String.valueOf(mRawContactId).getBytes(SyncDataHelper.DEFAULT_CHARSET_NAME); SyncDataHelper.writeInt(out, rowId.length); out.write(rowId); JsonFactory json = new JsonFactory(); StringWriter writer = new StringWriter(); JsonGenerator g = json.createGenerator(writer); g.writeStartObject(); writeStructuredName(g); writeList(hashValue, g, ContactConstants.PHONE, phones, true); writeList(hashValue, g, ContactConstants.EMAIL, emails, true); writeList(hashValue, g, ContactConstants.EVENT, events, false); writeList(hashValue, g, ContactConstants.RELATION, relations, false); writeList(hashValue, g, ContactConstants.SIPADDRESS, sipAddresses, false); writeList(hashValue, g, ContactConstants.NICKNAME, nicknames, false); writeList(hashValue, g, ContactConstants.WEBSITE, websites, false); writeAddress(hashValue, g, addresses); writeImList(g, imAddresses); writeOrganization(g, organization); writeField(g, ContactConstants.NOTE, note); if (starred) { g.writeBooleanField(ContactConstants.STARRED, true); } if (sendToVoiceMail) { g.writeBooleanField(ContactConstants.SEND_TO_VOICE_MAIL, true); } writeField(g, ContactConstants.DROID_CUSTOM_RINGTONE, droidCustomRingtone); if (photoSuperPrimary) { g.writeBooleanField(ContactConstants.PHOTO_SUPERPRIMARY, true); } writeStringList(g, ContactConstants.GROUPMEMBERSHIP, groupSourceIds); g.writeEndObject(); g.close(); String textData = writer.toString(); CryptoHelper.writeValue(secret, out, ecipher, iv, random, ContactConstants.TEXTDATA, textData); CryptoHelper.writeValue(secret, out, ecipher, iv, random, ContactConstants.PHOTO, photo); if (lastModified != null) { writeRawValue(out, ContactConstants.MODIFIED, String.valueOf(lastModified.getTime()).getBytes(SyncDataHelper.DEFAULT_CHARSET_NAME)); } if (mDeleted) { writeRawValue(out, ContactConstants.DELETED, "1".getBytes(SyncDataHelper.DEFAULT_CHARSET_NAME)); } writeRawValue(out, ContactConstants.HASH, createHash(hashValue)); return out.toByteArray(); } catch (final IOException ex) { LOG.error(ERROR_CONVERT_TOJSON + ex.toString(), ex); } catch (GeneralSecurityException ex) { LOG.error(ERROR_CONVERT_TOJSON + ex.toString(), ex); } catch (InvalidCipherTextException ex) { LOG.error(ERROR_CONVERT_TOJSON + ex.toString(), ex); } return null; }
From source file:squash.booking.lambdas.core.BackupManager.java
@Override public final ImmutablePair<List<Booking>, List<BookingRule>> backupAllBookingsAndBookingRules() throws Exception { if (!initialised) { throw new IllegalStateException("The backup manager has not been initialised"); }//from w ww. ja v a2 s . c o m // Encode bookings and booking rules as JSON JsonNodeFactory factory = new JsonNodeFactory(false); // Create a json factory to write the treenode as json. JsonFactory jsonFactory = new JsonFactory(); ObjectNode rootNode = factory.objectNode(); ArrayNode bookingsNode = rootNode.putArray("bookings"); List<Booking> bookings = bookingManager.getAllBookings(false); for (Booking booking : bookings) { bookingsNode.add((JsonNode) (mapper.valueToTree(booking))); } ArrayNode bookingRulesNode = rootNode.putArray("bookingRules"); List<BookingRule> bookingRules = ruleManager.getRules(false); for (BookingRule bookingRule : bookingRules) { bookingRulesNode.add((JsonNode) (mapper.valueToTree(bookingRule))); } // Add this, as will be needed for restore in most common case. rootNode.put("clearBeforeRestore", true); ByteArrayOutputStream backupDataStream = new ByteArrayOutputStream(); PrintStream printStream = new PrintStream(backupDataStream); try (JsonGenerator generator = jsonFactory.createGenerator(printStream)) { mapper.writeTree(generator, rootNode); } String backupString = backupDataStream.toString(StandardCharsets.UTF_8.name()); logger.log("Backing up all bookings and booking rules to S3 bucket"); IS3TransferManager transferManager = getS3TransferManager(); byte[] backupAsBytes = backupString.getBytes(StandardCharsets.UTF_8); ByteArrayInputStream backupAsStream = new ByteArrayInputStream(backupAsBytes); ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentLength(backupAsBytes.length); PutObjectRequest putObjectRequest = new PutObjectRequest(databaseBackupBucketName, "AllBookingsAndBookingRules", backupAsStream, metadata); TransferUtils.waitForS3Transfer(transferManager.upload(putObjectRequest), logger); logger.log("Backed up all bookings and booking rules to S3 bucket: " + backupString); // Backup to the SNS topic logger.log("Backing up all bookings and booking rules to SNS topic: " + adminSnsTopicArn); getSNSClient().publish(adminSnsTopicArn, backupString, "Sqawsh all-bookings and booking rules backup"); return new ImmutablePair<>(bookings, bookingRules); }