List of usage examples for com.fasterxml.jackson.core JsonGenerator writeStringField
public void writeStringField(String fieldName, String value) throws IOException, JsonGenerationException
From source file:com.google.openrtb.json.OpenRtbJsonWriter.java
@SuppressWarnings("deprecation") protected void writeBannerFields(Banner banner, JsonGenerator gen) throws IOException { if (banner.hasW()) { gen.writeNumberField("w", banner.getW()); }//from w w w .j av a2 s . c o m if (banner.hasH()) { gen.writeNumberField("h", banner.getH()); } if (banner.hasWmax()) { gen.writeNumberField("wmax", banner.getWmax()); } if (banner.hasHmax()) { gen.writeNumberField("hmax", banner.getHmax()); } if (banner.hasWmin()) { gen.writeNumberField("wmin", banner.getWmin()); } if (banner.hasHmin()) { gen.writeNumberField("hmin", banner.getHmin()); } if (banner.hasId()) { gen.writeStringField("id", banner.getId()); } writeEnums("btype", banner.getBtypeList(), gen); writeEnums("battr", banner.getBattrList(), gen); if (banner.hasPos()) { gen.writeNumberField("pos", banner.getPos().getNumber()); } writeStrings("mimes", banner.getMimesList(), gen); if (banner.hasTopframe()) { writeIntBoolField("topframe", banner.getTopframe(), gen); } writeEnums("expdir", banner.getExpdirList(), gen); writeEnums("api", banner.getApiList(), gen); if (checkRequired(banner.getFormatCount())) { gen.writeArrayFieldStart("format"); for (Format format : banner.getFormatList()) { writeFormat(format, gen); } gen.writeEndArray(); } }
From source file:de.escalon.hypermedia.spring.hydra.PagedResourcesSerializer.java
protected void serializeContext(Object bean, JsonGenerator jgen, SerializerProvider serializerProvider, Deque<LdContext> contextStack) throws IOException { // TODO: this code is duplicated from JacksonHydraSerializer, see there for considerations if (proxyUnwrapper != null) { bean = proxyUnwrapper.unwrapProxy(bean); }/*from w w w .j a v a 2s .c o m*/ MixinSource mixinSource = new JacksonMixinSource(serializerProvider.getConfig()); final Class<?> mixInClass = mixinSource.findMixInClassFor(bean.getClass()); final LdContext parentContext = contextStack.peek(); LdContext currentContext = new LdContext(parentContext, ldContextFactory.getVocab(mixinSource, bean, mixInClass), ldContextFactory.getTerms(mixinSource, bean, mixInClass)); contextStack.push(currentContext); // check if we need to write a context for the current bean at all // If it is in the same vocab: no context // If the terms are already defined in the context: no context boolean mustWriteContext; if (parentContext == null || !parentContext.contains(currentContext)) { mustWriteContext = true; } else { mustWriteContext = false; } if (mustWriteContext) { // begin context // default context: schema.org vocab or vocab package annotation jgen.writeObjectFieldStart("@context"); // do not repeat vocab if already defined in current context if (parentContext == null || parentContext.vocab == null || (currentContext.vocab != null && !currentContext.vocab.equals(parentContext.vocab))) { jgen.writeStringField(JsonLdKeywords.AT_VOCAB, currentContext.vocab); } for (Map.Entry<String, Object> termEntry : currentContext.terms.entrySet()) { if (termEntry.getValue() instanceof String) { jgen.writeStringField(termEntry.getKey(), termEntry.getValue().toString()); } else { jgen.writeObjectField(termEntry.getKey(), termEntry.getValue()); } } jgen.writeEndObject(); // end context } }
From source file:de.escalon.hypermedia.spring.hydra.PagedResourcesSerializer.java
@Override public void serialize(PagedResources pagedResources, JsonGenerator jgen, SerializerProvider serializerProvider) throws IOException { final SerializationConfig config = serializerProvider.getConfig(); JavaType javaType = config.constructType(pagedResources.getClass()); JsonSerializer<Object> serializer = BeanSerializerFactory.instance.createSerializer(serializerProvider, javaType);/*from w w w. ja v a 2s .c o m*/ // replicate pretty much everything from JacksonHydraSerializer // since we must reorganize the internals of pagedResources to get a hydra collection // with partial page view, we have to serialize pagedResources with an // unwrapping serializer Deque<LdContext> contextStack = (Deque<LdContext>) serializerProvider.getAttribute(KEY_LD_CONTEXT); if (contextStack == null) { contextStack = new ArrayDeque<LdContext>(); serializerProvider.setAttribute(KEY_LD_CONTEXT, contextStack); } // TODO: filter next/previous/first/last from link list - maybe create new PagedResources without them? List<Link> links = pagedResources.getLinks(); List<Link> filteredLinks = new ArrayList<Link>(); for (Link link : links) { String rel = link.getRel(); if (navigationRels.contains(rel)) { continue; } else { filteredLinks.add(link); } } PagedResources toRender = new PagedResources(pagedResources.getContent(), pagedResources.getMetadata(), filteredLinks); jgen.writeStartObject(); serializeContext(toRender, jgen, serializerProvider, contextStack); jgen.writeStringField(JsonLdKeywords.AT_TYPE, "hydra:Collection"); // serialize with PagedResourcesMixin serializer.unwrappingSerializer(NameTransformer.NOP).serialize(toRender, jgen, serializerProvider); PagedResources.PageMetadata metadata = pagedResources.getMetadata(); jgen.writeNumberField("hydra:totalItems", metadata.getTotalElements()); // begin hydra:view jgen.writeObjectFieldStart("hydra:view"); jgen.writeStringField(JsonLdKeywords.AT_TYPE, "hydra:PartialCollectionView"); writeRelLink(pagedResources, jgen, Link.REL_NEXT); writeRelLink(pagedResources, jgen, "previous"); // must also translate prev to its synonym previous writeRelLink(pagedResources, jgen, Link.REL_PREVIOUS, "previous"); writeRelLink(pagedResources, jgen, Link.REL_FIRST); writeRelLink(pagedResources, jgen, Link.REL_LAST); jgen.writeEndObject(); // end hydra:view jgen.writeEndObject(); contextStack = (Deque<LdContext>) serializerProvider.getAttribute(KEY_LD_CONTEXT); if (!contextStack.isEmpty()) { contextStack.pop(); } }
From source file:org.apache.olingo.server.core.serializer.json.ODataJsonSerializer.java
@Override public SerializerResult referenceCollection(final ServiceMetadata metadata, final EdmEntitySet edmEntitySet, final AbstractEntityCollection entityCollection, final ReferenceCollectionSerializerOptions options) throws SerializerException { OutputStream outputStream = null; SerializerException cachedException = null; boolean pagination = false; try {/*from ww w . j a v a 2s. c o m*/ final ContextURL contextURL = checkContextURL(options == null ? null : options.getContextURL()); CircleStreamBuffer buffer = new CircleStreamBuffer(); final UriHelper uriHelper = new UriHelperImpl(); outputStream = buffer.getOutputStream(); final JsonGenerator json = new JsonFactory().createGenerator(outputStream); json.writeStartObject(); writeContextURL(contextURL, json); if (options != null && options.getCount() != null && options.getCount().getValue()) { writeInlineCount("", entityCollection.getCount(), json); } json.writeArrayFieldStart(Constants.VALUE); for (final Entity entity : entityCollection) { json.writeStartObject(); json.writeStringField(constants.getId(), uriHelper.buildCanonicalURL(edmEntitySet, entity)); json.writeEndObject(); } json.writeEndArray(); writeNextLink(entityCollection, json, pagination); json.writeEndObject(); json.close(); outputStream.close(); return SerializerResultImpl.with().content(buffer.getInputStream()).build(); } catch (final IOException e) { cachedException = new SerializerException(IO_EXCEPTION_TEXT, e, SerializerException.MessageKeys.IO_EXCEPTION); throw cachedException; } finally { closeCircleStreamBufferOutput(outputStream, cachedException); } }
From source file:com.baidubce.services.bos.BosClient.java
/** * Completes a multipart upload by assembling previously uploaded parts. * * @param request The CompleteMultipartUploadRequest object that specifies all the parameters of this operation. * @return A CompleteMultipartUploadResponse from Bos containing the ETag for * the new object composed of the individual parts. *//*from w ww. j a v a 2 s. c o m*/ public CompleteMultipartUploadResponse completeMultipartUpload(CompleteMultipartUploadRequest request) { checkNotNull(request, "request should not be null."); InternalRequest internalRequest = this.createRequest(request, HttpMethodName.POST); internalRequest.addParameter("uploadId", request.getUploadId()); ObjectMetadata metadata = request.getObjectMetadata(); if (metadata != null) { populateRequestMetadata(internalRequest, metadata); } byte[] json = null; List<PartETag> partETags = request.getPartETags(); StringWriter writer = new StringWriter(); try { JsonGenerator jsonGenerator = JsonUtils.jsonGeneratorOf(writer); jsonGenerator.writeStartObject(); jsonGenerator.writeArrayFieldStart("parts"); for (PartETag partETag : partETags) { jsonGenerator.writeStartObject(); jsonGenerator.writeNumberField("partNumber", partETag.getPartNumber()); jsonGenerator.writeStringField("eTag", partETag.getETag()); jsonGenerator.writeEndObject(); } jsonGenerator.writeEndArray(); jsonGenerator.writeEndObject(); jsonGenerator.close(); } catch (IOException e) { throw new BceClientException("Fail to generate json", e); } try { json = writer.toString().getBytes(DEFAULT_ENCODING); } catch (UnsupportedEncodingException e) { throw new BceClientException("Fail to get UTF-8 bytes", e); } internalRequest.addHeader(Headers.CONTENT_LENGTH, String.valueOf(json.length)); internalRequest.addHeader(Headers.CONTENT_TYPE, "application/json"); internalRequest.setContent(RestartableInputStream.wrap(json)); CompleteMultipartUploadResponse response = this.invokeHttpClient(internalRequest, CompleteMultipartUploadResponse.class); response.setBucketName(request.getBucketName()); return response; }
From source file:de.escalon.hypermedia.hydra.serialize.JacksonHydraSerializer.java
private void serializeContext(Object bean, JsonGenerator jgen, SerializerProvider serializerProvider, Deque<String> deque) throws IOException { try {/* ww w .j a va 2s . co m*/ // TODO use serializerProvider.getAttributes to hold a stack of contexts // and check if we need to write a context for the current bean at all // If it is in the same vocab: no context // If the terms are already defined in the context: no context SerializationConfig config = serializerProvider.getConfig(); final Class<?> mixInClass = config.findMixInClassFor(bean.getClass()); String vocab = getVocab(bean, mixInClass); Map<String, Object> terms = getTerms(bean, mixInClass); final String currentVocab = deque.peek(); deque.push(vocab); boolean mustWriteContext; if (currentVocab == null || !vocab.equals(currentVocab)) { mustWriteContext = true; } else { // only write if bean has terms if (terms.isEmpty()) { mustWriteContext = false; } else { // TODO actually, need not repeat vocab in context if same mustWriteContext = true; } } if (mustWriteContext) { // begin context // default context: schema.org vocab or vocab package annotation jgen.writeObjectFieldStart("@context"); // TODO do not repeat vocab if already defined in current context if (currentVocab == null || !vocab.equals(currentVocab)) { jgen.writeStringField(AT_VOCAB, vocab); } for (Map.Entry<String, Object> termEntry : terms.entrySet()) { if (termEntry.getValue() instanceof String) { jgen.writeStringField(termEntry.getKey(), termEntry.getValue().toString()); } else { jgen.writeObjectField(termEntry.getKey(), termEntry.getValue()); } } jgen.writeEndObject(); } // end context // TODO build the context from @Vocab and @Term and @Expose and write it as local or external context with // TODO jsonld extension (using apt?) // TODO also allow manually created jsonld contexts // TODO how to define a context containing several context objects? @context is then an array of // TODO external context strings pointing to json-ld, and json objects containing terms // TODO another option: create custom vocabulary without reference to public vocabs // TODO support additionalType from goodrelations } catch (Exception e) { throw new RuntimeException(e); } }
From source file:org.apache.olingo.server.core.serializer.json.ODataJsonSerializer.java
/** Writes a geospatial value following the GeoJSON specification defined in RFC 7946. */ protected void writeGeoValue(final String name, final EdmPrimitiveType type, final Geospatial geoValue, final Boolean isNullable, JsonGenerator json, SRID parentSrid) throws EdmPrimitiveTypeException, IOException, SerializerException { if (geoValue == null) { if (isNullable == null || isNullable) { json.writeNull();/* w w w. j a va 2s.co m*/ } else { throw new EdmPrimitiveTypeException("The literal 'null' is not allowed."); } } else { if (!type.getDefaultType().isAssignableFrom(geoValue.getClass())) { throw new EdmPrimitiveTypeException("The value type " + geoValue.getClass() + " is not supported."); } json.writeStartObject(); json.writeStringField(Constants.ATTR_TYPE, geoValueTypeToJsonName.get(geoValue.getGeoType())); json.writeFieldName( geoValue.getGeoType() == Geospatial.Type.GEOSPATIALCOLLECTION ? Constants.JSON_GEOMETRIES : Constants.JSON_COORDINATES); json.writeStartArray(); switch (geoValue.getGeoType()) { case POINT: writeGeoPoint(json, (Point) geoValue); break; case MULTIPOINT: writeGeoPoints(json, (MultiPoint) geoValue); break; case LINESTRING: writeGeoPoints(json, (LineString) geoValue); break; case MULTILINESTRING: for (final LineString lineString : (MultiLineString) geoValue) { json.writeStartArray(); writeGeoPoints(json, lineString); json.writeEndArray(); } break; case POLYGON: writeGeoPolygon(json, (Polygon) geoValue); break; case MULTIPOLYGON: for (final Polygon polygon : (MultiPolygon) geoValue) { json.writeStartArray(); writeGeoPolygon(json, polygon); json.writeEndArray(); } break; case GEOSPATIALCOLLECTION: for (final Geospatial element : (GeospatialCollection) geoValue) { writeGeoValue(name, EdmPrimitiveTypeFactory.getInstance(element.getEdmPrimitiveTypeKind()), element, isNullable, json, geoValue.getSrid()); } break; } json.writeEndArray(); if (geoValue.getSrid() != null && geoValue.getSrid().isNotDefault() && (parentSrid == null || !parentSrid.equals(geoValue.getSrid()))) { srid(json, geoValue.getSrid()); } json.writeEndObject(); } }
From source file:com.baidubce.services.bos.BosClient.java
/** * Sets the Acl for the specified Bos bucket. * * @param request The request object containing the bucket to modify and the ACL to set. */// ww w. ja v a 2 s. c om public void setBucketAcl(SetBucketAclRequest request) { checkNotNull(request, "request should not be null."); InternalRequest internalRequest = this.createRequest(request, HttpMethodName.PUT); internalRequest.addParameter("acl", null); if (request.getCannedAcl() != null) { internalRequest.addHeader(Headers.BCE_ACL, request.getCannedAcl().toString()); this.setZeroContentLength(internalRequest); } else if (request.getAccessControlList() != null) { byte[] json = null; List<Grant> grants = request.getAccessControlList(); StringWriter writer = new StringWriter(); try { JsonGenerator jsonGenerator = JsonUtils.jsonGeneratorOf(writer); jsonGenerator.writeStartObject(); jsonGenerator.writeArrayFieldStart("accessControlList"); for (Grant grant : grants) { jsonGenerator.writeStartObject(); jsonGenerator.writeArrayFieldStart("grantee"); for (Grantee grantee : grant.getGrantee()) { jsonGenerator.writeStartObject(); jsonGenerator.writeStringField("id", grantee.getId()); jsonGenerator.writeEndObject(); } jsonGenerator.writeEndArray(); jsonGenerator.writeArrayFieldStart("permission"); for (Permission permission : grant.getPermission()) { jsonGenerator.writeString(permission.toString()); } jsonGenerator.writeEndArray(); jsonGenerator.writeEndObject(); } jsonGenerator.writeEndArray(); jsonGenerator.writeEndObject(); jsonGenerator.close(); } catch (IOException e) { throw new BceClientException("Fail to generate json", e); } try { json = writer.toString().getBytes(DEFAULT_ENCODING); } catch (UnsupportedEncodingException e) { throw new BceClientException("Fail to get UTF-8 bytes", e); } internalRequest.addHeader(Headers.CONTENT_LENGTH, String.valueOf(json.length)); internalRequest.addHeader(Headers.CONTENT_TYPE, "application/json"); internalRequest.setContent(RestartableInputStream.wrap(json)); } else { checkNotNull(null, "request.acl should not be null."); } this.invokeHttpClient(internalRequest, BosResponse.class); }
From source file:com.boundary.zoocreeper.Backup.java
private void dumpNode(JsonGenerator jgen, String path, Stat stat, List<ACL> acls, byte[] data) throws IOException { jgen.writeObjectFieldStart(path);/* w w w . j av a 2 s . c om*/ // The number of changes to the ACL of this znode. jgen.writeNumberField(FIELD_AVERSION, stat.getAversion()); // The time in milliseconds from epoch when this znode was created. jgen.writeNumberField(FIELD_CTIME, stat.getCtime()); // The number of changes to the children of this znode. jgen.writeNumberField(FIELD_CVERSION, stat.getCversion()); // The zxid of the change that caused this znode to be created. jgen.writeNumberField(FIELD_CZXID, stat.getCzxid()); // The length of the data field of this znode. // jgen.writeNumberField("dataLength", stat.getDataLength()); // The session id of the owner of this znode if the znode is an ephemeral node. If it is not an ephemeral node, // it will be zero. jgen.writeNumberField(FIELD_EPHEMERAL_OWNER, stat.getEphemeralOwner()); // The time in milliseconds from epoch when this znode was last modified. jgen.writeNumberField(FIELD_MTIME, stat.getMtime()); // The zxid of the change that last modified this znode. jgen.writeNumberField(FIELD_MZXID, stat.getMzxid()); // The number of children of this znode. jgen.writeNumberField("numChildren", stat.getNumChildren()); // last modified children? jgen.writeNumberField(FIELD_PZXID, stat.getPzxid()); // The number of changes to the data of this znode. jgen.writeNumberField(FIELD_VERSION, stat.getVersion()); if (data != null) { jgen.writeBinaryField(FIELD_DATA, data); } else { jgen.writeNullField(FIELD_DATA); } jgen.writeArrayFieldStart(FIELD_ACLS); for (ACL acl : acls) { jgen.writeStartObject(); jgen.writeStringField(FIELD_ACL_ID, acl.getId().getId()); jgen.writeStringField(FIELD_ACL_SCHEME, acl.getId().getScheme()); jgen.writeNumberField(FIELD_ACL_PERMS, acl.getPerms()); jgen.writeEndObject(); } jgen.writeEndArray(); jgen.writeEndObject(); }
From source file:com.buaa.cfs.conf.Configuration.java
/** * Writes out all the parameters and their properties (final and resource) to the given {@link Writer} The format of * the output would be { "properties" : [ {key1,value1,key1.isFinal,key1.resource}, {key2,value2, * key2.isFinal,key2.resource}... ] } It does not output the parameters of the configuration object which is loaded * from an input stream./*from w ww . ja v a 2 s.c o m*/ * * @param out the Writer to write to * * @throws IOException */ public static void dumpConfiguration(Configuration config, Writer out) throws IOException { JsonFactory dumpFactory = new JsonFactory(); JsonGenerator dumpGenerator = dumpFactory.createJsonGenerator(out); dumpGenerator.writeStartObject(); dumpGenerator.writeFieldName("properties"); dumpGenerator.writeStartArray(); dumpGenerator.flush(); synchronized (config) { for (Entry<Object, Object> item : config.getProps().entrySet()) { dumpGenerator.writeStartObject(); dumpGenerator.writeStringField("key", (String) item.getKey()); dumpGenerator.writeStringField("value", config.get((String) item.getKey())); dumpGenerator.writeBooleanField("isFinal", config.finalParameters.contains(item.getKey())); String[] resources = config.updatingResource.get(item.getKey()); String resource = UNKNOWN_RESOURCE; if (resources != null && resources.length > 0) { resource = resources[0]; } dumpGenerator.writeStringField("resource", resource); dumpGenerator.writeEndObject(); } } dumpGenerator.writeEndArray(); dumpGenerator.writeEndObject(); dumpGenerator.flush(); }