List of usage examples for com.fasterxml.jackson.core JsonGenerator writeEndArray
public abstract void writeEndArray() throws IOException, JsonGenerationException;
From source file:org.lamop.riche.model.jsonserialize.RelationWorkSourceSerialize.java
@Override public void serialize(List<RelationWorkSource> list, JsonGenerator jg, SerializerProvider sp) throws IOException, JsonProcessingException { jg.writeStartArray();//from ww w.j a v a 2 s. co m List<RelationWorkSource> aSerialiser = new ArrayList<>(); for (int i = 0; i < list.size(); i++) { RelationWorkSource get = list.get(i); RelationWorkSource newtruc = new RelationWorkSource(); newtruc.setSource(get.getSource()); newtruc.setNote(get.getNote()); newtruc.setExtract(get.getExtract()); aSerialiser.add(newtruc); } for (int i = 0; i < aSerialiser.size(); i++) { RelationWorkSource get = aSerialiser.get(i); jg.writeObject(get); } // for (int i = 0; i < list.size(); i++) { // RelationWorkSource get = list.get(i); // //// jg.writeObject(get.getSource()); //// // } jg.writeEndArray(); // jg.write // jg.writeObjectField("source", t.getSource()); // jg.writeNumberField("id", t.getId()); // jg.writeEndObject(); }
From source file:org.eclipse.winery.repository.resources.servicetemplates.plans.PlansResourceData.java
/** * Data object for the JSP/*from w w w .ja v a 2 s. c o m*/ * * @param plans the plans this resource manages */ public PlansResourceData(List<TPlan> plans) { if (plans.isEmpty()) { this.embeddedPlansTableData = "[]"; this.linkedPlansTableData = "[]"; return; } JsonFactory jsonFactory = new JsonFactory(); StringWriter embeddedPlansTableDataSW = new StringWriter(); StringWriter linkedPlansTableDataSW = new StringWriter(); try { JsonGenerator jGeneratorEmbedded = jsonFactory.createGenerator(embeddedPlansTableDataSW); JsonGenerator jGeneratorLinked = jsonFactory.createGenerator(linkedPlansTableDataSW); jGeneratorEmbedded.writeStartArray(); jGeneratorLinked.writeStartArray(); for (TPlan plan : plans) { String name = plan.getName(); if (name == null) { // name defaults to id name = plan.getId(); } String type = PlanTypesManager.INSTANCE.getShortName(plan.getPlanType()); String language = PlanLanguagesManager.INSTANCE.getShortName(plan.getPlanLanguage()); PlanModelReference planModelReference = plan.getPlanModelReference(); String reference = planModelReference != null ? planModelReference.getReference() : null; JsonGenerator gen; boolean writeReference; if (reference == null) { gen = jGeneratorEmbedded; writeReference = false; } else if (reference.startsWith("../")) { gen = jGeneratorEmbedded; writeReference = false; } else { gen = jGeneratorLinked; writeReference = true; } gen.writeStartArray(); gen.writeString(plan.getId()); gen.writeString(""); // precondition gen.writeString(name); gen.writeString(type); gen.writeString(language); if (writeReference) { gen.writeString(reference); } gen.writeEndArray(); } jGeneratorEmbedded.writeEndArray(); jGeneratorLinked.writeEndArray(); jGeneratorEmbedded.close(); embeddedPlansTableDataSW.close(); jGeneratorLinked.close(); linkedPlansTableDataSW.close(); } catch (JsonGenerationException e) { PlansResourceData.LOGGER.error(e.getMessage(), e); this.embeddedPlansTableData = "[]"; this.linkedPlansTableData = "[]"; return; } catch (IOException e) { PlansResourceData.LOGGER.error("", e); this.embeddedPlansTableData = "[]"; this.linkedPlansTableData = "[]"; return; } this.embeddedPlansTableData = embeddedPlansTableDataSW.toString(); this.linkedPlansTableData = linkedPlansTableDataSW.toString(); }
From source file:io.mesosphere.mesos.frameworks.cassandra.scheduler.api.NodeController.java
/** * Retrieve a list of all nodes including their status. * * <pre>{@code {//from w w w. j a v a2 s . c o m * "replaceNodes" : [ ], * "nodesToAcquire" : 0, * "nodes" : [ { * "tasks" : { * "METADATA" : { * "cpuCores" : 0.1, * "diskMb" : 16, * "memMb" : 16, * "taskId" : "cassandra.node.0.executor" * }, * "SERVER" : { * "cpuCores" : 2.0, * "diskMb" : 2048, * "memMb" : 2048, * "taskId" : "cassandra.node.0.executor.server" * } * }, * "executorId" : "cassandra.node.0.executor", * "ip" : "127.0.0.2", * "hostname" : "127.0.0.2", * "targetRunState" : "RUN", * "jmxPort" : 64112, * "seedNode" : true, * "cassandraDaemonPid" : 6104, * "lastHealthCheck" : 1426686217128, * "healthCheckDetails" : { * "healthy" : true, * "msg" : "", * "version" : "2.1.4", * "operationMode" : "NORMAL", * "clusterName" : "cassandra", * "dataCenter" : "DC1", * "rack" : "RAC1", * "endpoint" : "127.0.0.2", * "hostId" : "4207396e-6aa0-432e-97d9-1a4df3c1057f", * "joined" : true, * "gossipInitialized" : true, * "gossipRunning" : true, * "nativeTransportRunning" : true, * "rpcServerRunning" : true, * "tokenCount" : 256, * "uptimeMillis" : 29072 * } * }, { * "tasks" : { * "METADATA" : { * "cpuCores" : 0.1, * "diskMb" : 16, * "memMb" : 16, * "taskId" : "cassandra.node.1.executor" * }, * "SERVER" : { * "cpuCores" : 2.0, * "diskMb" : 2048, * "memMb" : 2048, * "taskId" : "cassandra.node.1.executor.server" * } * }, * "executorId" : "cassandra.node.1.executor", * "ip" : "127.0.0.1", * "hostname" : "localhost", * "targetRunState" : "RUN", * "jmxPort" : 64113, * "seedNode" : false, * "cassandraDaemonPid" : 6127, * "lastHealthCheck" : 1426686217095, * "healthCheckDetails" : { * "healthy" : true, * "msg" : "", * "version" : "2.1.4", * "operationMode" : "JOINING", * "clusterName" : "cassandra", * "dataCenter" : "", * "rack" : "", * "endpoint" : "", * "hostId" : "", * "joined" : true, * "gossipInitialized" : true, * "gossipRunning" : true, * "nativeTransportRunning" : false, * "rpcServerRunning" : false, * "tokenCount" : 0, * "uptimeMillis" : 16936 * } * } ] * }}</pre> */ @GET @Path("/all") public Response nodes() { return JaxRsUtils.buildStreamingResponse(factory, new StreamingJsonResponse() { @Override public void write(final JsonGenerator json) throws IOException { final CassandraFrameworkProtos.CassandraClusterState clusterState = cluster.getClusterState().get(); json.writeArrayFieldStart("replaceNodes"); for (final String ip : clusterState.getReplaceNodeIpsList()) { json.writeString(ip); } json.writeEndArray(); final NodeCounts nodeCounts = cluster.getClusterState().nodeCounts(); json.writeNumberField("nodesToAcquire", CassandraCluster.numberOfNodesToAcquire(nodeCounts, cluster.getConfiguration())); json.writeArrayFieldStart("nodes"); for (final CassandraFrameworkProtos.CassandraNode cassandraNode : clusterState.getNodesList()) { json.writeStartObject(); if (cassandraNode.hasReplacementForIp()) { json.writeStringField("replacementForIp", cassandraNode.getReplacementForIp()); } json.writeObjectFieldStart("tasks"); for (final CassandraFrameworkProtos.CassandraNodeTask cassandraNodeTask : cassandraNode .getTasksList()) { JaxRsUtils.writeTask(json, cassandraNodeTask); } json.writeEndObject(); // TODO cassandraNode.getDataVolumesList(); if (!cassandraNode.hasCassandraNodeExecutor()) { json.writeNullField("executorId"); json.writeNullField("workdir"); } else { json.writeStringField("executorId", cassandraNode.getCassandraNodeExecutor().getExecutorId()); final CassandraFrameworkProtos.ExecutorMetadata executorMetadata = cluster .metadataForExecutor(cassandraNode.getCassandraNodeExecutor().getExecutorId()); if (executorMetadata != null) { json.writeStringField("workdir", executorMetadata.getWorkdir()); } else { json.writeNullField("workdir"); } } json.writeStringField("ip", cassandraNode.getIp()); json.writeStringField("hostname", cassandraNode.getHostname()); json.writeStringField("targetRunState", cassandraNode.getTargetRunState().name()); json.writeNumberField("jmxPort", cassandraNode.getJmxConnect().getJmxPort()); json.writeBooleanField("seedNode", cassandraNode.getSeed()); CassandraFrameworkProtos.RackDc rackDc = cassandraNode.getRackDc(); json.writeObjectFieldStart("rackDc"); json.writeStringField("rack", rackDc.getRack()); json.writeStringField("dc", rackDc.getDc()); json.writeEndObject(); if (!cassandraNode.hasCassandraDaemonPid()) { json.writeNullField("cassandraDaemonPid"); } else { json.writeNumberField("cassandraDaemonPid", cassandraNode.getCassandraDaemonPid()); } final CassandraFrameworkProtos.HealthCheckHistoryEntry lastHealthCheck = cassandraNode .hasCassandraNodeExecutor() ? cluster.lastHealthCheck( cassandraNode.getCassandraNodeExecutor().getExecutorId()) : null; if (lastHealthCheck != null) { json.writeNumberField("lastHealthCheck", lastHealthCheck.getTimestampEnd()); } else { json.writeNullField("lastHealthCheck"); } if (lastHealthCheck != null) { json.writeObjectFieldStart("healthCheckDetails"); final CassandraFrameworkProtos.HealthCheckDetails hcd = lastHealthCheck.getDetails(); json.writeBooleanField("healthy", hcd.getHealthy()); json.writeStringField("msg", hcd.getMsg()); json.writeStringField("version", hcd.getInfo().getVersion()); json.writeStringField("operationMode", hcd.getInfo().getOperationMode()); json.writeStringField("clusterName", hcd.getInfo().getClusterName()); json.writeStringField("dataCenter", hcd.getInfo().getDataCenter()); json.writeStringField("rack", hcd.getInfo().getRack()); json.writeStringField("endpoint", hcd.getInfo().getEndpoint()); json.writeStringField("hostId", hcd.getInfo().getHostId()); json.writeBooleanField("joined", hcd.getInfo().getJoined()); json.writeBooleanField("gossipInitialized", hcd.getInfo().getGossipInitialized()); json.writeBooleanField("gossipRunning", hcd.getInfo().getGossipRunning()); json.writeBooleanField("nativeTransportRunning", hcd.getInfo().getNativeTransportRunning()); json.writeBooleanField("rpcServerRunning", hcd.getInfo().getRpcServerRunning()); json.writeNumberField("tokenCount", hcd.getInfo().getTokenCount()); json.writeNumberField("uptimeMillis", hcd.getInfo().getUptimeMillis()); json.writeEndObject(); } else { json.writeNullField("healthCheckDetails"); } final List<CassandraFrameworkProtos.DataVolume> dataVolumes = cassandraNode .getDataVolumesList(); json.writeArrayFieldStart("dataVolumes"); for (final CassandraFrameworkProtos.DataVolume volume : dataVolumes) { json.writeStartObject(); json.writeStringField("path", volume.getPath()); if (volume.hasSizeMb()) { json.writeNumberField("size", volume.getSizeMb()); } json.writeEndObject(); } json.writeEndArray(); json.writeEndObject(); } json.writeEndArray(); } }); }
From source file:org.pentaho.metaverse.impl.model.kettle.json.AbstractStepMetaJsonSerializer.java
protected void writeFieldMappings(T meta, JsonGenerator json, SerializerProvider serializerProvider) throws IOException { json.writeArrayFieldStart(JSON_PROPERTY_MAPPINGS); IFieldLineageMetadataProvider mapper = getFieldLineageMetadataProvider(meta); try {/* w w w . java 2 s . c om*/ Set<IFieldMapping> fieldMappings = mapper.getFieldMappings(meta); if (fieldMappings != null) { for (IFieldMapping fieldMapping : fieldMappings) { json.writeObject(fieldMapping); } } } catch (MetaverseAnalyzerException e) { LOGGER.warn(Messages.getString("WARNING.Serialization.Step.WriteFieldMappings", meta.getParentStepMeta().getName()), e); } json.writeEndArray(); }
From source file:de.escalon.hypermedia.spring.hydra.LinkListSerializer.java
private void writeIriTemplate(String rel, String href, List<String> variableNames, ActionDescriptor actionDescriptorForHttpGet, JsonGenerator jgen) throws IOException { jgen.writeObjectFieldStart(rel);/*from ww w . jav a 2 s .c om*/ jgen.writeStringField("@type", "hydra:IriTemplate"); jgen.writeStringField("hydra:template", href); jgen.writeArrayFieldStart("hydra:mapping"); writeHydraVariableMapping(jgen, actionDescriptorForHttpGet, variableNames); jgen.writeEndArray(); jgen.writeEndObject(); }
From source file:org.h2gis.drivers.geojson.GeoJsonWriteDriver.java
/** * * * Coordinates of a MultiPolygon are an array of Polygon coordinate arrays: * * { "type": "MultiPolygon", "coordinates": [ [[[102.0, 2.0], [103.0, 2.0], * [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]], [[[100.0, 0.0], [101.0, 0.0], * [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]], [[100.2, 0.2], [100.8, 0.2], * [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]] ] } * * @param geom/* w ww .j ava2 s. c o m*/ * @param gen * @throws IOException */ private void write(MultiPolygon geom, JsonGenerator gen) throws IOException { gen.writeStringField("type", "MultiPolygon"); gen.writeFieldName("coordinates"); gen.writeStartArray(); for (int i = 0; i < geom.getNumGeometries(); ++i) { Polygon p = (Polygon) geom.getGeometryN(i); gen.writeStartArray(); writeCoordinates(p.getExteriorRing().getCoordinates(), gen); for (int j = 0; j < p.getNumInteriorRing(); ++j) { writeCoordinates(p.getInteriorRingN(j).getCoordinates(), gen); } gen.writeEndArray(); } gen.writeEndArray(); }
From source file:org.commonjava.maven.atlas.graph.jackson.ProjectRelationshipSerializer.java
@SuppressWarnings("incomplete-switch") @Override/*from ww w. ja v a2 s . co m*/ public void serialize(final T value, final JsonGenerator gen, final SerializerProvider provider) throws IOException, JsonGenerationException { gen.writeStartObject(); gen.writeStringField(RELATIONSHIP_TYPE, value.getType().name()); gen.writeStringField(POM_LOCATION_URI, value.getPomLocation().toString()); gen.writeBooleanField(INHERITED, value.isInherited()); Set<URI> sources = value.getSources(); if (sources != null) { for (Iterator<URI> iter = sources.iterator(); iter.hasNext();) { if (iter.next() == null) { iter.remove(); } } if (!sources.isEmpty()) { gen.writeArrayFieldStart(SOURCE_URIS); for (URI uri : sources) { if (uri == null) { continue; } gen.writeString(uri.toString()); } gen.writeEndArray(); } } provider.defaultSerializeField(DECLARING_REF, value.getDeclaring(), gen); provider.defaultSerializeField(TARGET_REF, value.getTarget(), gen); switch (value.getType()) { case BOM: gen.writeBooleanField(MIXIN, value.isMixin()); break; case DEPENDENCY: { gen.writeStringField(SCOPE, ((DependencyRelationship) value).getScope().realName()); gen.writeBooleanField(MANAGED, value.isManaged()); gen.writeBooleanField(OPTIONAL, ((DependencyRelationship) value).isOptional()); break; } case PLUGIN_DEP: { provider.defaultSerializeField(PLUGIN_REF, ((PluginDependencyRelationship) value).getPlugin(), gen); gen.writeBooleanField(MANAGED, value.isManaged()); break; } case PLUGIN: { gen.writeBooleanField(MANAGED, value.isManaged()); gen.writeBooleanField(REPORTING, ((PluginRelationship) value).isReporting()); break; } } gen.writeNumberField(INDEX, value.getIndex()); gen.writeEndObject(); }
From source file:com.baasbox.configuration.PropertiesConfigurationHelper.java
/*** * * Returns a json representation of the Enumerator * The Enumerator must implements the IProperties interface * @param en the Enumerator to serialize. It must implements the IProperties interface * @return the representation of the Enumerator *///from www. j ava 2 s. com @SuppressWarnings("unchecked") public static String dumpConfigurationAsJson(String section) { Class en = CONFIGURATION_SECTIONS.get(section); try { JsonFactory jfactory = new JsonFactory(); StringWriter sw = new StringWriter(); String enumDescription = ""; JsonGenerator gen = jfactory.createJsonGenerator(sw); Method getEnumDescription = en.getMethod("getEnumDescription"); if (getEnumDescription != null && getEnumDescription.getReturnType() == String.class && Modifier.isStatic(getEnumDescription.getModifiers())) enumDescription = (String) getEnumDescription.invoke(null); gen.writeStartObject(); //{ gen.writeStringField("section", section); // "configuration":"EnumName" gen.writeStringField("description", enumDescription); // ,"description": "EnumDescription" gen.writeFieldName("sub sections"); // ,"sections": gen.writeStartObject(); // { String lastSection = ""; EnumSet values = EnumSet.allOf(en); for (Object v : values) { String key = (String) (en.getMethod("getKey")).invoke(v); boolean isVisible = (Boolean) (en.getMethod("isVisible")).invoke(v); String valueAsString; if (isVisible) valueAsString = (String) (en.getMethod("getValueAsString")).invoke(v); else valueAsString = "--HIDDEN--"; boolean isEditable = (Boolean) (en.getMethod("isEditable")).invoke(v); String valueDescription = (String) (en.getMethod("getValueDescription")).invoke(v); Class type = (Class) en.getMethod("getType").invoke(v); String subsection = key.substring(0, key.indexOf('.')); if (!lastSection.equals(subsection)) { if (gen.getOutputContext().inArray()) gen.writeEndArray(); gen.writeFieldName(subsection); // "sectionName": gen.writeStartArray(); // [ lastSection = subsection; } boolean isOverridden = (Boolean) (en.getMethod("isOverridden")).invoke(v); gen.writeStartObject(); // { gen.writeStringField(key, valueAsString); // "key": "value" gen.writeStringField("description", valueDescription); // ,"description":"description" gen.writeStringField("type", type.getSimpleName()); // ,"type":"type" gen.writeBooleanField("editable", isEditable); // ,"editable":"true|false" gen.writeBooleanField("visible", isVisible); // ,"visible":"true|false" gen.writeBooleanField("overridden", isOverridden); // ,"overridden":"true|false" gen.writeEndObject(); // } } if (gen.getOutputContext().inArray()) gen.writeEndArray(); // ] gen.writeEndObject(); // } gen.writeEndObject(); //} gen.close(); return sw.toString(); } catch (Exception e) { BaasBoxLogger.error("Cannot generate a json for " + en.getSimpleName() + " Enum. Is it an Enum that implements the IProperties interface?", e); } return "{}"; }
From source file:org.apache.arrow.vector.ipc.JsonFileWriter.java
private void writeDictionaryBatches(JsonGenerator generator, Set<Long> dictionaryIdsUsed, DictionaryProvider provider) throws IOException { generator.writeArrayFieldStart("dictionaries"); for (Long id : dictionaryIdsUsed) { generator.writeStartObject();/*from w w w . jav a2 s .c o m*/ generator.writeObjectField("id", id); generator.writeFieldName("data"); Dictionary dictionary = provider.lookup(id); FieldVector vector = dictionary.getVector(); List<Field> fields = ImmutableList.of(vector.getField()); List<FieldVector> vectors = ImmutableList.of(vector); VectorSchemaRoot root = new VectorSchemaRoot(fields, vectors, vector.getValueCount()); writeBatch(root); generator.writeEndObject(); } generator.writeEndArray(); }
From source file:com.ning.metrics.action.hdfs.reader.HdfsEntry.java
public void toJson(final JsonGenerator generator) throws IOException { Iterator<Row> content = null; try {//from ww w. j ava 2s . c om content = getContent(); } catch (IOException ignored) { } generator.writeStartObject(); generator.writeObjectField(JSON_ENTRY_PATH, getPath()); generator.writeObjectField(JSON_ENTRY_MTIME, getModificationDate().getMillis()); generator.writeObjectField(JSON_ENTRY_SIZE, getSize()); generator.writeObjectField(JSON_ENTRY_REPLICATION, getReplication()); generator.writeObjectField(JSON_ENTRY_IS_DIR, isDirectory()); // Important: need to flush before appending pre-serialized events generator.flush(); generator.writeArrayFieldStart(JSON_ENTRY_CONTENT); if (content != null) { while (content.hasNext()) { content.next().toJSON(generator); } } generator.writeEndArray(); generator.writeEndObject(); generator.flush(); }