Example usage for com.fasterxml.jackson.core JsonGenerator flush

List of usage examples for com.fasterxml.jackson.core JsonGenerator flush

Introduction

In this page you can find the example usage for com.fasterxml.jackson.core JsonGenerator flush.

Prototype

@Override
public abstract void flush() throws IOException;

Source Link

Document

Method called to flush any buffered content to the underlying target (output stream, writer), and to flush the target itself as well.

Usage

From source file:com.ibm.ws.lars.rest.ErrorHandler.java

@Override
protected void service(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {

    response.setStatus(500);// ww w .  j  ava  2 s .  c  om
    response.setContentType(MediaType.APPLICATION_JSON);
    PrintWriter printWriter = response.getWriter();
    JsonGenerator frontPageJsonGenerator = new JsonFactory().createGenerator(printWriter);
    frontPageJsonGenerator.setPrettyPrinter(new DefaultPrettyPrinter());

    frontPageJsonGenerator.writeStartObject();
    frontPageJsonGenerator.writeStringField("message",
            "Internal server error, please contact the server administrator");
    frontPageJsonGenerator.writeNumberField("statusCode", response.getStatus());
    frontPageJsonGenerator.writeEndObject();

    frontPageJsonGenerator.flush();
    frontPageJsonGenerator.close();
}

From source file:org.mashti.jetson.json.JsonRequestEncoder.java

@Override
protected void encodeRequest(final ChannelHandlerContext context, final Integer id, final Method method,
        final Object[] arguments, final ByteBuf out) throws RPCException {

    JsonGenerator generator = null;
    try {//  ww  w  .  jav  a  2  s. c  o  m
        generator = createJsonGenerator(out);
        generator.writeStartObject();
        generator.writeObjectField(ID_KEY, id);
        generator.writeObjectField(VERSION_KEY, DEFAULT_VERSION);
        generator.writeObjectField(METHOD_NAME_KEY, dispatch.get(method));
        writeRequestParameters(method, arguments, generator);
        generator.writeEndObject();
        generator.flush();
        generator.close();
    } catch (final JsonProcessingException e) {

        LOGGER.debug("failed to encode request", e);
        throw new InternalServerException(e);
    } catch (final IOException e) {
        LOGGER.debug("IO error occurred while encoding request", e);
        throw new TransportException(e);
    } finally {
        CloseableUtil.closeQuietly(generator);
    }
}

From source file:org.canova.api.conf.Configuration.java

/**
 *  Writes out all the parameters and their properties (final and resource) to
 *  the given {@link Writer}//from www .j  ava 2  s.c o  m
 *  The format of the output would be
 *  { "properties" : [ {key1,value1,key1.isFinal,key1.resource}, {key2,value2,
 *  key2.isFinal,key2.resource}... ] }
 *  It does not output the parameters of the configuration object which is
 *  loaded from an input stream.
 * @param out the Writer to write to
 * @throws IOException
 */
public static void dumpConfiguration(Configuration conf, Writer out) throws IOException {
    Configuration config = new Configuration(conf, true);
    config.reloadConfiguration();
    JsonFactory dumpFactory = new JsonFactory();
    JsonGenerator dumpGenerator = dumpFactory.createGenerator(out);
    dumpGenerator.writeStartObject();
    dumpGenerator.writeFieldName("properties");
    dumpGenerator.writeStartArray();
    dumpGenerator.flush();
    for (Map.Entry<Object, Object> item : config.getProps().entrySet()) {
        dumpGenerator.writeStartObject();
        dumpGenerator.writeStringField("key", (String) item.getKey());
        dumpGenerator.writeStringField("value", config.get((String) item.getKey()));
        dumpGenerator.writeBooleanField("isFinal", config.finalParameters.contains(item.getKey()));
        dumpGenerator.writeStringField("resource", config.updatingResource.get(item.getKey()));
        dumpGenerator.writeEndObject();
    }
    dumpGenerator.writeEndArray();
    dumpGenerator.writeEndObject();
    dumpGenerator.flush();
}

From source file:org.springframework.cloud.netflix.metrics.atlas.AtlasMetricObserver.java

private void sendMetricsBatch(List<Metric> metrics) {
    try {/*  w  ww  .  j a  va 2s . c  o  m*/
        ByteArrayOutputStream output = new ByteArrayOutputStream();
        JsonGenerator gen = smileFactory.createGenerator(output, JsonEncoding.UTF8);

        gen.writeStartObject();

        writeCommonTags(gen);
        if (writeMetrics(gen, metrics) == 0)
            return; // short circuit this batch if no valid/numeric metrics existed

        gen.writeEndObject();
        gen.flush();

        HttpHeaders headers = new HttpHeaders();
        headers.setContentType(MediaType.valueOf("application/x-jackson-smile"));
        HttpEntity<byte[]> entity = new HttpEntity<>(output.toByteArray(), headers);
        try {
            restTemplate.exchange(uri, HttpMethod.POST, entity, Map.class);
        } catch (HttpClientErrorException e) {
            logger.error("Failed to write metrics to atlas: " + e.getResponseBodyAsString(), e);
        } catch (RestClientException e) {
            logger.error("Failed to write metrics to atlas", e);
        }
    } catch (IOException e) {
        // an IOException stemming from the generator writing to a
        // ByteArrayOutputStream is impossible
        throw new RuntimeException(e);
    }
}

From source file:com.viridiansoftware.metrics.elasticsearch.ElasticsearchReporter.java

private void reportCounter(String index, long timestamp, String name, Counter counter) throws IOException {
    StringWriter writer = new StringWriter();
    JsonGenerator jsonGenerator = jsonFactory.createGenerator(writer);
    jsonGenerator.writeStartObject();/*  w w w . ja  v  a 2 s .  co  m*/
    jsonGenerator.writeNumberField(timestampFieldName, timestamp);
    jsonGenerator.writeStringField("@name", prefixMetricName(name));
    jsonGenerator.writeNumberField("count", counter.getCount());
    jsonGenerator.writeEndObject();
    jsonGenerator.flush();
    addReportToBulkRequest(index, MetricElasticsearchTypes.COUNTER, writer.toString());
}

From source file:ratpack.codahale.metrics.internal.WebSocketReporter.java

@Override
@SuppressWarnings("rawtypes")
public void report(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters,
        SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters,
        SortedMap<String, Timer> timers) {
    try {// w w w  . j a va 2 s. co m
        OutputStream out = new ByteArrayOutputStream();
        JsonGenerator json = factory.createGenerator(out);

        json.writeStartObject();
        json.writeNumberField("timestamp", clock.getTime());
        writeTimers(json, timers);
        writeGauges(json, gauges);
        writeMeters(json, meters);
        writeCounters(json, counters);
        writeHistograms(json, histograms);
        json.writeEndObject();

        json.flush();
        json.close();

        metricsBroadcaster.broadcast(out.toString());
    } catch (IOException e) {
        LOGGER.log(Level.WARNING, "Exception encountered while reporting metrics: " + e.getLocalizedMessage());
    }
}

From source file:org.fluentd.jvmwatcher.parser.JsonSimpleLogParser.java

@Override
public boolean parseState(PrintWriter out, JvmWatchState state) {
    boolean ret = false;
    JsonFactory jsonFactory = new JsonFactory();
    JsonGenerator generator = null;
    try {// ww w  . j  ava2s .c  o m
        generator = jsonFactory.createGenerator(out);
        // convert to JSON stream.
        this.outSimpleLog(generator, state);
        ret = true;
    } catch (IOException ex) {
        log.error("Parse output error.", ex);
        ret = false;
    } finally {
        if (null != generator) {
            try {
                // flush to JSON stream.
                generator.flush();
            } catch (IOException ex) {
                log.error("writer flush error.", ex);
            }
        }
    }

    return ret;
}

From source file:org.talend.dataprep.schema.csv.CSVSerializer.java

@Override
public InputStream serialize(InputStream rawContent, DataSetMetadata metadata, long limit) {
    try {/* w w w  .  j av a 2  s.  c o  m*/
        PipedInputStream pipe = new PipedInputStream();
        PipedOutputStream jsonOutput = new PipedOutputStream(pipe);
        // Serialize asynchronously for better performance (especially if caller doesn't consume all, see sampling).
        Runnable r = () -> {
            final Map<String, String> parameters = metadata.getContent().getParameters();
            final String separator = parameters.get(CSVFormatFamily.SEPARATOR_PARAMETER);
            try (CSVReader reader = new CSVReader(new InputStreamReader(rawContent, metadata.getEncoding()),
                    separator.charAt(0), '\"', '\0')) {
                JsonGenerator generator = new JsonFactory().createGenerator(jsonOutput);
                int i = 0;
                while (i++ < metadata.getContent().getNbLinesInHeader()) {
                    reader.readNext(); // Skip all header lines
                }
                generator.writeStartArray();
                writeLineContent(reader, metadata, generator, separator, limit);
                generator.writeEndArray();
                generator.flush();
            } catch (Exception e) {
                // Consumer may very well interrupt consumption of stream (in case of limit(n) use for sampling).
                // This is not an issue as consumer is allowed to partially consumes results, it's up to the
                // consumer to ensure data it consumed is consistent.
                LOGGER.debug("Unable to continue serialization for {}. Skipping remaining content.",
                        metadata.getId(), e);
            } finally {
                try {
                    jsonOutput.close();
                } catch (IOException e) {
                    LOGGER.error("Unable to close output", e);
                }
            }
        };
        executor.execute(r);
        return pipe;
    } catch (IOException e) {
        throw new TDPException(CommonErrorCodes.UNABLE_TO_SERIALIZE_TO_JSON, e);
    }
}

From source file:com.attribyte.essem.model.StoredGraph.java

/**
 * Gets this stored graph as a JSON object.
 * @return The JSON as a string.//  w  w w.j a  v a  2  s. co m
 * @throws IOException on JSON generation error.
 */
public final String getAsJSON() throws IOException {
    StringWriter writer = new StringWriter();
    JsonGenerator generator = parserFactory.createGenerator(writer);
    generateJSON(generator);
    generator.flush();
    generator.close();
    return writer.toString();
}

From source file:com.cinnober.msgcodec.json.JsonCodec.java

/**
 * Write the group to the stream, but without adding the '$type' field.
 * To decode the JSON the receiver must know what group type to expect.
 *
 * @param group the group to encode./*  w  w w.j  ava  2  s  . com*/
 * @param out the stream to write to, not null.
 * @throws IOException if the underlying byte sink throws an exception.
 * @throws IllegalArgumentException if the group is not correct or complete, e.g. a required field is missing.
 * Partial data may have been written to the stream.
 */
public void encodeStatic(Object group, OutputStream out) throws IOException {
    if (group == null) {
        out.write(NULL_BYTES);
    } else {
        JsonFactory f = new JsonFactory();
        JsonGenerator g = f.createGenerator(out);
        StaticGroupHandler groupHandler = lookupGroupByValue(group);
        if (groupHandler == null) {
            throw new IllegalArgumentException("Cannot encode group (unknown type)");
        }
        groupHandler.writeValue(group, g, false);
        g.flush();
    }
}