Example usage for com.fasterxml.jackson.core JsonGenerator flush

List of usage examples for com.fasterxml.jackson.core JsonGenerator flush

Introduction

In this page you can find the example usage for com.fasterxml.jackson.core JsonGenerator flush.

Prototype

@Override
public abstract void flush() throws IOException;

Source Link

Document

Method called to flush any buffered content to the underlying target (output stream, writer), and to flush the target itself as well.

Usage

From source file:com.joliciel.jochre.search.highlight.HighlightTerm.java

public void toJson(JsonGenerator jsonGen, DecimalFormat df) {
    try {/*from   w ww  .  ja va  2 s .c  o m*/
        jsonGen.writeStartObject();
        jsonGen.writeStringField("field", this.getField());
        jsonGen.writeNumberField("start", this.getStartOffset());
        jsonGen.writeNumberField("end", this.getEndOffset());
        jsonGen.writeNumberField("pageIndex", this.getPageIndex());
        jsonGen.writeNumberField("imageIndex", this.getImageIndex());
        double roundedWeight = df.parse(df.format(this.getWeight())).doubleValue();
        jsonGen.writeNumberField("weight", roundedWeight);
        jsonGen.writeEndObject();

        jsonGen.flush();
    } catch (java.text.ParseException e) {
        LogUtils.logError(LOG, e);
        throw new RuntimeException(e);
    } catch (IOException ioe) {
        LogUtils.logError(LOG, ioe);
        throw new RuntimeException(ioe);
    }
}

From source file:com.proofpoint.event.client.JsonEventWriter.java

public <T> void writeEvents(EventGenerator<T> events, @Nullable final String token, OutputStream out)
        throws IOException {
    checkNotNull(events, "events is null");
    checkNotNull(out, "out is null");

    final JsonGenerator jsonGenerator = jsonFactory.createGenerator(out, JsonEncoding.UTF8);

    jsonGenerator.writeStartArray();//ww  w . j a  v  a  2  s .  c om

    events.generate(new EventClient.EventPoster<T>() {
        @Override
        public void post(T event) throws IOException {
            JsonSerializer<T> serializer = getSerializer(event, token);
            if (serializer == null) {
                throw new InvalidEventException("Event class [%s] has not been registered as an event",
                        event.getClass().getName());
            }

            serializer.serialize(event, jsonGenerator, null);
        }
    });

    jsonGenerator.writeEndArray();
    jsonGenerator.flush();
}

From source file:org.springframework.data.elasticsearch.core.DefaultResultMapper.java

private String buildJSONFromFields(Collection<SearchHitField> values) {
    JsonFactory nodeFactory = new JsonFactory();
    try {//  w  w  w  . j ava 2  s  .c o m
        ByteArrayOutputStream stream = new ByteArrayOutputStream();
        JsonGenerator generator = nodeFactory.createGenerator(stream, JsonEncoding.UTF8);
        generator.writeStartObject();
        for (SearchHitField value : values) {
            if (value.getValues().size() > 1) {
                generator.writeArrayFieldStart(value.getName());
                for (Object val : value.getValues()) {
                    generator.writeObject(val);
                }
                generator.writeEndArray();
            } else {
                generator.writeObjectField(value.getName(), value.getValue());
            }
        }
        generator.writeEndObject();
        generator.flush();
        return new String(stream.toByteArray(), Charset.forName("UTF-8"));
    } catch (IOException e) {
        return null;
    }
}

From source file:com.ning.metrics.action.hdfs.reader.HdfsEntry.java

public void toJson(final JsonGenerator generator) throws IOException {
    Iterator<Row> content = null;
    try {/*ww  w .  j a  v  a 2  s .  c o  m*/
        content = getContent();
    } catch (IOException ignored) {
    }

    generator.writeStartObject();

    generator.writeObjectField(JSON_ENTRY_PATH, getPath());
    generator.writeObjectField(JSON_ENTRY_MTIME, getModificationDate().getMillis());
    generator.writeObjectField(JSON_ENTRY_SIZE, getSize());
    generator.writeObjectField(JSON_ENTRY_REPLICATION, getReplication());
    generator.writeObjectField(JSON_ENTRY_IS_DIR, isDirectory());
    // Important: need to flush before appending pre-serialized events
    generator.flush();

    generator.writeArrayFieldStart(JSON_ENTRY_CONTENT);
    if (content != null) {
        while (content.hasNext()) {
            content.next().toJSON(generator);
        }
    }
    generator.writeEndArray();

    generator.writeEndObject();
    generator.flush();
}

From source file:net.opentsdb.contrib.tsquare.web.controller.ExtendedApiController.java

@RequestMapping(value = "/grep", method = RequestMethod.GET)
public void grep(@RequestParam(required = false, defaultValue = "") String type,
        @RequestParam(required = false, defaultValue = "wildcard") String method,
        @RequestParam(required = true) String q, final HttpServletResponse servletResponse) throws IOException {

    if (log.isInfoEnabled()) {
        log.info("Suggest {} using {} expression: {}", type, method, q);
    }// ww w  .j  ava  2 s  .  c  o m

    // Do we have a valid type? Note that an empty "type" is valid.
    if (!Strings.isNullOrEmpty(type)) {
        Preconditions.checkArgument(getTsdbManager().getKnownUidKinds().contains(type), "Unknown type: %s",
                type);
    }

    // We can only query hbase using regex, so convert a wildcard query into
    // a regex if necessary.
    final String regex;
    if ("wildcard".equalsIgnoreCase(method)) {
        regex = TsWebUtils.wildcardToRegex(q);
        log.debug("Converted wildcard expression {} to regex: {}", q, regex);
    } else {
        regex = q;
    }

    final UidQuery query = getTsdbManager().newUidQuery();
    query.setRegex(regex);

    if (Strings.isNullOrEmpty(type)) {
        query.includeAllKinds();
    } else {
        query.includeKind(type);
    }

    servletResponse.setContentType("application/json");
    final OutputStream stream = servletResponse.getOutputStream();
    final JsonGenerator json = new JsonFactory().createJsonGenerator(stream);

    try {
        json.writeStartArray();

        query.run(new QueryCallback<Uid>() {
            @Override
            public boolean onResult(final Uid resultObject) {
                try {
                    json.writeString(resultObject.getName());
                    return true;
                } catch (IOException e) {
                    throw new IllegalArgumentException("Unable to serialize " + resultObject + " to JSON", e);
                }
            }
        });

        json.writeEndArray();
        json.flush();
    } finally {
        Closeables.close(stream, false);
    }
}

From source file:gov.bnl.channelfinder.ChannelsResource.java

/**
 * GET method for retrieving an instance of Channel identified by <tt>chan</tt>.
 *
 * @param chan channel name//  w  w  w.ja va 2  s .co  m
 * @return HTTP Response
 */
@GET
@Path("{chName: " + chNameRegex + "}")
@Produces({ "application/json" })
public Response read(@PathParam("chName") String chan) {
    audit.info("getting ch:" + chan);
    Client client = ElasticSearchClient.getSearchClient();
    String user = securityContext.getUserPrincipal() != null ? securityContext.getUserPrincipal().getName()
            : "";
    try {
        final GetResponse response = client.prepareGet("channelfinder", "channel", chan).execute().actionGet();
        Response r;
        if (response.isExists()) {
            final ObjectMapper mapper = new ObjectMapper();
            mapper.addMixIn(XmlProperty.class, OnlyXmlProperty.class);
            mapper.addMixIn(XmlTag.class, OnlyXmlTag.class);
            StreamingOutput stream = new StreamingOutput() {

                @Override
                public void write(OutputStream os) throws IOException, WebApplicationException {
                    JsonGenerator jg = mapper.getFactory().createGenerator(os, JsonEncoding.UTF8);
                    jg.writeObject(mapper.readValue(response.getSourceAsBytes(), XmlChannel.class));
                    jg.flush();
                    jg.close();
                }
            };
            r = Response.ok(stream).build();
        } else {
            r = Response.status(Response.Status.NOT_FOUND).build();
        }
        log.fine(user + "|" + uriInfo.getPath() + "|GET|OK|" + r.getStatus());
        return r;
    } catch (Exception e) {
        return handleException(user, "GET", Response.Status.INTERNAL_SERVER_ERROR, e);
    } finally {

    }
}

From source file:net.uncontended.precipice.reporting.registry.ToJSON.java

public String write(Summary<Result, Rejected> summary) {
    Slice<Result, Rejected>[] slices = summary.getSlices();

    Class<Result> resultClazz = summary.resultClazz;
    Class<Rejected> rejectedClazz = summary.rejectedClazz;

    StringWriter w = new StringWriter();
    try {/*from   ww  w .  j  av  a2 s.c  om*/
        JsonGenerator generator = jsonFactory.createGenerator(w);
        generator.writeStartObject();
        generator.writeObjectFieldStart("result-to-success?");
        for (Result r : resultClazz.getEnumConstants()) {
            generator.writeObjectField(r.toString(), r.isFailure());
        }
        generator.writeEndObject();
        generator.writeArrayFieldStart("rejected");
        for (Rejected r : rejectedClazz.getEnumConstants()) {
            generator.writeString(r.toString());
        }
        generator.writeEndArray();
        generator.writeArrayFieldStart("slices");
        writeSlice(generator, slices[0]);
        generator.writeEndArray();
        generator.writeEndObject();
        generator.flush();
    } catch (IOException e) {
        e.printStackTrace();
        return EMPTY;
    }

    return w.toString();
}

From source file:org.apache.nifi.processors.elasticsearch.PutElasticsearchHttpRecord.java

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {

    FlowFile flowFile = session.get();/*from   ww w .j  a v  a 2s  . c o  m*/
    if (flowFile == null) {
        return;
    }

    final RecordReaderFactory readerFactory = context.getProperty(RECORD_READER)
            .asControllerService(RecordReaderFactory.class);

    // Authentication
    final String username = context.getProperty(USERNAME).evaluateAttributeExpressions(flowFile).getValue();
    final String password = context.getProperty(PASSWORD).evaluateAttributeExpressions(flowFile).getValue();

    OkHttpClient okHttpClient = getClient();
    final ComponentLog logger = getLogger();

    final String baseUrl = trimToEmpty(context.getProperty(ES_URL).evaluateAttributeExpressions().getValue());
    HttpUrl.Builder urlBuilder = HttpUrl.parse(baseUrl).newBuilder().addPathSegment("_bulk");

    // Find the user-added properties and set them as query parameters on the URL
    for (Map.Entry<PropertyDescriptor, String> property : context.getProperties().entrySet()) {
        PropertyDescriptor pd = property.getKey();
        if (pd.isDynamic()) {
            if (property.getValue() != null) {
                urlBuilder = urlBuilder.addQueryParameter(pd.getName(),
                        context.getProperty(pd).evaluateAttributeExpressions().getValue());
            }
        }
    }
    final URL url = urlBuilder.build().url();

    final String index = context.getProperty(INDEX).evaluateAttributeExpressions(flowFile).getValue();
    if (StringUtils.isEmpty(index)) {
        logger.error("No value for index in for {}, transferring to failure", new Object[] { flowFile });
        session.transfer(flowFile, REL_FAILURE);
        return;
    }
    final String docType = context.getProperty(TYPE).evaluateAttributeExpressions(flowFile).getValue();
    String indexOp = context.getProperty(INDEX_OP).evaluateAttributeExpressions(flowFile).getValue();
    if (StringUtils.isEmpty(indexOp)) {
        logger.error("No Index operation specified for {}, transferring to failure.",
                new Object[] { flowFile });
        session.transfer(flowFile, REL_FAILURE);
        return;
    }

    switch (indexOp.toLowerCase()) {
    case "index":
    case "update":
    case "upsert":
    case "delete":
        break;
    default:
        logger.error("Index operation {} not supported for {}, transferring to failure.",
                new Object[] { indexOp, flowFile });
        session.transfer(flowFile, REL_FAILURE);
        return;
    }

    final String id_path = context.getProperty(ID_RECORD_PATH).evaluateAttributeExpressions(flowFile)
            .getValue();
    final RecordPath recordPath = StringUtils.isEmpty(id_path) ? null : recordPathCache.getCompiled(id_path);
    final StringBuilder sb = new StringBuilder();

    try (final InputStream in = session.read(flowFile);
            final RecordReader reader = readerFactory.createRecordReader(flowFile, in, getLogger())) {

        Record record;
        while ((record = reader.nextRecord()) != null) {

            final String id;
            if (recordPath != null) {
                Optional<FieldValue> idPathValue = recordPath.evaluate(record).getSelectedFields().findFirst();
                if (!idPathValue.isPresent() || idPathValue.get().getValue() == null) {
                    throw new IdentifierNotFoundException(
                            "Identifier Record Path specified but no value was found, transferring {} to failure.");
                }
                id = idPathValue.get().getValue().toString();
            } else {
                id = null;
            }

            // The ID must be valid for all operations except "index". For that case,
            // a missing ID indicates one is to be auto-generated by Elasticsearch
            if (id == null && !indexOp.equalsIgnoreCase("index")) {
                throw new IdentifierNotFoundException(
                        "Index operation {} requires a valid identifier value from a flow file attribute, transferring to failure.");
            }

            final StringBuilder json = new StringBuilder();

            ByteArrayOutputStream out = new ByteArrayOutputStream();
            JsonGenerator generator = factory.createJsonGenerator(out);
            writeRecord(record, record.getSchema(), generator);
            generator.flush();
            generator.close();
            json.append(out.toString());

            if (indexOp.equalsIgnoreCase("index")) {
                sb.append("{\"index\": { \"_index\": \"");
                sb.append(index);
                sb.append("\", \"_type\": \"");
                sb.append(docType);
                sb.append("\"");
                if (!StringUtils.isEmpty(id)) {
                    sb.append(", \"_id\": \"");
                    sb.append(id);
                    sb.append("\"");
                }
                sb.append("}}\n");
                sb.append(json);
                sb.append("\n");
            } else if (indexOp.equalsIgnoreCase("upsert") || indexOp.equalsIgnoreCase("update")) {
                sb.append("{\"update\": { \"_index\": \"");
                sb.append(index);
                sb.append("\", \"_type\": \"");
                sb.append(docType);
                sb.append("\", \"_id\": \"");
                sb.append(id);
                sb.append("\" }\n");
                sb.append("{\"doc\": ");
                sb.append(json);
                sb.append(", \"doc_as_upsert\": ");
                sb.append(indexOp.equalsIgnoreCase("upsert"));
                sb.append(" }\n");
            } else if (indexOp.equalsIgnoreCase("delete")) {
                sb.append("{\"delete\": { \"_index\": \"");
                sb.append(index);
                sb.append("\", \"_type\": \"");
                sb.append(docType);
                sb.append("\", \"_id\": \"");
                sb.append(id);
                sb.append("\" }\n");
            }
        }
    } catch (IdentifierNotFoundException infe) {
        logger.error(infe.getMessage(), new Object[] { flowFile });
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
        return;

    } catch (final IOException | SchemaNotFoundException | MalformedRecordException e) {
        logger.error("Could not parse incoming data", e);
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
        return;
    }

    RequestBody requestBody = RequestBody.create(MediaType.parse("application/json"), sb.toString());
    final Response getResponse;
    try {
        getResponse = sendRequestToElasticsearch(okHttpClient, url, username, password, "PUT", requestBody);
    } catch (final Exception e) {
        logger.error("Routing to {} due to exception: {}", new Object[] { REL_FAILURE.getName(), e }, e);
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
        return;
    }
    final int statusCode = getResponse.code();

    if (isSuccess(statusCode)) {
        ResponseBody responseBody = getResponse.body();
        try {
            final byte[] bodyBytes = responseBody.bytes();

            JsonNode responseJson = parseJsonResponse(new ByteArrayInputStream(bodyBytes));
            boolean errors = responseJson.get("errors").asBoolean(false);
            // ES has no rollback, so if errors occur, log them and route the whole flow file to failure
            if (errors) {
                ArrayNode itemNodeArray = (ArrayNode) responseJson.get("items");
                if (itemNodeArray.size() > 0) {
                    // All items are returned whether they succeeded or failed, so iterate through the item array
                    // at the same time as the flow file list, logging failures accordingly
                    for (int i = itemNodeArray.size() - 1; i >= 0; i--) {
                        JsonNode itemNode = itemNodeArray.get(i);
                        int status = itemNode.findPath("status").asInt();
                        if (!isSuccess(status)) {
                            String reason = itemNode.findPath("//error/reason").asText();
                            logger.error(
                                    "Failed to insert {} into Elasticsearch due to {}, transferring to failure",
                                    new Object[] { flowFile, reason });
                        }
                    }
                }
                session.transfer(flowFile, REL_FAILURE);
            } else {
                session.transfer(flowFile, REL_SUCCESS);
                session.getProvenanceReporter().send(flowFile, url.toString());
            }

        } catch (IOException ioe) {
            // Something went wrong when parsing the response, log the error and route to failure
            logger.error("Error parsing Bulk API response: {}", new Object[] { ioe.getMessage() }, ioe);
            session.transfer(flowFile, REL_FAILURE);
            context.yield();
        }
    } else if (statusCode / 100 == 5) {
        // 5xx -> RETRY, but a server error might last a while, so yield
        logger.warn(
                "Elasticsearch returned code {} with message {}, transferring flow file to retry. This is likely a server problem, yielding...",
                new Object[] { statusCode, getResponse.message() });
        session.transfer(flowFile, REL_RETRY);
        context.yield();
    } else { // 1xx, 3xx, 4xx, etc. -> NO RETRY
        logger.warn("Elasticsearch returned code {} with message {}, transferring flow file to failure",
                new Object[] { statusCode, getResponse.message() });
        session.transfer(flowFile, REL_FAILURE);
    }
    getResponse.close();
}

From source file:com.basho.riak.client.query.MapReduce.java

/**
 * Creates the JSON string of the M/R job for submitting to the
 * {@link RawClient}/*from  w w  w.  ja va 2 s . c o m*/
 * 
 * Uses Jackson to write out the JSON string. I'm not very happy with this
 * method, it is a candidate for change.
 * 
 * TODO re-evaluate this method, look for something smaller and more elegant.
 * 
 * @return a String of JSON
 * @throws RiakException
 *             if, for some reason, we can't create a JSON string.
 */
private String writeSpec() throws RiakException {

    final ByteArrayOutputStream out = new ByteArrayOutputStream();

    try {
        JsonGenerator jg = new JsonFactory().createJsonGenerator(out, JsonEncoding.UTF8);
        jg.setCodec(new ObjectMapper());

        jg.writeStartObject();

        jg.writeFieldName("inputs");
        writeInput(jg);

        jg.writeFieldName("query");
        jg.writeStartArray();

        writePhases(jg);

        jg.writeEndArray();
        if (timeout != null) {
            jg.writeNumberField("timeout", timeout);
        }

        jg.writeEndObject();
        jg.flush();

        return out.toString("UTF8");
    } catch (IOException e) {
        throw new RiakException(e);
    }
}