Example usage for com.fasterxml.jackson.databind JsonNode toString

List of usage examples for com.fasterxml.jackson.databind JsonNode toString

Introduction

In this page you can find the example usage for com.fasterxml.jackson.databind JsonNode toString.

Prototype

public abstract String toString();

Source Link

Usage

From source file:org.ms123.common.workflow.converter.Simpl4FilterTaskJsonConverter.java

protected FlowElement convertJsonToElement(JsonNode elementNode, JsonNode modelNode,
        Map<String, JsonNode> shapeMap) {
    ServiceTask task = new ServiceTask();
    Map elementMap = (Map) m_ds.deserialize(elementNode.toString());
    Map<String, Object> propMap = (Map) elementMap.get("properties");

    String clazz = Simpl4BpmnJsonConverter.getFullnameForTask("TaskFilterExecutor");
    task.setImplementationType(ImplementationType.IMPLEMENTATION_TYPE_CLASS);
    task.setImplementation(clazz);//from ww w . j  a  va  2 s  .co m

    FieldExtension field = new FieldExtension();
    field.setFieldName(VARMAPPING);
    String variablesmapping = getValue(VARMAPPING, propMap.get(VARMAPPING_PROP));
    field.setExpression(variablesmapping);
    task.getFieldExtensions().add(field);

    field = new FieldExtension();
    field.setFieldName(FILTERNAME);
    field.setStringValue(getValue(FILTERNAME, propMap.get(FILTERNAME_PROP)));
    task.getFieldExtensions().add(field);

    field = new FieldExtension();
    field.setFieldName(FILTEROBJECT);
    field.setStringValue(getValue(FILTEROBJECT, propMap.get(FILTEROBJECT_PROP)));
    task.getFieldExtensions().add(field);

    field = new FieldExtension();
    field.setFieldName(FILTERVARNAME);
    field.setStringValue(checkNull(FILTERVARNAME, propMap.get(FILTERVARNAME_PROP)));
    task.getFieldExtensions().add(field);
    return task;
}

From source file:actors.DeployLogRelay.java

private void pushLog(final JsonNode node) {
    final StringBuilder builder = new StringBuilder();
    builder.append("event: log\n");
    builder.append("data: ");
    final Iterable<String> split = Splitter.on("\n").split(node.toString());
    Joiner.on("\ndata: ").appendTo(builder, split);
    builder.append("\n\n");

    _channel.push(builder.toString());//from   w  w  w.  j  a  v  a2  s  .  c o  m
}

From source file:com.vz.onosproject.zeromqprovider.AppWebResource.java

/**
 * Installs flows to downstream ZMQ device
 * @param stream blob flowrule/*from w w w  . j  av  a 2  s  .c  o  m*/
 * @return 200 OK
 */
@POST
@Path("flows")
@Consumes(MediaType.APPLICATION_JSON)

public Response persisFlow(InputStream stream) {
    log.info("#### Pushing a flow");
    ObjectNode jsonTree = null;
    List<String> devices = controller.getAvailableDevices();
    try {
        jsonTree = (ObjectNode) mapper().readTree(stream);
        JsonNode devId = jsonTree.get("DeviceId");
        JsonNode payload = jsonTree.get("Payload");
        String sPayload = payload.toString();

        log.info("Device Id" + devId.asText());
        log.info("Payload Text value " + payload.textValue() + " toString " + payload.toString() + "Type "
                + payload.getNodeType().toString());

        if (devId == null || devId.asText().isEmpty() || devices.contains(devId.asText()) == false) {
            throw new IllegalArgumentException(INVALID_DEVICEID);
        }

        if (payload == null || sPayload.isEmpty()) {
            throw new IllegalArgumentException(INVALID_FLOW);
        }

        DeviceId deviceId = DeviceId.deviceId(devId.asText());
        Blob blob = new Blob(sPayload.getBytes());

        store.InsertBlob(deviceId, blob);
        controller.writeToDevice(deviceId, blob);
        incrPostCount();
        log.info("#### Total num of posts :  " + getNumPostRecieved());

        return Response.ok().build();
    } catch (/*IO*/Exception e) {
        e.printStackTrace();
        log.info("###### ERROR " + e.getMessage());
    }
    return Response.noContent().build();
}

From source file:org.opendaylight.groupbasedpolicy.jsonrpc.JsonRpcEndpoint.java

/**
 *
 * Handle an {@link RpcMessage} response from the peer.
 *
 * @param response A fully parsed Jackson Tree-Mode JsonNode
 * @throws NoSuchMethodException Internal error
 *//*from   w w w  .  j  ava 2  s .com*/
public void processResult(JsonNode response) throws NoSuchMethodException {

    logger.warn("Response : {}", response.toString());
    CallContext returnCtxt = methodContext.get(response.get("id").asText());
    if (returnCtxt == null)
        return;
    RpcMessage message = messageMap.get(returnCtxt.getMethod());
    if (message != null) {
        try {
            RpcMessage handler = objectMapper.treeToValue(response, message.getClass());

            JsonNode error = response.get("error");
            if (error != null && !error.isNull()) {
                logger.error("Error : {}", error.toString());
            }

            returnCtxt.getFuture().set(handler);
        } catch (JsonProcessingException e) {
            logger.error("Unable to handle " + returnCtxt.getMethod(), e);
        }
    } else {
        throw new RuntimeException("The response to " + returnCtxt.getMethod() + "sent is unsupported");
    }
}

From source file:io.mesosphere.mesos.frameworks.cassandra.scheduler.api.ApiControllerTest.java

@Test
public void testRoot() throws Exception {
    final Tuple2<Integer, JsonNode> tup = fetchJson("/", false);
    assertEquals(200, tup._1.intValue());
    final JsonNode json = tup._2;
    final String baseUri = resolve("/").toString();

    final ObjectMapper mapper = new ObjectMapper();
    final List<ApiEndpoint> list = mapper.readValue(json.toString(), LIST_TYPE_REFERENCE);

    assertThat(list).isEqualTo(newArrayList(new ApiEndpoint("GET", "config", newArrayList("application/json")),
            new ApiEndpoint("POST", "cluster/backup/start", newArrayList("application/json")),
            new ApiEndpoint("POST", "cluster/backup/abort", newArrayList("application/json")),
            new ApiEndpoint("GET", "cluster/backup/status", newArrayList("application/json")),
            new ApiEndpoint("GET", "cluster/backup/last", newArrayList("application/json")),
            new ApiEndpoint("POST", "cluster/cleanup/start", newArrayList("application/json")),
            new ApiEndpoint("POST", "cluster/cleanup/abort", newArrayList("application/json")),
            new ApiEndpoint("GET", "cluster/cleanup/status", newArrayList("application/json")),
            new ApiEndpoint("GET", "cluster/cleanup/last", newArrayList("application/json")),
            new ApiEndpoint("POST", "cluster/repair/start", newArrayList("application/json")),
            new ApiEndpoint("POST", "cluster/repair/abort", newArrayList("application/json")),
            new ApiEndpoint("GET", "cluster/repair/status", newArrayList("application/json")),
            new ApiEndpoint("GET", "cluster/repair/last", newArrayList("application/json")),
            new ApiEndpoint("POST", "cluster/restore/start?name=$name", newArrayList("application/json")),
            new ApiEndpoint("POST", "cluster/restore/abort", newArrayList("application/json")),
            new ApiEndpoint("GET", "cluster/restore/status", newArrayList("application/json")),
            new ApiEndpoint("GET", "cluster/restore/last", newArrayList("application/json")),
            new ApiEndpoint("POST", "cluster/rolling-restart/start", newArrayList("application/json")),
            new ApiEndpoint("POST", "cluster/rolling-restart/abort", newArrayList("application/json")),
            new ApiEndpoint("GET", "cluster/rolling-restart/status", newArrayList("application/json")),
            new ApiEndpoint("GET", "cluster/rolling-restart/last", newArrayList("application/json")),
            new ApiEndpoint("GET", "node/all", newArrayList("application/json")),
            new ApiEndpoint("GET", "node/seed/all", newArrayList("application/json")),
            new ApiEndpoint("POST", "node/{node}/stop/", newArrayList("application/json")),
            new ApiEndpoint("POST", "node/{node}/start/", newArrayList("application/json")),
            new ApiEndpoint("POST", "node/{node}/restart/", newArrayList("application/json")),
            new ApiEndpoint("POST", "node/{node}/terminate/", newArrayList("application/json")),
            new ApiEndpoint("POST", "node/{node}/replace/", newArrayList("application/json")),
            new ApiEndpoint("POST", "node/{node}/rackdc", newArrayList("application/json")),
            new ApiEndpoint("POST", "node/{node}/make-seed/", newArrayList("application/json")),
            new ApiEndpoint("POST", "node/{node}/make-non-seed/", newArrayList("application/json")),
            new ApiEndpoint("GET", "live-nodes", newArrayList("application/json")),
            new ApiEndpoint("GET", "live-nodes/text", newArrayList("text/plain")),
            new ApiEndpoint("GET", "live-nodes/cqlsh", newArrayList("text/x-cassandra-cqlsh")),
            new ApiEndpoint("GET", "live-nodes/nodetool", newArrayList("text/x-cassandra-nodetool")),
            new ApiEndpoint("GET", "live-nodes/stress", newArrayList("text/x-cassandra-stress")),
            new ApiEndpoint("GET", "qa/report/resources", newArrayList("application/json", "text/plain")),
            new ApiEndpoint("POST", "scale/nodes?nodeCount={nodeCount}", newArrayList("application/json")),
            new ApiEndpoint("GET", "health/process", newArrayList("application/json")),
            new ApiEndpoint("GET", "health/cluster", newArrayList("application/json")),
            new ApiEndpoint("GET", "health/cluster/report", newArrayList("application/json"))));
}

From source file:io.streamdata.SampleEventSource.java

@Override
public void run() {

    try {//from   w  ww.ja va2  s . c  o  m

        // build EventSource object to handle Streamdata.io Server-Sent events for this target
        // Streamdata.io will send two types of events :
        // 'data' event : this is the first event received to provide a full set of data and initialize the data stream. the streamdata.io specific 'data' event will be triggered
        // when a fresh Json data set is pushed by Streamdata.io coming from the API.
        // 'patch' event : this is a patch event to apply to initial data set. The streamdata.io specific 'patch' event will be triggered when a fresh Json patch
        // is pushed by streamdata.io coming from the API. This patch has to be applied to the latest data set.
        LOGGER.debug("Starting EventSource ...");
        eventSource = new EventSource(target) {
            @Override
            public void onEvent(InboundEvent inboundEvent) {
                // get event name
                String eventName = inboundEvent.getName();
                // get event data
                String eventData = new String(inboundEvent.getRawData());

                if ("data".equals(eventName)) {
                    // event name is "data": handle reception of a 'data' event
                    try {
                        // simply set local storage with this data set
                        data = jsonObjectMapper.readTree(eventData);
                        LOGGER.debug("Data received : {}\n\n", data.toString());
                    } catch (IOException e) {
                        LOGGER.error("Data received is not Json format: {}", eventData, e);
                        // closing stream in case of error
                        this.close();
                    }
                } else if ("patch".equals(eventName)) {
                    // event name is "patch": handle reception of a 'patch' event
                    try {
                        // read the patch and set in in a local variable
                        JsonNode patchNode = jsonObjectMapper.readTree(eventData);
                        LOGGER.debug("Patch received : {}", patchNode.toString());
                        // apply patch to the local storage
                        LOGGER.debug("Applying patch ...");
                        data = JsonPatch.apply(patchNode, data);
                        // data set is then updated.
                        LOGGER.debug("Data updated: {}\n\n", data.toString());
                    } catch (IOException e) {
                        LOGGER.error("Patch received is not Json format: {}.", eventData, e);
                        // closing stream in case of error
                        this.close();
                    }
                } else if ("error".equals(eventName)) {
                    LOGGER.error("An error occured: {}.", eventData);
                    // closing stream in case of error
                    this.close();
                } else {
                    // add code here for any other event type
                    // streamdata.io
                    LOGGER.debug("Unhandled event received: {}\n\n", eventData);
                }
            }
        };
    } catch (Exception e) {
        LOGGER.error("An Error occured.", e);
        close();
        System.exit(0);
    }

}

From source file:org.dspace.app.rest.repository.MetadataSchemaRestRepository.java

@Override
@PreAuthorize("hasAuthority('ADMIN')")
protected MetadataSchemaRest put(Context context, HttpServletRequest request, String apiCategory, String model,
        Integer id, JsonNode jsonNode) throws SQLException, AuthorizeException {

    MetadataSchemaRest metadataSchemaRest = new Gson().fromJson(jsonNode.toString(), MetadataSchemaRest.class);

    if (isBlank(metadataSchemaRest.getPrefix())) {
        throw new UnprocessableEntityException("metadata schema name cannot be blank");
    }/* w  w w . j  a va 2 s.c o m*/
    if (isBlank(metadataSchemaRest.getNamespace())) {
        throw new UnprocessableEntityException("metadata schema namespace cannot be blank");
    }

    if (!Objects.equals(id, metadataSchemaRest.getId())) {
        throw new UnprocessableEntityException("ID in request doesn't match path ID");
    }

    MetadataSchema metadataSchema = metadataSchemaService.find(context, id);
    if (metadataSchema == null) {
        throw new ResourceNotFoundException("metadata schema with id: " + id + " not found");
    }

    metadataSchema.setName(metadataSchemaRest.getPrefix());
    metadataSchema.setNamespace(metadataSchemaRest.getNamespace());

    try {
        metadataSchemaService.update(context, metadataSchema);
        context.commit();
    } catch (NonUniqueMetadataException e) {
        throw new UnprocessableEntityException("metadata schema " + metadataSchemaRest.getPrefix() + "."
                + metadataSchemaRest.getNamespace() + " already exists");
    }

    return converter.fromModel(metadataSchema);
}

From source file:org.n52.iceland.config.json.JsonSettingsDao.java

private void numberDecodeError(SettingType type, JsonNode node) {
    throw new ConfigurationError(String.format("Cannot decode setting to %s type: node type = %s, value = >%s<",
            type, node.getNodeType(), node.toString()));
}

From source file:com.ikanow.aleph2.v1.document_db.utils.TestJsonNodeBsonUtils.java

@Test
public void test_mapWritableWrapper() {
    final ObjectMapper mapper = BeanTemplateUtils.configureMapper(Optional.empty());

    final BasicDBObject m1 = new BasicDBObject();

    m1.put("test1", true);

    final BasicDBObject m2 = new BasicDBObject();
    m2.put("nested", m1);
    m2.put("test2", "test2");

    final BasicDBList a1 = new BasicDBList();
    a1.add(4);/*from ww  w.  j  a v a  2  s. c o m*/
    a1.add(5);

    final BasicDBList a2 = new BasicDBList();
    a2.add(m1);
    a2.add(m1);

    m2.put("array", a2);
    m1.put("array", a1);

    final JsonNode j2 = JsonNodeBsonUtils.from(m2);

    assertEquals(3, j2.size());

    // Check j's contents
    assertEquals(Stream.of("nested", "test2", "array").sorted().collect(Collectors.toList()),
            Optionals.streamOf(j2.fieldNames(), false).sorted().collect(Collectors.toList()));
    assertEquals("test2", j2.get("test2").asText());

    final JsonNode j1 = j2.get("nested");
    assertEquals(2, j1.size());
    final JsonNode j1b = JsonNodeBsonUtils.from(m1);
    assertTrue("entrySet wrong: " + j1b.toString(), "{\"test1\":true,\"array\":[4,5]}".equals(j1b.toString())
            || "{\"array\":[4,5],\"test1\":true}".equals(j1b.toString())); //(tests entrySet)
    final ArrayNode an = mapper.createArrayNode();
    an.add(mapper.convertValue(4, JsonNode.class));
    an.add(mapper.convertValue(5, JsonNode.class));
    assertEquals(Arrays.asList(mapper.convertValue(true, JsonNode.class), an),
            Optionals.streamOf(((ObjectNode) j1).elements(), false).collect(Collectors.toList()));

    // OK, now test adding:

    assertEquals(2, j1.size());

    final ObjectNode o1 = (ObjectNode) j1;
    o1.put("added", "added_this");

    final ObjectNodeWrapper o1c = (ObjectNodeWrapper) o1;
    assertFalse(o1c.containsKey("not_present"));
    assertTrue(o1c.containsKey("added"));
    assertTrue(o1c.containsKey("test1"));

    assertEquals(Stream.of("test1", "array", "added").sorted().collect(Collectors.toList()),
            Optionals.streamOf(j1.fieldNames(), false).sorted().collect(Collectors.toList()));
    assertEquals(
            Arrays.asList(mapper.convertValue(true, JsonNode.class), an,
                    mapper.convertValue("added_this", JsonNode.class)),
            Optionals.streamOf(((ObjectNode) j1).elements(), false).collect(Collectors.toList()));
    assertTrue(j1.toString().contains("added_this"));
    assertTrue(j1.toString().contains("4,5"));

    assertEquals(mapper.convertValue("added_this", JsonNode.class), j1.get("added"));

    assertEquals(3, j1.size());

    // OK now test removing:

    assertEquals(null, o1.remove("not_present"));
    assertEquals(mapper.convertValue(true, JsonNode.class), o1.remove("test1"));
    assertEquals(2, o1.size());
    ObjectNode o1b = o1.remove(Arrays.asList("added", "array"));
    assertEquals(0, o1.size());
    assertEquals(0, o1b.size());

    o1.setAll(JsonNodeBsonUtils.from(m1)); // will be minus one object
    assertEquals(2, o1.size());
    assertTrue(o1c.containsValue(mapper.convertValue(true, JsonNode.class)));
    assertFalse(o1c.containsValue("banana"));

    final ObjectNodeWrapper o2 = (ObjectNodeWrapper) JsonNodeBsonUtils.from(m2);
    assertFalse(o2.isEmpty());
    assertTrue(o2.containsKey("array"));
    assertFalse(o2.containsValue("array"));
    assertTrue(o2.containsValue(mapper.convertValue("test2", JsonNode.class)));
    assertEquals(TextNode.class, o2.remove("test2").getClass());
    assertEquals(2, o2.size());
    o2.removeAll();
    assertEquals(0, o2.size());
}

From source file:io.confluent.connect.elasticsearch.internals.HttpClient.java

private Bulk constructBulk(RecordBatch batch, Callback<Response> callback) {
    Bulk.Builder builder = new Bulk.Builder();
    List<ESRequest> requests = batch.requests();
    for (ESRequest request : requests) {
        JsonNode data = null;
        try {//from  w w  w .ja  v  a  2  s  .  com
            data = objectMapper.readTree(request.getPayload());
        } catch (IOException e) {
            callback.onFailure(e);
        }
        Index index = new Index.Builder(data.toString()).index(request.getIndex()).type(request.getType())
                .id(request.getId()).build();
        builder.addAction(index);
    }
    return builder.build();
}