Example usage for com.fasterxml.jackson.databind.node ArrayNode get

List of usage examples for com.fasterxml.jackson.databind.node ArrayNode get

Introduction

In this page you can find the example usage for com.fasterxml.jackson.databind.node ArrayNode get.

Prototype

public JsonNode get(String paramString) 

Source Link

Usage

From source file:com.activiti.service.activiti.ProcessInstanceService.java

public List<String> getCurrentActivityInstances(ServerConfig serverConfig, String processInstanceId) {
    URIBuilder builder = clientUtil/*from   ww  w .  java  2  s .co m*/
            .createUriBuilder(MessageFormat.format(CURRENT_ACTIVITY_INSTANCE_LIST_URL, processInstanceId));
    HttpGet get = new HttpGet(clientUtil.getServerUrl(serverConfig, builder));
    JsonNode node = clientUtil.executeRequest(get, serverConfig);

    List<String> result = new ArrayList<String>();
    if (node.isArray()) {
        ArrayNode data = (ArrayNode) node;
        for (int i = 0; i < data.size(); i++) {
            if (data.get(i) != null) {
                result.add(data.get(i).asText());
            }
        }
    }
    return result;
}

From source file:com.marklogic.samplestack.database.DatabaseTransformsIT.java

@Test
public void commentPatchTransform() {
    // make a user
    contributorService.store(Utils.joeUser);

    // make sure there's no question
    operations.delete(ClientRole.SAMPLESTACK_CONTRIBUTOR, TEST_URI);

    askAndAnswer();/*from   w w  w  . ja  v  a 2s .co  m*/

    // now we can comment on an answer.
    JsonNode qnaDoc = operations.getJsonDocument(ClientRole.SAMPLESTACK_CONTRIBUTOR, TEST_URI);

    ArrayNode answers = (ArrayNode) qnaDoc.get("answers");
    String answerId = answers.get(0).get("id").asText();

    ServerTransform commentTransform = new ServerTransform("comment-patch");
    commentTransform.put("postId", answerId);
    commentTransform.put("text", "text of comment on answer");
    commentTransform.put("userName", Utils.joeUser.getUserName());

    // dummy uri because this transform does an update on parent doc.
    contribManager.write(DUMMY_URI, new StringHandle(""), commentTransform);

    commentTransform = new ServerTransform("comment-patch");
    commentTransform.put("postId", TEST_URI);
    commentTransform.put("text", "text of comment on question");
    commentTransform.put("userName", Utils.joeUser.getUserName());

    // dummy uri because this transform does an update on parent doc.
    contribManager.write(DUMMY_URI, new StringHandle(""), commentTransform);

    // check comments
    qnaDoc = operations.getJsonDocument(ClientRole.SAMPLESTACK_CONTRIBUTOR, TEST_URI);

    assertEquals("doc has comment", 1, qnaDoc.get("comments").size());
    assertEquals("answer has comment", 1, qnaDoc.get("answers").get(0).get("comments").size());
    assertEquals("doc has right", "text of comment on question",
            qnaDoc.get("comments").get(0).get("text").asText());
    assertEquals("answer has right", "text of comment on answer",
            qnaDoc.get("answers").get(0).get("comments").get(0).get("text").asText());

}

From source file:com.marklogic.samplestack.database.DatabaseTransformsIT.java

@Test
public void votePatchTransform() {
    // make a user
    contributorService.store(Utils.joeUser);

    // make sure there's no question
    operations.delete(ClientRole.SAMPLESTACK_CONTRIBUTOR, TEST_URI);

    askAndAnswer();//  ww  w  . j av a2  s  .  c om

    // now we can comment on an answer.
    JsonNode qnaDoc = operations.getJsonDocument(ClientRole.SAMPLESTACK_CONTRIBUTOR, TEST_URI);

    ArrayNode answers = (ArrayNode) qnaDoc.get("answers");
    String answerId = answers.get(0).get("id").asText();
    String postId = qnaDoc.get("id").asText();

    ServerTransform voteTransform = new ServerTransform("vote-patch");
    voteTransform.put("postId", postId);
    voteTransform.put("delta", "1");
    voteTransform.put("userName", Utils.joeUser.getUserName());

    // dummy uri because this transform does an update on parent doc.
    contribManager.write(DUMMY_URI, new StringHandle(""), voteTransform);

    qnaDoc = operations.getJsonDocument(ClientRole.SAMPLESTACK_CONTRIBUTOR, TEST_URI);

    assertEquals("question has updated itemTally", "1", qnaDoc.get("itemTally").asText());
    assertEquals("question has updated score", "1", qnaDoc.get("docScore").asText());

    voteTransform = new ServerTransform("vote-patch");
    voteTransform.put("postId", answerId);
    voteTransform.put("delta", "-1");
    voteTransform.put("userName", Utils.joeUser.getUserName());

    // dummy uri because this transform does an update on parent doc.
    // this should FAIL, joe cant vote twice.
    contribManager.write(DUMMY_URI, new StringHandle(""), voteTransform);
    // check comments
    qnaDoc = operations.getJsonDocument(ClientRole.SAMPLESTACK_CONTRIBUTOR, TEST_URI);
    assertEquals("question has untouched itemTally", "1", qnaDoc.get("itemTally").asText());
    assertEquals("question has updated score", "0", qnaDoc.get("docScore").asText());
    assertEquals("answer has updated itemTally", "-1", qnaDoc.get("answers").get(0).get("itemTally").asText());

}

From source file:org.numenta.nupic.algorithms.CLAClassifierDeserializer.java

@Override
public CLAClassifier deserialize(JsonParser jp, DeserializationContext ctxt)
        throws IOException, JsonProcessingException {

    ObjectCodec oc = jp.getCodec();/*from w w w .j  a va  2 s  .com*/
    JsonNode node = oc.readTree(jp);

    CLAClassifier retVal = new CLAClassifier();
    retVal.alpha = node.get("alpha").asDouble();
    retVal.actValueAlpha = node.get("actValueAlpha").asDouble();
    retVal.learnIteration = node.get("learnIteration").asInt();
    retVal.recordNumMinusLearnIteration = node.get("recordNumMinusLearnIteration").asInt();
    retVal.maxBucketIdx = node.get("maxBucketIdx").asInt();

    String[] steps = node.get("steps").asText().split(",");
    TIntList t = new TIntArrayList();
    for (String step : steps) {
        t.add(Integer.parseInt(step));
    }
    retVal.steps = t;

    String[] tupleStrs = node.get("patternNZHistory").asText().split(";");
    Deque<Tuple> patterns = new Deque<Tuple>(tupleStrs.length);
    for (String tupleStr : tupleStrs) {
        String[] tupleParts = tupleStr.split("-");
        int iteration = Integer.parseInt(tupleParts[0]);
        String pattern = tupleParts[1].substring(1, tupleParts[1].indexOf("]")).trim();
        String[] indexes = pattern.split(",");
        int[] indices = new int[indexes.length];
        for (int i = 0; i < indices.length; i++) {
            indices[i] = Integer.parseInt(indexes[i].trim());
        }
        Tuple tup = new Tuple(iteration, indices);
        patterns.append(tup);
    }
    retVal.patternNZHistory = patterns;

    Map<Tuple, BitHistory> bitHistoryMap = new HashMap<Tuple, BitHistory>();
    String[] bithists = node.get("activeBitHistory").asText().split(";");
    for (String bh : bithists) {
        String[] parts = bh.split("-");

        String[] left = parts[0].split(",");
        Tuple iteration = new Tuple(Integer.parseInt(left[0].trim()), Integer.parseInt(left[1].trim()));

        BitHistory bitHistory = new BitHistory();
        String[] right = parts[1].split("=");
        bitHistory.id = right[0].trim();

        TDoubleList dubs = new TDoubleArrayList();
        String[] stats = right[1].substring(1, right[1].indexOf("}")).trim().split(",");
        for (int i = 0; i < stats.length; i++) {
            dubs.add(Double.parseDouble(stats[i].trim()));
        }
        bitHistory.stats = dubs;

        bitHistory.lastTotalUpdate = Integer.parseInt(right[2].trim());

        bitHistoryMap.put(iteration, bitHistory);
    }
    retVal.activeBitHistory = bitHistoryMap;

    ArrayNode jn = (ArrayNode) node.get("actualValues");
    List<Object> l = new ArrayList<Object>();
    for (int i = 0; i < jn.size(); i++) {
        JsonNode n = jn.get(i);
        try {
            double d = Double.parseDouble(n.asText().trim());
            l.add(d);
        } catch (Exception e) {
            l.add(n.asText().trim());
        }
    }
    retVal.actualValues = l;

    //Go back and set the classifier on the BitHistory objects
    for (Tuple tuple : bitHistoryMap.keySet()) {
        bitHistoryMap.get(tuple).classifier = retVal;
    }

    return retVal;
}

From source file:com.googlecode.jsonschema2pojo.FragmentResolverTest.java

@Test
public void pathCanReferToArrayContentsByIndex() {

    ObjectNode root = new ObjectMapper().createObjectNode();

    ArrayNode a = root.arrayNode();
    root.put("a", a);

    a.add(root.objectNode());// ww w  .  ja va2  s. c o m
    a.add(root.objectNode());
    a.add(root.objectNode());

    assertThat(resolver.resolve(root, "#/a/0"), is(sameInstance(a.get(0))));
    assertThat(resolver.resolve(root, "#/a/1"), is(sameInstance(a.get(1))));
    assertThat(resolver.resolve(root, "#/a/2"), is(sameInstance(a.get(2))));

}

From source file:com.googlecode.jsonschema2pojo.FragmentResolverTest.java

@Test
public void pathCanReferToArrayContentsAtTheDocumentRoot() {
    ArrayNode root = new ObjectMapper().createArrayNode();

    root.add(root.objectNode());/*from   www  .  j  a va  2s  .com*/
    root.add(root.objectNode());
    root.add(root.objectNode());

    assertThat(resolver.resolve(root, "#/0"), is(sameInstance(root.get(0))));
    assertThat(resolver.resolve(root, "#/1"), is(sameInstance(root.get(1))));
    assertThat(resolver.resolve(root, "#/2"), is(sameInstance(root.get(2))));

}

From source file:org.opencredo.couchdb.inbound.CouchDbAllDocumentsMessageSource.java

public Message<URI> receive() {
    if (toBeReceived.isEmpty()) {
        URI skipUri = UriComponentsBuilder.fromUri(databaseUri).replaceQueryParam("limit", limit)
                .replaceQueryParam("skip", skip).build().toUri();
        ObjectNode response = couchDbDocumentOperations.readDocument(skipUri, ObjectNode.class);
        ArrayNode rows = (ArrayNode) response.get("rows");
        int size = rows.size();
        Assert.isTrue(size <= limit, "Retrieved more rows than limit");
        for (int i = 0; i < size; i++) {
            JsonNode node = rows.get(i);
            String id = node.get("id").textValue();
            try {
                toBeReceived.add(new URI(baseUri + "/" + id));
                skip++;/* w  w w. ja  v a 2 s.  c  om*/
            } catch (URISyntaxException e) {
                logger.error("Error creating the URI of document from baseUri and ID", e);
                return null;
            }
        }
    }

    Map<String, String> headers = createHeaderMap(databaseUri, skip, limit);
    return prepareMessage(toBeReceived.poll(), headers);
}

From source file:br.com.ingenieux.mojo.simpledb.cmd.PutAttributesCommand.java

private Collection<ReplaceableAttribute> getAttributesFrom(ArrayNode attributesNode, boolean replaceP) {
    List<ReplaceableAttribute> attributeList = new ArrayList<ReplaceableAttribute>();

    for (int i = 0; i < attributesNode.size(); i++) {
        ObjectNode objectNode = (ObjectNode) attributesNode.get(i);

        Iterator<String> itFieldName = objectNode.fieldNames();
        while (itFieldName.hasNext()) {
            String key = itFieldName.next();
            JsonNode valueNode = objectNode.get(key);

            if (valueNode.isValueNode()) {
                attributeList.add(new ReplaceableAttribute(key, valueNode.asText(), replaceP));
            } else if (valueNode.isArray()) {
                for (int j = 0; j < valueNode.size(); j++) {
                    JsonNode scalarValueNode = valueNode.get(j);

                    attributeList.add(new ReplaceableAttribute(key, scalarValueNode.asText(), replaceP));
                }//w  w w  . jav  a  2s. c  om
            }
        }
    }

    return attributeList;
}

From source file:com.basho.riak.client.api.commands.itest.ITestBucketMapReduce.java

private void erlangBucketMR(String bucketType) throws InterruptedException, ExecutionException {
    initValues(bucketType);/*w ww.j ava  2  s .c o m*/
    Namespace ns = new Namespace(bucketType, mrBucketName);
    BucketMapReduce bmr = new BucketMapReduce.Builder().withNamespace(ns)
            .withMapPhase(Function.newErlangFunction("riak_kv_mapreduce", "map_object_value"), false)
            .withReducePhase(Function.newErlangFunction("riak_kv_mapreduce", "reduce_string_to_integer"), false)
            .withReducePhase(Function.newErlangFunction("riak_kv_mapreduce", "reduce_sort"), true).build();

    MapReduce.Response response = client.execute(bmr);

    // The query should return one phase result which is a JSON array containing
    // all the values, 0 - 199
    assertEquals(200, response.getResultsFromAllPhases().size());
    ArrayNode result = response.getResultForPhase(2);
    assertEquals(200, result.size());

    assertEquals(42, result.get(42).asInt());
    assertEquals(199, result.get(199).asInt());

    resetAndEmptyBucket(ns);
}

From source file:net.maurerit.zkb.KillParser.java

public List<Kill> parse(InputStream is, boolean isXml) throws IOException {
    List<Kill> kills = new ArrayList<Kill>();

    ObjectMapper mapper;/*from ww  w. ja v a 2  s.  c  o  m*/
    if (isXml) {
        mapper = new XmlMapper();
    } else {
        mapper = new ObjectMapper();
    }

    TreeNode root = mapper.readTree(is);

    if (root instanceof ArrayNode) {
        ArrayNode rootCasted = (ArrayNode) root;
        for (int idx = 0; idx < rootCasted.size(); idx++) {
            kills.add(parseKill(rootCasted.get(idx)));
        }
    }

    return kills;
}