List of usage examples for com.fasterxml.jackson.databind.node ObjectNode replace
public JsonNode replace(String paramString, JsonNode paramJsonNode)
From source file:com.redhat.lightblue.util.JsonDoc.java
private static JsonNode modifyObjectNode(JsonNode parentNode, JsonNode newValue, String last, Path p) { JsonNode oldValue;/*from w ww .ja v a2 s. c om*/ if (Util.isNumber(last)) { throw new IllegalArgumentException(UtilConstants.ERR_INVALID_INDEXED_ACCESS + p); } ObjectNode obj = (ObjectNode) parentNode; if (newValue == null) { oldValue = obj.get(last); obj.remove(last); } else { oldValue = obj.replace(last, newValue); } return oldValue; }
From source file:com.pros.jsontransform.examples.expressions.ExpressionsTest.java
@Test public void testFieldRenaming() throws IOException, ObjectTransformerException { jsonSource = JunitTools.readFile(pathToJson + "/" + fileNamePrefix + "Source.json"); jsonTransform = JunitTools.readFile(pathToJson + "/" + fileNamePrefix + "Map.json"); jsonTarget = JunitTools.readFile(pathToJson + "/" + fileNamePrefix + "Target.json"); ObjectTransformer transformer = new ObjectTransformer(mapper); String result = transformer.transform(jsonSource, jsonTransform); System.out.println(result);//from w w w.j ava2 s. c o m // need to adjust UUID as it never repeats JsonNode resultNode = mapper.readTree(result); ObjectNode target = (ObjectNode) mapper.readTree(jsonTarget); target.replace("id", resultNode.get("id")); assertTrue(resultNode.equals(target)); }
From source file:org.apache.manifoldcf.elasticsearch.MCFAuthorizerUtils.java
public static SearchRequest parseSearchRequestMCF(RestRequest request) throws MCFAuthorizerException { SearchRequest searchRequest;//from ww w . j a v a 2s. com String username = request.param("u"); //if(username==null) throw new MCFAuthorizerException("Username not passed."); if (username != null) { String[] indices = Strings.splitStringByCommaToArray(request.param("index")); searchRequest = new SearchRequest(indices); boolean isTemplateRequest = request.path().endsWith("/template"); if (request.hasContent() || request.hasParam("source")) { FilterBuilder authorizationFilter = buildAuthorizationFilter(username); FilteredQueryBuilder filteredQueryBuilder; ObjectMapper objectMapper = new ObjectMapper(); ObjectNode modifiedJSON, innerJSON; JsonNode requestJSON; try { requestJSON = objectMapper.readTree(RestActions.getRestContent(request).toBytes()); if (isTemplateRequest) { modifiedJSON = (ObjectNode) requestJSON; innerJSON = (ObjectNode) requestJSON.findValue("template"); filteredQueryBuilder = QueryBuilders.filteredQuery( QueryBuilders.wrapperQuery(innerJSON.findValue("query").toString()), authorizationFilter); modifiedJSON.replace("template", innerJSON.set("query", objectMapper.readTree(filteredQueryBuilder.buildAsBytes().toBytes()))); searchRequest.templateSource(modifiedJSON.toString()); } else { filteredQueryBuilder = QueryBuilders.filteredQuery( QueryBuilders.wrapperQuery(requestJSON.findValue("query").toString()), authorizationFilter); modifiedJSON = (ObjectNode) requestJSON; modifiedJSON.set("query", objectMapper.readTree(filteredQueryBuilder.buildAsBytes().toBytes())); searchRequest.source(modifiedJSON.toString()); } } catch (IOException e) { e.printStackTrace(); throw new MCFAuthorizerException("JSON parser error"); } } searchRequest.extraSource(parseSearchSourceMCF(request)); searchRequest.searchType(request.param("search_type")); searchRequest.queryCache(request.paramAsBoolean("query_cache", null)); String scroll = request.param("scroll"); if (scroll != null) { searchRequest.scroll(new Scroll(parseTimeValue(scroll, null))); } searchRequest.types(Strings.splitStringByCommaToArray(request.param("type"))); searchRequest.routing(request.param("routing")); searchRequest.preference(request.param("preference")); searchRequest.indicesOptions(IndicesOptions.fromRequest(request, searchRequest.indicesOptions())); } else { searchRequest = RestSearchAction.parseSearchRequest(request); } return searchRequest; }
From source file:org.forgerock.openig.migrate.action.InlineDeclarationsAction.java
@Override protected ObjectNode doMigrate(final RouteModel route, final ObjectNode configuration) { ArrayNode heap = (ArrayNode) configuration.get("heap"); // Creates references for (ObjectModel source : route.getObjects()) { for (String pointer : source.getType().getPatterns()) { PathVisitor visitor = new PathVisitor(Pattern.compile(pointer)); new NodeTraversal().traverse(source.getConfig(), visitor); for (PathVisitor.Match match : visitor.getMatches()) { JsonNode pointed = match.getNode(); if (pointed.isArray()) { int i = 0; for (JsonNode item : pointed) { bindArrayReference(source, route.findObject(item.asText()), match.getPointer(), i++); }// ww w. j a v a2 s . co m } else if (pointed.isTextual()) { bindReference(source, route.findObject(pointed.asText()), match.getPointer()); } } } } // Inline references as much as possible, starting from the leafs // TODO Consider Moving all candidates at once, this is probably not useful to process them step by step List<ObjectModel> candidates = findCandidates(route); while (!candidates.isEmpty()) { for (ObjectModel candidate : candidates) { Reference ref = candidate.getReferencedBy().get(0); if (ref.isArrayRef()) { ArrayNode array = (ArrayNode) ref.getSource().getConfig().at(ref.getPointer()); array.set(ref.getIndex(), ref.getTarget().getNode()); } else { // We'll just replace in place the value ObjectNode parent = (ObjectNode) ref.getSource().getConfig().at(parentOf(ref.getPointer())); parent.replace(lastSegmentOf(ref.getPointer()), ref.getTarget().getNode()); } ref.getSource().getReferencesTo().remove(ref); ref.getTarget().getReferencedBy().remove(ref); ref.getTarget().markInlined(); } candidates = findCandidates(route); } // Remove inlined references, Java 8 style Iterator<ObjectModel> iterator = route.getObjects().stream().filter(inlined()).sorted(byReverseIndex()) .iterator(); while (iterator.hasNext()) { ObjectModel next = iterator.next(); heap.remove(next.getIndex()); } return configuration; }
From source file:org.forgerock.openig.migrate.action.HeapObjectsSimplificationAction.java
@Override public ObjectNode migrate(final ObjectNode configuration) { JsonNode heap = configuration.get("heap"); if (heap != null) { JsonNode objects = heap.get("objects"); if ((objects != null) && objects.isArray()) { ArrayNode objectsNode = (ArrayNode) objects; configuration.replace("heap", objectsNode); }/*w ww. ja va 2s .c om*/ } return configuration; }
From source file:com.marklogic.client.impl.CombinedQueryBuilderImpl.java
@SuppressWarnings("rawtypes") private String makeJSONCombinedQuery(CombinedQueryDefinitionImpl qdef) { try {//from w w w . ja va 2 s . c om ObjectMapper mapper = new ObjectMapper().configure(Feature.ALLOW_UNQUOTED_FIELD_NAMES, true) .configure(Feature.ALLOW_SINGLE_QUOTES, true); ObjectNode rootNode = mapper.createObjectNode(); ObjectNode searchNode = mapper.createObjectNode(); rootNode.replace("search", searchNode); if (qdef.sparql != null) searchNode.put("sparql", qdef.sparql); if (qdef.qtext != null) searchNode.put("qtext", qdef.qtext); if (qdef.options != null) { HandleImplementation optionsBase = HandleAccessor.as(qdef.options); if (Format.JSON != optionsBase.getFormat()) { throw new IllegalStateException("Cannot combine a JSON-format structured " + "query with " + optionsBase.getFormat() + "-format options"); } String json = HandleAccessor.contentAsString(qdef.options); JsonNode optionsNode = mapper.readTree(json); searchNode.replace("options", optionsNode.get("options")); } if (qdef.rawQuery != null) { String json = HandleAccessor.contentAsString(qdef.rawQuery.getHandle()); JsonNode rawQueryNode = mapper.readTree(json); JsonNode queryNode = rawQueryNode.get("query"); if (queryNode == null) queryNode = rawQueryNode.path("search").get("query"); if (queryNode != null) searchNode.replace("query", queryNode); if (qdef.options == null) { JsonNode optionsNode = rawQueryNode.path("search").get("options"); if (optionsNode != null) searchNode.replace("options", optionsNode); } if (qdef.qtext == null) { JsonNode qtextNode = rawQueryNode.path("search").get("qtext"); if (qtextNode != null) searchNode.replace("qtext", qtextNode); } if (qdef.sparql == null) { JsonNode sparqlNode = rawQueryNode.path("search").get("sparql"); if (sparqlNode != null) searchNode.replace("sparql", sparqlNode); } } return rootNode.toString(); } catch (Exception e) { throw new MarkLogicIOException(e); } }
From source file:io.amient.kafka.metrics.DiscoveryTool.java
public Dashboard generateDashboard(String name, List<Broker> brokers, List<String> topics, String dataSource, String path, int interval_s) { Dashboard dash = new Dashboard(name, dataSource, path + "/" + name + ".json"); ///////////// ROW 1 - TOPIC METRICS dash.newVariable("topic", true, topics.toArray(new String[topics.size()])); ArrayNode topicsRow = dash.newRow("TOPIC METRICS FOR `$topic`", 250, true); ObjectNode graphT1 = dash.newGraph(topicsRow, "Input / Sec", 5, false).put("fill", 2).put("stack", false); graphT1.replace("y_formats", dash.newArray("bytes", "short")); graphT1.set("tooltip", dash.newObject().put("value_type", "individual").put("shared", false)); dash.newTarget(graphT1, "$tag_topic", "SELECT sum(\"OneMinuteRate\") FROM \"BytesInPerSec\" " + "WHERE \"name\" = '" + name + "' AND \"topic\" =~ /^$topic$/ AND $timeFilter " + "GROUP BY time(" + interval_s + "s), \"topic\" fill(null)"); ObjectNode graphT2 = dash.newGraph(topicsRow, "Failed Fetch Requests / Sec", 2, false).put("fill", 4) .put("stack", false); graphT2.set("tooltip", dash.newObject().put("value_type", "individual").put("shared", false)); dash.newTarget(graphT2, "$tag_topic", "SELECT sum(\"OneMinuteRate\") FROM \"FailedFetchRequestsPerSec\" " + "WHERE \"name\" = '" + name + "' AND \"topic\" =~ /^$topic$/ AND $timeFilter " + "GROUP BY time(" + interval_s + "s), \"topic\" fill(null)"); ObjectNode graphT3 = dash.newGraph(topicsRow, "Output / Sec", 5, false).put("fill", 2).put("stack", false); graphT3.replace("y_formats", dash.newArray("bytes", "short")); graphT3.set("tooltip", dash.newObject().put("value_type", "individual").put("shared", false)); dash.newTarget(graphT3, "$tag_topic", "SELECT sum(\"OneMinuteRate\") FROM \"BytesOutPerSec\" " + "WHERE \"name\" = '" + name + "' AND \"topic\" =~ /^$topic$/ AND $timeFilter " + "GROUP BY time(" + interval_s + "s), \"topic\" fill(null)"); ///////////// ROW 2 - AGGREGATED CLUSTER METRICS ArrayNode clusterRow = dash.newRow(String.format("CLUSTER METRICS FOR %d broker(s)", brokers.size()), 172, true);/*from ww w . j a v a 2 s.c om*/ dash.newStat(clusterRow, "Controllers", 1, "SELECT sum(\"Value\") FROM \"ActiveControllerCount\" " + "WHERE \"group\" = 'kafka.controller' AND \"name\" = '" + name + "' AND $timeFilter " + "GROUP BY time(" + interval_s + "s)") .put("valueFontSize", "150%"); ObjectNode graph1 = dash.newGraph(clusterRow, "Under-Replicated Partitions", 2, false).put("bars", true); dash.newTarget(graph1, "$tag_service", "SELECT mean(\"Value\") FROM \"UnderReplicatedPartitions\" " + "WHERE \"group\" = 'kafka.server' AND \"name\" = '" + name + "' AND $timeFilter " + "GROUP BY time(" + interval_s + "s), \"service\""); dash.newTable(clusterRow, "Partition Count", 2, "avg", "$tag_service", "SELECT last(\"Value\") FROM \"PartitionCount\" " + "WHERE \"group\" = 'kafka.server' AND \"name\" = '" + name + "' AND $timeFilter " + "GROUP BY time(" + interval_s + "s), \"service\"") .put("transform", "timeseries_aggregations").put("showHeader", false); //Total Maximum Log Flush Time ObjectNode graph5 = dash.newGraph(clusterRow, "Log Flush Time (98th maximum)", 2, false).put("linewidth", 1) .put("points", false).put("fill", 0); graph5.replace("y_formats", dash.newArray("ms", "short")); dash.get(graph5, "grid").put("threshold1", 6).put("threshold1Color", "rgba(236, 118, 21, 0.21)") .put("threshold2", 12).put("threshold2Color", "rgba(234, 112, 112, 0.22)"); dash.newTarget(graph5, "$tag_service", "SELECT max(\"98thPercentile\") as \"98thPercentile\" " + "FROM \"LogFlushRateAndTimeMs\" " + "WHERE \"group\" = 'kafka.log' AND \"name\" = '" + name + "' AND $timeFilter " + "GROUP BY time(1m), \"service\""); ObjectNode graph2 = dash.newGraph(clusterRow, "Input / Sec", 2, false).put("fill", 2).put("stack", true); graph2.replace("y_formats", dash.newArray("bytes", "short")); graph2.replace("tooltip", dash.newObject().put("value_type", "individual").put("shared", true)); dash.get(graph2, "grid").put("leftMin", 0); dash.newTarget(graph2, "$tag_service", "SELECT sum(\"OneMinuteRate\") FROM \"BytesInPerSec\" " + "WHERE \"group\" = 'kafka.server' AND \"topic\" =~ /^$topic$/ AND \"name\" = '" + name + "' " + "AND $timeFilter " + "GROUP BY time(" + interval_s + "s), \"service\""); ObjectNode graph3 = dash.newGraph(clusterRow, "Output / Sec", 2, false).put("fill", 2).put("stack", true); graph3.replace("y_formats", dash.newArray("bytes", "short")); graph3.replace("tooltip", dash.newObject().put("value_type", "individual").put("shared", true)); dash.get(graph3, "grid").put("leftMin", 0); dash.newTarget(graph3, "$tag_service", "SELECT sum(\"OneMinuteRate\") FROM \"BytesOutPerSec\" " + "WHERE \"group\" = 'kafka.server' AND \"topic\" =~ /^$topic$/ AND \"name\" = '" + name + "' " + "AND $timeFilter " + "GROUP BY time(" + interval_s + "s), \"service\""); dash.newStat(clusterRow, "Requests/Sec", 1, "SELECT mean(\"OneMinuteRate\") FROM \"RequestsPerSec\" " + "WHERE \"group\" = 'kafka.network' AND \"name\" = '" + name + "' AND $timeFilter " + "GROUP BY time(" + interval_s + "s)") .put("decimals", 1).put("valueName", "avg").put("valueFontSize", "35%").put("format", "short") .replace("sparkline", dash.newObject().put("show", true).put("full", false)); ///////////// ROW (2 + b) - BROKER-LEVEL METRICS for (Broker broker : brokers) { //extra row for each broker ArrayNode brokerRow = dash .newRow(String.format("Kafka Broker ID %s @ %s", broker.id, broker.hostPort()), 250, false); //Purgatory graph ObjectNode graph6 = dash.newGraph(brokerRow, "Num.delayed requests", 4, true); dash.newTarget(graph6, "$col", "SELECT max(\"Value\"), median(\"Value\"), min(\"Value\") FROM \"NumDelayedRequests\" " + "WHERE \"name\" = 'stag-kafka-cluster' AND \"service\" = 'broker-1' AND $timeFilter " + "GROUP BY time($interval) fill(null)"); //Log Flush Time graph ObjectNode graph7 = dash.newGraph(brokerRow, "Log Flush Time (mean)", 4, false).put("linewidth", 1) .put("points", true).put("pointradius", 1).put("fill", 0); graph7.replace("y_formats", dash.newArray("ms", "short")); dash.get(graph7, "grid").put("leftLogBase", 2).put("threshold1", 100) .put("threshold1Color", "rgba(236, 118, 21, 0.21)").put("threshold2", 250) .put("threshold2Color", "rgba(234, 112, 112, 0.22)"); dash.newTarget(graph7, "$col", "SELECT sum(\"999thPercentile\") as \"999thPercentile\" " + "FROM \"LogFlushRateAndTimeMs\" " + "WHERE \"group\" = 'kafka.log' AND \"service\" = '" + String.format("broker-%s", broker.id) + "'" + "AND \"name\" = '" + name + "' AND $timeFilter " + "GROUP BY time(30s)"); dash.newTarget(graph7, "$col", "SELECT sum(\"99thPercentile\") as \"99thPercentile\" " + "FROM \"LogFlushRateAndTimeMs\" " + "WHERE \"group\" = 'kafka.log' AND \"service\" = '" + String.format("broker-%s", broker.id) + "'" + "AND \"name\" = '" + name + "' AND $timeFilter " + "GROUP BY time(30s)"); dash.newTarget(graph7, "$col", "SELECT sum(\"95thPercentile\") as \"95thPercentile\" " + "FROM \"LogFlushRateAndTimeMs\" " + "WHERE \"group\" = 'kafka.log' AND \"service\" = '" + String.format("broker-%s", broker.id) + "'" + "AND \"name\" = '" + name + "' AND $timeFilter " + "GROUP BY time(30s)"); //Combined Throughput Graph ObjectNode graph8 = dash.newGraph(brokerRow, "Throughput", 4, true).put("linewidth", 1).put("fill", 6) .put("y-axis", false); graph8.replace("y_formats", dash.newArray("bytes", "short")); graph8.set("aliasColors", dash.newObject().put("Input", "#BF1B00").put("Output", "#508642")); dash.newTarget(graph8, "Output", "SELECT sum(\"OneMinuteRate\") * -1 FROM \"BytesOutPerSec\" " + "WHERE \"name\" = 'stag-kafka-cluster' AND \"topic\" =~ /^$topic$/ " + "AND \"service\" = '" + String.format("broker-%s", broker.id) + "' AND $timeFilter " + "GROUP BY time($interval) fill(null)"); dash.newTarget(graph8, "Input", "SELECT sum(\"OneMinuteRate\") FROM \"BytesInPerSec\" " + "WHERE \"name\" = 'stag-kafka-cluster' AND \"topic\" =~ /^$topic$/ " + "AND \"service\" = '" + String.format("broker-%s", broker.id) + "' AND $timeFilter " + "GROUP BY time($interval) fill(null)"); } return dash; }
From source file:com.pros.jsontransform.ObjectTransformer.java
private void transformObject(final JsonNode sourceNode, final JsonNode transformNode, final ObjectNode targetNode) throws ObjectTransformerException { ObjectNode childNode = mapper.createObjectNode(); targetNode.replace(transformNodeFieldName, childNode); // visit child object transformNode(sourceNode, transformNode, childNode); }
From source file:com.pros.jsontransform.ObjectTransformer.java
private void transformArray(final JsonNode sourceNode, final JsonNode transformNode, final ObjectNode targetNode) throws ObjectTransformerException { // create new array ArrayNode targetArray = mapper.createArrayNode(); targetNode.replace(transformNodeFieldName, targetArray); processArray(sourceNode, transformNode, targetArray); }
From source file:com.msopentech.odatajclient.testservice.utils.Commons.java
@SuppressWarnings("fallthrough") public static JsonNode changeFormat(final ObjectNode node, final Accept target) { final List<String> toBeRemoved = new ArrayList<String>(); final Map<String, JsonNode> toBeReplaced = new HashMap<String, JsonNode>(); switch (target) { case JSON_NOMETA: // nometa + minimal toBeRemoved.add(JSON_ODATAMETADATA_NAME); case JSON://w w w.jav a 2 s . co m // minimal toBeRemoved.add(JSON_EDITLINK_NAME); toBeRemoved.add(JSON_ID_NAME); toBeRemoved.add(JSON_TYPE_NAME); final Iterator<Map.Entry<String, JsonNode>> fields = node.fields(); while (fields.hasNext()) { final Map.Entry<String, JsonNode> field = fields.next(); if (field.getKey().endsWith(JSON_MEDIA_SUFFIX) || field.getKey().endsWith(JSON_NAVIGATION_SUFFIX) || field.getKey().endsWith(JSON_TYPE_SUFFIX)) { toBeRemoved.add(field.getKey()); } else if (field.getValue().isObject()) { toBeReplaced.put(field.getKey(), changeFormat((ObjectNode) field.getValue(), target)); } } case JSON_FULLMETA: //ignore: no changes break; default: throw new UnsupportedOperationException(target.toString()); } for (String field : toBeRemoved) { node.remove(field); } for (Map.Entry<String, JsonNode> field : toBeReplaced.entrySet()) { node.replace(field.getKey(), field.getValue()); } return node; }