List of usage examples for com.fasterxml.jackson.databind.node ObjectNode put
public ObjectNode put(String paramString1, String paramString2)
From source file:controllers.api.v1.Dataset.java
public static Result createLogicalDatasetFolder(Long datasetId) { ObjectNode result = Json.newObject(); // String username = session("user"); Map<String, String[]> params = request().body().asFormUrlEncoded(); //if (StringUtils.isNotBlank(username)) { String errorMsg = DatasetsDAO.createLogicalDatasetFolder(datasetId, params); if (errorMsg.startsWith("success:")) { result.put("status", "success"); Integer id = Integer.parseInt(errorMsg.split(":")[1]); result.put("id", id); } else {/*from w w w . ja v a2 s .co m*/ result.put("status", "failed"); result.put("msg", errorMsg); } /*} else { result.put("status", "failed"); result.put("msg", "Authentication Required"); }*/ return ok(result); }
From source file:net.logstash.logback.encoder.LogstashEncoderV1.java
@Override public void doEncode(ILoggingEvent event) throws IOException { ObjectNode eventNode = MAPPER.createObjectNode(); eventNode.put("@timestamp", ISO_DATETIME_TIME_ZONE_FORMAT_WITH_MILLIS.format(event.getTimeStamp())); eventNode.put("message", event.getFormattedMessage()); eventNode.put("tags", createTags(event)); eventNode.put("thread_name", event.getThreadName()); eventNode.put("logger_name", event.getLoggerName()); eventNode.put("level", event.getLevel().toString()); eventNode.put("level_value", event.getLevel().toInt()); if (isIncludeCallerInfo()) { StackTraceElement callerData = extractCallerData(event); eventNode.put("caller_class_name", callerData.getClassName()); eventNode.put("caller_method_name", callerData.getMethodName()); eventNode.put("caller_file_name", callerData.getFileName()); eventNode.put("caller_line_number", callerData.getLineNumber()); }// ww w . jav a 2s.c o m IThrowableProxy throwableProxy = event.getThrowableProxy(); if (throwableProxy != null) { eventNode.put("stack_trace", ThrowableProxyUtil.asString(throwableProxy)); } Context context = getContext(); if (context != null) { addPropertiesAsFields(eventNode, context.getCopyOfPropertyMap()); } addPropertiesAsFields(eventNode, event.getMDCPropertyMap()); write(MAPPER.writeValueAsBytes(eventNode), outputStream); write(CoreConstants.LINE_SEPARATOR, outputStream); if (isImmediateFlush()) { outputStream.flush(); } }
From source file:com.rusticisoftware.tincan.InteractionComponent.java
@Override public ObjectNode toJSONNode(TCAPIVersion version) { ObjectNode node = Mapper.getInstance().createObjectNode(); if (this.id != null) { node.put("id", this.getId()); }/*from w w w.j a va 2 s.c o m*/ if (this.description != null) { node.put("description", this.getDescription().toJSONNode(version)); } return node; }
From source file:org.jboss.aerogear.sync.jsonmergepatch.client.JsonMergePatchClientSynchronizer.java
@Override public void addContent(JsonNode content, final ObjectNode objectNode, final String fieldName) { objectNode.put(fieldName, content); }
From source file:models.daos.DatasetDao.java
public static ObjectNode getDatasetDependency(JsonNode input) throws Exception { ObjectNode resultJson = Json.newObject(); String cluster = DEFAULT_CLUSTER_NAME; String datasetUri = null;/*from w ww .ja v a 2s .c o m*/ String dbName = null; String tableName = null; boolean isHive = false; boolean isDalids = false; if (input != null && input.isContainerNode()) { if (input.has(CLUSTER_NAME_KEY)) { cluster = input.get(CLUSTER_NAME_KEY).asText(); } if (input.has(DATASET_URI_KEY)) { datasetUri = input.get(DATASET_URI_KEY).asText(); } } if (StringUtils.isBlank(datasetUri)) { resultJson.put("return_code", 404); resultJson.put("message", "Wrong input format! Missing dataset uri"); return resultJson; } Integer index = -1; if ((index = datasetUri.indexOf(HIVE_PREFIX_WITH_3_SLASH)) != -1) { isHive = true; String tmp = datasetUri.substring(index + HIVE_PREFIX_WITH_3_SLASH.length()); String[] info = tmp.split("\\.|/"); if (info != null && info.length == 2) { dbName = info[0]; tableName = info[1]; } } else if ((index = datasetUri.indexOf(DALIDS_PREFIX_WITH_3_SLASH)) != -1) { isDalids = true; String tmp = datasetUri.substring(index + DALIDS_PREFIX_WITH_3_SLASH.length()); String[] info = tmp.split("\\.|/"); if (info != null && info.length == 2) { dbName = info[0]; tableName = info[1]; } } else if ((index = datasetUri.indexOf(HIVE_PREFIX_WITH_2_SLASH)) != -1) { isHive = true; String tmp = datasetUri.substring(index + HIVE_PREFIX_WITH_2_SLASH.length()); String[] info = tmp.split("\\.|/"); if (info != null && info.length == 3) { cluster = info[0]; dbName = info[1]; tableName = info[2]; } } else if ((index = datasetUri.indexOf(DALIDS_PREFIX_WITH_2_SLASH)) != -1) { isDalids = true; String tmp = datasetUri.substring(index + DALIDS_PREFIX_WITH_2_SLASH.length()); String[] info = tmp.split("\\.|/"); if (info != null && info.length == 3) { cluster = info[0]; dbName = info[1]; tableName = info[2]; } } else if (datasetUri.indexOf('.') != -1) { index = datasetUri.indexOf(':'); String tmp = datasetUri; if (index != -1) { cluster = datasetUri.substring(0, index); tmp = datasetUri.substring(index + 1); } String[] info = tmp.split("\\.|/"); if (info != null && info.length == 2) { dbName = info[0]; tableName = info[1]; } } if (StringUtils.isBlank(cluster) || StringUtils.isBlank(dbName) || StringUtils.isBlank(tableName)) { resultJson.put("return_code", 404); resultJson.put("message", "Wrong input format! Missing dataset uri"); return resultJson; } String sqlQuery = null; List<Map<String, Object>> rows = null; if (isHive) { rows = JdbcUtil.wherehowsJdbcTemplate.queryForList(GET_DATASET_ID_IN_MAP_TABLE_WITH_TYPE_AND_CLUSTER, "/" + dbName + "/" + tableName, "hive", cluster); } else if (isDalids) { rows = JdbcUtil.wherehowsJdbcTemplate.queryForList(GET_DATASET_ID_IN_MAP_TABLE_WITH_TYPE_AND_CLUSTER, "/" + dbName + "/" + tableName, "dalids", cluster); } else { rows = JdbcUtil.wherehowsJdbcTemplate.queryForList(GET_DATASET_ID_IN_MAP_TABLE_WITH_CLUSTER, "/" + dbName + "/" + tableName, cluster); } Long datasetId = 0L; String urn = null; String datasetType = null; String deploymentTier = null; String dataCenter = null; String serverCluster = null; if (rows != null && rows.size() > 0) { for (Map row : rows) { datasetId = (Long) row.get("dataset_id"); urn = (String) row.get("urn"); datasetType = (String) row.get("dataset_type"); if (datasetType.equalsIgnoreCase("hive")) { isHive = true; } else if (datasetType.equalsIgnoreCase("dalids")) { isDalids = true; } deploymentTier = (String) row.get("deployment_tier"); dataCenter = (String) row.get("data_center"); serverCluster = (String) row.get("server_cluster"); break; } } else { resultJson.put("return_code", 200); resultJson.put("message", "Dependency information is not available."); return resultJson; } List<DatasetDependencyRecord> depends = new ArrayList<DatasetDependencyRecord>(); getDatasetDependencies(datasetId, "", 1, depends); int leafLevelDependencyCount = 0; if (depends.size() > 0) { for (DatasetDependencyRecord d : depends) { if (d.next_level_dependency_count == 0) { leafLevelDependencyCount++; } } } StringBuilder inputUri = new StringBuilder(""); if (isHive) { inputUri.append("hive://"); } else if (isDalids) { inputUri.append("dalids://"); } inputUri.append(cluster + "/" + dbName + "/" + tableName); resultJson.put("return_code", 200); resultJson.put("deployment_tier", deploymentTier); resultJson.put("data_center", dataCenter); resultJson.put("cluster", StringUtils.isNotBlank(serverCluster) ? serverCluster : cluster); resultJson.put("dataset_type", datasetType); resultJson.put("database_name", dbName); resultJson.put("table_name", tableName); resultJson.put("urn", urn); resultJson.put("dataset_id", datasetId); resultJson.put("input_uri", inputUri.toString()); resultJson.set("dependencies", Json.toJson(depends)); resultJson.put("leaf_level_dependency_count", leafLevelDependencyCount); return resultJson; }
From source file:org.apache.taverna.activities.rest.ui.servicedescription.GenericRESTTemplateService.java
@Override public Configuration getActivityConfiguration() { Configuration configuration = new Configuration(); configuration.setType(ACTIVITY_TYPE.resolve("#Config")); ObjectNode json = (ObjectNode) configuration.getJson(); ObjectNode requestNode = json.objectNode(); requestNode.put("httpMethod", RESTActivity.HTTP_METHOD.GET.name()); requestNode.put("absoluteURITemplate", "http://www.uniprot.org/uniprot/{id}.xml"); ArrayNode headersNode = requestNode.arrayNode(); headersNode.addObject().put("header", "Accept").put("value", "application/xml"); headersNode.addObject().put("header", "Content-Type").put("value", "application/xml"); requestNode.set("headers", headersNode); json.set("request", requestNode); json.put("outgoingDataFormat", RESTActivity.DATA_FORMAT.String.name()); json.put("showRedirectionOutputPort", false); json.put("showActualURLPort", false); json.put("showResponseHeadersPort", false); json.put("escapeParameters", true); return configuration; }
From source file:org.lendingclub.mercator.bind.model.ResourceRecordSet.java
public ObjectNode toJson() { ObjectNode recordNode = mapper.createObjectNode(); recordNode.put("name", name).put("ttl", ttl).put("class", classType).put("type", type); ObjectNode rData = mapper.createObjectNode(); Set<String> keys = recordData.keySet(); for (String key : keys) { rData.put(key, recordData.get(key).toString()); }/*from w w w . j av a2s .c om*/ recordNode.put("rData", rData); return recordNode; }
From source file:org.pentaho.metaverse.api.Namespace.java
@Override public INamespace getSiblingNamespace(String name, String type) { if (namespace != null) { try {/* w ww . j a v a 2s . c o m*/ JsonNode jsonObject = objectMapper.readTree(namespace); if (jsonObject.isObject()) { ObjectNode object = (ObjectNode) jsonObject; object.put(DictionaryConst.PROPERTY_NAME, name); object.put(DictionaryConst.PROPERTY_TYPE, type); } return new Namespace(objectMapper.writeValueAsString(jsonObject)); } catch (Exception e) { return null; } } return null; }
From source file:org.opendaylight.sfc.sbrest.json.SfstExporterFactory.java
@Override public String exportJson(DataObject dataObject) { String ret = null;//from w w w. j a va2 s . c om if (dataObject instanceof ServiceFunctionSchedulerType) { ServiceFunctionSchedulerType sfst = (ServiceFunctionSchedulerType) dataObject; ArrayNode sfstArray = mapper.createArrayNode(); ObjectNode sfstNode = mapper.createObjectNode(); sfstNode.put(_NAME, sfst.getName()); sfstNode.put(_ENABLED, sfst.isEnabled()); if (sfst.getType() != null) { sfstNode.put(_TYPE, SERVICE_FUNCTION_SCHEDULE_TYPE_PREFIX + sfst.getType().getSimpleName().toLowerCase()); } sfstArray.add(sfstNode); try { Object sfstObject = mapper.treeToValue(sfstArray, Object.class); ret = mapper.writeValueAsString(sfstObject); ret = "{\"" + _SERVICE_FUNCTION_SCHEDULE_TYPE + "\":" + ret + "}"; LOG.debug("Created Service Function Schedule Type JSON: {}", ret); } catch (JsonProcessingException e) { LOG.error("Error during creation of JSON for Service Function Schedule Type {}", sfst.getName()); } } else { throw new IllegalArgumentException("Argument is not an instance of ServiceFunctionSchedulerType"); } return ret; }
From source file:org.onosproject.segmentrouting.web.TunnelCodec.java
@Override public ObjectNode encode(Tunnel tunnel, CodecContext context) { final ObjectNode result = context.mapper().createObjectNode().put(TUNNEL_ID, tunnel.id()); result.put(GROUP_ID, tunnel.groupId()); final ArrayNode jsonLabelIds = result.putArray(LABEL_PATH); tunnel.labelIds().forEach(label -> jsonLabelIds.add(label.intValue())); return result; }