Example usage for com.fasterxml.jackson.databind.node ObjectNode toString

List of usage examples for com.fasterxml.jackson.databind.node ObjectNode toString

Introduction

In this page you can find the example usage for com.fasterxml.jackson.databind.node ObjectNode toString.

Prototype

public String toString() 

Source Link

Usage

From source file:com.glaf.dts.web.rest.MxSchedulerResource.java

@GET
@POST/*from   w  w  w . j a  v  a2s.  c o  m*/
@Path("/list")
@ResponseBody
@Produces({ MediaType.APPLICATION_OCTET_STREAM })
public byte[] list(@Context HttpServletRequest request) {
    List<Scheduler> list = sysSchedulerService.getSchedulers(Constants.DTS_TASK_TYPE);
    ObjectNode responseJSON = new ObjectMapper().createObjectNode();

    ArrayNode schedulersJSON = new ObjectMapper().createArrayNode();
    responseJSON.set("schedulers", schedulersJSON);

    for (Scheduler scheduler : list) {
        ObjectNode schedulerJSON = new ObjectMapper().createObjectNode();
        schedulerJSON.put("taskId", scheduler.getTaskId());
        schedulerJSON.put("taskName", scheduler.getTaskName());
        schedulerJSON.put("title", scheduler.getTitle());
        schedulerJSON.put("content", scheduler.getContent());
        schedulerJSON.put("locked", scheduler.getLocked());
        schedulerJSON.put("expression", scheduler.getExpression());
        schedulerJSON.put("jobClass", scheduler.getJobClass());
        schedulerJSON.put("priority", scheduler.getPriority());
        schedulerJSON.put("repeatCount", scheduler.getRepeatCount());
        schedulerJSON.put("repeatInterval", scheduler.getRepeatInterval());
        schedulerJSON.put("startDelay", scheduler.getStartDelay());
        schedulerJSON.put("threadSize", scheduler.getThreadSize());
        schedulerJSON.put("createBy", scheduler.getCreateBy());
        schedulerJSON.put("createDate", DateUtils.getDateTime(scheduler.getCreateDate()));
        schedulerJSON.put("startDate", DateUtils.getDateTime(scheduler.getStartDate()));
        schedulerJSON.put("endDate", DateUtils.getDateTime(scheduler.getEndDate()));
        schedulersJSON.add(schedulerJSON);
    }

    try {
        return responseJSON.toString().getBytes("UTF-8");
    } catch (IOException e) {
        return responseJSON.toString().getBytes();
    }
}

From source file:com.glaf.report.web.rest.MxReportTaskResource.java

@GET
@POST//from  w w  w.j a  va  2s  . co  m
@Path("/list")
@ResponseBody
@Produces({ MediaType.APPLICATION_OCTET_STREAM })
public byte[] list(@Context HttpServletRequest request) {
    Map<String, Object> params = RequestUtils.getParameterMap(request);
    ReportTaskQuery query = new ReportTaskQuery();
    Tools.populate(query, params);

    String gridType = ParamUtils.getString(params, "gridType");
    if (gridType == null) {
        gridType = "easyui";
    }
    int start = 0;
    int limit = 10;
    String orderName = null;
    String order = null;
    if ("easyui".equals(gridType)) {
        int pageNo = ParamUtils.getInt(params, "page");
        limit = ParamUtils.getInt(params, "rows");
        start = (pageNo - 1) * limit;
        orderName = ParamUtils.getString(params, "sort");
        order = ParamUtils.getString(params, "order");
    } else if ("extjs".equals(gridType)) {
        start = ParamUtils.getInt(params, "start");
        limit = ParamUtils.getInt(params, "limit");
        orderName = ParamUtils.getString(params, "sort");
        order = ParamUtils.getString(params, "dir");
    } else if ("yui".equals(gridType)) {
        start = ParamUtils.getInt(params, "startIndex");
        limit = ParamUtils.getInt(params, "results");
        orderName = ParamUtils.getString(params, "sort");
        order = ParamUtils.getString(params, "dir");
    }

    if (start < 0) {
        start = 0;
    }

    if (limit <= 0) {
        limit = Paging.DEFAULT_PAGE_SIZE;
    }

    ObjectNode responseJSON = new ObjectMapper().createObjectNode();
    int total = reportTaskService.getReportTaskCountByQueryCriteria(query);
    if (total > 0) {
        responseJSON.put("total", total);
        responseJSON.put("totalCount", total);
        responseJSON.put("totalRecords", total);
        responseJSON.put("start", start);
        responseJSON.put("startIndex", start);
        responseJSON.put("limit", limit);
        responseJSON.put("pageSize", limit);

        if (StringUtils.isNotEmpty(orderName)) {
            query.setSortOrder(orderName);
            if (StringUtils.equals(order, "desc")) {
                query.setSortOrder("desc");
            }
        }

        List<ReportTask> list = reportTaskService.getReportTasksByQueryCriteria(start, limit, query);

        if (list != null && !list.isEmpty()) {
            ArrayNode rowsJSON = new ObjectMapper().createArrayNode();
            if ("yui".equals(gridType)) {
                responseJSON.set("records", rowsJSON);
            } else {
                responseJSON.set("rows", rowsJSON);
            }

            for (ReportTask reportTask : list) {
                ObjectNode node = reportTask.toObjectNode();
                node.put("sortNo", ++start);
                node.put("startIndex", start);
                rowsJSON.add(node);
            }
        }
    }
    try {
        return responseJSON.toString().getBytes("UTF-8");
    } catch (IOException e) {
        return responseJSON.toString().getBytes();
    }
}

From source file:gov.osti.services.Metadata.java

/**
 * Attempt to send this Metadata information to the indexing service configured.
 * If no service is configured, do nothing.
 *
 * @param em the related EntityManager//w  w w  .  ja  v  a 2 s . c o  m
 * @param md the Metadata to send
 */
private static void sendToIndex(EntityManager em, DOECodeMetadata md) {
    // if indexing is not configured, skip this step
    if ("".equals(INDEX_URL))
        return;

    // set some reasonable default timeouts
    RequestConfig rc = RequestConfig.custom().setSocketTimeout(60000).setConnectTimeout(60000)
            .setConnectionRequestTimeout(60000).build();
    // create an HTTP client to request through
    CloseableHttpClient hc = HttpClientBuilder.create().setDefaultRequestConfig(rc).build();
    try {
        // do not index DOE CODE New/Previous DOI related identifiers if Approved without a Release Date
        DOECodeMetadata indexableMd = removeNonIndexableRi(em, md);

        // construct a POST submission to the indexer service
        HttpPost post = new HttpPost(INDEX_URL);
        post.setHeader("Content-Type", "application/json");
        post.setHeader("Accept", "application/json");
        // add JSON String to index for later display/search
        ObjectNode node = (ObjectNode) index_mapper.valueToTree(indexableMd);
        node.put("json", indexableMd.toJson().toString());
        post.setEntity(new StringEntity(node.toString(), "UTF-8"));

        HttpResponse response = hc.execute(post);

        if (HttpStatus.SC_OK != response.getStatusLine().getStatusCode()) {
            log.warn("Indexing Error occurred for ID=" + md.getCodeId());
            log.warn("Message: " + EntityUtils.toString(response.getEntity()));
        }
    } catch (IOException e) {
        log.warn("Indexing Error: " + e.getMessage() + " ID=" + md.getCodeId());
    } finally {
        try {
            if (null != hc)
                hc.close();
        } catch (IOException e) {
            log.warn("Index Close Error: " + e.getMessage());
        }
    }
}

From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.TestElasticsearchIndexUtils.java

@Test
public void test_columnarMapping_integrated() throws JsonProcessingException, IOException {
    final String both = Resources.toString(
            Resources/*from   w  ww. j  a va  2 s.  c o  m*/
                    .getResource("com/ikanow/aleph2/search_service/elasticsearch/utils/full_mapping_test.json"),
            Charsets.UTF_8);
    final JsonNode both_json = _mapper.readTree(both);

    final DataBucketBean test_bucket = BeanTemplateUtils.build(DataBucketBean.class).with(
            DataBucketBean::data_schema,
            BeanTemplateUtils.build(DataSchemaBean.class).with(DataSchemaBean::columnar_schema,
                    BeanTemplateUtils.build(DataSchemaBean.ColumnarSchemaBean.class)
                            .with("field_include_list",
                                    Arrays.asList("column_only_enabled", "@timestamp", "@version"))
                            .with("field_exclude_list", Arrays.asList("column_only_disabled"))
                            .with("field_type_include_list", Arrays.asList("string"))
                            .with("field_type_exclude_list", Arrays.asList("number"))
                            .with("field_include_pattern_list", Arrays.asList("test*", "column_only_enabled2*"))
                            .with("field_exclude_pattern_list",
                                    Arrays.asList("*noindex", "column_only_disabled2*"))
                            .done().get())
                    .done().get())
            .done().get();

    final String expected = Resources.toString(
            Resources.getResource(
                    "com/ikanow/aleph2/search_service/elasticsearch/utils/mapping_test_results.json"),
            Charsets.UTF_8);
    final JsonNode expected_json = _mapper.readTree(expected);

    // 1) Default
    {
        final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> field_lookups = ElasticsearchIndexUtils
                .parseDefaultMapping(both_json, Optional.empty(), Optional.empty(), Optional.empty(),
                        _config.search_technology_override(), _mapper);

        //DEBUG
        //         System.out.println("(Field lookups = " + field_lookups + ")");
        //         System.out.println("(Analyzed default = " + _config.columnar_technology_override().default_field_data_analyzed() + ")");
        //         System.out.println("(NotAnalyzed default = " + _config.columnar_technology_override().default_field_data_notanalyzed() + ")");

        final XContentBuilder test_result = ElasticsearchIndexUtils.getColumnarMapping(test_bucket,
                Optional.empty(), field_lookups,
                _mapper.convertValue(_config.columnar_technology_override().enabled_field_data_notanalyzed(),
                        JsonNode.class),
                _mapper.convertValue(_config.columnar_technology_override().enabled_field_data_analyzed(),
                        JsonNode.class),
                _mapper.convertValue(_config.columnar_technology_override().default_field_data_notanalyzed(),
                        JsonNode.class),
                _mapper.convertValue(_config.columnar_technology_override().default_field_data_analyzed(),
                        JsonNode.class),
                Optional.empty(), _config.search_technology_override(), _mapper, "_default_");

        final ObjectNode expected_remove_search_settings = ((ObjectNode) expected_json.get("mappings")
                .get("_default_")).remove(Arrays.asList("_meta", "_all", "_source"));
        assertEquals(expected_remove_search_settings.toString(), test_result.bytes().toUtf8());

        // 1b) While we're here, just test that the temporal service doesn't change the XContent

        final XContentBuilder test_result_1b_1 = ElasticsearchIndexUtils.getTemporalMapping(test_bucket,
                Optional.of(test_result));

        assertEquals(test_result_1b_1.bytes().toUtf8(), test_result.bytes().toUtf8());

        // Slightly more complex, add non null temporal mapping (which is just ignored for mappings purpose, it's used elsewhere)

        final DataBucketBean test_bucket_temporal = BeanTemplateUtils.build(DataBucketBean.class)
                .with(DataBucketBean::data_schema,
                        BeanTemplateUtils.build(DataSchemaBean.class)
                                .with(DataSchemaBean::temporal_schema,
                                        BeanTemplateUtils.build(DataSchemaBean.TemporalSchemaBean.class)
                                                .with("grouping_time_period", "1w").done().get())
                                .done().get())
                .done().get();

        final XContentBuilder test_result_1b_2 = ElasticsearchIndexUtils
                .getTemporalMapping(test_bucket_temporal, Optional.of(test_result));

        assertEquals(test_result_1b_2.bytes().toUtf8(), test_result.bytes().toUtf8());

        // 1c) Check it exceptions out if there's a duplicate key

        // (It not longer exceptions out with duplicate keys, it just ignores the latter ones)

        //         final DataBucketBean test_bucket_error = BeanTemplateUtils.build(DataBucketBean.class)
        //               .with(DataBucketBean::data_schema, 
        //                     BeanTemplateUtils.build(DataSchemaBean.class)
        //                        .with(DataSchemaBean::columnar_schema,
        //                              BeanTemplateUtils.build(DataSchemaBean.ColumnarSchemaBean.class)
        //                                 .with("field_include_list", Arrays.asList("column_only_enabled", "@timestamp", "@version"))
        //                                 .with("field_exclude_list", Arrays.asList("column_only_enabled"))
        //                                 .with("field_type_include_list", Arrays.asList("string"))
        //                                 .with("field_type_exclude_list", Arrays.asList("number"))
        //                                 .with("field_include_pattern_list", Arrays.asList("test*", "column_only_enabled*"))
        //                                 .with("field_exclude_pattern_list", Arrays.asList("*noindex", "column_only_disabled*"))
        //                              .done().get()
        //                        )
        //                     .done().get()
        //                     )
        //               .done().get();
        //   
        //
        //         try {
        //            ElasticsearchIndexUtils.getColumnarMapping(
        //                  test_bucket_error, Optional.empty(), field_lookups, 
        //                  _mapper.convertValue(_config.columnar_technology_override().enabled_field_data_notanalyzed(), JsonNode.class),
        //                  _mapper.convertValue(_config.columnar_technology_override().enabled_field_data_analyzed(), JsonNode.class), 
        //                  _mapper.convertValue(_config.columnar_technology_override().default_field_data_notanalyzed(), JsonNode.class),
        //                  _mapper.convertValue(_config.columnar_technology_override().default_field_data_analyzed(), JsonNode.class), 
        //               _mapper);
        //            
        //            fail("Should have thrown exception");
        //         }
        //         catch (Exception e) {} // expected, carry on

    }

    // 1d) Check if doc schema are enabled
    {
        final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> field_lookups = ElasticsearchIndexUtils
                .parseDefaultMapping(both_json, Optional.empty(), Optional.empty(), Optional.empty(),
                        _config.search_technology_override(), _mapper);

        final XContentBuilder test_result = ElasticsearchIndexUtils.getColumnarMapping(test_bucket,
                Optional.empty(), field_lookups,
                _mapper.convertValue(_config.columnar_technology_override().enabled_field_data_notanalyzed(),
                        JsonNode.class),
                _mapper.convertValue(_config.columnar_technology_override().enabled_field_data_analyzed(),
                        JsonNode.class),
                _mapper.convertValue(_config.columnar_technology_override().default_field_data_notanalyzed(),
                        JsonNode.class),
                _mapper.convertValue(_config.columnar_technology_override().default_field_data_analyzed(),
                        JsonNode.class),
                Optional.of(_mapper.convertValue(_config.document_schema_override(), JsonNode.class)),
                _config.search_technology_override(), _mapper, "_default_");

        assertTrue("Should contain the annotation logic: " + test_result.string(),
                test_result.string().contains("\"__a\":{\"properties\":{"));
    }

    // 2) Types instead of "_defaults_"

    // 2a) type exists

    {
        final String test_type = Resources.toString(
                Resources.getResource(
                        "com/ikanow/aleph2/search_service/elasticsearch/utils/full_mapping_test_type.json"),
                Charsets.UTF_8);
        final JsonNode test_type_json = _mapper.readTree(test_type);

        final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> field_lookups = ElasticsearchIndexUtils
                .parseDefaultMapping(test_type_json, Optional.of("type_test"), Optional.empty(),
                        Optional.empty(), _config.search_technology_override(), _mapper);

        final XContentBuilder test_result = ElasticsearchIndexUtils.getColumnarMapping(test_bucket,
                Optional.of(XContentFactory.jsonBuilder().startObject()), field_lookups,
                _mapper.convertValue(_config.columnar_technology_override().enabled_field_data_notanalyzed(),
                        JsonNode.class),
                _mapper.convertValue(_config.columnar_technology_override().enabled_field_data_analyzed(),
                        JsonNode.class),
                _mapper.convertValue(_config.columnar_technology_override().default_field_data_notanalyzed(),
                        JsonNode.class),
                _mapper.convertValue(_config.columnar_technology_override().default_field_data_analyzed(),
                        JsonNode.class),
                Optional.empty(), _config.search_technology_override(), _mapper, "_default_");

        assertEquals(expected_json.get("mappings").get("_default_").toString(), test_result.bytes().toUtf8());
    }

    // 2b) type doesn't exist, should fall back to _default_

    {
        final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> field_lookups = ElasticsearchIndexUtils
                .parseDefaultMapping(both_json, Optional.of("no_such_type"), Optional.empty(), Optional.empty(),
                        _config.search_technology_override(), _mapper);

        final XContentBuilder test_result = ElasticsearchIndexUtils.getColumnarMapping(test_bucket,
                Optional.of(XContentFactory.jsonBuilder().startObject()), field_lookups,
                _mapper.convertValue(_config.columnar_technology_override().enabled_field_data_notanalyzed(),
                        JsonNode.class),
                _mapper.convertValue(_config.columnar_technology_override().enabled_field_data_analyzed(),
                        JsonNode.class),
                _mapper.convertValue(_config.columnar_technology_override().default_field_data_notanalyzed(),
                        JsonNode.class),
                _mapper.convertValue(_config.columnar_technology_override().default_field_data_analyzed(),
                        JsonNode.class),
                Optional.empty(), _config.search_technology_override(), _mapper, "_default_");

        assertEquals(expected_json.get("mappings").get("_default_").toString(), test_result.bytes().toUtf8());
    }
}

From source file:com.marklogic.jena.functionaltests.ConnectedRESTQA.java

public static void addElementRangeIndexTemporalAxis(String dbName, String axisName, String namespaceStart,
        String localnameStart, String namespaceEnd, String localnameEnd) throws Exception {
    /**// w w  w . ja  v  a 2  s.c  o  m
     {
     "axis-name": "eri-json-system",
     "axis-start": {
         "element-reference": {
     "namespace-uri": "",
     "localname": "eri-system-start",
     "scalar-type": "dateTime"
         }
       },
       "axis-end": {
         "element-reference": {
     "namespace-uri": "",
     "localname": "eri-system-end",
     "scalar-type": "dateTime"
         }
       }
     }  
     */
    ObjectMapper mapper = new ObjectMapper();
    ObjectNode rootNode = mapper.createObjectNode();

    rootNode.put("axis-name", axisName);

    // Set axis start
    ObjectNode axisStart = mapper.createObjectNode();
    ObjectNode elementReferenceStart = mapper.createObjectNode();
    elementReferenceStart.put("namespace-uri", namespaceStart);
    elementReferenceStart.put("localname", localnameStart);
    elementReferenceStart.put("scalar-type", "dateTime");

    axisStart.set("element-reference", elementReferenceStart);
    rootNode.set("axis-start", axisStart);

    // Set axis end
    ObjectNode axisEnd = mapper.createObjectNode();
    ObjectNode elementReferenceEnd = mapper.createObjectNode();
    elementReferenceEnd.put("namespace-uri", namespaceStart);
    elementReferenceEnd.put("localname", localnameEnd);
    elementReferenceEnd.put("scalar-type", "dateTime");

    axisEnd.set("element-reference", elementReferenceEnd);
    rootNode.set("axis-end", axisEnd);

    System.out.println(rootNode.toString());

    DefaultHttpClient client = new DefaultHttpClient();
    client.getCredentialsProvider().setCredentials(new AuthScope("localhost", 8002),
            new UsernamePasswordCredentials("admin", "admin"));

    HttpPost post = new HttpPost(
            "http://localhost:8002/manage/v2/databases/" + dbName + "/temporal/axes?format=json");

    post.addHeader("Content-type", "application/json");
    post.addHeader("accept", "application/json");
    post.setEntity(new StringEntity(rootNode.toString()));

    HttpResponse response = client.execute(post);
    HttpEntity respEntity = response.getEntity();
    if (response.getStatusLine().getStatusCode() == 400) {
        HttpEntity entity = response.getEntity();
        String responseString = EntityUtils.toString(entity, "UTF-8");
        System.out.println(responseString);
    } else if (respEntity != null) {
        // EntityUtils to get the response content
        String content = EntityUtils.toString(respEntity);
        System.out.println(content);

        System.out.println("Temporal axis: " + axisName + " created");
        System.out.println("==============================================================");
    } else {
        System.out.println("No Proper Response");
    }
}

From source file:com.glaf.report.web.rest.MxReportFileResource.java

@GET
@POST//from w w w  .  ja  va2  s.com
@Path("/list")
@ResponseBody
@Produces({ MediaType.APPLICATION_OCTET_STREAM })
public byte[] list(@Context HttpServletRequest request) {
    Map<String, Object> params = RequestUtils.getParameterMap(request);
    ReportFileQuery query = new ReportFileQuery();
    Tools.populate(query, params);

    String gridType = ParamUtils.getString(params, "gridType");
    if (gridType == null) {
        gridType = "easyui";
    }
    int start = 0;
    int limit = 10;
    String orderName = null;
    String order = null;
    if ("easyui".equals(gridType)) {
        int pageNo = ParamUtils.getInt(params, "page");
        limit = ParamUtils.getInt(params, "rows");
        start = (pageNo - 1) * limit;
        orderName = ParamUtils.getString(params, "sort");
        order = ParamUtils.getString(params, "order");
    } else if ("extjs".equals(gridType)) {
        start = ParamUtils.getInt(params, "start");
        limit = ParamUtils.getInt(params, "limit");
        orderName = ParamUtils.getString(params, "sort");
        order = ParamUtils.getString(params, "dir");
    } else if ("yui".equals(gridType)) {
        start = ParamUtils.getInt(params, "startIndex");
        limit = ParamUtils.getInt(params, "results");
        orderName = ParamUtils.getString(params, "sort");
        order = ParamUtils.getString(params, "dir");
    }

    if (start < 0) {
        start = 0;
    }

    if (limit <= 0) {
        limit = Paging.DEFAULT_PAGE_SIZE;
    }

    ObjectNode responseJSON = new ObjectMapper().createObjectNode();
    int total = reportFileService.getReportFileCountByQueryCriteria(query);
    if (total > 0) {
        responseJSON.put("total", total);
        responseJSON.put("totalCount", total);
        responseJSON.put("totalRecords", total);
        responseJSON.put("start", start);
        responseJSON.put("startIndex", start);
        responseJSON.put("limit", limit);
        responseJSON.put("pageSize", limit);

        if (StringUtils.isNotEmpty(orderName)) {
            query.setSortOrder(orderName);
            if (StringUtils.equals(order, "desc")) {
                query.setSortOrder("desc");
            }
        }

        // Map<String, UserProfile> userMap =
        // MxIdentityFactory.getUserProfileMap();
        List<ReportFile> list = reportFileService.getReportFilesByQueryCriteria(start, limit, query);

        if (list != null && !list.isEmpty()) {
            ArrayNode rowsJSON = new ObjectMapper().createArrayNode();
            if ("yui".equals(gridType)) {
                responseJSON.set("records", rowsJSON);
            } else {
                responseJSON.set("rows", rowsJSON);
            }

            for (ReportFile reportFile : list) {
                ObjectNode node = reportFile.toObjectNode();
                node.put("sortNo", ++start);
                node.put("startIndex", start);
                rowsJSON.add(node);
            }

        }
    }
    try {
        return responseJSON.toString().getBytes("UTF-8");
    } catch (IOException e) {
        return responseJSON.toString().getBytes();
    }
}

From source file:com.googlecode.jsonrpc4j.JsonRpcHttpAsyncClient.java

/**
 * Writes a request./*  w  w  w  . ja  va2  s  . c  om*/
 * 
 * @param methodName
 *            the method name
 * @param arguments
 *            the arguments
 * @param ops
 *            the stream
 * @param id
 *            the optional id
 * @throws IOException
 *             on error
 */
private void writeRequest(String methodName, Object arguments, HttpRequest httpRequest) throws IOException {

    // create the request
    ObjectNode request = mapper.createObjectNode();

    request.put("id", nextId.getAndIncrement());

    // add protocol and method
    request.put("jsonrpc", JSON_RPC_VERSION);
    request.put("method", methodName);

    // object array args
    if (arguments != null && arguments.getClass().isArray()) {
        Object[] args = Object[].class.cast(arguments);
        if (args.length > 0) {
            request.put("params", mapper.valueToTree(Object[].class.cast(arguments)));
        }

        // collection args
    } else if (arguments != null && Collection.class.isInstance(arguments)) {
        if (!Collection.class.cast(arguments).isEmpty()) {
            request.put("params", mapper.valueToTree(arguments));
        }

        // map args
    } else if (arguments != null && Map.class.isInstance(arguments)) {
        if (!Map.class.cast(arguments).isEmpty()) {
            request.put("params", mapper.valueToTree(arguments));
        }

        // other args
    } else if (arguments != null) {
        request.put("params", mapper.valueToTree(arguments));
    }

    if (LOGGER.isLoggable(Level.FINE)) {
        LOGGER.log(Level.FINE, "JSON-PRC Request: " + request.toString());
    }

    ByteArrayOutputStream baos = new ByteArrayOutputStream(512);
    mapper.writeValue(baos, request);

    HttpEntityEnclosingRequest entityRequest = (HttpEntityEnclosingRequest) httpRequest;

    HttpEntity entity;
    if (entityRequest.getFirstHeader("Content-Type") == null) {
        // Set default content type if none is set.
        entity = new ByteArrayEntity(baos.toByteArray(), ContentType.APPLICATION_JSON);
    } else {
        entity = new ByteArrayEntity(baos.toByteArray());
    }

    entityRequest.setEntity(entity);
}

From source file:com.yzmy.jsonrpc4j.JsonRpcHttpAsyncClient.java

/**
 * Writes a request./*  ww w .j  a  v a  2  s  . c  o m*/
 * 
 * @param methodName
 *            the method name
 * @param arguments
 *            the arguments
 * @param ops
 *            the stream
 * @param id
 *            the optional id
 * @throws IOException
 *             on error
 */
private void writeRequest(String methodName, Object arguments, HttpRequest httpRequest) throws IOException {

    // create the request
    ObjectNode request = mapper.createObjectNode();

    request.put("id", nextId.getAndIncrement());

    // add protocol and method
    request.put("jsonrpc", JSON_RPC_VERSION);
    request.put("method", methodName);

    // object array args
    if (arguments != null && arguments.getClass().isArray()) {
        Object[] args = Object[].class.cast(arguments);
        if (args.length > 0) {
            request.set("params", mapper.valueToTree(Object[].class.cast(arguments)));
        }

        // collection args
    } else if (arguments != null && Collection.class.isInstance(arguments)) {
        if (!Collection.class.cast(arguments).isEmpty()) {
            request.set("params", mapper.valueToTree(arguments));
        }

        // map args
    } else if (arguments != null && Map.class.isInstance(arguments)) {
        if (!Map.class.cast(arguments).isEmpty()) {
            request.set("params", mapper.valueToTree(arguments));
        }

        // other args
    } else if (arguments != null) {
        request.set("params", mapper.valueToTree(arguments));
    }

    if (LOGGER.isLoggable(Level.FINE)) {
        LOGGER.log(Level.FINE, "JSON-PRC Request: " + request.toString());
    }

    ByteArrayOutputStream baos = new ByteArrayOutputStream(512);
    mapper.writeValue(baos, request);

    HttpEntityEnclosingRequest entityRequest = (HttpEntityEnclosingRequest) httpRequest;

    HttpEntity entity;
    if (entityRequest.getFirstHeader("Content-Type") == null) {
        // Set default content type if none is set.
        entity = new ByteArrayEntity(baos.toByteArray(), ContentType.APPLICATION_JSON);
    } else {
        entity = new ByteArrayEntity(baos.toByteArray());
    }

    entityRequest.setEntity(entity);
}

From source file:com.glaf.activiti.web.springmvc.ActivitiTreeController.java

@ResponseBody
@RequestMapping("/processInstances")
public byte[] processInstances(HttpServletRequest request) {
    ObjectNode responseJSON = new ObjectMapper().createObjectNode();
    ArrayNode arrayJSON = new ObjectMapper().createArrayNode();
    Map<String, Object> params = RequestUtils.getParameterMap(request);
    logger.debug("params:" + params);
    int pageNo = ParamUtils.getInt(params, "page");
    int limit = ParamUtils.getInt(params, "rows");
    if (limit <= 0) {
        limit = 15;/*from  ww  w. jav a 2s.c o m*/
    }
    if (pageNo <= 0) {
        pageNo = 1;
    }
    int start = (pageNo - 1) * limit;
    long total = activitiProcessQueryService.getProcessInstanceCount(params);
    responseJSON.put("start", start);
    responseJSON.put("limit", limit);
    if (total > 0) {
        responseJSON.put("total", total);
        List<ProcessInstance> list = activitiProcessQueryService.getProcessInstances(start, limit, params);
        if (list != null && !list.isEmpty()) {
            for (ProcessInstance processInstance : list) {
                ObjectNode row = new ObjectMapper().createObjectNode();
                row.put("sortNo", ++start);
                row.put("id", processInstance.getId());
                row.put("processInstanceId", processInstance.getId());
                row.put("businessKey", processInstance.getBusinessKey());
                row.put("isEnded", processInstance.isEnded());
                row.put("processDefinitionId", processInstance.getProcessDefinitionId());
                arrayJSON.add(row);
            }
            responseJSON.set("rows", arrayJSON);
        }
    } else {
        responseJSON.set("rows", arrayJSON);
    }
    try {
        // logger.debug(responseJSON.toString());
        return responseJSON.toString().getBytes("UTF-8");
    } catch (IOException e) {
        return responseJSON.toString().getBytes();
    }
}

From source file:io.gs2.stamina.Gs2StaminaClient.java

/**
 * ???<br>/*  w ww .j a  va 2 s .c  o  m*/
 * <br>
 *
 * @param request 
        
 * @return ?
        
 */

public UpdateStaminaPoolResult updateStaminaPool(UpdateStaminaPoolRequest request) {

    ObjectNode body = JsonNodeFactory.instance.objectNode().put("serviceClass", request.getServiceClass())
            .put("increaseInterval", request.getIncreaseInterval());
    if (request.getDescription() != null)
        body.put("description", request.getDescription());
    if (request.getConsumeStaminaTriggerScript() != null)
        body.put("consumeStaminaTriggerScript", request.getConsumeStaminaTriggerScript());
    if (request.getConsumeStaminaDoneTriggerScript() != null)
        body.put("consumeStaminaDoneTriggerScript", request.getConsumeStaminaDoneTriggerScript());
    if (request.getAddStaminaTriggerScript() != null)
        body.put("addStaminaTriggerScript", request.getAddStaminaTriggerScript());
    if (request.getAddStaminaDoneTriggerScript() != null)
        body.put("addStaminaDoneTriggerScript", request.getAddStaminaDoneTriggerScript());
    if (request.getGetMaxStaminaTriggerScript() != null)
        body.put("getMaxStaminaTriggerScript", request.getGetMaxStaminaTriggerScript());
    HttpPut put = createHttpPut(
            Gs2Constant.ENDPOINT_HOST + "/staminaPool/"
                    + (request.getStaminaPoolName() == null || request.getStaminaPoolName().equals("") ? "null"
                            : request.getStaminaPoolName())
                    + "",
            credential, ENDPOINT, UpdateStaminaPoolRequest.Constant.MODULE,
            UpdateStaminaPoolRequest.Constant.FUNCTION, body.toString());
    if (request.getRequestId() != null) {
        put.setHeader("X-GS2-REQUEST-ID", request.getRequestId());
    }

    return doRequest(put, UpdateStaminaPoolResult.class);

}