Example usage for com.fasterxml.jackson.databind.node ArrayNode size

List of usage examples for com.fasterxml.jackson.databind.node ArrayNode size

Introduction

In this page you can find the example usage for com.fasterxml.jackson.databind.node ArrayNode size.

Prototype

public int size() 

Source Link

Usage

From source file:com.alliander.osgp.shared.usermanagement.KeycloakClient.java

/**
 * Performs a user lookup by username.//ww  w .j  a  va 2 s.c  o m
 * <p>
 * This method assumes an existing unique username is provided, so a exactly
 * one user will be found in the lookup.
 *
 * @param username
 *            an existing Keycloak username for the configured realm.
 * @return the user ID for the user with the given username.
 * @throws KeycloakClientException
 *             if retrieving a single user ID for the given username does
 *             not succeed.
 */
public String getUserId(final String username) throws KeycloakClientException {

    LOGGER.info("Retrieving Keycloak user ID for user '{}' and realm '{}'.", username, this.realm);

    final WebClient getUserIdWebClient = this.getWebClientInstance().path(this.usersPath).query("username",
            username);

    Response response = this.withBearerToken(getUserIdWebClient).get();

    JsonNode jsonNode;
    try {
        jsonNode = this.getJsonResponseBody(response);
    } catch (final KeycloakBearerException e) {
        LOGGER.debug("It looks like the bearer token expired, retry API call to the user lookup.", e);
        this.refreshToken();
        response = this.withBearerToken(getUserIdWebClient).get();
        jsonNode = this.getJsonResponseBody(response);
    }

    if (!jsonNode.isArray()) {
        throw new KeycloakClientException(
                "Expected array result from Keycloak API user lookup, got: " + jsonNode.getNodeType().name());
    }
    final ArrayNode jsonArray = (ArrayNode) jsonNode;

    if (jsonArray.size() != 1) {
        throw new KeycloakClientException("Expected 1 array result from Keycloak API user lookup for username '"
                + username + "', got: " + jsonArray.size());
    }

    final JsonNode userRepresentation = jsonArray.get(0);

    final JsonNode userId = userRepresentation.get("id");

    if (userId == null || !userId.isTextual()) {
        throw new KeycloakClientException(
                "Keycloak API user representation does not contain a JSON text field 'id'.");
    }

    return userId.textValue();
}

From source file:fr.gouv.vitam.mdbes.QueryBench.java

/**
  * { expression, $depth : exactdepth, $relativedepth : /- depth, vary : [] }, 
  * $depth and $relativedepth being optional (mutual exclusive),
  * vary being optional// w  w w .  j av  a2  s.  c  om
  *
  * @param command
  * @param level
  * @throws InvalidParseOperationException
  */
private void analyzeVary(final JsonNode command, int level) throws InvalidParseOperationException {
    if (command == null) {
        throw new InvalidParseOperationException("Not correctly parsed");
    }
    // check vary
    List<TypeField> fields = new ArrayList<>();
    if (command.has(VARY)) {
        final ArrayNode jvary = (ArrayNode) ((ObjectNode) command).remove(VARY);
        for (int i = 0; i < jvary.size(); i++) {
            JsonNode node = jvary.get(i);
            TypeField tf = getField(node, level);
            if (tf != null) {
                fields.add(tf);
            }
        }
    }
    levelFields.add(fields);
    levelRequests.add(command);
}

From source file:org.apache.hadoop.gateway.filter.rewrite.impl.json.JsonFilterReader.java

private void processValueString() throws IOException {
    Level child;// www . j  a  va  2s. c  o  m
    Level parent;
    String value = null;
    parent = stack.peek();
    if (parent.isArray()) {
        ArrayNode array = (ArrayNode) parent.node;
        array.add(parser.getText());
        if (bufferingLevel == null) {
            value = filterStreamValue(parent);
            array.set(array.size() - 1, new TextNode(value));
        } else {
            array.removeAll();
        }
    } else {
        child = stack.pop();
        parent = stack.peek();
        ((ObjectNode) parent.node).put(child.field, parser.getText());
        if (bufferingLevel == null) {
            child.node = parent.node; // Populate the JsonNode of the child for filtering.
            value = filterStreamValue(child);
        }
    }
    if (bufferingLevel == null) {
        if (parent.node.isArray()) {
            ((ArrayNode) parent.node).removeAll();
        } else {
            ((ObjectNode) parent.node).removeAll();
        }
        generator.writeString(value);
    }
}

From source file:com.alliander.osgp.shared.usermanagement.KeycloakClient.java

/**
 * Looks up the sessions for the user with the given user ID. If a session
 * exists that has the login client in its clients collection, the session
 * ID for this session is returned. If no such session is returned, this
 * method returns {@code null}.// w  ww . j a  v  a2s.c om
 * <p>
 * The user ID can be looked up by username by calling
 * {@link #getUserId(String)}.
 *
 * @param userId
 *            an existing Keycloak user ID for the configured realm.
 *
 * @return the session ID for a login session for the user with the given ID
 *         with the configured login client and realm, or {@code null} if
 *         such a session is not found.
 * @throws KeycloakClientException
 *             in case of errors while obtaining the session ID from
 *             Keycloak.
 */
public String getUserSessionId(final String userId) throws KeycloakClientException {
    LOGGER.info("Retrieving Keycloak user session for user ID '{}' with client '{}' for realm '{}'.", userId,
            this.loginClient, this.realm);

    final WebClient getUserSessionIdWebClient = this.getWebClientInstance()
            .path(this.userSessionsPath.replace(PATH_ELEMENT_USER_ID, userId));

    Response response = this.withBearerToken(getUserSessionIdWebClient).get();
    JsonNode jsonNode;
    try {
        jsonNode = this.getJsonResponseBody(response);
    } catch (final KeycloakBearerException e) {
        LOGGER.debug("It looks like the bearer token expired, retry API call to get the session ID.", e);
        this.refreshToken();
        response = this.withBearerToken(getUserSessionIdWebClient).get();
        jsonNode = this.getJsonResponseBody(response);
    }

    final ArrayNode jsonArray = this.asArrayNode(jsonNode, "from Keycloak API user lookup");

    if (jsonArray.size() == 0) {
        /*
         * No sessions in Keycloak for the given user. This would be a
         * normal situation when the application login is based only on a
         * Mellon session still active, when there is no single logout at
         * the end of a Keycloak session.
         */
        LOGGER.info("No active Keycloak sessions for user ID '{}' for realm '{}'.", userId, this.realm);
        return null;
    }

    final String sessionId = this.determineSessionIdWithLoginClient(jsonArray);

    if (sessionId == null) {
        LOGGER.info("No active Keycloak sessions for user ID '{}' with client '{}' for realm '{}'.", userId,
                this.loginClient, this.realm);
    }

    return sessionId;
}

From source file:org.talend.dataprep.api.service.DataSetAPITest.java

@Test
public void testLookupActionsActions() throws Exception {
    // given// ww  w . ja v  a  2s  .c om
    final String firstDataSetId = createDataset("dataset/dataset.csv", "testDataset", "text/csv");
    final String dataSetId = createDataset("dataset/dataset_cars.csv", "cars", "text/csv");
    final String thirdDataSetId = createDataset("dataset/dataset.csv", "third", "text/csv");

    List<String> expectedIds = Arrays.asList(firstDataSetId, thirdDataSetId);

    // when
    final String actions = when().get("/api/datasets/{id}/actions", dataSetId).asString();

    // then
    final JsonNode jsonNode = mapper.readTree(actions);
    // response is an array
    assertTrue("json not an array:" + actions, jsonNode.isArray());
    Assertions.assertThat(jsonNode.isArray()).isTrue();
    // an array of 2 entries
    ArrayNode lookups = (ArrayNode) jsonNode;
    assertThat(lookups.size(), is(2));

    // let's check the url of the possible lookups
    for (int i = 0; i < lookups.size(); i++) {
        final JsonNode lookup = lookups.get(i);
        final ArrayNode parameters = (ArrayNode) lookup.get("parameters");
        for (int j = 0; j < parameters.size(); j++) {
            final JsonNode parameter = parameters.get(j);
            if (StringUtils.equals(parameter.get("name").asText(), "url")) {
                final String url = parameter.get("default").asText();
                // the url id must be known
                assertThat(expectedIds.stream().filter(url::contains).count(), is(1L));
            }
        }
    }
}

From source file:org.apache.nifi.processors.elasticsearch.PutElasticsearchHttpRecord.java

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {

    FlowFile flowFile = session.get();//w ww  .j av a  2s  .c  om
    if (flowFile == null) {
        return;
    }

    final RecordReaderFactory readerFactory = context.getProperty(RECORD_READER)
            .asControllerService(RecordReaderFactory.class);

    // Authentication
    final String username = context.getProperty(USERNAME).evaluateAttributeExpressions(flowFile).getValue();
    final String password = context.getProperty(PASSWORD).evaluateAttributeExpressions(flowFile).getValue();

    OkHttpClient okHttpClient = getClient();
    final ComponentLog logger = getLogger();

    final String baseUrl = trimToEmpty(context.getProperty(ES_URL).evaluateAttributeExpressions().getValue());
    HttpUrl.Builder urlBuilder = HttpUrl.parse(baseUrl).newBuilder().addPathSegment("_bulk");

    // Find the user-added properties and set them as query parameters on the URL
    for (Map.Entry<PropertyDescriptor, String> property : context.getProperties().entrySet()) {
        PropertyDescriptor pd = property.getKey();
        if (pd.isDynamic()) {
            if (property.getValue() != null) {
                urlBuilder = urlBuilder.addQueryParameter(pd.getName(),
                        context.getProperty(pd).evaluateAttributeExpressions().getValue());
            }
        }
    }
    final URL url = urlBuilder.build().url();

    final String index = context.getProperty(INDEX).evaluateAttributeExpressions(flowFile).getValue();
    if (StringUtils.isEmpty(index)) {
        logger.error("No value for index in for {}, transferring to failure", new Object[] { flowFile });
        session.transfer(flowFile, REL_FAILURE);
        return;
    }
    final String docType = context.getProperty(TYPE).evaluateAttributeExpressions(flowFile).getValue();
    String indexOp = context.getProperty(INDEX_OP).evaluateAttributeExpressions(flowFile).getValue();
    if (StringUtils.isEmpty(indexOp)) {
        logger.error("No Index operation specified for {}, transferring to failure.",
                new Object[] { flowFile });
        session.transfer(flowFile, REL_FAILURE);
        return;
    }

    switch (indexOp.toLowerCase()) {
    case "index":
    case "update":
    case "upsert":
    case "delete":
        break;
    default:
        logger.error("Index operation {} not supported for {}, transferring to failure.",
                new Object[] { indexOp, flowFile });
        session.transfer(flowFile, REL_FAILURE);
        return;
    }

    final String id_path = context.getProperty(ID_RECORD_PATH).evaluateAttributeExpressions(flowFile)
            .getValue();
    final RecordPath recordPath = StringUtils.isEmpty(id_path) ? null : recordPathCache.getCompiled(id_path);
    final StringBuilder sb = new StringBuilder();

    try (final InputStream in = session.read(flowFile);
            final RecordReader reader = readerFactory.createRecordReader(flowFile, in, getLogger())) {

        Record record;
        while ((record = reader.nextRecord()) != null) {

            final String id;
            if (recordPath != null) {
                Optional<FieldValue> idPathValue = recordPath.evaluate(record).getSelectedFields().findFirst();
                if (!idPathValue.isPresent() || idPathValue.get().getValue() == null) {
                    throw new IdentifierNotFoundException(
                            "Identifier Record Path specified but no value was found, transferring {} to failure.");
                }
                id = idPathValue.get().getValue().toString();
            } else {
                id = null;
            }

            // The ID must be valid for all operations except "index". For that case,
            // a missing ID indicates one is to be auto-generated by Elasticsearch
            if (id == null && !indexOp.equalsIgnoreCase("index")) {
                throw new IdentifierNotFoundException(
                        "Index operation {} requires a valid identifier value from a flow file attribute, transferring to failure.");
            }

            final StringBuilder json = new StringBuilder();

            ByteArrayOutputStream out = new ByteArrayOutputStream();
            JsonGenerator generator = factory.createJsonGenerator(out);
            writeRecord(record, record.getSchema(), generator);
            generator.flush();
            generator.close();
            json.append(out.toString());

            if (indexOp.equalsIgnoreCase("index")) {
                sb.append("{\"index\": { \"_index\": \"");
                sb.append(index);
                sb.append("\", \"_type\": \"");
                sb.append(docType);
                sb.append("\"");
                if (!StringUtils.isEmpty(id)) {
                    sb.append(", \"_id\": \"");
                    sb.append(id);
                    sb.append("\"");
                }
                sb.append("}}\n");
                sb.append(json);
                sb.append("\n");
            } else if (indexOp.equalsIgnoreCase("upsert") || indexOp.equalsIgnoreCase("update")) {
                sb.append("{\"update\": { \"_index\": \"");
                sb.append(index);
                sb.append("\", \"_type\": \"");
                sb.append(docType);
                sb.append("\", \"_id\": \"");
                sb.append(id);
                sb.append("\" }\n");
                sb.append("{\"doc\": ");
                sb.append(json);
                sb.append(", \"doc_as_upsert\": ");
                sb.append(indexOp.equalsIgnoreCase("upsert"));
                sb.append(" }\n");
            } else if (indexOp.equalsIgnoreCase("delete")) {
                sb.append("{\"delete\": { \"_index\": \"");
                sb.append(index);
                sb.append("\", \"_type\": \"");
                sb.append(docType);
                sb.append("\", \"_id\": \"");
                sb.append(id);
                sb.append("\" }\n");
            }
        }
    } catch (IdentifierNotFoundException infe) {
        logger.error(infe.getMessage(), new Object[] { flowFile });
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
        return;

    } catch (final IOException | SchemaNotFoundException | MalformedRecordException e) {
        logger.error("Could not parse incoming data", e);
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
        return;
    }

    RequestBody requestBody = RequestBody.create(MediaType.parse("application/json"), sb.toString());
    final Response getResponse;
    try {
        getResponse = sendRequestToElasticsearch(okHttpClient, url, username, password, "PUT", requestBody);
    } catch (final Exception e) {
        logger.error("Routing to {} due to exception: {}", new Object[] { REL_FAILURE.getName(), e }, e);
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
        return;
    }
    final int statusCode = getResponse.code();

    if (isSuccess(statusCode)) {
        ResponseBody responseBody = getResponse.body();
        try {
            final byte[] bodyBytes = responseBody.bytes();

            JsonNode responseJson = parseJsonResponse(new ByteArrayInputStream(bodyBytes));
            boolean errors = responseJson.get("errors").asBoolean(false);
            // ES has no rollback, so if errors occur, log them and route the whole flow file to failure
            if (errors) {
                ArrayNode itemNodeArray = (ArrayNode) responseJson.get("items");
                if (itemNodeArray.size() > 0) {
                    // All items are returned whether they succeeded or failed, so iterate through the item array
                    // at the same time as the flow file list, logging failures accordingly
                    for (int i = itemNodeArray.size() - 1; i >= 0; i--) {
                        JsonNode itemNode = itemNodeArray.get(i);
                        int status = itemNode.findPath("status").asInt();
                        if (!isSuccess(status)) {
                            String reason = itemNode.findPath("//error/reason").asText();
                            logger.error(
                                    "Failed to insert {} into Elasticsearch due to {}, transferring to failure",
                                    new Object[] { flowFile, reason });
                        }
                    }
                }
                session.transfer(flowFile, REL_FAILURE);
            } else {
                session.transfer(flowFile, REL_SUCCESS);
                session.getProvenanceReporter().send(flowFile, url.toString());
            }

        } catch (IOException ioe) {
            // Something went wrong when parsing the response, log the error and route to failure
            logger.error("Error parsing Bulk API response: {}", new Object[] { ioe.getMessage() }, ioe);
            session.transfer(flowFile, REL_FAILURE);
            context.yield();
        }
    } else if (statusCode / 100 == 5) {
        // 5xx -> RETRY, but a server error might last a while, so yield
        logger.warn(
                "Elasticsearch returned code {} with message {}, transferring flow file to retry. This is likely a server problem, yielding...",
                new Object[] { statusCode, getResponse.message() });
        session.transfer(flowFile, REL_RETRY);
        context.yield();
    } else { // 1xx, 3xx, 4xx, etc. -> NO RETRY
        logger.warn("Elasticsearch returned code {} with message {}, transferring flow file to failure",
                new Object[] { statusCode, getResponse.message() });
        session.transfer(flowFile, REL_FAILURE);
    }
    getResponse.close();
}

From source file:UploadTest.java

@Test
public void jackson_test() {
    try {//from   w  w w  .ja  va2 s  .  c  om
        ObjectMapper mapper = new ObjectMapper();
        JsonNode root = mapper.readTree(new File("/home/raul/test/frl%3A6376984/6376984.json"));

        JsonNode curValue = root.at("/hasPart");
        System.out.println("curValue: " + curValue);
        if (curValue.isArray()) {
            ArrayNode hasPartArray = (ArrayNode) curValue;
            for (int i = 0; i < hasPartArray.size(); i++) {
                JsonNode curPart = getFirst((ObjectNode) hasPartArray.get(i));
                System.out.println("curPart: " + curPart);
                System.out.println("Found " + curPart);
            }

        } else {
            throw new RuntimeException(
                    "Unexpected type " + curValue.getNodeType() + " found: " + curValue.asText());
        }
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}

From source file:org.apache.flink.yarn.YARNSessionCapacitySchedulerITCase.java

/**
 * Test TaskManager failure and also if the vcores are set correctly (see issue FLINK-2213).
 *///  w w w. j av  a  2 s.c o  m
@Test(timeout = 100000) // timeout after 100 seconds
public void testTaskManagerFailure() {
    LOG.info("Starting testTaskManagerFailure()");
    Runner runner = startWithArgs(
            new String[] { "-j", flinkUberjar.getAbsolutePath(), "-t", flinkLibFolder.getAbsolutePath(), "-n",
                    "1", "-jm", "768", "-tm", "1024", "-s", "3", // set the slots 3 to check if the vCores are set properly!
                    "-nm", "customName", "-Dfancy-configuration-value=veryFancy",
                    "-Dyarn.maximum-failed-containers=3", "-D" + ConfigConstants.YARN_VCORES + "=2" },
            "Number of connected TaskManagers changed to 1. Slots available: 3", RunTypes.YARN_SESSION);

    Assert.assertEquals(2, getRunningContainers());

    // ------------------------ Test if JobManager web interface is accessible -------

    YarnClient yc = null;
    try {
        yc = YarnClient.createYarnClient();
        yc.init(yarnConfiguration);
        yc.start();

        List<ApplicationReport> apps = yc.getApplications(EnumSet.of(YarnApplicationState.RUNNING));
        Assert.assertEquals(1, apps.size()); // Only one running
        ApplicationReport app = apps.get(0);
        Assert.assertEquals("customName", app.getName());
        String url = app.getTrackingUrl();
        if (!url.endsWith("/")) {
            url += "/";
        }
        if (!url.startsWith("http://")) {
            url = "http://" + url;
        }
        LOG.info("Got application URL from YARN {}", url);

        String response = TestBaseUtils.getFromHTTP(url + "taskmanagers/");

        JsonNode parsedTMs = new ObjectMapper().readTree(response);
        ArrayNode taskManagers = (ArrayNode) parsedTMs.get("taskmanagers");
        Assert.assertNotNull(taskManagers);
        Assert.assertEquals(1, taskManagers.size());
        Assert.assertEquals(3, taskManagers.get(0).get("slotsNumber").asInt());

        // get the configuration from webinterface & check if the dynamic properties from YARN show up there.
        String jsonConfig = TestBaseUtils.getFromHTTP(url + "jobmanager/config");
        Map<String, String> parsedConfig = WebMonitorUtils.fromKeyValueJsonArray(jsonConfig);

        Assert.assertEquals("veryFancy", parsedConfig.get("fancy-configuration-value"));
        Assert.assertEquals("3", parsedConfig.get("yarn.maximum-failed-containers"));
        Assert.assertEquals("2", parsedConfig.get(ConfigConstants.YARN_VCORES));

        // -------------- FLINK-1902: check if jobmanager hostname/port are shown in web interface
        // first, get the hostname/port
        String oC = outContent.toString();
        Pattern p = Pattern.compile("Flink JobManager is now running on ([a-zA-Z0-9.-]+):([0-9]+)");
        Matcher matches = p.matcher(oC);
        String hostname = null;
        String port = null;
        while (matches.find()) {
            hostname = matches.group(1).toLowerCase();
            port = matches.group(2);
        }
        LOG.info("Extracted hostname:port: {} {}", hostname, port);

        Assert.assertEquals("unable to find hostname in " + jsonConfig, hostname,
                parsedConfig.get(ConfigConstants.JOB_MANAGER_IPC_ADDRESS_KEY));
        Assert.assertEquals("unable to find port in " + jsonConfig, port,
                parsedConfig.get(ConfigConstants.JOB_MANAGER_IPC_PORT_KEY));

        // test logfile access
        String logs = TestBaseUtils.getFromHTTP(url + "jobmanager/log");
        Assert.assertTrue(logs.contains("Starting YARN ApplicationMaster"));
        Assert.assertTrue(logs.contains("Starting JobManager"));
        Assert.assertTrue(logs.contains("Starting JobManager Web Frontend"));
    } catch (Throwable e) {
        LOG.warn("Error while running test", e);
        Assert.fail(e.getMessage());
    }

    // ------------------------ Kill container with TaskManager and check if vcores are set correctly -------

    // find container id of taskManager:
    ContainerId taskManagerContainer = null;
    NodeManager nodeManager = null;
    UserGroupInformation remoteUgi = null;
    NMTokenIdentifier nmIdent = null;
    try {
        remoteUgi = UserGroupInformation.getCurrentUser();
    } catch (IOException e) {
        LOG.warn("Unable to get curr user", e);
        Assert.fail();
    }
    for (int nmId = 0; nmId < NUM_NODEMANAGERS; nmId++) {
        NodeManager nm = yarnCluster.getNodeManager(nmId);
        ConcurrentMap<ContainerId, Container> containers = nm.getNMContext().getContainers();
        for (Map.Entry<ContainerId, Container> entry : containers.entrySet()) {
            String command = Joiner.on(" ").join(entry.getValue().getLaunchContext().getCommands());
            if (command.contains(YarnTaskManager.class.getSimpleName())) {
                taskManagerContainer = entry.getKey();
                nodeManager = nm;
                nmIdent = new NMTokenIdentifier(taskManagerContainer.getApplicationAttemptId(), null, "", 0);
                // allow myself to do stuff with the container
                // remoteUgi.addCredentials(entry.getValue().getCredentials());
                remoteUgi.addTokenIdentifier(nmIdent);
            }
        }
        sleep(500);
    }

    Assert.assertNotNull("Unable to find container with TaskManager", taskManagerContainer);
    Assert.assertNotNull("Illegal state", nodeManager);

    yc.stop();

    List<ContainerId> toStop = new LinkedList<ContainerId>();
    toStop.add(taskManagerContainer);
    StopContainersRequest scr = StopContainersRequest.newInstance(toStop);

    try {
        nodeManager.getNMContext().getContainerManager().stopContainers(scr);
    } catch (Throwable e) {
        LOG.warn("Error stopping container", e);
        Assert.fail("Error stopping container: " + e.getMessage());
    }

    // stateful termination check:
    // wait until we saw a container being killed and AFTERWARDS a new one launched
    boolean ok = false;
    do {
        LOG.debug("Waiting for correct order of events. Output: {}", errContent.toString());

        String o = errContent.toString();
        int killedOff = o.indexOf("Container killed by the ApplicationMaster");
        if (killedOff != -1) {
            o = o.substring(killedOff);
            ok = o.indexOf("Launching TaskManager") > 0;
        }
        sleep(1000);
    } while (!ok);

    // send "stop" command to command line interface
    runner.sendStop();
    // wait for the thread to stop
    try {
        runner.join(1000);
    } catch (InterruptedException e) {
        LOG.warn("Interrupted while stopping runner", e);
    }
    LOG.warn("stopped");

    // ----------- Send output to logger
    System.setOut(originalStdout);
    System.setErr(originalStderr);
    String oC = outContent.toString();
    String eC = errContent.toString();
    LOG.info("Sending stdout content through logger: \n\n{}\n\n", oC);
    LOG.info("Sending stderr content through logger: \n\n{}\n\n", eC);

    // ------ Check if everything happened correctly
    Assert.assertTrue("Expect to see failed container", eC.contains("New messages from the YARN cluster"));

    Assert.assertTrue("Expect to see failed container",
            eC.contains("Container killed by the ApplicationMaster"));

    Assert.assertTrue("Expect to see new container started",
            eC.contains("Launching TaskManager") && eC.contains("on host"));

    // cleanup auth for the subsequent tests.
    remoteUgi.getTokenIdentifiers().remove(nmIdent);

    LOG.info("Finished testTaskManagerFailure()");
}

From source file:com.heliosapm.mws.server.net.json.JSONRequest.java

/**
 * Creates a new JSONRequest/*from  w w w.  j a v a2  s. co m*/
 * @param channel The channel that the request came in on. Ignored if null 
 * @param tCode the type code of the request
 * @param requestId The client supplied request ID
 * @param inReferenceToRequestId The client supplied in regards to request ID
 * @param serviceName The service name requested
 * @param opName The op name requested
 * @param request The original request
 */
protected JSONRequest(Channel channel, String tCode, long rid, long rerid, String serviceName, String opName,
        JsonNode request) {
    this.channel = channel;
    this.tCode = tCode;
    this.requestId = rid;
    this.inReferenceToRequestId = rerid;
    this.serviceName = serviceName;
    this.opName = opName;
    this.request = request;
    JsonNode argNode = request.get("args");
    if (argNode != null) {
        if (argNode instanceof ArrayNode) {
            ArrayNode an = (ArrayNode) argNode;
            for (int i = 0; i < an.size(); i++) {
                arguments.put("" + i, an.get(i));
            }
        } else if (argNode instanceof ObjectNode) {
            ObjectNode on = (ObjectNode) argNode;
            for (Iterator<String> siter = on.fieldNames(); siter.hasNext();) {
                String fieldName = siter.next();
                arguments.put(fieldName, on.get(fieldName));
            }
        }
    }
}

From source file:ru.histone.deparser.Deparser.java

protected String processFor(ArrayNode ast) {
    ArrayNode var = (ArrayNode) ast.get(1);
    ArrayNode collection = (ArrayNode) ast.get(2);
    ArrayNode ifStatements = (ArrayNode) ast.get(3).get(0);
    ArrayNode elseStatements = (ast.get(3).size() > 1) ? elseStatements = (ArrayNode) ast.get(3).get(1) : null;

    StringBuilder result = new StringBuilder();

    String v1 = var.get(0).asText();
    String v2 = (var.size() > 1) ? var.get(1).asText() : null;

    String collectionProcessed = processAstNode(collection);
    result.append(ind() + "for (" + v1 + "," + v2 + " in " + collectionProcessed + ") {\n");

    indent();/*from  ww  w.  ja v  a  2  s .  co  m*/
    for (JsonNode ifStatement : ifStatements) {
        String s = processAstNode(ifStatement);
        if (s != null)
            result.append(s);
    }
    unindent();

    if (elseStatements != null) {
        result.append(ind() + "} else {\n");
        for (JsonNode elseStatement : elseStatements) {
            String s = processAstNode(elseStatement);
            if (s != null)
                result.append(s);
        }
        result.append(ind() + "}\n");
    }

    return result.toString();
}