List of usage examples for org.apache.hadoop.yarn.api.protocolrecords GetClusterNodesResponse getNodeReports
@Public @Stable public abstract List<NodeReport> getNodeReports();
NodeReport for all nodes in the cluster. From source file:com.datatorrent.stram.StramMiniClusterTest.java
License:Apache License
@Test public void testSetupShutdown() throws Exception { GetClusterNodesRequest request = Records.newRecord(GetClusterNodesRequest.class); ClientRMService clientRMService = yarnCluster.getResourceManager().getClientRMService(); GetClusterNodesResponse response = clientRMService.getClusterNodes(request); List<NodeReport> nodeReports = response.getNodeReports(); LOG.info("{}", nodeReports); for (NodeReport nr : nodeReports) { LOG.info("Node: {}", nr.getNodeId()); LOG.info("Total memory: {}", nr.getCapability()); LOG.info("Used memory: {}", nr.getUsed()); LOG.info("Number containers: {}", nr.getNumContainers()); }//from w w w . j a v a 2 s . com String appMasterJar = JarFinder.getJar(StreamingAppMaster.class); LOG.info("appmaster jar: " + appMasterJar); String testJar = JarFinder.getJar(StramMiniClusterTest.class); LOG.info("testJar: " + testJar); // create test application Properties dagProps = new Properties(); // input module (ensure shutdown works while windows are generated) dagProps.put(StreamingApplication.DT_PREFIX + "operator.numGen.classname", TestGeneratorInputOperator.class.getName()); dagProps.put(StreamingApplication.DT_PREFIX + "operator.numGen.maxTuples", "1"); // fake output adapter - to be ignored when determine shutdown //props.put(DAGContext.DT_PREFIX + "stream.output.classname", HDFSOutputStream.class.getName()); //props.put(DAGContext.DT_PREFIX + "stream.output.inputNode", "module2"); //props.put(DAGContext.DT_PREFIX + "stream.output.filepath", "miniclustertest-testSetupShutdown.out"); dagProps.put(StreamingApplication.DT_PREFIX + "operator.module1.classname", GenericTestOperator.class.getName()); dagProps.put(StreamingApplication.DT_PREFIX + "operator.module2.classname", GenericTestOperator.class.getName()); dagProps.put(StreamingApplication.DT_PREFIX + "stream.fromNumGen.source", "numGen.outport"); dagProps.put(StreamingApplication.DT_PREFIX + "stream.fromNumGen.sinks", "module1.inport1"); dagProps.put(StreamingApplication.DT_PREFIX + "stream.n1n2.source", "module1.outport1"); dagProps.put(StreamingApplication.DT_PREFIX + "stream.n1n2.sinks", "module2.inport1"); dagProps.setProperty(StreamingApplication.DT_PREFIX + LogicalPlan.MASTER_MEMORY_MB.getName(), "128"); dagProps.setProperty(StreamingApplication.DT_PREFIX + LogicalPlan.CONTAINER_JVM_OPTIONS.getName(), "-Dlog4j.properties=custom_log4j.properties"); dagProps.setProperty(StreamingApplication.DT_PREFIX + "operator.*." + OperatorContext.MEMORY_MB.getName(), "64"); dagProps.setProperty(StreamingApplication.DT_PREFIX + "operator.*." + OperatorContext.VCORES.getName(), "1"); dagProps.setProperty(StreamingApplication.DT_PREFIX + "operator.*.port.*." + Context.PortContext.BUFFER_MEMORY_MB.getName(), "32"); dagProps.setProperty(StreamingApplication.DT_PREFIX + LogicalPlan.DEBUG.getName(), "true"); //dagProps.setProperty(StreamingApplication.DT_PREFIX + LogicalPlan.CONTAINERS_MAX_COUNT.getName(), "2"); LOG.info("dag properties: {}", dagProps); LOG.info("Initializing Client"); LogicalPlanConfiguration tb = new LogicalPlanConfiguration(conf); tb.addFromProperties(dagProps, null); LogicalPlan dag = createDAG(tb); Configuration yarnConf = new Configuration(yarnCluster.getConfig()); StramClient client = new StramClient(yarnConf, dag); try { client.start(); if (StringUtils.isBlank(System.getenv("JAVA_HOME"))) { client.javaCmd = "java"; // JAVA_HOME not set in the yarn mini cluster } LOG.info("Running client"); client.startApplication(); boolean result = client.monitorApplication(); LOG.info("Client run completed. Result=" + result); Assert.assertTrue(result); } finally { client.stop(); } }
From source file:org.deeplearning4j.iterativereduce.runtime.yarn.ResourceManagerHandler.java
License:Apache License
public List<NodeReport> getClusterNodes() throws YarnRemoteException { if (clientResourceManager == null) throw new IllegalArgumentException("Can't get report without connecting first!"); GetClusterNodesRequest req = Records.newRecord(GetClusterNodesRequest.class); GetClusterNodesResponse res = clientResourceManager.getClusterNodes(req); return res.getNodeReports(); }
From source file:org.huahinframework.manager.rest.service.ApplicationService.java
License:Apache License
@Path("/cluster") @GET//from w w w . j ava 2 s . co m @Produces(MediaType.APPLICATION_JSON) public JSONObject getCluster() { JSONObject jsonObject = new JSONObject(); try { GetClusterMetricsRequest metricsRequest = recordFactory .newRecordInstance(GetClusterMetricsRequest.class); GetClusterMetricsResponse metricsResponse = applicationsManager.getClusterMetrics(metricsRequest); jsonObject.put(Response.NUM_NODE_MANAGERS, metricsResponse.getClusterMetrics().getNumNodeManagers()); GetClusterNodesRequest nodeRequest = recordFactory.newRecordInstance(GetClusterNodesRequest.class); GetClusterNodesResponse nodeResponse = applicationsManager.getClusterNodes(nodeRequest); List<JSONObject> reports = new ArrayList<JSONObject>(); for (NodeReport report : nodeResponse.getNodeReports()) { JSONObject nr = new JSONObject(); nr.put(Response.HTTP_ADDRESS, report.getHttpAddress()); nr.put(Response.NUM_CONTAINERS, report.getNumContainers()); nr.put(Response.RACK_NAME, report.getRackName()); nr.put(Response.CAPABILITY, report.getCapability().getMemory()); nr.put(Response.HEALTH_REPORT, report.getNodeHealthStatus().getHealthReport()); nr.put(Response.IS_NODE_HEALTHY, report.getNodeHealthStatus().getIsNodeHealthy()); nr.put(Response.LAST_HEALTH_REPORT_TIME, new Date(report.getNodeHealthStatus().getLastHealthReportTime())); nr.put(Response.NODE_ID, report.getNodeId()); nr.put(Response.NODE_STATE, report.getNodeState()); nr.put(Response.NODE_STATE, report.getNodeState()); nr.put(Response.USED, report.getUsed()); reports.add(nr); } jsonObject.put(Response.NODES, reports); } catch (Exception e) { e.printStackTrace(); log.error(e); Map<String, String> status = new HashMap<String, String>(); status.put(Response.STATUS, e.getMessage()); jsonObject = new JSONObject(status); } return jsonObject; }