Example usage for org.apache.hadoop.yarn.api.protocolrecords GetApplicationsResponse getApplicationList

List of usage examples for org.apache.hadoop.yarn.api.protocolrecords GetApplicationsResponse getApplicationList

Introduction

In this page you can find the example usage for org.apache.hadoop.yarn.api.protocolrecords GetApplicationsResponse getApplicationList.

Prototype

@Public
@Stable
public abstract List<ApplicationReport> getApplicationList();

Source Link

Document

Get ApplicationReport for applications.

Usage

From source file:org.apache.hive.service.server.KillQueryImpl.java

License:Apache License

public static Set<ApplicationId> getChildYarnJobs(Configuration conf, String tag)
        throws IOException, YarnException {
    Set<ApplicationId> childYarnJobs = new HashSet<ApplicationId>();
    GetApplicationsRequest gar = GetApplicationsRequest.newInstance();
    gar.setScope(ApplicationsRequestScope.OWN);
    gar.setApplicationTags(Collections.singleton(tag));

    ApplicationClientProtocol proxy = ClientRMProxy.createRMProxy(conf, ApplicationClientProtocol.class);
    GetApplicationsResponse apps = proxy.getApplications(gar);
    List<ApplicationReport> appsList = apps.getApplicationList();
    for (ApplicationReport appReport : appsList) {
        if (isAdmin() || appReport.getApplicationTags()
                .contains(QueryState.USERID_TAG + "=" + SessionState.get().getUserName())) {
            childYarnJobs.add(appReport.getApplicationId());
        }/*from  w w w .j  av  a  2 s . c  o m*/
    }

    if (childYarnJobs.isEmpty()) {
        LOG.info("No child applications found");
    } else {
        LOG.info("Found child YARN applications: " + StringUtils.join(childYarnJobs, ","));
    }

    return childYarnJobs;
}

From source file:org.apache.oozie.action.hadoop.LauncherMainHadoopUtils.java

License:Apache License

private static Set<ApplicationId> getChildYarnJobs(Configuration actionConf) {
    System.out.println("Fetching child yarn jobs");
    Set<ApplicationId> childYarnJobs = new HashSet<ApplicationId>();
    String tag = actionConf.get(CHILD_MAPREDUCE_JOB_TAGS);
    if (tag == null) {
        System.out.print("Could not find Yarn tags property " + CHILD_MAPREDUCE_JOB_TAGS);
        return childYarnJobs;
    }/*ww w  . j a va  2 s. c  om*/
    System.out.println("tag id : " + tag);
    long startTime = 0L;
    try {
        startTime = Long.parseLong((System.getProperty(OOZIE_JOB_LAUNCH_TIME)));
    } catch (NumberFormatException nfe) {
        throw new RuntimeException("Could not find Oozie job launch time", nfe);
    }

    GetApplicationsRequest gar = GetApplicationsRequest.newInstance();
    gar.setScope(ApplicationsRequestScope.OWN);
    gar.setApplicationTags(Collections.singleton(tag));
    long endTime = System.currentTimeMillis();
    if (startTime > endTime) {
        System.out.println(
                "WARNING: Clock skew between the Oozie server host and this host detected.  Please fix this.  "
                        + "Attempting to work around...");
        // We don't know which one is wrong (relative to the RM), so to be safe, let's assume they're both wrong and add an
        // offset in both directions
        long diff = 2 * (startTime - endTime);
        startTime = startTime - diff;
        endTime = endTime + diff;
    }
    gar.setStartRange(startTime, endTime);
    try {
        ApplicationClientProtocol proxy = ClientRMProxy.createRMProxy(actionConf,
                ApplicationClientProtocol.class);
        GetApplicationsResponse apps = proxy.getApplications(gar);
        List<ApplicationReport> appsList = apps.getApplicationList();
        for (ApplicationReport appReport : appsList) {
            childYarnJobs.add(appReport.getApplicationId());
        }
    } catch (IOException ioe) {
        throw new RuntimeException("Exception occurred while finding child jobs", ioe);
    } catch (YarnException ye) {
        throw new RuntimeException("Exception occurred while finding child jobs", ye);
    }

    System.out.println("Child yarn jobs are found - " + StringUtils.join(childYarnJobs, ","));
    return childYarnJobs;
}

From source file:org.apache.zeppelin.integration.FlinkIntegrationTest.java

License:Apache License

@Test
public void testLocalMode() throws IOException, YarnException, InterpreterException {
    InterpreterSetting flinkInterpreterSetting = interpreterSettingManager.getInterpreterSettingByName("flink");
    flinkInterpreterSetting.setProperty("FLINK_HOME", flinkHome);
    flinkInterpreterSetting.setProperty("ZEPPELIN_CONF_DIR", zeppelin.getZeppelinConfDir().getAbsolutePath());

    testInterpreterBasics();/*from www. j a  v a 2 s .  c om*/

    // no yarn application launched
    GetApplicationsRequest request = GetApplicationsRequest
            .newInstance(EnumSet.of(YarnApplicationState.RUNNING));
    GetApplicationsResponse response = hadoopCluster.getYarnCluster().getResourceManager().getClientRMService()
            .getApplications(request);
    assertEquals(0, response.getApplicationList().size());

    interpreterSettingManager.close();
}

From source file:org.apache.zeppelin.integration.FlinkIntegrationTest.java

License:Apache License

public void testYarnMode() throws IOException, InterpreterException, YarnException {
    InterpreterSetting flinkInterpreterSetting = interpreterSettingManager.getInterpreterSettingByName("flink");
    flinkInterpreterSetting.setProperty("HADOOP_CONF_DIR", hadoopCluster.getConfigPath());
    flinkInterpreterSetting.setProperty("FLINK_HOME", flinkHome);
    flinkInterpreterSetting.setProperty("ZEPPELIN_CONF_DIR", zeppelin.getZeppelinConfDir().getAbsolutePath());
    flinkInterpreterSetting.setProperty("flink.execution.mode", "YARN");
    testInterpreterBasics();//from w ww.j  a  v a  2  s. co m

    // 1 yarn application launched
    GetApplicationsRequest request = GetApplicationsRequest
            .newInstance(EnumSet.of(YarnApplicationState.RUNNING));
    GetApplicationsResponse response = hadoopCluster.getYarnCluster().getResourceManager().getClientRMService()
            .getApplications(request);
    assertEquals(1, response.getApplicationList().size());

    interpreterSettingManager.close();
}

From source file:org.apache.zeppelin.integration.SparkIntegrationTest.java

License:Apache License

@Test
public void testLocalMode() throws IOException, YarnException, InterpreterException, XmlPullParserException {
    InterpreterSetting sparkInterpreterSetting = interpreterSettingManager.getInterpreterSettingByName("spark");
    sparkInterpreterSetting.setProperty("master", "local[*]");
    sparkInterpreterSetting.setProperty("SPARK_HOME", sparkHome);
    sparkInterpreterSetting.setProperty("ZEPPELIN_CONF_DIR", zeppelin.getZeppelinConfDir().getAbsolutePath());
    sparkInterpreterSetting.setProperty("zeppelin.spark.useHiveContext", "false");
    sparkInterpreterSetting.setProperty("zeppelin.pyspark.useIPython", "false");
    sparkInterpreterSetting.setProperty("zeppelin.spark.scala.color", "false");
    sparkInterpreterSetting.setProperty("zeppelin.spark.deprecatedMsg.show", "false");

    testInterpreterBasics();//ww  w .  j a v a  2  s.  com

    // no yarn application launched
    GetApplicationsRequest request = GetApplicationsRequest
            .newInstance(EnumSet.of(YarnApplicationState.RUNNING));
    GetApplicationsResponse response = hadoopCluster.getYarnCluster().getResourceManager().getClientRMService()
            .getApplications(request);
    assertEquals(0, response.getApplicationList().size());

    interpreterSettingManager.close();
}

From source file:org.apache.zeppelin.integration.SparkIntegrationTest.java

License:Apache License

@Test
public void testYarnClientMode()
        throws IOException, YarnException, InterruptedException, InterpreterException, XmlPullParserException {
    InterpreterSetting sparkInterpreterSetting = interpreterSettingManager.getInterpreterSettingByName("spark");
    sparkInterpreterSetting.setProperty("master", "yarn-client");
    sparkInterpreterSetting.setProperty("HADOOP_CONF_DIR", hadoopCluster.getConfigPath());
    sparkInterpreterSetting.setProperty("SPARK_HOME", sparkHome);
    sparkInterpreterSetting.setProperty("ZEPPELIN_CONF_DIR", zeppelin.getZeppelinConfDir().getAbsolutePath());
    sparkInterpreterSetting.setProperty("zeppelin.spark.useHiveContext", "false");
    sparkInterpreterSetting.setProperty("zeppelin.pyspark.useIPython", "false");
    sparkInterpreterSetting.setProperty("PYSPARK_PYTHON", getPythonExec());
    sparkInterpreterSetting.setProperty("spark.driver.memory", "512m");
    sparkInterpreterSetting.setProperty("zeppelin.spark.scala.color", "false");
    sparkInterpreterSetting.setProperty("zeppelin.spark.deprecatedMsg.show", "false");

    testInterpreterBasics();/*from   ww w .  j a v a 2s  .  c om*/

    // 1 yarn application launched
    GetApplicationsRequest request = GetApplicationsRequest
            .newInstance(EnumSet.of(YarnApplicationState.RUNNING));
    GetApplicationsResponse response = hadoopCluster.getYarnCluster().getResourceManager().getClientRMService()
            .getApplications(request);
    assertEquals(1, response.getApplicationList().size());

    interpreterSettingManager.close();

    waitForYarnAppCompleted(30 * 1000);
}

From source file:org.apache.zeppelin.integration.SparkIntegrationTest.java

License:Apache License

private void waitForYarnAppCompleted(int timeout) throws YarnException {
    long start = System.currentTimeMillis();
    boolean yarnAppCompleted = false;
    while ((System.currentTimeMillis() - start) < timeout) {
        GetApplicationsRequest request = GetApplicationsRequest
                .newInstance(EnumSet.of(YarnApplicationState.RUNNING));
        GetApplicationsResponse response = hadoopCluster.getYarnCluster().getResourceManager()
                .getClientRMService().getApplications(request);
        if (response.getApplicationList().isEmpty()) {
            yarnAppCompleted = true;//ww w.j  a va  2s .  c o m
            break;
        }
        try {
            Thread.sleep(1000);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }
    assertTrue("Yarn app is not completed in " + timeout + " milliseconds.", yarnAppCompleted);
}

From source file:org.apache.zeppelin.integration.SparkIntegrationTest.java

License:Apache License

@Test
public void testYarnClusterMode()
        throws IOException, YarnException, InterruptedException, InterpreterException, XmlPullParserException {
    InterpreterSetting sparkInterpreterSetting = interpreterSettingManager.getInterpreterSettingByName("spark");
    sparkInterpreterSetting.setProperty("master", "yarn-cluster");
    sparkInterpreterSetting.setProperty("HADOOP_CONF_DIR", hadoopCluster.getConfigPath());
    sparkInterpreterSetting.setProperty("SPARK_HOME", sparkHome);
    sparkInterpreterSetting.setProperty("ZEPPELIN_CONF_DIR", zeppelin.getZeppelinConfDir().getAbsolutePath());
    sparkInterpreterSetting.setProperty("zeppelin.spark.useHiveContext", "false");
    sparkInterpreterSetting.setProperty("zeppelin.pyspark.useIPython", "false");
    sparkInterpreterSetting.setProperty("PYSPARK_PYTHON", getPythonExec());
    sparkInterpreterSetting.setProperty("spark.driver.memory", "512m");
    sparkInterpreterSetting.setProperty("zeppelin.spark.scala.color", "false");
    sparkInterpreterSetting.setProperty("zeppelin.spark.deprecatedMsg.show", "false");

    testInterpreterBasics();// w  ww . ja  va 2s.c o  m

    // 1 yarn application launched
    GetApplicationsRequest request = GetApplicationsRequest
            .newInstance(EnumSet.of(YarnApplicationState.RUNNING));
    GetApplicationsResponse response = hadoopCluster.getYarnCluster().getResourceManager().getClientRMService()
            .getApplications(request);
    assertEquals(1, response.getApplicationList().size());

    interpreterSettingManager.close();

    waitForYarnAppCompleted(30 * 1000);
}

From source file:org.springframework.yarn.client.ClientRmTemplate.java

License:Apache License

@Override
public List<ApplicationReport> listApplications() {
    return execute(new YarnRpcCallback<List<ApplicationReport>, ApplicationClientProtocol>() {
        @Override//  w  ww  .j a v  a 2 s .  com
        public List<ApplicationReport> doInYarn(ApplicationClientProtocol proxy)
                throws YarnException, IOException {
            GetApplicationsRequest request = Records.newRecord(GetApplicationsRequest.class);
            GetApplicationsResponse response = proxy.getApplications(request);
            return response.getApplicationList();
        }
    });
}

From source file:org.springframework.yarn.client.ClientRmTemplateTests.java

License:Apache License

@Test
public void testExecuteCallback() {
    List<ApplicationReport> applications = template
            .execute(new YarnRpcCallback<List<ApplicationReport>, ApplicationClientProtocol>() {
                @Override//w  w  w  . ja  v a2s .  co  m
                public List<ApplicationReport> doInYarn(ApplicationClientProtocol proxy)
                        throws YarnException, IOException {
                    GetApplicationsRequest request = Records.newRecord(GetApplicationsRequest.class);
                    GetApplicationsResponse response = proxy.getApplications(request);
                    return response.getApplicationList();
                }
            });
    assertNotNull(applications);
}