Example usage for org.apache.spark.launcher SparkAppHandle getAppId

List of usage examples for org.apache.spark.launcher SparkAppHandle getAppId

Introduction

In this page you can find the example usage for org.apache.spark.launcher SparkAppHandle getAppId.

Prototype

String getAppId();

Source Link

Document

Returns the application ID, or null if not yet known.

Usage

From source file:com.ebay.logstream.runner.spark.SparkPipelineRunner.java

License:Apache License

@Override
public Map<String, Object> run(Pipeline pipeline) {
    Map<String, Object> result = new HashMap<>();
    Map<String, String> env = Maps.newHashMap();
    env.put("SPARK_PRINT_LAUNCH_COMMAND", "1");
    SparkLauncher launcher = new SparkLauncher(env);
    launcher.setAppResource(pipeline.getContext().getPipelineJarPath());
    launcher.setAppName(pipeline.getContext().getPipelineName());
    launcher.setMainClass(SparkPipelineRunner.class.getCanonicalName());
    launcher.setSparkHome(pipeline.getContext().getConfig().getString(SPARK_HOME_KEY));
    launcher.setJavaHome(pipeline.getContext().getConfig().getString(JAVA_HOME));
    //set app args
    launcher.addAppArgs(pipeline.getContext().getPipeline());
    launcher.addAppArgs(pipeline.getContext().getPipelineName());
    launcher.addAppArgs(pipeline.getContext().getDeployMode().toString());
    launcher.addAppArgs(pipeline.getContext().getInputParallelism() + "");
    launcher.addAppArgs(pipeline.getContext().getFilterParallelism() + "");
    launcher.addAppArgs(pipeline.getContext().getOutputParallelism() + "");
    //work around(for get driver pid)
    String uuid = UUID.randomUUID().toString();
    launcher.addAppArgs(uuid);//from  ww w  .j  a va2s . co m
    launcher.addAppArgs();
    launcher.setVerbose(true);
    launcher.addSparkArg("--verbose");
    if (pipeline.getContext().getDeployMode() == LogStormConstants.DeployMode.LOCAL) {
        launcher.setMaster("local[*]");
    } else {
        launcher.setMaster(pipeline.getContext().getConfig().getString(SPARK_MASTER_KEY));
    }

    try {
        SparkAppHandle handle = launcher.startApplication();
        while (handle.getAppId() == null) {
            Thread.sleep(1000);
        }
        result.put("applicationId", handle.getAppId());
        LOG.info("generate spark applicationId " + handle.getAppId());
        //get driver pid
        String cmd = "ps -ef | grep " + uuid + " | grep -v grep | awk '{print $2}'";
        LOG.info("cmd {}", cmd);
        Process process = Runtime.getRuntime().exec(new String[] { "/bin/sh", "-c", cmd });
        synchronized (process) {
            try {
                process.wait();
            } catch (Exception e) {
                LOG.warn("failed to wait driver pid: ", e);
            }
        }
        InputStream inputStream = process.getInputStream();
        BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
        String pid;
        while ((pid = bufferedReader.readLine()) != null) {
            result.put("driverPid", pid);
            System.out.println(pid);
        }
        bufferedReader.close();
    } catch (Exception e) {
        LOG.error("failed to start as a spark application, ", e);
    }

    return result;
}

From source file:com.streamsets.datacollector.pipeline.executor.spark.yarn.YarnAppLauncher.java

License:Apache License

@Override
public Optional<String> launchApp(Record record) throws ApplicationLaunchFailureException, ELEvalException {

    SparkLauncher launcher = getLauncher();

    if (yarnConfigs.language == Language.JVM) {
        launcher.setMainClass(yarnConfigs.mainClass);
    }//from  ww  w  .  j ava  2  s.c  o  m

    launcher.setAppResource(yarnConfigs.appResource).setAppName(yarnConfigs.appName).setMaster(YARN)
            .setDeployMode(yarnConfigs.deployMode.getLabel().toLowerCase()).setVerbose(yarnConfigs.verbose);

    if (yarnConfigs.dynamicAllocation) {
        launcher.setConf("spark.dynamicAllocation.enabled", "true");
        launcher.setConf("spark.shuffle.service.enabled", "true");
        launcher.setConf("spark.dynamicAllocation.minExecutors", String.valueOf(yarnConfigs.minExecutors));
        launcher.setConf("spark.dynamicAllocation.maxExecutors", String.valueOf(yarnConfigs.maxExecutors));
    } else {
        launcher.setConf("spark.dynamicAllocation.enabled", "false");
        launcher.addSparkArg("--num-executors", String.valueOf(yarnConfigs.numExecutors));
    }

    launcher.addSparkArg("--executor-memory", yarnConfigs.executorMemory);
    launcher.addSparkArg("--driver-memory", yarnConfigs.driverMemory);

    if (yarnConfigs.deployMode == DeployMode.CLUSTER && yarnConfigs.waitForCompletion) {
        launcher.setConf("spark.yarn.submit.waitAppCompletion", "true");
    }

    // Default is empty string, so pass only non-empty ones.
    yarnConfigs.noValueArgs.forEach((String arg) -> applyConfIfPresent(arg, launcher::addSparkArg));
    yarnConfigs.args.forEach((String k, String v) -> applyConfIfPresent(k, v, launcher::addSparkArg));

    // For files, no need of removing empty strings, since we verify the file exists in init itself.
    yarnConfigs.additionalFiles.forEach(launcher::addFile);
    yarnConfigs.additionalJars.forEach(launcher::addJar);
    yarnConfigs.pyFiles.forEach(launcher::addPyFile);

    launcher.addAppArgs(getNonEmptyArgs(yarnConfigs.evaluateArgsELs(record)));

    applyConfIfPresent(configs.javaHome, launcher::setJavaHome);
    applyConfIfPresent("spark.yarn.principal", configs.credentialsConfigBean.principal, launcher::setConf);
    applyConfIfPresent("spark.yarn.keytab", configs.credentialsConfigBean.keytab, launcher::setConf);
    applyConfIfPresent("--proxy-user", yarnConfigs.proxyUser, launcher::addSparkArg);
    applyConfIfPresent(configs.sparkHome, launcher::setSparkHome);

    timeout = yarnConfigs.waitTimeout;

    try {
        final SparkAppHandle handle = launcher.startApplication(new AppListener());
        return Optional.ofNullable(handle.getAppId());
    } catch (IOException ex) {
        latch.countDown();
        throw new ApplicationLaunchFailureException(ex);
    } catch (Throwable ex) { // NOSONAR
        latch.countDown();
        throw ex;
    }
}