Example usage for org.apache.commons.exec DefaultExecutor DefaultExecutor

List of usage examples for org.apache.commons.exec DefaultExecutor DefaultExecutor

Introduction

In this page you can find the example usage for org.apache.commons.exec DefaultExecutor DefaultExecutor.

Prototype

public DefaultExecutor() 

Source Link

Document

Default constructor creating a default PumpStreamHandler and sets the working directory of the subprocess to the current working directory.

Usage

From source file:org.apache.zeppelin.python.IPythonInterpreter.java

private void launchIPythonKernel(int ipythonPort) throws IOException {
    LOGGER.info("Launching IPython Kernel at port: " + ipythonPort);
    // copy the python scripts to a temp directory, then launch ipython kernel in that folder
    File pythonWorkDir = Files.createTempDirectory("zeppelin_ipython").toFile();
    String[] ipythonScripts = { "ipython_server.py", "ipython_pb2.py", "ipython_pb2_grpc.py" };
    for (String ipythonScript : ipythonScripts) {
        URL url = getClass().getClassLoader().getResource("grpc/python" + "/" + ipythonScript);
        FileUtils.copyURLToFile(url, new File(pythonWorkDir, ipythonScript));
    }//from w  ww  .  j  av  a 2s  .  c om

    CommandLine cmd = CommandLine.parse(pythonExecutable);
    cmd.addArgument(pythonWorkDir.getAbsolutePath() + "/ipython_server.py");
    cmd.addArgument(ipythonPort + "");
    DefaultExecutor executor = new DefaultExecutor();
    ProcessLogOutputStream processOutput = new ProcessLogOutputStream(LOGGER);
    executor.setStreamHandler(new PumpStreamHandler(processOutput));
    watchDog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT);
    executor.setWatchdog(watchDog);

    if (useBuiltinPy4j) {
        //TODO(zjffdu) don't do hard code on py4j here
        File py4jDestFile = new File(pythonWorkDir, "py4j-src-0.10.7.zip");
        FileUtils.copyURLToFile(getClass().getClassLoader().getResource("python/py4j-src-0.10.7.zip"),
                py4jDestFile);
        if (additionalPythonPath != null) {
            // put the py4j at the end, because additionalPythonPath may already contain py4j.
            // e.g. PySparkInterpreter
            additionalPythonPath = additionalPythonPath + ":" + py4jDestFile.getAbsolutePath();
        } else {
            additionalPythonPath = py4jDestFile.getAbsolutePath();
        }
    }

    Map<String, String> envs = setupIPythonEnv();
    executor.execute(cmd, envs, this);

    // wait until IPython kernel is started or timeout
    long startTime = System.currentTimeMillis();
    while (true) {
        try {
            Thread.sleep(100);
        } catch (InterruptedException e) {
            LOGGER.error("Interrupted by something", e);
        }

        try {
            StatusResponse response = ipythonClient.status(StatusRequest.newBuilder().build());
            if (response.getStatus() == IPythonStatus.RUNNING) {
                LOGGER.info("IPython Kernel is Running");
                break;
            } else {
                LOGGER.info("Wait for IPython Kernel to be started");
            }
        } catch (Exception e) {
            // ignore the exception, because is may happen when grpc server has not started yet.
            LOGGER.info("Wait for IPython Kernel to be started");
        }

        if ((System.currentTimeMillis() - startTime) > ipythonLaunchTimeout) {
            throw new IOException(
                    "Fail to launch IPython Kernel in " + ipythonLaunchTimeout / 1000 + " seconds");
        }
    }
}

From source file:org.apache.zeppelin.python.PythonInterpreter.java

private void createGatewayServerAndStartScript() throws IOException {
    // start gateway server in JVM side
    int port = RemoteInterpreterUtils.findRandomAvailablePortOnAllLocalInterfaces();
    // use the FQDN as the server address instead of 127.0.0.1 so that python process in docker
    // container can also connect to this gateway server.
    String serverAddress = PythonUtils.getLocalIP(properties);
    String secret = PythonUtils.createSecret(256);
    this.gatewayServer = PythonUtils.createGatewayServer(this, serverAddress, port, secret, usePy4jAuth);
    gatewayServer.start();// w  w w. j  av  a  2 s  .co  m

    // launch python process to connect to the gateway server in JVM side
    createPythonScript();
    String pythonExec = getPythonExec();
    CommandLine cmd = CommandLine.parse(pythonExec);
    if (!pythonExec.endsWith(".py")) {
        // PythonDockerInterpreter set pythonExec with script
        cmd.addArgument(pythonWorkDir + "/zeppelin_python.py", false);
    }
    cmd.addArgument(serverAddress, false);
    cmd.addArgument(Integer.toString(port), false);

    executor = new DefaultExecutor();
    outputStream = new InterpreterOutputStream(LOGGER);
    PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream);
    executor.setStreamHandler(streamHandler);
    executor.setWatchdog(new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT));
    Map<String, String> env = setupPythonEnv();
    if (usePy4jAuth) {
        env.put("PY4J_GATEWAY_SECRET", secret);
    }
    LOGGER.info("Launching Python Process Command: " + cmd.getExecutable() + " "
            + StringUtils.join(cmd.getArguments(), " "));
    executor.execute(cmd, env, this);
    pythonScriptRunning.set(true);
}

From source file:org.apache.zeppelin.shell.security.ShellSecurityImpl.java

public static void createSecureConfiguration(Properties properties, String shell) {

    String authType = properties.getProperty("zeppelin.shell.auth.type").trim().toUpperCase();

    switch (authType) {
    case "KERBEROS":
        CommandLine cmdLine = CommandLine.parse(shell);
        cmdLine.addArgument("-c", false);
        String kinitCommand = String.format("kinit -k -t %s %s",
                properties.getProperty("zeppelin.shell.keytab.location"),
                properties.getProperty("zeppelin.shell.principal"));
        cmdLine.addArgument(kinitCommand, false);
        DefaultExecutor executor = new DefaultExecutor();

        try {/*from  w ww .  ja va2 s. co m*/
            int exitVal = executor.execute(cmdLine);
        } catch (Exception e) {
            LOGGER.error("Unable to run kinit for zeppelin user " + kinitCommand, e);
            throw new InterpreterException(e);
        }
    }
}

From source file:org.apache.zeppelin.shell.ShellInterpreter.java

@Override
public InterpreterResult interpret(String cmd, InterpreterContext contextInterpreter) {
    logger.debug("Run shell command '" + cmd + "'");
    long start = System.currentTimeMillis();
    CommandLine cmdLine = CommandLine.parse("bash");
    cmdLine.addArgument("-c", false);
    cmdLine.addArgument(cmd, false);//w w  w. ja v  a 2s  .com
    DefaultExecutor executor = new DefaultExecutor();
    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    executor.setStreamHandler(new PumpStreamHandler(outputStream));

    executor.setWatchdog(new ExecuteWatchdog(commandTimeOut));
    try {
        int exitValue = executor.execute(cmdLine);
        return new InterpreterResult(InterpreterResult.Code.SUCCESS, outputStream.toString());
    } catch (ExecuteException e) {
        logger.error("Can not run " + cmd, e);
        return new InterpreterResult(Code.ERROR, e.getMessage());
    } catch (IOException e) {
        logger.error("Can not run " + cmd, e);
        return new InterpreterResult(Code.ERROR, e.getMessage());
    }
}

From source file:org.apache.zeppelin.spark.PySparkInterpreter.java

private void createGatewayServerAndStartScript() {
    // create python script
    createPythonScript();/*from  www .j a v  a  2 s  .c o  m*/

    port = findRandomOpenPortOnAllLocalInterfaces();

    gatewayServer = new GatewayServer(this, port);
    gatewayServer.start();

    // Run python shell
    CommandLine cmd = CommandLine.parse(getProperty("zeppelin.pyspark.python"));
    cmd.addArgument(scriptPath, false);
    cmd.addArgument(Integer.toString(port), false);
    cmd.addArgument(Integer.toString(getSparkInterpreter().getSparkVersion().toNumber()), false);
    executor = new DefaultExecutor();
    outputStream = new ByteArrayOutputStream();
    PipedOutputStream ps = new PipedOutputStream();
    in = null;
    try {
        in = new PipedInputStream(ps);
    } catch (IOException e1) {
        throw new InterpreterException(e1);
    }
    ins = new BufferedWriter(new OutputStreamWriter(ps));

    input = new ByteArrayOutputStream();

    PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream, outputStream, in);
    executor.setStreamHandler(streamHandler);
    executor.setWatchdog(new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT));

    try {
        Map env = EnvironmentUtils.getProcEnvironment();

        executor.execute(cmd, env, this);
        pythonscriptRunning = true;
    } catch (IOException e) {
        throw new InterpreterException(e);
    }

    try {
        input.write("import sys, getopt\n".getBytes());
        ins.flush();
    } catch (IOException e) {
        throw new InterpreterException(e);
    }
}

From source file:org.apache.zeppelin.spark.SparkRInterpreter.java

@Override
public void open() {
    // create R script
    createRScript();/* w  w w.  j av  a2  s . c  o  m*/

    int backendTimeout = Integer.parseInt(System.getenv().getOrDefault("SPARKR_BACKEND_TIMEOUT", "120"));

    // Launch a SparkR backend server for the R process to connect to; this will let it see our
    // Java system properties etc.
    ZeppelinRBackend sparkRBackend = new ZeppelinRBackend();

    Semaphore initialized = new Semaphore(0);
    Thread sparkRBackendThread = new Thread("SparkR backend") {
        @Override
        public void run() {
            sparkRBackendPort = sparkRBackend.init();
            initialized.release();
            sparkRBackend.run();
        }
    };

    sparkRBackendThread.start();

    // Wait for RBackend initialization to finish
    try {
        if (initialized.tryAcquire(backendTimeout, TimeUnit.SECONDS)) {
            // Launch R
            CommandLine cmd = CommandLine.parse(getProperty("zeppelin.sparkr.r"));
            cmd.addArgument(scriptPath, false);
            cmd.addArgument("--no-save", false);
            //      cmd.addArgument(getJavaSparkContext().version(), false);
            executor = new DefaultExecutor();
            outputStream = new ByteArrayOutputStream();
            PipedOutputStream ps = new PipedOutputStream();
            in = null;
            try {
                in = new PipedInputStream(ps);
            } catch (IOException e1) {
                throw new InterpreterException(e1);
            }
            ins = new BufferedWriter(new OutputStreamWriter(ps));

            input = new ByteArrayOutputStream();

            PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream, outputStream, in);
            executor.setStreamHandler(streamHandler);
            executor.setWatchdog(new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT));

            Map env = EnvironmentUtils.getProcEnvironment();

            String sparkRInterpreterObjId = sparkRBackend.put(this);
            String uberdataContextObjId = sparkRBackend.put(getUberdataContext());
            env.put("R_PROFILE_USER", scriptPath);
            env.put("SPARK_HOME", getSparkHome());
            env.put("EXISTING_SPARKR_BACKEND_PORT", String.valueOf(sparkRBackendPort));
            env.put("SPARKR_INTERPRETER_ID", sparkRInterpreterObjId);
            env.put("UBERDATA_CONTEXT_ID", uberdataContextObjId);
            logger.info("executing {} {}", env, cmd.toString());
            executor.execute(cmd, env, this);
            logger.info("executed");
            rScriptRunning = true;

        } else {
            System.err.println("SparkR backend did not initialize in " + backendTimeout + " seconds");
            System.exit(-1);
        }
    } catch (InterruptedException e) {
        new InterpreterException((e));
    } catch (IOException e) {
        new InterpreterException((e));
    }

}

From source file:org.apache.zeppelin.spark.ZeppelinR.java

/**
 * Start R repl/*from   w  ww.  ja v a2 s  . c o m*/
 * @throws IOException
 */
public void open() throws IOException {
    createRScript();

    zeppelinR.put(hashCode(), this);

    CommandLine cmd = CommandLine.parse(rCmdPath);
    cmd.addArgument("--no-save");
    cmd.addArgument("--no-restore");
    cmd.addArgument("-f");
    cmd.addArgument(scriptPath);
    cmd.addArgument("--args");
    cmd.addArgument(Integer.toString(hashCode()));
    cmd.addArgument(Integer.toString(port));
    cmd.addArgument(libPath);
    cmd.addArgument(Integer.toString(sparkVersion.toNumber()));

    // dump out the R command to facilitate manually running it, e.g. for fault diagnosis purposes
    logger.debug(cmd.toString());

    executor = new DefaultExecutor();
    outputStream = new InterpreterOutputStream(logger);

    input = new PipedOutputStream();
    PipedInputStream in = new PipedInputStream(input);

    PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream, outputStream, in);
    executor.setWatchdog(new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT));
    executor.setStreamHandler(streamHandler);
    Map env = EnvironmentUtils.getProcEnvironment();

    initialOutput = new InterpreterOutput(null);
    outputStream.setInterpreterOutput(initialOutput);
    executor.execute(cmd, env, this);
    rScriptRunning = true;

    // flush output
    eval("cat('')");
}

From source file:org.apache.zeppelin.submarine.job.thread.JobRunThread.java

public void run() {
    boolean tryLock = lockRunning.tryLock();
    if (false == tryLock) {
        LOGGER.warn("Can not get JobRunThread lockRunning!");
        return;//from  w  ww. j  a va  2  s.c om
    }

    SubmarineUI submarineUI = submarineJob.getSubmarineUI();
    try {
        InterpreterContext intpContext = submarineJob.getIntpContext();
        String noteId = intpContext.getNoteId();
        String userName = intpContext.getAuthenticationInfo().getUser();
        String jobName = SubmarineUtils.getJobName(userName, noteId);

        if (true == running.get()) {
            String message = String.format("Job %s already running.", jobName);
            submarineUI.outputLog("WARN", message);
            LOGGER.warn(message);
            return;
        }
        running.set(true);

        Properties properties = submarineJob.getProperties();
        HdfsClient hdfsClient = submarineJob.getHdfsClient();
        File pythonWorkDir = submarineJob.getPythonWorkDir();

        submarineJob.setCurrentJobState(EXECUTE_SUBMARINE);

        String algorithmPath = properties.getProperty(SubmarineConstants.SUBMARINE_ALGORITHM_HDFS_PATH, "");
        if (!algorithmPath.startsWith("hdfs://")) {
            String message = "Algorithm file upload HDFS path, " + "Must be `hdfs://` prefix. now setting "
                    + algorithmPath;
            submarineUI.outputLog("Configuration error", message);
            return;
        }

        List<ParagraphInfo> paragraphInfos = intpContext.getIntpEventClient().getParagraphList(userName,
                noteId);
        String outputMsg = hdfsClient.saveParagraphToFiles(noteId, paragraphInfos,
                pythonWorkDir == null ? "" : pythonWorkDir.getAbsolutePath(), properties);
        if (!StringUtils.isEmpty(outputMsg)) {
            submarineUI.outputLog("Save algorithm file", outputMsg);
        }

        HashMap jinjaParams = SubmarineUtils.propertiesToJinjaParams(properties, submarineJob, true);

        URL urlTemplate = Resources.getResource(SubmarineJob.SUBMARINE_JOBRUN_TF_JINJA);
        String template = Resources.toString(urlTemplate, Charsets.UTF_8);
        Jinjava jinjava = new Jinjava();
        String submarineCmd = jinjava.render(template, jinjaParams);
        // If the first line is a newline, delete the newline
        int firstLineIsNewline = submarineCmd.indexOf("\n");
        if (firstLineIsNewline == 0) {
            submarineCmd = submarineCmd.replaceFirst("\n", "");
        }

        StringBuffer sbLogs = new StringBuffer(submarineCmd);
        submarineUI.outputLog("Submarine submit command", sbLogs.toString());

        long timeout = Long
                .valueOf(properties.getProperty(SubmarineJob.TIMEOUT_PROPERTY, SubmarineJob.defaultTimeout));
        CommandLine cmdLine = CommandLine.parse(SubmarineJob.shell);
        cmdLine.addArgument(submarineCmd, false);
        DefaultExecutor executor = new DefaultExecutor();
        ExecuteWatchdog watchDog = new ExecuteWatchdog(timeout);
        executor.setWatchdog(watchDog);
        StringBuffer sbLogOutput = new StringBuffer();
        executor.setStreamHandler(new PumpStreamHandler(new LogOutputStream() {
            @Override
            protected void processLine(String line, int level) {
                line = line.trim();
                if (!StringUtils.isEmpty(line)) {
                    sbLogOutput.append(line + "\n");
                }
            }
        }));

        if (Boolean.valueOf(properties.getProperty(SubmarineJob.DIRECTORY_USER_HOME))) {
            executor.setWorkingDirectory(new File(System.getProperty("user.home")));
        }

        Map<String, String> env = new HashMap<>();
        String launchMode = (String) jinjaParams.get(SubmarineConstants.INTERPRETER_LAUNCH_MODE);
        if (StringUtils.equals(launchMode, "yarn")) {
            // Set environment variables in the submarine interpreter container run on yarn
            String javaHome, hadoopHome, hadoopConf;
            javaHome = (String) jinjaParams.get(SubmarineConstants.DOCKER_JAVA_HOME);
            hadoopHome = (String) jinjaParams.get(SubmarineConstants.DOCKER_HADOOP_HDFS_HOME);
            hadoopConf = (String) jinjaParams.get(SubmarineConstants.SUBMARINE_HADOOP_CONF_DIR);
            env.put("JAVA_HOME", javaHome);
            env.put("HADOOP_HOME", hadoopHome);
            env.put("HADOOP_HDFS_HOME", hadoopHome);
            env.put("HADOOP_CONF_DIR", hadoopConf);
            env.put("YARN_CONF_DIR", hadoopConf);
            env.put("CLASSPATH", "`$HADOOP_HDFS_HOME/bin/hadoop classpath --glob`");
            env.put("ZEPPELIN_FORCE_STOP", "true");
        }

        LOGGER.info("Execute EVN: {}, Command: {} ", env.toString(), submarineCmd);
        AtomicBoolean cmdLineRunning = new AtomicBoolean(true);
        executor.execute(cmdLine, env, new DefaultExecuteResultHandler() {
            @Override
            public void onProcessComplete(int exitValue) {
                String message = String.format("jobName %s ProcessComplete exit value is : %d", jobName,
                        exitValue);
                LOGGER.info(message);
                submarineUI.outputLog("JOR RUN COMPLETE", message);
                cmdLineRunning.set(false);
                submarineJob.setCurrentJobState(EXECUTE_SUBMARINE_FINISHED);
            }

            @Override
            public void onProcessFailed(ExecuteException e) {
                String message = String.format("jobName %s ProcessFailed exit value is : %d, exception is : %s",
                        jobName, e.getExitValue(), e.getMessage());
                LOGGER.error(message);
                submarineUI.outputLog("JOR RUN FAILED", message);
                cmdLineRunning.set(false);
                submarineJob.setCurrentJobState(EXECUTE_SUBMARINE_ERROR);
            }
        });
        int loopCount = 100;
        while ((loopCount-- > 0) && cmdLineRunning.get() && running.get()) {
            Thread.sleep(1000);
        }
        if (watchDog.isWatching()) {
            watchDog.destroyProcess();
            Thread.sleep(1000);
        }
        if (watchDog.isWatching()) {
            watchDog.killedProcess();
        }

        // Check if it has been submitted to YARN
        Map<String, Object> jobState = submarineJob.getJobStateByYarn(jobName);
        loopCount = 50;
        while ((loopCount-- > 0) && !jobState.containsKey("state") && running.get()) {
            Thread.sleep(3000);
            jobState = submarineJob.getJobStateByYarn(jobName);
        }

        if (!jobState.containsKey("state")) {
            String message = String.format("JOB %s was not submitted to YARN!", jobName);
            LOGGER.error(message);
            submarineUI.outputLog("JOR RUN FAILED", message);
            submarineJob.setCurrentJobState(EXECUTE_SUBMARINE_ERROR);
        }
    } catch (Exception e) {
        LOGGER.error(e.getMessage(), e);
        submarineJob.setCurrentJobState(EXECUTE_SUBMARINE_ERROR);
        submarineUI.outputLog("Exception", e.getMessage());
    } finally {
        running.set(false);
        lockRunning.unlock();
    }
}

From source file:org.apache.zeppelin.submarine.job.thread.TensorboardRunThread.java

public void run() {
    SubmarineUI submarineUI = submarineJob.getSubmarineUI();

    boolean tryLock = lockRunning.tryLock();

    try {/* ww w  . j ava  2 s  . c o  m*/
        Properties properties = submarineJob.getProperties();
        String tensorboardName = SubmarineUtils.getTensorboardName(submarineJob.getUserName());
        if (true == running.get()) {
            String message = String.format("tensorboard %s already running.", tensorboardName);
            submarineUI.outputLog("WARN", message);
            LOGGER.warn(message);
            return;
        }
        running.set(true);

        HashMap jinjaParams = SubmarineUtils.propertiesToJinjaParams(properties, submarineJob, false);
        // update jobName -> tensorboardName
        jinjaParams.put(SubmarineConstants.JOB_NAME, tensorboardName);

        URL urlTemplate = Resources.getResource(SubmarineJob.SUBMARINE_TENSORBOARD_JINJA);
        String template = Resources.toString(urlTemplate, Charsets.UTF_8);
        Jinjava jinjava = new Jinjava();
        String submarineCmd = jinjava.render(template, jinjaParams);
        // If the first line is a newline, delete the newline
        int firstLineIsNewline = submarineCmd.indexOf("\n");
        if (firstLineIsNewline == 0) {
            submarineCmd = submarineCmd.replaceFirst("\n", "");
        }
        StringBuffer sbLogs = new StringBuffer(submarineCmd);
        submarineUI.outputLog("Submarine submit command", sbLogs.toString());

        long timeout = Long
                .valueOf(properties.getProperty(SubmarineJob.TIMEOUT_PROPERTY, SubmarineJob.defaultTimeout));
        CommandLine cmdLine = CommandLine.parse(SubmarineJob.shell);
        cmdLine.addArgument(submarineCmd, false);
        DefaultExecutor executor = new DefaultExecutor();
        ExecuteWatchdog watchDog = new ExecuteWatchdog(timeout);
        executor.setWatchdog(watchDog);
        StringBuffer sbLogOutput = new StringBuffer();
        executor.setStreamHandler(new PumpStreamHandler(new LogOutputStream() {
            @Override
            protected void processLine(String line, int level) {
                line = line.trim();
                if (!StringUtils.isEmpty(line)) {
                    sbLogOutput.append(line + "\n");
                }
            }
        }));

        if (Boolean.valueOf(properties.getProperty(SubmarineJob.DIRECTORY_USER_HOME))) {
            executor.setWorkingDirectory(new File(System.getProperty("user.home")));
        }

        Map<String, String> env = new HashMap<>();
        String launchMode = (String) jinjaParams.get(SubmarineConstants.INTERPRETER_LAUNCH_MODE);
        if (StringUtils.equals(launchMode, "yarn")) {
            // Set environment variables in the container
            String javaHome, hadoopHome, hadoopConf;
            javaHome = (String) jinjaParams.get(SubmarineConstants.DOCKER_JAVA_HOME);
            hadoopHome = (String) jinjaParams.get(SubmarineConstants.DOCKER_HADOOP_HDFS_HOME);
            hadoopConf = (String) jinjaParams.get(SubmarineConstants.SUBMARINE_HADOOP_CONF_DIR);
            env.put("JAVA_HOME", javaHome);
            env.put("HADOOP_HOME", hadoopHome);
            env.put("HADOOP_HDFS_HOME", hadoopHome);
            env.put("HADOOP_CONF_DIR", hadoopConf);
            env.put("YARN_CONF_DIR", hadoopConf);
            env.put("CLASSPATH", "`$HADOOP_HDFS_HOME/bin/hadoop classpath --glob`");
        }

        LOGGER.info("Execute EVN: {}, Command: {} ", env.toString(), submarineCmd);

        AtomicBoolean cmdLineRunning = new AtomicBoolean(true);
        executor.execute(cmdLine, env, new DefaultExecuteResultHandler() {
            @Override
            public void onProcessComplete(int exitValue) {
                String message = String.format("jobName %s ProcessComplete exit value is : %d", tensorboardName,
                        exitValue);
                LOGGER.info(message);
                submarineUI.outputLog("TENSORBOARD RUN COMPLETE", message);
                cmdLineRunning.set(false);
            }

            @Override
            public void onProcessFailed(ExecuteException e) {
                String message = String.format("jobName %s ProcessFailed exit value is : %d, exception is : %s",
                        tensorboardName, e.getExitValue(), e.getMessage());
                LOGGER.error(message);
                submarineUI.outputLog("TENSORBOARD RUN FAILED", message);
                cmdLineRunning.set(false);
            }
        });
        int loopCount = 100;
        while ((loopCount-- > 0) && cmdLineRunning.get() && running.get()) {
            Thread.sleep(1000);
        }
        if (watchDog.isWatching()) {
            watchDog.destroyProcess();
            Thread.sleep(1000);
        }
        if (watchDog.isWatching()) {
            watchDog.killedProcess();
        }

        // Check if it has been submitted to YARN
        Map<String, Object> jobState = submarineJob.getJobStateByYarn(tensorboardName);
        loopCount = 50;
        while ((loopCount-- > 0) && !jobState.containsKey("state") && running.get()) {
            Thread.sleep(3000);
            jobState = submarineJob.getJobStateByYarn(tensorboardName);
        }

        if (!jobState.containsKey("state")) {
            String message = String.format("tensorboard %s was not submitted to YARN!", tensorboardName);
            LOGGER.error(message);
            submarineUI.outputLog("JOR RUN FAILED", message);
        }
    } catch (Exception e) {
        LOGGER.error(e.getMessage(), e);
        submarineUI.outputLog("Exception", e.getMessage());
    } finally {
        running.set(false);
        lockRunning.unlock();
    }
}

From source file:org.apache.zeppelin.submarine.SubmarineJobTest.java

@Test
public void defaultExecutorTest() throws IOException {
    DefaultExecutor executor = new DefaultExecutor();
    CommandLine cmdLine = CommandLine.parse(shell);

    URL urlTemplate = Resources.getResource(DEFAULT_EXECUTOR_TEST);

    cmdLine.addArgument(urlTemplate.getFile(), false);

    Map<String, String> env = new HashMap<>();
    env.put("CLASSPATH", "`$HADOOP_HDFS_HOME/bin/hadoop classpath --glob`");
    env.put("EVN", "test");

    AtomicBoolean cmdLineRunning = new AtomicBoolean(true);
    StringBuffer sbLogOutput = new StringBuffer();
    executor.setStreamHandler(new PumpStreamHandler(new LogOutputStream() {
        @Override/*from   w  w  w .ja va 2 s .c om*/
        protected void processLine(String line, int level) {
            //LOGGER.info(line);
            sbLogOutput.append(line + "\n");
        }
    }));

    executor.execute(cmdLine, env, new DefaultExecuteResultHandler() {
        @Override
        public void onProcessComplete(int exitValue) {
            cmdLineRunning.set(false);
        }

        @Override
        public void onProcessFailed(ExecuteException e) {
            cmdLineRunning.set(false);
            LOGGER.error(e.getMessage());
        }
    });
    int loopCount = 100;
    while ((loopCount-- > 0) && cmdLineRunning.get()) {
        try {
            Thread.sleep(1000);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }

    LOGGER.info(sbLogOutput.toString());
}