Example usage for org.apache.commons.exec PumpStreamHandler PumpStreamHandler

List of usage examples for org.apache.commons.exec PumpStreamHandler PumpStreamHandler

Introduction

In this page you can find the example usage for org.apache.commons.exec PumpStreamHandler PumpStreamHandler.

Prototype

public PumpStreamHandler(final OutputStream outAndErr) 

Source Link

Document

Construct a new PumpStreamHandler.

Usage

From source file:org.apache.tika.parser.geo.topic.GeoParser.java

public HashMap<String, ArrayList<String>> searchGeoNames(ArrayList<String> locationNameEntities)
        throws ExecuteException, IOException {
    CommandLine cmdLine = new CommandLine("lucene-geo-gazetteer");
    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    cmdLine.addArgument("-s");
    for (String name : locationNameEntities) {
        cmdLine.addArgument(name);//  www . j  a v a2 s .c o m
    }

    LOG.fine("Executing: " + cmdLine);
    DefaultExecutor exec = new DefaultExecutor();
    exec.setExitValue(0);
    ExecuteWatchdog watchdog = new ExecuteWatchdog(60000);
    exec.setWatchdog(watchdog);
    PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream);
    exec.setStreamHandler(streamHandler);
    int exitValue = exec.execute(cmdLine, EnvironmentUtils.getProcEnvironment());
    String outputJson = outputStream.toString("UTF-8");
    JSONArray json = (JSONArray) JSONValue.parse(outputJson);

    HashMap<String, ArrayList<String>> returnHash = new HashMap<String, ArrayList<String>>();
    for (int i = 0; i < json.size(); i++) {
        JSONObject obj = (JSONObject) json.get(i);
        for (Object key : obj.keySet()) {
            String theKey = (String) key;
            JSONArray vals = (JSONArray) obj.get(theKey);
            ArrayList<String> stringVals = new ArrayList<String>(vals.size());
            for (int j = 0; j < vals.size(); j++) {
                String val = (String) vals.get(j);
                stringVals.add(val);
            }

            returnHash.put(theKey, stringVals);
        }
    }

    return returnHash;

}

From source file:org.apache.tika.parser.ocr.TesseractOCRParser.java

/**
 * This method is used to process the image to an OCR-friendly format.
 * @param streamingObject input image to be processed
 * @param config TesseractOCRconfig class to get ImageMagick properties
 * @throws IOException if an input error occurred
 * @throws TikaException if an exception timed out
 *///from w  w w .j  av  a 2  s. c  om
private void processImage(File streamingObject, TesseractOCRConfig config) throws IOException, TikaException {

    // fetch rotation script from resources
    InputStream in = getClass().getResourceAsStream("rotation.py");
    TemporaryResources tmp = new TemporaryResources();
    File rotationScript = tmp.createTemporaryFile();
    Files.copy(in, rotationScript.toPath(), StandardCopyOption.REPLACE_EXISTING);

    String cmd = "python " + rotationScript.getAbsolutePath() + " -f " + streamingObject.getAbsolutePath();
    String angle = "0";

    DefaultExecutor executor = new DefaultExecutor();
    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream);
    executor.setStreamHandler(streamHandler);

    // determine the angle of rotation required to make the text horizontal
    CommandLine cmdLine = CommandLine.parse(cmd);
    if (hasPython()) {
        try {
            executor.execute(cmdLine);
            angle = outputStream.toString("UTF-8").trim();
        } catch (Exception e) {

        }
    }

    // process the image - parameter values can be set in TesseractOCRConfig.properties
    String line = "convert -density " + config.getDensity() + " -depth " + config.getDepth() + " -colorspace "
            + config.getColorspace() + " -filter " + config.getFilter() + " -resize " + config.getResize()
            + "% -rotate " + angle + " " + streamingObject.getAbsolutePath() + " "
            + streamingObject.getAbsolutePath();
    cmdLine = CommandLine.parse(line);
    try {
        executor.execute(cmdLine);
    } catch (Exception e) {

    }

    tmp.close();
}

From source file:org.apache.tika.parser.pot.PooledTimeSeriesParser.java

private String computePoT(File input) throws IOException, TikaException {

    CommandLine cmdLine = new CommandLine("pooled-time-series");
    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    cmdLine.addArgument("-f");
    cmdLine.addArgument(input.getAbsolutePath());
    LOG.trace("Executing: {}", cmdLine);
    DefaultExecutor exec = new DefaultExecutor();
    exec.setExitValue(0);//from www.jav a  2 s . c om
    ExecuteWatchdog watchdog = new ExecuteWatchdog(60000);
    exec.setWatchdog(watchdog);
    PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream);
    exec.setStreamHandler(streamHandler);
    int exitValue = exec.execute(cmdLine, EnvironmentUtils.getProcEnvironment());
    return outputStream.toString("UTF-8");

}

From source file:org.apache.zeppelin.interpreter.remote.RemoteInterpreterManagedProcess.java

@Override
public void start(String userName, Boolean isUserImpersonate) {
    // start server process
    try {/*w  w w.j  av a2 s.c o  m*/
        port = RemoteInterpreterUtils.findRandomAvailablePortOnAllLocalInterfaces();
    } catch (IOException e1) {
        throw new InterpreterException(e1);
    }

    CommandLine cmdLine = CommandLine.parse(interpreterRunner);
    cmdLine.addArgument("-d", false);
    cmdLine.addArgument(interpreterDir, false);
    cmdLine.addArgument("-p", false);
    cmdLine.addArgument(Integer.toString(port), false);
    if (isUserImpersonate && !userName.equals("anonymous")) {
        cmdLine.addArgument("-u", false);
        cmdLine.addArgument(userName, false);
    }
    cmdLine.addArgument("-l", false);
    cmdLine.addArgument(localRepoDir, false);

    executor = new DefaultExecutor();

    ByteArrayOutputStream cmdOut = new ByteArrayOutputStream();
    ProcessLogOutputStream processOutput = new ProcessLogOutputStream(logger);
    processOutput.setOutputStream(cmdOut);

    executor.setStreamHandler(new PumpStreamHandler(processOutput));
    watchdog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT);
    executor.setWatchdog(watchdog);

    try {
        Map procEnv = EnvironmentUtils.getProcEnvironment();
        procEnv.putAll(env);

        logger.info("Run interpreter process {}", cmdLine);
        executor.execute(cmdLine, procEnv, this);
        running = true;
    } catch (IOException e) {
        running = false;
        throw new InterpreterException(e);
    }

    long startTime = System.currentTimeMillis();
    while (System.currentTimeMillis() - startTime < getConnectTimeout()) {
        if (!running) {
            try {
                cmdOut.flush();
            } catch (IOException e) {
                // nothing to do
            }
            throw new InterpreterException(new String(cmdOut.toByteArray()));
        }

        try {
            if (RemoteInterpreterUtils.checkIfRemoteEndpointAccessible("localhost", port)) {
                break;
            } else {
                try {
                    Thread.sleep(500);
                } catch (InterruptedException e) {
                    logger.error("Exception in RemoteInterpreterProcess while synchronized reference "
                            + "Thread.sleep", e);
                }
            }
        } catch (Exception e) {
            if (logger.isDebugEnabled()) {
                logger.debug("Remote interpreter not yet accessible at localhost:" + port);
            }
        }
    }
    processOutput.setOutputStream(null);
}

From source file:org.apache.zeppelin.python.IPythonInterpreter.java

private void launchIPythonKernel(int ipythonPort) throws IOException {
    LOGGER.info("Launching IPython Kernel at port: " + ipythonPort);
    // copy the python scripts to a temp directory, then launch ipython kernel in that folder
    File pythonWorkDir = Files.createTempDirectory("zeppelin_ipython").toFile();
    String[] ipythonScripts = { "ipython_server.py", "ipython_pb2.py", "ipython_pb2_grpc.py" };
    for (String ipythonScript : ipythonScripts) {
        URL url = getClass().getClassLoader().getResource("grpc/python" + "/" + ipythonScript);
        FileUtils.copyURLToFile(url, new File(pythonWorkDir, ipythonScript));
    }/*  www .j  av a 2 s . c  o m*/

    CommandLine cmd = CommandLine.parse(pythonExecutable);
    cmd.addArgument(pythonWorkDir.getAbsolutePath() + "/ipython_server.py");
    cmd.addArgument(ipythonPort + "");
    DefaultExecutor executor = new DefaultExecutor();
    ProcessLogOutputStream processOutput = new ProcessLogOutputStream(LOGGER);
    executor.setStreamHandler(new PumpStreamHandler(processOutput));
    watchDog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT);
    executor.setWatchdog(watchDog);

    if (useBuiltinPy4j) {
        //TODO(zjffdu) don't do hard code on py4j here
        File py4jDestFile = new File(pythonWorkDir, "py4j-src-0.10.7.zip");
        FileUtils.copyURLToFile(getClass().getClassLoader().getResource("python/py4j-src-0.10.7.zip"),
                py4jDestFile);
        if (additionalPythonPath != null) {
            // put the py4j at the end, because additionalPythonPath may already contain py4j.
            // e.g. PySparkInterpreter
            additionalPythonPath = additionalPythonPath + ":" + py4jDestFile.getAbsolutePath();
        } else {
            additionalPythonPath = py4jDestFile.getAbsolutePath();
        }
    }

    Map<String, String> envs = setupIPythonEnv();
    executor.execute(cmd, envs, this);

    // wait until IPython kernel is started or timeout
    long startTime = System.currentTimeMillis();
    while (true) {
        try {
            Thread.sleep(100);
        } catch (InterruptedException e) {
            LOGGER.error("Interrupted by something", e);
        }

        try {
            StatusResponse response = ipythonClient.status(StatusRequest.newBuilder().build());
            if (response.getStatus() == IPythonStatus.RUNNING) {
                LOGGER.info("IPython Kernel is Running");
                break;
            } else {
                LOGGER.info("Wait for IPython Kernel to be started");
            }
        } catch (Exception e) {
            // ignore the exception, because is may happen when grpc server has not started yet.
            LOGGER.info("Wait for IPython Kernel to be started");
        }

        if ((System.currentTimeMillis() - startTime) > ipythonLaunchTimeout) {
            throw new IOException(
                    "Fail to launch IPython Kernel in " + ipythonLaunchTimeout / 1000 + " seconds");
        }
    }
}

From source file:org.apache.zeppelin.python.PythonInterpreter.java

private void createGatewayServerAndStartScript() throws IOException {
    // start gateway server in JVM side
    int port = RemoteInterpreterUtils.findRandomAvailablePortOnAllLocalInterfaces();
    // use the FQDN as the server address instead of 127.0.0.1 so that python process in docker
    // container can also connect to this gateway server.
    String serverAddress = PythonUtils.getLocalIP(properties);
    String secret = PythonUtils.createSecret(256);
    this.gatewayServer = PythonUtils.createGatewayServer(this, serverAddress, port, secret, usePy4jAuth);
    gatewayServer.start();/*from  w w w . ja va 2  s. c o m*/

    // launch python process to connect to the gateway server in JVM side
    createPythonScript();
    String pythonExec = getPythonExec();
    CommandLine cmd = CommandLine.parse(pythonExec);
    if (!pythonExec.endsWith(".py")) {
        // PythonDockerInterpreter set pythonExec with script
        cmd.addArgument(pythonWorkDir + "/zeppelin_python.py", false);
    }
    cmd.addArgument(serverAddress, false);
    cmd.addArgument(Integer.toString(port), false);

    executor = new DefaultExecutor();
    outputStream = new InterpreterOutputStream(LOGGER);
    PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream);
    executor.setStreamHandler(streamHandler);
    executor.setWatchdog(new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT));
    Map<String, String> env = setupPythonEnv();
    if (usePy4jAuth) {
        env.put("PY4J_GATEWAY_SECRET", secret);
    }
    LOGGER.info("Launching Python Process Command: " + cmd.getExecutable() + " "
            + StringUtils.join(cmd.getArguments(), " "));
    executor.execute(cmd, env, this);
    pythonScriptRunning.set(true);
}

From source file:org.apache.zeppelin.shell.ShellInterpreter.java

@Override
public InterpreterResult interpret(String cmd, InterpreterContext contextInterpreter) {
    logger.debug("Run shell command '" + cmd + "'");
    long start = System.currentTimeMillis();
    CommandLine cmdLine = CommandLine.parse("bash");
    cmdLine.addArgument("-c", false);
    cmdLine.addArgument(cmd, false);/*from  w  w  w. j  a  v a 2 s.c o  m*/
    DefaultExecutor executor = new DefaultExecutor();
    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    executor.setStreamHandler(new PumpStreamHandler(outputStream));

    executor.setWatchdog(new ExecuteWatchdog(commandTimeOut));
    try {
        int exitValue = executor.execute(cmdLine);
        return new InterpreterResult(InterpreterResult.Code.SUCCESS, outputStream.toString());
    } catch (ExecuteException e) {
        logger.error("Can not run " + cmd, e);
        return new InterpreterResult(Code.ERROR, e.getMessage());
    } catch (IOException e) {
        logger.error("Can not run " + cmd, e);
        return new InterpreterResult(Code.ERROR, e.getMessage());
    }
}

From source file:org.apache.zeppelin.submarine.job.thread.JobRunThread.java

public void run() {
    boolean tryLock = lockRunning.tryLock();
    if (false == tryLock) {
        LOGGER.warn("Can not get JobRunThread lockRunning!");
        return;// w  ww.j a  va  2 s.  c  o  m
    }

    SubmarineUI submarineUI = submarineJob.getSubmarineUI();
    try {
        InterpreterContext intpContext = submarineJob.getIntpContext();
        String noteId = intpContext.getNoteId();
        String userName = intpContext.getAuthenticationInfo().getUser();
        String jobName = SubmarineUtils.getJobName(userName, noteId);

        if (true == running.get()) {
            String message = String.format("Job %s already running.", jobName);
            submarineUI.outputLog("WARN", message);
            LOGGER.warn(message);
            return;
        }
        running.set(true);

        Properties properties = submarineJob.getProperties();
        HdfsClient hdfsClient = submarineJob.getHdfsClient();
        File pythonWorkDir = submarineJob.getPythonWorkDir();

        submarineJob.setCurrentJobState(EXECUTE_SUBMARINE);

        String algorithmPath = properties.getProperty(SubmarineConstants.SUBMARINE_ALGORITHM_HDFS_PATH, "");
        if (!algorithmPath.startsWith("hdfs://")) {
            String message = "Algorithm file upload HDFS path, " + "Must be `hdfs://` prefix. now setting "
                    + algorithmPath;
            submarineUI.outputLog("Configuration error", message);
            return;
        }

        List<ParagraphInfo> paragraphInfos = intpContext.getIntpEventClient().getParagraphList(userName,
                noteId);
        String outputMsg = hdfsClient.saveParagraphToFiles(noteId, paragraphInfos,
                pythonWorkDir == null ? "" : pythonWorkDir.getAbsolutePath(), properties);
        if (!StringUtils.isEmpty(outputMsg)) {
            submarineUI.outputLog("Save algorithm file", outputMsg);
        }

        HashMap jinjaParams = SubmarineUtils.propertiesToJinjaParams(properties, submarineJob, true);

        URL urlTemplate = Resources.getResource(SubmarineJob.SUBMARINE_JOBRUN_TF_JINJA);
        String template = Resources.toString(urlTemplate, Charsets.UTF_8);
        Jinjava jinjava = new Jinjava();
        String submarineCmd = jinjava.render(template, jinjaParams);
        // If the first line is a newline, delete the newline
        int firstLineIsNewline = submarineCmd.indexOf("\n");
        if (firstLineIsNewline == 0) {
            submarineCmd = submarineCmd.replaceFirst("\n", "");
        }

        StringBuffer sbLogs = new StringBuffer(submarineCmd);
        submarineUI.outputLog("Submarine submit command", sbLogs.toString());

        long timeout = Long
                .valueOf(properties.getProperty(SubmarineJob.TIMEOUT_PROPERTY, SubmarineJob.defaultTimeout));
        CommandLine cmdLine = CommandLine.parse(SubmarineJob.shell);
        cmdLine.addArgument(submarineCmd, false);
        DefaultExecutor executor = new DefaultExecutor();
        ExecuteWatchdog watchDog = new ExecuteWatchdog(timeout);
        executor.setWatchdog(watchDog);
        StringBuffer sbLogOutput = new StringBuffer();
        executor.setStreamHandler(new PumpStreamHandler(new LogOutputStream() {
            @Override
            protected void processLine(String line, int level) {
                line = line.trim();
                if (!StringUtils.isEmpty(line)) {
                    sbLogOutput.append(line + "\n");
                }
            }
        }));

        if (Boolean.valueOf(properties.getProperty(SubmarineJob.DIRECTORY_USER_HOME))) {
            executor.setWorkingDirectory(new File(System.getProperty("user.home")));
        }

        Map<String, String> env = new HashMap<>();
        String launchMode = (String) jinjaParams.get(SubmarineConstants.INTERPRETER_LAUNCH_MODE);
        if (StringUtils.equals(launchMode, "yarn")) {
            // Set environment variables in the submarine interpreter container run on yarn
            String javaHome, hadoopHome, hadoopConf;
            javaHome = (String) jinjaParams.get(SubmarineConstants.DOCKER_JAVA_HOME);
            hadoopHome = (String) jinjaParams.get(SubmarineConstants.DOCKER_HADOOP_HDFS_HOME);
            hadoopConf = (String) jinjaParams.get(SubmarineConstants.SUBMARINE_HADOOP_CONF_DIR);
            env.put("JAVA_HOME", javaHome);
            env.put("HADOOP_HOME", hadoopHome);
            env.put("HADOOP_HDFS_HOME", hadoopHome);
            env.put("HADOOP_CONF_DIR", hadoopConf);
            env.put("YARN_CONF_DIR", hadoopConf);
            env.put("CLASSPATH", "`$HADOOP_HDFS_HOME/bin/hadoop classpath --glob`");
            env.put("ZEPPELIN_FORCE_STOP", "true");
        }

        LOGGER.info("Execute EVN: {}, Command: {} ", env.toString(), submarineCmd);
        AtomicBoolean cmdLineRunning = new AtomicBoolean(true);
        executor.execute(cmdLine, env, new DefaultExecuteResultHandler() {
            @Override
            public void onProcessComplete(int exitValue) {
                String message = String.format("jobName %s ProcessComplete exit value is : %d", jobName,
                        exitValue);
                LOGGER.info(message);
                submarineUI.outputLog("JOR RUN COMPLETE", message);
                cmdLineRunning.set(false);
                submarineJob.setCurrentJobState(EXECUTE_SUBMARINE_FINISHED);
            }

            @Override
            public void onProcessFailed(ExecuteException e) {
                String message = String.format("jobName %s ProcessFailed exit value is : %d, exception is : %s",
                        jobName, e.getExitValue(), e.getMessage());
                LOGGER.error(message);
                submarineUI.outputLog("JOR RUN FAILED", message);
                cmdLineRunning.set(false);
                submarineJob.setCurrentJobState(EXECUTE_SUBMARINE_ERROR);
            }
        });
        int loopCount = 100;
        while ((loopCount-- > 0) && cmdLineRunning.get() && running.get()) {
            Thread.sleep(1000);
        }
        if (watchDog.isWatching()) {
            watchDog.destroyProcess();
            Thread.sleep(1000);
        }
        if (watchDog.isWatching()) {
            watchDog.killedProcess();
        }

        // Check if it has been submitted to YARN
        Map<String, Object> jobState = submarineJob.getJobStateByYarn(jobName);
        loopCount = 50;
        while ((loopCount-- > 0) && !jobState.containsKey("state") && running.get()) {
            Thread.sleep(3000);
            jobState = submarineJob.getJobStateByYarn(jobName);
        }

        if (!jobState.containsKey("state")) {
            String message = String.format("JOB %s was not submitted to YARN!", jobName);
            LOGGER.error(message);
            submarineUI.outputLog("JOR RUN FAILED", message);
            submarineJob.setCurrentJobState(EXECUTE_SUBMARINE_ERROR);
        }
    } catch (Exception e) {
        LOGGER.error(e.getMessage(), e);
        submarineJob.setCurrentJobState(EXECUTE_SUBMARINE_ERROR);
        submarineUI.outputLog("Exception", e.getMessage());
    } finally {
        running.set(false);
        lockRunning.unlock();
    }
}

From source file:org.apache.zeppelin.submarine.job.thread.TensorboardRunThread.java

public void run() {
    SubmarineUI submarineUI = submarineJob.getSubmarineUI();

    boolean tryLock = lockRunning.tryLock();

    try {/*from w w w. j av a  2  s  .  c  om*/
        Properties properties = submarineJob.getProperties();
        String tensorboardName = SubmarineUtils.getTensorboardName(submarineJob.getUserName());
        if (true == running.get()) {
            String message = String.format("tensorboard %s already running.", tensorboardName);
            submarineUI.outputLog("WARN", message);
            LOGGER.warn(message);
            return;
        }
        running.set(true);

        HashMap jinjaParams = SubmarineUtils.propertiesToJinjaParams(properties, submarineJob, false);
        // update jobName -> tensorboardName
        jinjaParams.put(SubmarineConstants.JOB_NAME, tensorboardName);

        URL urlTemplate = Resources.getResource(SubmarineJob.SUBMARINE_TENSORBOARD_JINJA);
        String template = Resources.toString(urlTemplate, Charsets.UTF_8);
        Jinjava jinjava = new Jinjava();
        String submarineCmd = jinjava.render(template, jinjaParams);
        // If the first line is a newline, delete the newline
        int firstLineIsNewline = submarineCmd.indexOf("\n");
        if (firstLineIsNewline == 0) {
            submarineCmd = submarineCmd.replaceFirst("\n", "");
        }
        StringBuffer sbLogs = new StringBuffer(submarineCmd);
        submarineUI.outputLog("Submarine submit command", sbLogs.toString());

        long timeout = Long
                .valueOf(properties.getProperty(SubmarineJob.TIMEOUT_PROPERTY, SubmarineJob.defaultTimeout));
        CommandLine cmdLine = CommandLine.parse(SubmarineJob.shell);
        cmdLine.addArgument(submarineCmd, false);
        DefaultExecutor executor = new DefaultExecutor();
        ExecuteWatchdog watchDog = new ExecuteWatchdog(timeout);
        executor.setWatchdog(watchDog);
        StringBuffer sbLogOutput = new StringBuffer();
        executor.setStreamHandler(new PumpStreamHandler(new LogOutputStream() {
            @Override
            protected void processLine(String line, int level) {
                line = line.trim();
                if (!StringUtils.isEmpty(line)) {
                    sbLogOutput.append(line + "\n");
                }
            }
        }));

        if (Boolean.valueOf(properties.getProperty(SubmarineJob.DIRECTORY_USER_HOME))) {
            executor.setWorkingDirectory(new File(System.getProperty("user.home")));
        }

        Map<String, String> env = new HashMap<>();
        String launchMode = (String) jinjaParams.get(SubmarineConstants.INTERPRETER_LAUNCH_MODE);
        if (StringUtils.equals(launchMode, "yarn")) {
            // Set environment variables in the container
            String javaHome, hadoopHome, hadoopConf;
            javaHome = (String) jinjaParams.get(SubmarineConstants.DOCKER_JAVA_HOME);
            hadoopHome = (String) jinjaParams.get(SubmarineConstants.DOCKER_HADOOP_HDFS_HOME);
            hadoopConf = (String) jinjaParams.get(SubmarineConstants.SUBMARINE_HADOOP_CONF_DIR);
            env.put("JAVA_HOME", javaHome);
            env.put("HADOOP_HOME", hadoopHome);
            env.put("HADOOP_HDFS_HOME", hadoopHome);
            env.put("HADOOP_CONF_DIR", hadoopConf);
            env.put("YARN_CONF_DIR", hadoopConf);
            env.put("CLASSPATH", "`$HADOOP_HDFS_HOME/bin/hadoop classpath --glob`");
        }

        LOGGER.info("Execute EVN: {}, Command: {} ", env.toString(), submarineCmd);

        AtomicBoolean cmdLineRunning = new AtomicBoolean(true);
        executor.execute(cmdLine, env, new DefaultExecuteResultHandler() {
            @Override
            public void onProcessComplete(int exitValue) {
                String message = String.format("jobName %s ProcessComplete exit value is : %d", tensorboardName,
                        exitValue);
                LOGGER.info(message);
                submarineUI.outputLog("TENSORBOARD RUN COMPLETE", message);
                cmdLineRunning.set(false);
            }

            @Override
            public void onProcessFailed(ExecuteException e) {
                String message = String.format("jobName %s ProcessFailed exit value is : %d, exception is : %s",
                        tensorboardName, e.getExitValue(), e.getMessage());
                LOGGER.error(message);
                submarineUI.outputLog("TENSORBOARD RUN FAILED", message);
                cmdLineRunning.set(false);
            }
        });
        int loopCount = 100;
        while ((loopCount-- > 0) && cmdLineRunning.get() && running.get()) {
            Thread.sleep(1000);
        }
        if (watchDog.isWatching()) {
            watchDog.destroyProcess();
            Thread.sleep(1000);
        }
        if (watchDog.isWatching()) {
            watchDog.killedProcess();
        }

        // Check if it has been submitted to YARN
        Map<String, Object> jobState = submarineJob.getJobStateByYarn(tensorboardName);
        loopCount = 50;
        while ((loopCount-- > 0) && !jobState.containsKey("state") && running.get()) {
            Thread.sleep(3000);
            jobState = submarineJob.getJobStateByYarn(tensorboardName);
        }

        if (!jobState.containsKey("state")) {
            String message = String.format("tensorboard %s was not submitted to YARN!", tensorboardName);
            LOGGER.error(message);
            submarineUI.outputLog("JOR RUN FAILED", message);
        }
    } catch (Exception e) {
        LOGGER.error(e.getMessage(), e);
        submarineUI.outputLog("Exception", e.getMessage());
    } finally {
        running.set(false);
        lockRunning.unlock();
    }
}

From source file:org.apache.zeppelin.submarine.SubmarineJobTest.java

@Test
public void defaultExecutorTest() throws IOException {
    DefaultExecutor executor = new DefaultExecutor();
    CommandLine cmdLine = CommandLine.parse(shell);

    URL urlTemplate = Resources.getResource(DEFAULT_EXECUTOR_TEST);

    cmdLine.addArgument(urlTemplate.getFile(), false);

    Map<String, String> env = new HashMap<>();
    env.put("CLASSPATH", "`$HADOOP_HDFS_HOME/bin/hadoop classpath --glob`");
    env.put("EVN", "test");

    AtomicBoolean cmdLineRunning = new AtomicBoolean(true);
    StringBuffer sbLogOutput = new StringBuffer();
    executor.setStreamHandler(new PumpStreamHandler(new LogOutputStream() {
        @Override//from w  w w.  j  ava  2 s. co  m
        protected void processLine(String line, int level) {
            //LOGGER.info(line);
            sbLogOutput.append(line + "\n");
        }
    }));

    executor.execute(cmdLine, env, new DefaultExecuteResultHandler() {
        @Override
        public void onProcessComplete(int exitValue) {
            cmdLineRunning.set(false);
        }

        @Override
        public void onProcessFailed(ExecuteException e) {
            cmdLineRunning.set(false);
            LOGGER.error(e.getMessage());
        }
    });
    int loopCount = 100;
    while ((loopCount-- > 0) && cmdLineRunning.get()) {
        try {
            Thread.sleep(1000);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }

    LOGGER.info(sbLogOutput.toString());
}