Example usage for org.apache.commons.exec DefaultExecutor execute

List of usage examples for org.apache.commons.exec DefaultExecutor execute

Introduction

In this page you can find the example usage for org.apache.commons.exec DefaultExecutor execute.

Prototype

public void execute(final CommandLine command, final Map<String, String> environment,
        final ExecuteResultHandler handler) throws ExecuteException, IOException 

Source Link

Usage

From source file:com.tupilabs.pbs.PBS.java

/**
 * Executes a PBS command./*  www  .  j a  v a 2 s  .  c o  m*/
 *
 * @param cmdLine command
 * @param environment env vars
 * @param out output stream
 * @param err err stream
 * @return execute handler
 * @throws ExecuteException if there is an error executing a command
 * @throws IOException in case of an IO problem
 */
static DefaultExecuteResultHandler execute(CommandLine cmdLine, Map<String, String> environment,
        OutputStream out, OutputStream err) throws ExecuteException, IOException {
    DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler();
    ExecuteStreamHandler streamHandler = new PumpStreamHandler(out, err);
    DefaultExecutor executor = new DefaultExecutor();
    executor.setExitValue(0);
    executor.setStreamHandler(streamHandler);
    if (environment != null) {
        executor.execute(cmdLine, environment, resultHandler);
    } else {
        executor.execute(cmdLine, resultHandler);
    }
    return resultHandler;
}

From source file:org.apache.geode.example.utils.ShellUtil.java

public DefaultExecuteResultHandler execute(CommandLine cmdLine, long timeout, Map environment, File dir)
        throws IOException {
    ExecutorTemplate exampleTestExecutor = new ExecutorTemplate(timeout, dir).invoke();
    DefaultExecutor executor = exampleTestExecutor.getExecutor();
    DefaultExecuteResultHandler resultHandler = exampleTestExecutor.getResultHandler();
    executor.execute(cmdLine, environment, resultHandler);

    return resultHandler;

}

From source file:org.apache.geode.example.utils.ShellUtil.java

public DefaultExecuteResultHandler execute(String fileName, long timeout, Map environment, File dir)
        throws IOException {
    ExecutorTemplate exampleTestExecutor = new ExecutorTemplate(timeout, dir).invoke();
    DefaultExecutor executor = exampleTestExecutor.getExecutor();
    DefaultExecuteResultHandler resultHandler = exampleTestExecutor.getResultHandler();
    executor.execute(parseCommandLine(fileName), environment, resultHandler);

    return resultHandler;
}

From source file:org.apache.zeppelin.python.IPythonInterpreter.java

private void launchIPythonKernel(int ipythonPort) throws IOException {
    LOGGER.info("Launching IPython Kernel at port: " + ipythonPort);
    // copy the python scripts to a temp directory, then launch ipython kernel in that folder
    File pythonWorkDir = Files.createTempDirectory("zeppelin_ipython").toFile();
    String[] ipythonScripts = { "ipython_server.py", "ipython_pb2.py", "ipython_pb2_grpc.py" };
    for (String ipythonScript : ipythonScripts) {
        URL url = getClass().getClassLoader().getResource("grpc/python" + "/" + ipythonScript);
        FileUtils.copyURLToFile(url, new File(pythonWorkDir, ipythonScript));
    }//from   ww  w.  j a v  a2  s.c o m

    CommandLine cmd = CommandLine.parse(pythonExecutable);
    cmd.addArgument(pythonWorkDir.getAbsolutePath() + "/ipython_server.py");
    cmd.addArgument(ipythonPort + "");
    DefaultExecutor executor = new DefaultExecutor();
    ProcessLogOutputStream processOutput = new ProcessLogOutputStream(LOGGER);
    executor.setStreamHandler(new PumpStreamHandler(processOutput));
    watchDog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT);
    executor.setWatchdog(watchDog);

    if (useBuiltinPy4j) {
        //TODO(zjffdu) don't do hard code on py4j here
        File py4jDestFile = new File(pythonWorkDir, "py4j-src-0.10.7.zip");
        FileUtils.copyURLToFile(getClass().getClassLoader().getResource("python/py4j-src-0.10.7.zip"),
                py4jDestFile);
        if (additionalPythonPath != null) {
            // put the py4j at the end, because additionalPythonPath may already contain py4j.
            // e.g. PySparkInterpreter
            additionalPythonPath = additionalPythonPath + ":" + py4jDestFile.getAbsolutePath();
        } else {
            additionalPythonPath = py4jDestFile.getAbsolutePath();
        }
    }

    Map<String, String> envs = setupIPythonEnv();
    executor.execute(cmd, envs, this);

    // wait until IPython kernel is started or timeout
    long startTime = System.currentTimeMillis();
    while (true) {
        try {
            Thread.sleep(100);
        } catch (InterruptedException e) {
            LOGGER.error("Interrupted by something", e);
        }

        try {
            StatusResponse response = ipythonClient.status(StatusRequest.newBuilder().build());
            if (response.getStatus() == IPythonStatus.RUNNING) {
                LOGGER.info("IPython Kernel is Running");
                break;
            } else {
                LOGGER.info("Wait for IPython Kernel to be started");
            }
        } catch (Exception e) {
            // ignore the exception, because is may happen when grpc server has not started yet.
            LOGGER.info("Wait for IPython Kernel to be started");
        }

        if ((System.currentTimeMillis() - startTime) > ipythonLaunchTimeout) {
            throw new IOException(
                    "Fail to launch IPython Kernel in " + ipythonLaunchTimeout / 1000 + " seconds");
        }
    }
}

From source file:org.apache.zeppelin.submarine.job.thread.JobRunThread.java

public void run() {
    boolean tryLock = lockRunning.tryLock();
    if (false == tryLock) {
        LOGGER.warn("Can not get JobRunThread lockRunning!");
        return;/* w  w  w.  j  a va2s.  co  m*/
    }

    SubmarineUI submarineUI = submarineJob.getSubmarineUI();
    try {
        InterpreterContext intpContext = submarineJob.getIntpContext();
        String noteId = intpContext.getNoteId();
        String userName = intpContext.getAuthenticationInfo().getUser();
        String jobName = SubmarineUtils.getJobName(userName, noteId);

        if (true == running.get()) {
            String message = String.format("Job %s already running.", jobName);
            submarineUI.outputLog("WARN", message);
            LOGGER.warn(message);
            return;
        }
        running.set(true);

        Properties properties = submarineJob.getProperties();
        HdfsClient hdfsClient = submarineJob.getHdfsClient();
        File pythonWorkDir = submarineJob.getPythonWorkDir();

        submarineJob.setCurrentJobState(EXECUTE_SUBMARINE);

        String algorithmPath = properties.getProperty(SubmarineConstants.SUBMARINE_ALGORITHM_HDFS_PATH, "");
        if (!algorithmPath.startsWith("hdfs://")) {
            String message = "Algorithm file upload HDFS path, " + "Must be `hdfs://` prefix. now setting "
                    + algorithmPath;
            submarineUI.outputLog("Configuration error", message);
            return;
        }

        List<ParagraphInfo> paragraphInfos = intpContext.getIntpEventClient().getParagraphList(userName,
                noteId);
        String outputMsg = hdfsClient.saveParagraphToFiles(noteId, paragraphInfos,
                pythonWorkDir == null ? "" : pythonWorkDir.getAbsolutePath(), properties);
        if (!StringUtils.isEmpty(outputMsg)) {
            submarineUI.outputLog("Save algorithm file", outputMsg);
        }

        HashMap jinjaParams = SubmarineUtils.propertiesToJinjaParams(properties, submarineJob, true);

        URL urlTemplate = Resources.getResource(SubmarineJob.SUBMARINE_JOBRUN_TF_JINJA);
        String template = Resources.toString(urlTemplate, Charsets.UTF_8);
        Jinjava jinjava = new Jinjava();
        String submarineCmd = jinjava.render(template, jinjaParams);
        // If the first line is a newline, delete the newline
        int firstLineIsNewline = submarineCmd.indexOf("\n");
        if (firstLineIsNewline == 0) {
            submarineCmd = submarineCmd.replaceFirst("\n", "");
        }

        StringBuffer sbLogs = new StringBuffer(submarineCmd);
        submarineUI.outputLog("Submarine submit command", sbLogs.toString());

        long timeout = Long
                .valueOf(properties.getProperty(SubmarineJob.TIMEOUT_PROPERTY, SubmarineJob.defaultTimeout));
        CommandLine cmdLine = CommandLine.parse(SubmarineJob.shell);
        cmdLine.addArgument(submarineCmd, false);
        DefaultExecutor executor = new DefaultExecutor();
        ExecuteWatchdog watchDog = new ExecuteWatchdog(timeout);
        executor.setWatchdog(watchDog);
        StringBuffer sbLogOutput = new StringBuffer();
        executor.setStreamHandler(new PumpStreamHandler(new LogOutputStream() {
            @Override
            protected void processLine(String line, int level) {
                line = line.trim();
                if (!StringUtils.isEmpty(line)) {
                    sbLogOutput.append(line + "\n");
                }
            }
        }));

        if (Boolean.valueOf(properties.getProperty(SubmarineJob.DIRECTORY_USER_HOME))) {
            executor.setWorkingDirectory(new File(System.getProperty("user.home")));
        }

        Map<String, String> env = new HashMap<>();
        String launchMode = (String) jinjaParams.get(SubmarineConstants.INTERPRETER_LAUNCH_MODE);
        if (StringUtils.equals(launchMode, "yarn")) {
            // Set environment variables in the submarine interpreter container run on yarn
            String javaHome, hadoopHome, hadoopConf;
            javaHome = (String) jinjaParams.get(SubmarineConstants.DOCKER_JAVA_HOME);
            hadoopHome = (String) jinjaParams.get(SubmarineConstants.DOCKER_HADOOP_HDFS_HOME);
            hadoopConf = (String) jinjaParams.get(SubmarineConstants.SUBMARINE_HADOOP_CONF_DIR);
            env.put("JAVA_HOME", javaHome);
            env.put("HADOOP_HOME", hadoopHome);
            env.put("HADOOP_HDFS_HOME", hadoopHome);
            env.put("HADOOP_CONF_DIR", hadoopConf);
            env.put("YARN_CONF_DIR", hadoopConf);
            env.put("CLASSPATH", "`$HADOOP_HDFS_HOME/bin/hadoop classpath --glob`");
            env.put("ZEPPELIN_FORCE_STOP", "true");
        }

        LOGGER.info("Execute EVN: {}, Command: {} ", env.toString(), submarineCmd);
        AtomicBoolean cmdLineRunning = new AtomicBoolean(true);
        executor.execute(cmdLine, env, new DefaultExecuteResultHandler() {
            @Override
            public void onProcessComplete(int exitValue) {
                String message = String.format("jobName %s ProcessComplete exit value is : %d", jobName,
                        exitValue);
                LOGGER.info(message);
                submarineUI.outputLog("JOR RUN COMPLETE", message);
                cmdLineRunning.set(false);
                submarineJob.setCurrentJobState(EXECUTE_SUBMARINE_FINISHED);
            }

            @Override
            public void onProcessFailed(ExecuteException e) {
                String message = String.format("jobName %s ProcessFailed exit value is : %d, exception is : %s",
                        jobName, e.getExitValue(), e.getMessage());
                LOGGER.error(message);
                submarineUI.outputLog("JOR RUN FAILED", message);
                cmdLineRunning.set(false);
                submarineJob.setCurrentJobState(EXECUTE_SUBMARINE_ERROR);
            }
        });
        int loopCount = 100;
        while ((loopCount-- > 0) && cmdLineRunning.get() && running.get()) {
            Thread.sleep(1000);
        }
        if (watchDog.isWatching()) {
            watchDog.destroyProcess();
            Thread.sleep(1000);
        }
        if (watchDog.isWatching()) {
            watchDog.killedProcess();
        }

        // Check if it has been submitted to YARN
        Map<String, Object> jobState = submarineJob.getJobStateByYarn(jobName);
        loopCount = 50;
        while ((loopCount-- > 0) && !jobState.containsKey("state") && running.get()) {
            Thread.sleep(3000);
            jobState = submarineJob.getJobStateByYarn(jobName);
        }

        if (!jobState.containsKey("state")) {
            String message = String.format("JOB %s was not submitted to YARN!", jobName);
            LOGGER.error(message);
            submarineUI.outputLog("JOR RUN FAILED", message);
            submarineJob.setCurrentJobState(EXECUTE_SUBMARINE_ERROR);
        }
    } catch (Exception e) {
        LOGGER.error(e.getMessage(), e);
        submarineJob.setCurrentJobState(EXECUTE_SUBMARINE_ERROR);
        submarineUI.outputLog("Exception", e.getMessage());
    } finally {
        running.set(false);
        lockRunning.unlock();
    }
}

From source file:org.apache.zeppelin.submarine.job.thread.TensorboardRunThread.java

public void run() {
    SubmarineUI submarineUI = submarineJob.getSubmarineUI();

    boolean tryLock = lockRunning.tryLock();

    try {/*  w  w w . j  av a2 s  .c  o m*/
        Properties properties = submarineJob.getProperties();
        String tensorboardName = SubmarineUtils.getTensorboardName(submarineJob.getUserName());
        if (true == running.get()) {
            String message = String.format("tensorboard %s already running.", tensorboardName);
            submarineUI.outputLog("WARN", message);
            LOGGER.warn(message);
            return;
        }
        running.set(true);

        HashMap jinjaParams = SubmarineUtils.propertiesToJinjaParams(properties, submarineJob, false);
        // update jobName -> tensorboardName
        jinjaParams.put(SubmarineConstants.JOB_NAME, tensorboardName);

        URL urlTemplate = Resources.getResource(SubmarineJob.SUBMARINE_TENSORBOARD_JINJA);
        String template = Resources.toString(urlTemplate, Charsets.UTF_8);
        Jinjava jinjava = new Jinjava();
        String submarineCmd = jinjava.render(template, jinjaParams);
        // If the first line is a newline, delete the newline
        int firstLineIsNewline = submarineCmd.indexOf("\n");
        if (firstLineIsNewline == 0) {
            submarineCmd = submarineCmd.replaceFirst("\n", "");
        }
        StringBuffer sbLogs = new StringBuffer(submarineCmd);
        submarineUI.outputLog("Submarine submit command", sbLogs.toString());

        long timeout = Long
                .valueOf(properties.getProperty(SubmarineJob.TIMEOUT_PROPERTY, SubmarineJob.defaultTimeout));
        CommandLine cmdLine = CommandLine.parse(SubmarineJob.shell);
        cmdLine.addArgument(submarineCmd, false);
        DefaultExecutor executor = new DefaultExecutor();
        ExecuteWatchdog watchDog = new ExecuteWatchdog(timeout);
        executor.setWatchdog(watchDog);
        StringBuffer sbLogOutput = new StringBuffer();
        executor.setStreamHandler(new PumpStreamHandler(new LogOutputStream() {
            @Override
            protected void processLine(String line, int level) {
                line = line.trim();
                if (!StringUtils.isEmpty(line)) {
                    sbLogOutput.append(line + "\n");
                }
            }
        }));

        if (Boolean.valueOf(properties.getProperty(SubmarineJob.DIRECTORY_USER_HOME))) {
            executor.setWorkingDirectory(new File(System.getProperty("user.home")));
        }

        Map<String, String> env = new HashMap<>();
        String launchMode = (String) jinjaParams.get(SubmarineConstants.INTERPRETER_LAUNCH_MODE);
        if (StringUtils.equals(launchMode, "yarn")) {
            // Set environment variables in the container
            String javaHome, hadoopHome, hadoopConf;
            javaHome = (String) jinjaParams.get(SubmarineConstants.DOCKER_JAVA_HOME);
            hadoopHome = (String) jinjaParams.get(SubmarineConstants.DOCKER_HADOOP_HDFS_HOME);
            hadoopConf = (String) jinjaParams.get(SubmarineConstants.SUBMARINE_HADOOP_CONF_DIR);
            env.put("JAVA_HOME", javaHome);
            env.put("HADOOP_HOME", hadoopHome);
            env.put("HADOOP_HDFS_HOME", hadoopHome);
            env.put("HADOOP_CONF_DIR", hadoopConf);
            env.put("YARN_CONF_DIR", hadoopConf);
            env.put("CLASSPATH", "`$HADOOP_HDFS_HOME/bin/hadoop classpath --glob`");
        }

        LOGGER.info("Execute EVN: {}, Command: {} ", env.toString(), submarineCmd);

        AtomicBoolean cmdLineRunning = new AtomicBoolean(true);
        executor.execute(cmdLine, env, new DefaultExecuteResultHandler() {
            @Override
            public void onProcessComplete(int exitValue) {
                String message = String.format("jobName %s ProcessComplete exit value is : %d", tensorboardName,
                        exitValue);
                LOGGER.info(message);
                submarineUI.outputLog("TENSORBOARD RUN COMPLETE", message);
                cmdLineRunning.set(false);
            }

            @Override
            public void onProcessFailed(ExecuteException e) {
                String message = String.format("jobName %s ProcessFailed exit value is : %d, exception is : %s",
                        tensorboardName, e.getExitValue(), e.getMessage());
                LOGGER.error(message);
                submarineUI.outputLog("TENSORBOARD RUN FAILED", message);
                cmdLineRunning.set(false);
            }
        });
        int loopCount = 100;
        while ((loopCount-- > 0) && cmdLineRunning.get() && running.get()) {
            Thread.sleep(1000);
        }
        if (watchDog.isWatching()) {
            watchDog.destroyProcess();
            Thread.sleep(1000);
        }
        if (watchDog.isWatching()) {
            watchDog.killedProcess();
        }

        // Check if it has been submitted to YARN
        Map<String, Object> jobState = submarineJob.getJobStateByYarn(tensorboardName);
        loopCount = 50;
        while ((loopCount-- > 0) && !jobState.containsKey("state") && running.get()) {
            Thread.sleep(3000);
            jobState = submarineJob.getJobStateByYarn(tensorboardName);
        }

        if (!jobState.containsKey("state")) {
            String message = String.format("tensorboard %s was not submitted to YARN!", tensorboardName);
            LOGGER.error(message);
            submarineUI.outputLog("JOR RUN FAILED", message);
        }
    } catch (Exception e) {
        LOGGER.error(e.getMessage(), e);
        submarineUI.outputLog("Exception", e.getMessage());
    } finally {
        running.set(false);
        lockRunning.unlock();
    }
}

From source file:org.apache.zeppelin.submarine.SubmarineJobTest.java

@Test
public void defaultExecutorTest() throws IOException {
    DefaultExecutor executor = new DefaultExecutor();
    CommandLine cmdLine = CommandLine.parse(shell);

    URL urlTemplate = Resources.getResource(DEFAULT_EXECUTOR_TEST);

    cmdLine.addArgument(urlTemplate.getFile(), false);

    Map<String, String> env = new HashMap<>();
    env.put("CLASSPATH", "`$HADOOP_HDFS_HOME/bin/hadoop classpath --glob`");
    env.put("EVN", "test");

    AtomicBoolean cmdLineRunning = new AtomicBoolean(true);
    StringBuffer sbLogOutput = new StringBuffer();
    executor.setStreamHandler(new PumpStreamHandler(new LogOutputStream() {
        @Override/*from  w  w  w  . j  ava2 s  . c o m*/
        protected void processLine(String line, int level) {
            //LOGGER.info(line);
            sbLogOutput.append(line + "\n");
        }
    }));

    executor.execute(cmdLine, env, new DefaultExecuteResultHandler() {
        @Override
        public void onProcessComplete(int exitValue) {
            cmdLineRunning.set(false);
        }

        @Override
        public void onProcessFailed(ExecuteException e) {
            cmdLineRunning.set(false);
            LOGGER.error(e.getMessage());
        }
    });
    int loopCount = 100;
    while ((loopCount-- > 0) && cmdLineRunning.get()) {
        try {
            Thread.sleep(1000);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }

    LOGGER.info(sbLogOutput.toString());
}

From source file:org.apache.zeppelin.util.ProcessLauncher.java

public void launch() {
    DefaultExecutor executor = new DefaultExecutor();
    this.processOutput = new ProcessLogOutputStream();
    executor.setStreamHandler(new PumpStreamHandler(processOutput));
    this.watchdog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT);
    executor.setWatchdog(watchdog);/*  w  w  w .  j  a va2s.  com*/
    try {
        executor.execute(commandLine, envs, this);
        transition(State.LAUNCHED);
        LOGGER.info("Process is launched: {}", commandLine);
    } catch (IOException e) {
        this.processOutput.stopCatchLaunchOutput();
        LOGGER.error("Fail to launch process: " + commandLine, e);
        transition(State.TERMINATED);
        errorMessage = e.getMessage();
    }
}

From source file:org.excalibur.service.compute.executor.Worker.java

public void execute(final Application application, ExecuteResultHandler executeResultHandler)
        throws ExecuteException, IOException {
    final String commandLine = application.getExecutableCommandLine();

    DefaultExecutor executor = new DefaultExecutor();
    CommandLine command = new CommandLine("/bin/sh");
    command.addArgument("-c", false);
    command.addArgument(commandLine, false);
    executor.execute(command, System.getenv(), executeResultHandler);

    LOG.debug("Launched the execution of task: [{}], uuid: [{}]", commandLine, application.getId());
}

From source file:org.jberet.support.io.OsCommandBatchlet.java

/**
 * {@inheritDoc}/*  ww w.j  a  va  2  s.c  o  m*/
 * <p>
 * This method runs the OS command.
 * If the command completes successfully, its process exit code is returned.
 * If there is exception while running the OS command, which may be
 * caused by timeout, the command being stopped, or other errors, the process
 * exit code is set as the step exit status, and the exception is thrown.
 *
 * @return the OS command process exit code
 *
 * @throws Exception upon errors
 */
@Override
public String process() throws Exception {
    final DefaultExecutor executor = new DefaultExecutor();
    final CommandLine commandLineObj;
    if (commandLine != null) {
        commandLineObj = CommandLine.parse(commandLine);
    } else {
        if (commandArray == null) {
            throw SupportMessages.MESSAGES.invalidReaderWriterProperty(null, null, "commandArray");
        } else if (commandArray.isEmpty()) {
            throw SupportMessages.MESSAGES.invalidReaderWriterProperty(null, commandArray.toString(),
                    "commandArray");
        }
        commandLineObj = new CommandLine(commandArray.get(0));
        final int len = commandArray.size();
        if (len > 1) {
            for (int i = 1; i < len; i++) {
                commandLineObj.addArgument(commandArray.get(i));
            }
        }
    }

    if (workingDir != null) {
        executor.setWorkingDirectory(workingDir);
    }
    if (streamHandler != null) {
        executor.setStreamHandler((ExecuteStreamHandler) streamHandler.newInstance());
    }

    SupportLogger.LOGGER.runCommand(commandLineObj.getExecutable(),
            Arrays.toString(commandLineObj.getArguments()), executor.getWorkingDirectory().getAbsolutePath());

    if (commandOkExitValues != null) {
        executor.setExitValues(commandOkExitValues);
    }

    watchdog = new ExecuteWatchdog(
            timeoutSeconds > 0 ? timeoutSeconds * 1000 : ExecuteWatchdog.INFINITE_TIMEOUT);
    executor.setWatchdog(watchdog);

    executor.setProcessDestroyer(new ShutdownHookProcessDestroyer());
    final DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler();
    executor.execute(commandLineObj, environment, resultHandler);
    resultHandler.waitFor();

    final ExecuteException exception = resultHandler.getException();
    if (exception != null) {
        stepContext.setExitStatus(String.valueOf(resultHandler.getExitValue()));
        if (!isStopped) {
            throw exception;
        } else {
            SupportLogger.LOGGER.warn("", exception);
        }
    }
    return String.valueOf(resultHandler.getExitValue());
}