List of usage examples for org.apache.commons.exec CommandLine addArgument
public CommandLine addArgument(final String argument, final boolean handleQuoting)
From source file:org.apache.zeppelin.shell.ShellInterpreter.java
@Override public InterpreterResult interpret(String cmd, InterpreterContext contextInterpreter) { logger.debug("Run shell command '" + cmd + "'"); long start = System.currentTimeMillis(); CommandLine cmdLine = CommandLine.parse("bash"); cmdLine.addArgument("-c", false); cmdLine.addArgument(cmd, false);/* w w w . j av a2s. c o m*/ DefaultExecutor executor = new DefaultExecutor(); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); executor.setStreamHandler(new PumpStreamHandler(outputStream)); executor.setWatchdog(new ExecuteWatchdog(commandTimeOut)); try { int exitValue = executor.execute(cmdLine); return new InterpreterResult(InterpreterResult.Code.SUCCESS, outputStream.toString()); } catch (ExecuteException e) { logger.error("Can not run " + cmd, e); return new InterpreterResult(Code.ERROR, e.getMessage()); } catch (IOException e) { logger.error("Can not run " + cmd, e); return new InterpreterResult(Code.ERROR, e.getMessage()); } }
From source file:org.apache.zeppelin.spark.PySparkInterpreter.java
private void createGatewayServerAndStartScript() { // create python script createPythonScript();/*from w w w . j a v a 2 s . c o m*/ port = findRandomOpenPortOnAllLocalInterfaces(); gatewayServer = new GatewayServer(this, port); gatewayServer.start(); // Run python shell CommandLine cmd = CommandLine.parse(getProperty("zeppelin.pyspark.python")); cmd.addArgument(scriptPath, false); cmd.addArgument(Integer.toString(port), false); cmd.addArgument(Integer.toString(getSparkInterpreter().getSparkVersion().toNumber()), false); executor = new DefaultExecutor(); outputStream = new ByteArrayOutputStream(); PipedOutputStream ps = new PipedOutputStream(); in = null; try { in = new PipedInputStream(ps); } catch (IOException e1) { throw new InterpreterException(e1); } ins = new BufferedWriter(new OutputStreamWriter(ps)); input = new ByteArrayOutputStream(); PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream, outputStream, in); executor.setStreamHandler(streamHandler); executor.setWatchdog(new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT)); try { Map env = EnvironmentUtils.getProcEnvironment(); executor.execute(cmd, env, this); pythonscriptRunning = true; } catch (IOException e) { throw new InterpreterException(e); } try { input.write("import sys, getopt\n".getBytes()); ins.flush(); } catch (IOException e) { throw new InterpreterException(e); } }
From source file:org.apache.zeppelin.spark.SparkRInterpreter.java
@Override public void open() { // create R script createRScript();// ww w.j av a 2 s . com int backendTimeout = Integer.parseInt(System.getenv().getOrDefault("SPARKR_BACKEND_TIMEOUT", "120")); // Launch a SparkR backend server for the R process to connect to; this will let it see our // Java system properties etc. ZeppelinRBackend sparkRBackend = new ZeppelinRBackend(); Semaphore initialized = new Semaphore(0); Thread sparkRBackendThread = new Thread("SparkR backend") { @Override public void run() { sparkRBackendPort = sparkRBackend.init(); initialized.release(); sparkRBackend.run(); } }; sparkRBackendThread.start(); // Wait for RBackend initialization to finish try { if (initialized.tryAcquire(backendTimeout, TimeUnit.SECONDS)) { // Launch R CommandLine cmd = CommandLine.parse(getProperty("zeppelin.sparkr.r")); cmd.addArgument(scriptPath, false); cmd.addArgument("--no-save", false); // cmd.addArgument(getJavaSparkContext().version(), false); executor = new DefaultExecutor(); outputStream = new ByteArrayOutputStream(); PipedOutputStream ps = new PipedOutputStream(); in = null; try { in = new PipedInputStream(ps); } catch (IOException e1) { throw new InterpreterException(e1); } ins = new BufferedWriter(new OutputStreamWriter(ps)); input = new ByteArrayOutputStream(); PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream, outputStream, in); executor.setStreamHandler(streamHandler); executor.setWatchdog(new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT)); Map env = EnvironmentUtils.getProcEnvironment(); String sparkRInterpreterObjId = sparkRBackend.put(this); String uberdataContextObjId = sparkRBackend.put(getUberdataContext()); env.put("R_PROFILE_USER", scriptPath); env.put("SPARK_HOME", getSparkHome()); env.put("EXISTING_SPARKR_BACKEND_PORT", String.valueOf(sparkRBackendPort)); env.put("SPARKR_INTERPRETER_ID", sparkRInterpreterObjId); env.put("UBERDATA_CONTEXT_ID", uberdataContextObjId); logger.info("executing {} {}", env, cmd.toString()); executor.execute(cmd, env, this); logger.info("executed"); rScriptRunning = true; } else { System.err.println("SparkR backend did not initialize in " + backendTimeout + " seconds"); System.exit(-1); } } catch (InterruptedException e) { new InterpreterException((e)); } catch (IOException e) { new InterpreterException((e)); } }
From source file:org.apache.zeppelin.submarine.job.thread.JobRunThread.java
public void run() { boolean tryLock = lockRunning.tryLock(); if (false == tryLock) { LOGGER.warn("Can not get JobRunThread lockRunning!"); return;/*from w w w. ja v a2 s . com*/ } SubmarineUI submarineUI = submarineJob.getSubmarineUI(); try { InterpreterContext intpContext = submarineJob.getIntpContext(); String noteId = intpContext.getNoteId(); String userName = intpContext.getAuthenticationInfo().getUser(); String jobName = SubmarineUtils.getJobName(userName, noteId); if (true == running.get()) { String message = String.format("Job %s already running.", jobName); submarineUI.outputLog("WARN", message); LOGGER.warn(message); return; } running.set(true); Properties properties = submarineJob.getProperties(); HdfsClient hdfsClient = submarineJob.getHdfsClient(); File pythonWorkDir = submarineJob.getPythonWorkDir(); submarineJob.setCurrentJobState(EXECUTE_SUBMARINE); String algorithmPath = properties.getProperty(SubmarineConstants.SUBMARINE_ALGORITHM_HDFS_PATH, ""); if (!algorithmPath.startsWith("hdfs://")) { String message = "Algorithm file upload HDFS path, " + "Must be `hdfs://` prefix. now setting " + algorithmPath; submarineUI.outputLog("Configuration error", message); return; } List<ParagraphInfo> paragraphInfos = intpContext.getIntpEventClient().getParagraphList(userName, noteId); String outputMsg = hdfsClient.saveParagraphToFiles(noteId, paragraphInfos, pythonWorkDir == null ? "" : pythonWorkDir.getAbsolutePath(), properties); if (!StringUtils.isEmpty(outputMsg)) { submarineUI.outputLog("Save algorithm file", outputMsg); } HashMap jinjaParams = SubmarineUtils.propertiesToJinjaParams(properties, submarineJob, true); URL urlTemplate = Resources.getResource(SubmarineJob.SUBMARINE_JOBRUN_TF_JINJA); String template = Resources.toString(urlTemplate, Charsets.UTF_8); Jinjava jinjava = new Jinjava(); String submarineCmd = jinjava.render(template, jinjaParams); // If the first line is a newline, delete the newline int firstLineIsNewline = submarineCmd.indexOf("\n"); if (firstLineIsNewline == 0) { submarineCmd = submarineCmd.replaceFirst("\n", ""); } StringBuffer sbLogs = new StringBuffer(submarineCmd); submarineUI.outputLog("Submarine submit command", sbLogs.toString()); long timeout = Long .valueOf(properties.getProperty(SubmarineJob.TIMEOUT_PROPERTY, SubmarineJob.defaultTimeout)); CommandLine cmdLine = CommandLine.parse(SubmarineJob.shell); cmdLine.addArgument(submarineCmd, false); DefaultExecutor executor = new DefaultExecutor(); ExecuteWatchdog watchDog = new ExecuteWatchdog(timeout); executor.setWatchdog(watchDog); StringBuffer sbLogOutput = new StringBuffer(); executor.setStreamHandler(new PumpStreamHandler(new LogOutputStream() { @Override protected void processLine(String line, int level) { line = line.trim(); if (!StringUtils.isEmpty(line)) { sbLogOutput.append(line + "\n"); } } })); if (Boolean.valueOf(properties.getProperty(SubmarineJob.DIRECTORY_USER_HOME))) { executor.setWorkingDirectory(new File(System.getProperty("user.home"))); } Map<String, String> env = new HashMap<>(); String launchMode = (String) jinjaParams.get(SubmarineConstants.INTERPRETER_LAUNCH_MODE); if (StringUtils.equals(launchMode, "yarn")) { // Set environment variables in the submarine interpreter container run on yarn String javaHome, hadoopHome, hadoopConf; javaHome = (String) jinjaParams.get(SubmarineConstants.DOCKER_JAVA_HOME); hadoopHome = (String) jinjaParams.get(SubmarineConstants.DOCKER_HADOOP_HDFS_HOME); hadoopConf = (String) jinjaParams.get(SubmarineConstants.SUBMARINE_HADOOP_CONF_DIR); env.put("JAVA_HOME", javaHome); env.put("HADOOP_HOME", hadoopHome); env.put("HADOOP_HDFS_HOME", hadoopHome); env.put("HADOOP_CONF_DIR", hadoopConf); env.put("YARN_CONF_DIR", hadoopConf); env.put("CLASSPATH", "`$HADOOP_HDFS_HOME/bin/hadoop classpath --glob`"); env.put("ZEPPELIN_FORCE_STOP", "true"); } LOGGER.info("Execute EVN: {}, Command: {} ", env.toString(), submarineCmd); AtomicBoolean cmdLineRunning = new AtomicBoolean(true); executor.execute(cmdLine, env, new DefaultExecuteResultHandler() { @Override public void onProcessComplete(int exitValue) { String message = String.format("jobName %s ProcessComplete exit value is : %d", jobName, exitValue); LOGGER.info(message); submarineUI.outputLog("JOR RUN COMPLETE", message); cmdLineRunning.set(false); submarineJob.setCurrentJobState(EXECUTE_SUBMARINE_FINISHED); } @Override public void onProcessFailed(ExecuteException e) { String message = String.format("jobName %s ProcessFailed exit value is : %d, exception is : %s", jobName, e.getExitValue(), e.getMessage()); LOGGER.error(message); submarineUI.outputLog("JOR RUN FAILED", message); cmdLineRunning.set(false); submarineJob.setCurrentJobState(EXECUTE_SUBMARINE_ERROR); } }); int loopCount = 100; while ((loopCount-- > 0) && cmdLineRunning.get() && running.get()) { Thread.sleep(1000); } if (watchDog.isWatching()) { watchDog.destroyProcess(); Thread.sleep(1000); } if (watchDog.isWatching()) { watchDog.killedProcess(); } // Check if it has been submitted to YARN Map<String, Object> jobState = submarineJob.getJobStateByYarn(jobName); loopCount = 50; while ((loopCount-- > 0) && !jobState.containsKey("state") && running.get()) { Thread.sleep(3000); jobState = submarineJob.getJobStateByYarn(jobName); } if (!jobState.containsKey("state")) { String message = String.format("JOB %s was not submitted to YARN!", jobName); LOGGER.error(message); submarineUI.outputLog("JOR RUN FAILED", message); submarineJob.setCurrentJobState(EXECUTE_SUBMARINE_ERROR); } } catch (Exception e) { LOGGER.error(e.getMessage(), e); submarineJob.setCurrentJobState(EXECUTE_SUBMARINE_ERROR); submarineUI.outputLog("Exception", e.getMessage()); } finally { running.set(false); lockRunning.unlock(); } }
From source file:org.apache.zeppelin.submarine.job.thread.TensorboardRunThread.java
public void run() { SubmarineUI submarineUI = submarineJob.getSubmarineUI(); boolean tryLock = lockRunning.tryLock(); try {//from w w w. j av a 2s .c o m Properties properties = submarineJob.getProperties(); String tensorboardName = SubmarineUtils.getTensorboardName(submarineJob.getUserName()); if (true == running.get()) { String message = String.format("tensorboard %s already running.", tensorboardName); submarineUI.outputLog("WARN", message); LOGGER.warn(message); return; } running.set(true); HashMap jinjaParams = SubmarineUtils.propertiesToJinjaParams(properties, submarineJob, false); // update jobName -> tensorboardName jinjaParams.put(SubmarineConstants.JOB_NAME, tensorboardName); URL urlTemplate = Resources.getResource(SubmarineJob.SUBMARINE_TENSORBOARD_JINJA); String template = Resources.toString(urlTemplate, Charsets.UTF_8); Jinjava jinjava = new Jinjava(); String submarineCmd = jinjava.render(template, jinjaParams); // If the first line is a newline, delete the newline int firstLineIsNewline = submarineCmd.indexOf("\n"); if (firstLineIsNewline == 0) { submarineCmd = submarineCmd.replaceFirst("\n", ""); } StringBuffer sbLogs = new StringBuffer(submarineCmd); submarineUI.outputLog("Submarine submit command", sbLogs.toString()); long timeout = Long .valueOf(properties.getProperty(SubmarineJob.TIMEOUT_PROPERTY, SubmarineJob.defaultTimeout)); CommandLine cmdLine = CommandLine.parse(SubmarineJob.shell); cmdLine.addArgument(submarineCmd, false); DefaultExecutor executor = new DefaultExecutor(); ExecuteWatchdog watchDog = new ExecuteWatchdog(timeout); executor.setWatchdog(watchDog); StringBuffer sbLogOutput = new StringBuffer(); executor.setStreamHandler(new PumpStreamHandler(new LogOutputStream() { @Override protected void processLine(String line, int level) { line = line.trim(); if (!StringUtils.isEmpty(line)) { sbLogOutput.append(line + "\n"); } } })); if (Boolean.valueOf(properties.getProperty(SubmarineJob.DIRECTORY_USER_HOME))) { executor.setWorkingDirectory(new File(System.getProperty("user.home"))); } Map<String, String> env = new HashMap<>(); String launchMode = (String) jinjaParams.get(SubmarineConstants.INTERPRETER_LAUNCH_MODE); if (StringUtils.equals(launchMode, "yarn")) { // Set environment variables in the container String javaHome, hadoopHome, hadoopConf; javaHome = (String) jinjaParams.get(SubmarineConstants.DOCKER_JAVA_HOME); hadoopHome = (String) jinjaParams.get(SubmarineConstants.DOCKER_HADOOP_HDFS_HOME); hadoopConf = (String) jinjaParams.get(SubmarineConstants.SUBMARINE_HADOOP_CONF_DIR); env.put("JAVA_HOME", javaHome); env.put("HADOOP_HOME", hadoopHome); env.put("HADOOP_HDFS_HOME", hadoopHome); env.put("HADOOP_CONF_DIR", hadoopConf); env.put("YARN_CONF_DIR", hadoopConf); env.put("CLASSPATH", "`$HADOOP_HDFS_HOME/bin/hadoop classpath --glob`"); } LOGGER.info("Execute EVN: {}, Command: {} ", env.toString(), submarineCmd); AtomicBoolean cmdLineRunning = new AtomicBoolean(true); executor.execute(cmdLine, env, new DefaultExecuteResultHandler() { @Override public void onProcessComplete(int exitValue) { String message = String.format("jobName %s ProcessComplete exit value is : %d", tensorboardName, exitValue); LOGGER.info(message); submarineUI.outputLog("TENSORBOARD RUN COMPLETE", message); cmdLineRunning.set(false); } @Override public void onProcessFailed(ExecuteException e) { String message = String.format("jobName %s ProcessFailed exit value is : %d, exception is : %s", tensorboardName, e.getExitValue(), e.getMessage()); LOGGER.error(message); submarineUI.outputLog("TENSORBOARD RUN FAILED", message); cmdLineRunning.set(false); } }); int loopCount = 100; while ((loopCount-- > 0) && cmdLineRunning.get() && running.get()) { Thread.sleep(1000); } if (watchDog.isWatching()) { watchDog.destroyProcess(); Thread.sleep(1000); } if (watchDog.isWatching()) { watchDog.killedProcess(); } // Check if it has been submitted to YARN Map<String, Object> jobState = submarineJob.getJobStateByYarn(tensorboardName); loopCount = 50; while ((loopCount-- > 0) && !jobState.containsKey("state") && running.get()) { Thread.sleep(3000); jobState = submarineJob.getJobStateByYarn(tensorboardName); } if (!jobState.containsKey("state")) { String message = String.format("tensorboard %s was not submitted to YARN!", tensorboardName); LOGGER.error(message); submarineUI.outputLog("JOR RUN FAILED", message); } } catch (Exception e) { LOGGER.error(e.getMessage(), e); submarineUI.outputLog("Exception", e.getMessage()); } finally { running.set(false); lockRunning.unlock(); } }
From source file:org.apache.zeppelin.submarine.SubmarineJobTest.java
@Test public void defaultExecutorTest() throws IOException { DefaultExecutor executor = new DefaultExecutor(); CommandLine cmdLine = CommandLine.parse(shell); URL urlTemplate = Resources.getResource(DEFAULT_EXECUTOR_TEST); cmdLine.addArgument(urlTemplate.getFile(), false); Map<String, String> env = new HashMap<>(); env.put("CLASSPATH", "`$HADOOP_HDFS_HOME/bin/hadoop classpath --glob`"); env.put("EVN", "test"); AtomicBoolean cmdLineRunning = new AtomicBoolean(true); StringBuffer sbLogOutput = new StringBuffer(); executor.setStreamHandler(new PumpStreamHandler(new LogOutputStream() { @Override/*from w w w . j a va 2 s .co m*/ protected void processLine(String line, int level) { //LOGGER.info(line); sbLogOutput.append(line + "\n"); } })); executor.execute(cmdLine, env, new DefaultExecuteResultHandler() { @Override public void onProcessComplete(int exitValue) { cmdLineRunning.set(false); } @Override public void onProcessFailed(ExecuteException e) { cmdLineRunning.set(false); LOGGER.error(e.getMessage()); } }); int loopCount = 100; while ((loopCount-- > 0) && cmdLineRunning.get()) { try { Thread.sleep(1000); } catch (InterruptedException e) { e.printStackTrace(); } } LOGGER.info(sbLogOutput.toString()); }
From source file:org.apache.zeppelin.submarine.SubmarineShellInterpreter.java
public void createSecureConfiguration() throws InterpreterException { Properties properties = getProperties(); CommandLine cmdLine = CommandLine.parse(shell); cmdLine.addArgument("-c", false); String kinitCommand = String.format("kinit -k -t %s %s", properties.getProperty(SUBMARINE_HADOOP_KEYTAB), properties.getProperty(SUBMARINE_HADOOP_PRINCIPAL)); cmdLine.addArgument(kinitCommand, false); DefaultExecutor executor = new DefaultExecutor(); try {/*from ww w .j av a 2 s. c o m*/ executor.execute(cmdLine); } catch (Exception e) { LOGGER.error("Unable to run kinit for zeppelin user " + kinitCommand, e); throw new InterpreterException(e); } }
From source file:org.codehaus.mojo.antlr.AbstractAntlrMojo.java
protected void performGeneration(GenerationPlan plan, Artifact antlrArtifact) throws MojoExecutionException { if (!plan.getGenerationDirectory().getParentFile().exists()) { plan.getGenerationDirectory().getParentFile().mkdirs(); }// w ww .j a va 2 s . c o m // ---------------------------------------------------------------------- // Wrap arguments // Note: grammar file should be last // ---------------------------------------------------------------------- List arguments = new LinkedList(); addArgIf(arguments, debug, "-debug"); addArgIf(arguments, diagnostic, "-diagnostic"); addArgIf(arguments, trace, "-trace"); addArgIf(arguments, traceParser, "-traceParser"); addArgIf(arguments, traceLexer, "-traceLexer"); addArgIf(arguments, traceTreeParser, "-traceTreeParser"); addArgs(arguments); arguments.add("-o"); arguments.add(plan.getGenerationDirectory().getPath()); if (plan.getCollectedSuperGrammarIds().size() > 0) { arguments.add("-glib"); StringBuffer buffer = new StringBuffer(); Iterator ids = plan.getCollectedSuperGrammarIds().iterator(); while (ids.hasNext()) { buffer.append(new File(sourceDirectory, (String) ids.next())); if (ids.hasNext()) { buffer.append(';'); } } arguments.add(buffer.toString()); } arguments.add(plan.getSource().getPath()); String[] args = (String[]) arguments.toArray(new String[arguments.size()]); if (plan.getImportVocabTokenTypesDirectory() != null && !plan.getImportVocabTokenTypesDirectory().equals(plan.getGenerationDirectory())) { // we need to spawn a new process to properly set up PWD CommandLine commandLine = new CommandLine("java"); commandLine.addArgument("-classpath", false); commandLine.addArgument(generateClasspathForProcessSpawning(antlrArtifact), true); commandLine.addArgument("antlr.Tool", false); commandLine.addArguments(args, true); DefaultExecutor executor = new DefaultExecutor(); executor.setWorkingDirectory(plan.getImportVocabTokenTypesDirectory()); try { executor.execute(commandLine); } catch (IOException e) { getLog().warn("Error spawning process to execute antlr tool : " + e.getMessage()); } return; } // ---------------------------------------------------------------------- // Call Antlr // ---------------------------------------------------------------------- if (getLog().isDebugEnabled()) { getLog().debug("antlr args=\n" + StringUtils.join(args, "\n")); } boolean failedSetManager = false; SecurityManager oldSm = null; try { oldSm = System.getSecurityManager(); System.setSecurityManager(NoExitSecurityManager.INSTANCE); } catch (SecurityException ex) { // ANTLR-12 oldSm = null; failedSetManager = true; // ignore, in embedded environment the security manager can already be set. // in such a case assume the exit call is handled properly.. getLog().warn("Cannot set custom SecurityManager. " + "Antlr's call to System.exit() can cause application shutdown " + "if not handled by the current SecurityManager."); } String originalUserDir = null; if (plan.getImportVocabTokenTypesDirectory() != null) { originalUserDir = System.getProperty("user.dir"); System.setProperty("user.dir", plan.getImportVocabTokenTypesDirectory().getPath()); } PrintStream oldErr = System.err; OutputStream errOS = new StringOutputStream(); PrintStream err = new PrintStream(errOS); System.setErr(err); try { executeAntlrInIsolatedClassLoader((String[]) arguments.toArray(new String[0]), antlrArtifact); } catch (SecurityException e) { if (e.getMessage().equals("exitVM-0") || e.getClass().getName().equals("org.netbeans.core.execution.ExitSecurityException")) // netbeans // IDE Sec // Manager. { // ANTLR-12 // now basically every secutiry manager could set different message, how to handle in generic way? // probably only by external execution // / in case of NetBeans SecurityManager, it's not possible to distinguish exit codes, rather swallow // than fail. getLog().debug(e); } else { throw new MojoExecutionException( "Antlr execution failed: " + e.getMessage() + "\n Error output:\n" + errOS, e); } } finally { if (originalUserDir != null) { System.setProperty("user.dir", originalUserDir); } if (!failedSetManager) { System.setSecurityManager(oldSm); } System.setErr(oldErr); System.err.println(errOS.toString()); } }
From source file:org.eclipse.ecf.python.AbstractPythonLauncher.java
@Override public void launch(String[] args, OutputStream output) throws Exception { synchronized (this.launchLock) { if (isLaunched()) throw new IllegalStateException("Already started"); this.shuttingDown = false; if (enabled) { String pythonLaunchCommand = createPythonLaunchCommand(); if (pythonLaunchCommand == null) throw new NullPointerException("pythonLaunchCommand must not be null"); logger.debug("pythonLaunchCommand=" + pythonLaunchCommand); this.executor = createExecutor(); if (this.pythonWorkingDirectory != null) this.executor.setWorkingDirectory(pythonWorkingDirectory); if (output == null) { output = new LogOutputStream() { @Override//from www .java2 s . com protected void processLine(String line, int level) { logger.debug("PYTHON: " + line); } }; } executor.setStreamHandler(new PumpStreamHandler(output)); this.executor.setProcessDestroyer(new PythonProcessDestroyer()); ExecuteResultHandler executeHandler = new DefaultExecuteResultHandler() { @Override public void onProcessComplete(int exitValue) { logger.debug("PYTHON EXIT=" + exitValue); } @Override public void onProcessFailed(ExecuteException e) { if (!shuttingDown) logger.debug("PYTHON EXCEPTION", e); } }; CommandLine commandLine = new CommandLine(pythonExec).addArgument(PYTHON_LAUNCH_COMMAND_OPTION); commandLine.addArgument(pythonLaunchCommand, true); List<String> argsList = (args == null) ? Collections.emptyList() : Arrays.asList(args); if (this.javaPort != null && !argsList.contains(JAVA_PORT_OPTION)) { commandLine.addArgument(JAVA_PORT_OPTION); commandLine.addArgument(String.valueOf(this.javaPort)); } if (this.pythonPort != null && !argsList.contains(PYTHON_PORT_OPTION)) { commandLine.addArgument(PYTHON_PORT_OPTION); commandLine.addArgument(String.valueOf(this.pythonPort)); } if (args != null) commandLine.addArguments(args); logger.debug("PythonLauncher.launch: " + commandLine); try { executor.execute(commandLine, executeHandler); } catch (Exception e) { this.executor = null; throw e; } } else logger.debug("PythonLauncher DISABLED. Python process must be started manually"); } }
From source file:org.eclipse.smarthome.io.net.exec.ExecUtil.java
/** * <p>/*from w ww . j a v a 2s .c om*/ * Executes <code>commandLine</code>. Sometimes (especially observed on * MacOS) the commandLine isn't executed properly. In that cases another * exec-method is to be used. To accomplish this please use the special * delimiter '<code>@@</code>'. If <code>commandLine</code> contains this * delimiter it is split into a String[] array and the special exec-method * is used. * </p> * <p> * A possible {@link IOException} gets logged but no further processing is * done. * </p> * * @param commandLine * the command line to execute * @param timeout * timeout for execution in milliseconds * @return response data from executed command line */ public static String executeCommandLineAndWaitResponse(String commandLine, int timeout) { String retval = null; CommandLine cmdLine = null; if (commandLine.contains(CMD_LINE_DELIMITER)) { String[] cmdArray = commandLine.split(CMD_LINE_DELIMITER); cmdLine = new CommandLine(cmdArray[0]); for (int i = 1; i < cmdArray.length; i++) { cmdLine.addArgument(cmdArray[i], false); } } else { cmdLine = CommandLine.parse(commandLine); } DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler(); ExecuteWatchdog watchdog = new ExecuteWatchdog(timeout); Executor executor = new DefaultExecutor(); ByteArrayOutputStream stdout = new ByteArrayOutputStream(); PumpStreamHandler streamHandler = new PumpStreamHandler(stdout); executor.setExitValue(1); executor.setStreamHandler(streamHandler); executor.setWatchdog(watchdog); try { executor.execute(cmdLine, resultHandler); logger.debug("executed commandLine '{}'", commandLine); } catch (ExecuteException e) { logger.error("couldn't execute commandLine '" + commandLine + "'", e); } catch (IOException e) { logger.error("couldn't execute commandLine '" + commandLine + "'", e); } // some time later the result handler callback was invoked so we // can safely request the exit code try { resultHandler.waitFor(); int exitCode = resultHandler.getExitValue(); retval = StringUtils.chomp(stdout.toString()); logger.debug("exit code '{}', result '{}'", exitCode, retval); } catch (InterruptedException e) { logger.error("Timeout occured when executing commandLine '" + commandLine + "'", e); } return retval; }