List of usage examples for org.apache.commons.exec DefaultExecutor DefaultExecutor
public DefaultExecutor()
From source file:net.mymam.fileprocessor.VideoFileGenerator.java
private Path generateFile(String cmdLineTemplate, Path in, Path outDir, String outFile, int maxWidth, int maxHeight) throws FileProcessingFailedException { try {//w w w.j a va 2 s. c om Path out = Paths.get(outDir.toString(), outFile); // TODO: Make sure that weired file names cannot be used to inject shell scripts, like '"; rm -r *;' String cmdLine = cmdLineTemplate.replace("$INPUT_FILE", "\"" + in.toString() + "\"") .replace("$OUTPUT_FILE", "\"" + out.toString() + "\"") .replace("$MAX_WIDTH", Integer.toString(maxWidth)) .replace("$MAX_HEIGHT", Integer.toString(maxHeight)); CommandLine cmd = CommandLine.parse(cmdLine); System.out.println("Executing " + cmd); // TODO: Use logging. new DefaultExecutor().execute(cmd); return out; } catch (Throwable t) { throw new FileProcessingFailedException(t); } }
From source file:io.takari.maven.testing.executor.ForkedLauncher.java
public int run(String[] cliArgs, Map<String, String> envVars, File multiModuleProjectDirectory, File workingDirectory, File logFile) throws IOException, LauncherException { String javaHome;/* www . ja va 2s .c o m*/ if (envVars == null || envVars.get("JAVA_HOME") == null) { javaHome = System.getProperty("java.home"); } else { javaHome = envVars.get("JAVA_HOME"); } File executable = new File(javaHome, Os.isFamily(Os.FAMILY_WINDOWS) ? "bin/javaw.exe" : "bin/java"); CommandLine cli = new CommandLine(executable); cli.addArgument("-classpath").addArgument(classworldsJar.getAbsolutePath()); cli.addArgument("-Dclassworlds.conf=" + new File(mavenHome, "bin/m2.conf").getAbsolutePath()); cli.addArgument("-Dmaven.home=" + mavenHome.getAbsolutePath()); cli.addArgument("-Dmaven.multiModuleProjectDirectory=" + multiModuleProjectDirectory.getAbsolutePath()); cli.addArgument("org.codehaus.plexus.classworlds.launcher.Launcher"); cli.addArguments(args.toArray(new String[args.size()])); if (extensions != null && !extensions.isEmpty()) { cli.addArgument("-Dmaven.ext.class.path=" + toPath(extensions)); } cli.addArguments(cliArgs); Map<String, String> env = new HashMap<>(); if (mavenHome != null) { env.put("M2_HOME", mavenHome.getAbsolutePath()); } if (envVars != null) { env.putAll(envVars); } if (envVars == null || envVars.get("JAVA_HOME") == null) { env.put("JAVA_HOME", System.getProperty("java.home")); } DefaultExecutor executor = new DefaultExecutor(); executor.setProcessDestroyer(new ShutdownHookProcessDestroyer()); executor.setWorkingDirectory(workingDirectory.getAbsoluteFile()); try (OutputStream log = new FileOutputStream(logFile)) { PrintStream out = new PrintStream(log); out.format("Maven Executor implementation: %s\n", getClass().getName()); out.format("Maven home: %s\n", mavenHome); out.format("Build work directory: %s\n", workingDirectory); out.format("Environment: %s\n", env); out.format("Command line: %s\n\n", cli.toString()); out.flush(); PumpStreamHandler streamHandler = new PumpStreamHandler(log); executor.setStreamHandler(streamHandler); return executor.execute(cli, env); // this throws ExecuteException if process return code != 0 } catch (ExecuteException e) { throw new LauncherException("Failed to run Maven: " + e.getMessage() + "\n" + cli, e); } }
From source file:io.vertx.config.vault.utils.VaultProcess.java
public boolean run(String args) { String cli = executable.getAbsolutePath() + " " + args; System.out.println(">> " + cli); CommandLine parse = CommandLine.parse(cli); DefaultExecutor executor = new DefaultExecutor(); executor.setExitValue(0);// w w w . ja va 2s . c om try { return executor.execute(parse) == 0; } catch (IOException e) { e.printStackTrace(); return false; } }
From source file:edu.kit.dama.dataworkflow.impl.LocalExecutionHandler.java
/** * Execute the user application. This method will start a new process running * the prepared user application locally. The method will return as soon as * the application has terminated. An asnychronous monitoring task my check * whether the process is still running or not via {@link #getTaskStatus(edu.kit.dama.mdm.dataworkflow.DataWorkflowTask) * } This method will check the runningIndicator file '.RUNNING', which only * exists as long as the application is running. * * @param pTask The task whose application should be executed. * * @throws DataWorkflowProcessingException If either the startup or the * processing fails for any reason, or if the user application returns an exit * code != 0./*from w w w. ja va 2s .c o m*/ */ @Override public void startUserApplication(DataWorkflowTask pTask) throws DataWorkflowProcessingException { //simply start the process...monitoring will be connected later File runningIndicator = getRunningIndicator(pTask); FileOutputStream fout = null; FileOutputStream ferr = null; File executablePath; try { executablePath = DataWorkflowHelper.getTaskMainExecutable(pTask); File executionBasePath = DataWorkflowHelper.getExecutionBasePath(pTask); File workingDirectory = DataWorkflowHelper.getTaskWorkingDirectory(executionBasePath); File tempDirectory = DataWorkflowHelper.getTaskTempDirectory(executionBasePath); File inputDirectory = DataWorkflowHelper.getTaskInputDirectory(executionBasePath); File outputDirectory = DataWorkflowHelper.getTaskOutputDirectory(executionBasePath); if (!executablePath.canExecute()) { LOGGER.debug("Executable at location {} seems not to be executable. Taking care of this..."); if (executablePath.setExecutable(true)) { LOGGER.debug("Executable was successfully set to be executable."); } else { LOGGER.warn("Failed to set executable to be executable. Trying to continue."); } } String cmdLineString = executablePath.getAbsolutePath() + " " + pTask.getConfiguration().getApplicationArguments() + " " + pTask.getApplicationArguments(); LOGGER.debug("Building up command array from string '{}'", cmdLineString); CommandLine cmdLine = CommandLine.parse(cmdLineString); DefaultExecutor executor = new DefaultExecutor(); executor.setExitValue(0); Map<String, String> env = new HashMap<>(); env.put("WORKING_DIR", workingDirectory.getAbsolutePath()); env.put("TEMP_DIR", tempDirectory.getAbsolutePath()); env.put("INPUT_DIR", inputDirectory.getAbsolutePath()); env.put("OUTPUT_DIR", outputDirectory.getAbsolutePath()); fout = new FileOutputStream(new File(tempDirectory, "stdout.log")); ferr = new FileOutputStream(new File(tempDirectory, "stderr.log")); LOGGER.debug("Setting stream handler for stdout and stderr."); executor.setStreamHandler(new PumpStreamHandler(fout, ferr)); LOGGER.debug("Creating .RUNNING file for monitoring."); FileUtils.touch(runningIndicator); LOGGER.debug("Executing process."); int exitCode = executor.execute(cmdLine); if (exitCode != 0) { throw new DataWorkflowProcessingException( "Execution returned exit code " + exitCode + ". See logfiles for details."); } else { LOGGER.debug("Process successfully finished with exit code {}", exitCode); } } catch (IOException | UnsupportedOperatingSystemException e) { throw new DataWorkflowProcessingException("Failed to start executable for task " + pTask.getId(), e); } finally { LOGGER.debug("Removing running indicator file {}", runningIndicator); FileUtils.deleteQuietly(runningIndicator); if (fout != null) { try { fout.close(); } catch (IOException ex) { } } if (ferr != null) { try { ferr.close(); } catch (IOException ex) { } } } }
From source file:com.thinkbiganalytics.spark.shell.MultiUserProcessManager.java
/** * Calls kinit to request a new Kerberos ticket if the previous one is about to expire. *///from www . jav a 2 s . c o m private void refreshKerberosTicket() { // Determine if a new ticket is needed if (kerberos == null || kerberos.getInitInterval() <= 0 || kerberosNextInit > DateTimeUtils.currentTimeMillis()) { return; } // Build executor final Executor executor = new DefaultExecutor(); final ShutdownHookProcessDestroyer processDestroyer = new ShutdownHookProcessDestroyer(); executor.setProcessDestroyer(processDestroyer); final Logger outputLogger = LoggerFactory.getLogger(getClass().getName() + ".kinit"); final LoggerOutputStream outputStream = new LoggerOutputStream(outputLogger); final PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream); executor.setStreamHandler(streamHandler); final ExecuteWatchdog watchdog = new ExecuteWatchdog(TimeUnit.SECONDS.toMillis(kerberos.getInitTimeout())); executor.setWatchdog(watchdog); // Run kinit to acquire a new ticket final CommandLine command = new CommandLine("kinit").addArgument("-kt") .addArgument(kerberos.getKeytabLocation()).addArgument(kerberos.getKerberosPrincipal()); log.debug("Acquiring a new Kerberos ticket with command: {}", command); int exitCode; try { exitCode = executor.execute(command); } catch (final IOException e) { log.error("Failed to execute kinit", e); exitCode = -1; } // Record next time to acquire ticket if (!executor.isFailure(exitCode)) { kerberosNextInit = DateTimeUtils.currentTimeMillis() + TimeUnit.SECONDS.toMillis(kerberos.getInitInterval()); } else { if (watchdog.killedProcess()) { log.error("Failed to acquire a Kerberos ticket within the allotted time: {}", kerberos.getInitTimeout()); } else { log.error("Kinit exited with non-zero status: {}", exitCode); } kerberosNextInit = DateTimeUtils.currentTimeMillis() + TimeUnit.SECONDS.toMillis(kerberos.getRetryInterval()); throw new IllegalStateException("Failed to acquire a Kerberos ticket"); } }
From source file:modules.GeneralNativeCommandModule.java
protected KeyValueResult extractNative(String command, String options, Path path) throws NativeExecutionException { if (command == null || command.equals("")) { System.err.println("command null at GeneralNativeCommandModule.extractNative()"); return null; }/*ww w. j a v a2 s.c o m*/ CommandLine commandLine = new CommandLine(command); if (options != null && !options.equals("")) { String[] args = options.split(" "); commandLine.addArguments(args); } if (path != null) { commandLine.addArgument(path.toAbsolutePath().toString(), false); } DefaultExecutor executor = new DefaultExecutor(); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream); executor.setStreamHandler(streamHandler); GeneralExecutableModuleConfig generalExecutableModuleConfig = getConfig(); executor.setWatchdog(new ExecuteWatchdog(generalExecutableModuleConfig.timeout)); if (getConfig().workingDirectory != null && getConfig().workingDirectory.exists()) { executor.setWorkingDirectory(getConfig().workingDirectory); } try { // System.out.println(commandLine); executor.execute(commandLine); } catch (ExecuteException xs) { NativeExecutionException n = new NativeExecutionException(); n.initCause(xs); if (path != null) { n.path = path.toAbsolutePath().toString(); } n.executionResult = outputStream.toString(); n.exitCode = xs.getExitValue(); throw n; } catch (IOException xs) { // System.out.println(commandLine); NativeExecutionException n = new NativeExecutionException(); n.initCause(xs); if (path != null) { n.path = path.toAbsolutePath().toString(); } n.executionResult = outputStream.toString(); throw n; } KeyValueResult t = new KeyValueResult("GeneralNativeCommandResults"); t.add("fullOutput", outputStream.toString().trim()); return t; }
From source file:com.vmware.bdd.service.impl.NodeLdapUserMgmtConfService.java
private void transferFile(String srcFilePath, String ip, String targetFilePath) { CommandLine cmdLine = new CommandLine("scp").addArgument(srcFilePath) .addArgument(ip + ":" + targetFilePath); DefaultExecutor executor = new DefaultExecutor(); executor.setStreamHandler(new PumpStreamHandler(new ExecOutputLogger(LOGGER, false), //output logger new ExecOutputLogger(LOGGER, true)) //error logger );// w w w . j a v a 2 s.c o m executor.setWatchdog(new ExecuteWatchdog(1000l * 120l)); try { int exitVal = executor.execute(cmdLine); if (exitVal != 0) { throw new RuntimeException("CFG_LDAP_FAIL", null); } } catch (IOException e) { throw new RuntimeException("CFG_LDAP_FAIL", e); } }
From source file:com.boundlessgeo.wps.grass.GrassProcesses.java
@DescribeProcess(title = "GRASS Version", description = "Retreive the version of GRASS used for computation") @DescribeResult(description = "Version") public static String version() { if (EXEC == null) { return "unavailable"; }//from w ww .j a v a 2s. co m CommandLine cmd = new CommandLine(EXEC); cmd.addArgument("-v"); DefaultExecutor executor = new DefaultExecutor(); executor.setExitValue(0); ExecuteWatchdog watchdog = new ExecuteWatchdog(60000); executor.setWatchdog(watchdog); try { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); executor.setStreamHandler(new PumpStreamHandler(outputStream)); LOGGER.info("exec: " + cmd.toString()); int exitValue = executor.execute(cmd); return outputStream.toString(); } catch (ExecuteException huh) { return "exit code: " + huh.getExitValue() + " (" + huh.getMessage() + ")"; } catch (IOException e) { return "unavailable: " + e.getClass().getSimpleName() + ":" + e.getMessage(); } }
From source file:com.github.seqware.queryengine.tutorial.PosterSGE.java
/** * <p>benchmark.</p>/*from www . j ava 2s .com*/ * * @throws java.io.IOException if any. */ public void benchmark() throws IOException { if (args.length != 3) { System.err.println(args.length + " arguments found"); System.out.println( PosterSGE.class.getSimpleName() + " <outputKeyValueFile> <input file dir> <simultaneous jobs>"); System.exit(-1); } File outputFile = Utility.checkOutput(args[0]); // check if reference has been properly created Reference reference = SWQEFactory.getQueryInterface().getLatestAtomByRowKey("hg_19", Reference.class); if (reference == null) { SGID refID = ReferenceCreator.mainMethod(new String[] { HG_19 }); reference = SWQEFactory.getQueryInterface().getAtomBySGID(Reference.class, refID); } // record reference, starting disk space keyValues.put("referenceID", reference.getSGID().getRowKey()); recordSpace("start"); Utility.writeKeyValueFile(outputFile, keyValues); // create new FeatureSet id and pass it onto our children CreateUpdateManager manager = SWQEFactory.getModelManager(); FeatureSet initialFeatureSet = manager.buildFeatureSet().setReference(reference).build(); manager.flush(); int count = 0; // go through all input files File fileDirectory = new File(args[1]); File[] listFiles = fileDirectory.listFiles(); // record start and finish time Date startDate = new Date(); keyValues.put(count + "-start-date-long", Long.toString(startDate.getTime())); keyValues.put(count + "-start-date-human", startDate.toString()); // submit all jobs in parallel via SGE StringBuilder jobNames = new StringBuilder(); for (File inputFile : listFiles) { // run without unnecessary parameters String cargs = "-w VCFVariantImportWorker -i " + inputFile.getAbsolutePath() + " -b " + String.valueOf(BENCHMARKING_BATCH_SIZE) + " -f " + initialFeatureSet.getSGID().getRowKey() + " -r " + reference.getSGID().getRowKey(); String command = "java -Xmx2048m -classpath " + System.getProperty("user.dir") + "/seqware-queryengine-0.12.0-full.jar com.github.seqware.queryengine.system.importers.SOFeatureImporter"; command = command + " " + cargs; command = "qsub -q long -l h_vmem=3G -cwd -N dyuen-" + inputFile.getName() + " -b y " + command; jobNames.append("dyuen-").append(inputFile.getName()).append(","); System.out.println("Running: " + command); CommandLine cmdLine = CommandLine.parse(command); DefaultExecutor executor = new DefaultExecutor(); int exitValue = executor.execute(cmdLine); } String jobs = jobNames.toString().substring(0, jobNames.length() - 1); // submit a job that just waits on all the preceding jobs for synchronization String command = "java -Xmx1024m -version"; command = "qsub -cwd -N dyuen-wait -hold_jid " + jobs + " -b y -sync y " + command; System.out.println("Running wait: " + command); CommandLine cmdLine = CommandLine.parse(command); DefaultExecutor executor = new DefaultExecutor(); executor.setExitValues(null); int exitValue = executor.execute(cmdLine); FeatureSet fSet = SWQEFactory.getQueryInterface().getLatestAtomBySGID(initialFeatureSet.getSGID(), FeatureSet.class); keyValues.put(count + "-featuresSet-id", fSet.getSGID().getRowKey()); keyValues.put(count + "-featuresSet-id-timestamp", Long.toString(fSet.getSGID().getBackendTimestamp().getTime())); // // runs count query, touches everything but does not write // // keyValues.put(count + "-start-count-date-long", Long.toString(System.currentTimeMillis())); // long fsetcount = fSet.getCount(); // keyValues.put(count + "-features-loaded", Long.toString(fsetcount)); // keyValues.put(count + "-end-count-date-long", Long.toString(System.currentTimeMillis())); Date endDate = new Date(); keyValues.put(count + "-end-date-long", Long.toString(endDate.getTime())); keyValues.put(count + "-end-date-human", endDate.toString()); recordSpace(String.valueOf(count)); Utility.writeKeyValueFile(outputFile, keyValues); count++; }
From source file:com.tascape.qa.th.android.comm.Adb.java
private static void loadSerialProductMap() { SERIAL_PRODUCT.clear();/*from w w w.j a va 2s. co m*/ String serials = SystemConfiguration.getInstance().getProperty(SYSPROP_SERIALS); if (null != serials) { LOG.info("Use specified devices from system property {}={}", SYSPROP_SERIALS, serials); Lists.newArrayList(serials.split(",")).forEach(s -> SERIAL_PRODUCT.put(s, "na")); } else { CommandLine cmdLine = new CommandLine(ADB); cmdLine.addArgument("devices"); cmdLine.addArgument("-l"); LOG.debug("{}", cmdLine.toString()); List<String> output = new ArrayList<>(); Executor executor = new DefaultExecutor(); executor.setStreamHandler(new ESH(output)); try { if (executor.execute(cmdLine) != 0) { throw new RuntimeException(cmdLine + " failed"); } } catch (IOException ex) { throw new RuntimeException(cmdLine + " failed", ex); } output.stream().map(line -> StringUtils.split(line, " ", 3)) .filter(ss -> ss.length == 3 && ss[1].equals("device")).forEach(ss -> { LOG.info("device {} -> {}", ss[0], ss[2]); SERIAL_PRODUCT.put(ss[0], ss[2]); }); } if (SERIAL_PRODUCT.isEmpty()) { throw new RuntimeException("No device detected."); } SERIALS.addAll(SERIAL_PRODUCT.keySet()); }