List of usage examples for org.apache.commons.exec DefaultExecutor setWatchdog
public void setWatchdog(final ExecuteWatchdog watchDog)
From source file:be.tarsos.transcoder.ffmpeg.FFMPEGExecutor.java
/** * Executes the ffmpeg process with the previous given arguments. * //from ww w. ja v a 2 s . co m * @return The standard output of the child process. * * @throws IOException * If the process call fails. */ public String execute() throws IOException { CommandLine cmdLine = new CommandLine(ffmpegExecutablePath); int fileNumber = 0; Map<String, File> map = new HashMap<String, File>(); for (int i = 0; i < args.size(); i++) { final String arg = args.get(i); final Boolean isFile = argIsFile.get(i); if (isFile) { String key = "file" + fileNumber; map.put(key, new File(arg)); cmdLine.addArgument("'${" + key + "}'", false); fileNumber++; } else { cmdLine.addArgument(arg); } } cmdLine.setSubstitutionMap(map); LOG.fine("Execute: " + cmdLine); DefaultExecutor executor = new DefaultExecutor(); //5minutes wait ExecuteWatchdog watchdog = new ExecuteWatchdog(60 * 1000 * 5); executor.setWatchdog(watchdog); ByteArrayOutputStream out = new ByteArrayOutputStream(); executor.setStreamHandler(new PumpStreamHandler(out)); int[] exitValues = { 0, 1 }; executor.setExitValues(exitValues); executor.execute(cmdLine); return out.toString(); }
From source file:com.boulmier.machinelearning.jobexecutor.job.Job.java
public void start() throws IOException { final ByteArrayOutputStream out = new ByteArrayOutputStream(); final DefaultExecutor exec = new DefaultExecutor(); final ExecuteWatchdog wd = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT); final PumpStreamHandler output = new PumpStreamHandler(out); final DefaultExecuteResultHandler handler = new DefaultExecuteResultHandler(); exec.setWatchdog(wd); exec.setStreamHandler(output);//from www . jav a2s. c o m exec.execute(cl, handler); JobExecutor.logger.info("Running job " + jobid); new Thread(new Runnable() { @Override public void run() { try { handler.waitFor(); Computer.ComputeProperties properties = Computer.ComputeProperties.buildFromRequest(req); new SenderComputer(new StorageComputer(out.toString(), properties)).compute(); JobExecutor.logger.info("Job complete " + jobid); } catch (InterruptedException ex) { exec.getWatchdog().destroyProcess(); JobExecutor.logger.error( "Job (" + jobid + ") has been destroyed due to internal error " + ex.getMessage()); } } }).start(); }
From source file:com.k42b3.aletheia.filter.request.Process.java
public void exec(Request request) { String cmd = getConfig().getProperty("cmd"); try {// w ww . j a v a 2 s . co m logger.info("Execute: " + cmd); CommandLine commandLine = CommandLine.parse(cmd); ExecuteWatchdog watchdog = new ExecuteWatchdog(timeout); DefaultExecutor executor = new DefaultExecutor(); this.baos = new ByteArrayOutputStream(); this.baosErr = new ByteArrayOutputStream(); this.bais = new ByteArrayInputStream(request.getContent().getBytes()); executor.setStreamHandler(new PumpStreamHandler(this.baos, this.baosErr, this.bais)); executor.setWatchdog(watchdog); executor.execute(commandLine); logger.info("Output: " + this.baos.toString()); request.setContent(this.baos.toString()); } catch (Exception e) { logger.warning(e.getMessage()); } }
From source file:com.walmart.gatling.commons.ReportExecutor.java
private void runJob(Master.GenerateReport job) { TaskEvent taskEvent = job.reportJob.taskEvent; CommandLine cmdLine = new CommandLine(agentConfig.getJob().getCommand()); Map<String, Object> map = new HashMap<>(); map.put("path", new File(agentConfig.getJob().getJobArtifact(taskEvent.getJobName()))); cmdLine.addArgument("${path}"); //parameters come from the task event for (Pair<String, String> pair : taskEvent.getParameters()) { cmdLine.addArgument(pair.getValue()); }/*from w ww . j a v a 2 s . c o m*/ String dir = agentConfig.getJob().getLogDirectory() + "reports/" + job.reportJob.trackingId + "/"; cmdLine.addArgument(dir); cmdLine.setSubstitutionMap(map); DefaultExecutor executor = new DefaultExecutor(); executor.setExitValues(agentConfig.getJob().getExitValues()); ExecuteWatchdog watchdog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT); executor.setWatchdog(watchdog); executor.setWorkingDirectory(new File(agentConfig.getJob().getPath())); FileOutputStream outFile = null; FileOutputStream errorFile = null; try { List<String> resultFiles = new ArrayList<>(job.results.size()); //download all files adn /*int i=0; for (Worker.Result result : job.results) { String destFile = dir + i++ + ".log"; resultFiles.add(destFile); DownloadFile.downloadFile(result.metrics,destFile); }*/ AtomicInteger index = new AtomicInteger(); job.results.parallelStream().forEach(result -> { String destFile = dir + index.incrementAndGet() + ".log"; resultFiles.add(destFile); DownloadFile.downloadFile(result.metrics, destFile); }); String outPath = agentConfig.getJob().getOutPath(taskEvent.getJobName(), job.reportJob.trackingId); String errPath = agentConfig.getJob().getErrorPath(taskEvent.getJobName(), job.reportJob.trackingId); //create the std and err files outFile = FileUtils.openOutputStream(new File(outPath)); errorFile = FileUtils.openOutputStream(new File(errPath)); PumpStreamHandler psh = new PumpStreamHandler(new ExecLogHandler(outFile), new ExecLogHandler(errorFile)); executor.setStreamHandler(psh); System.out.println(cmdLine); int exitResult = executor.execute(cmdLine); ReportResult result; if (executor.isFailure(exitResult)) { result = new ReportResult(dir, job.reportJob, false); log.info("Report Executor Failed, result: " + job.toString()); } else { result = new ReportResult(job.reportJob.getHtml(), job.reportJob, true); log.info("Report Executor Completed, result: " + result.toString()); } for (String resultFile : resultFiles) { FileUtils.deleteQuietly(new File(resultFile)); } getSender().tell(result, getSelf()); } catch (IOException e) { e.printStackTrace(); throw new RuntimeException(e); } finally { IOUtils.closeQuietly(outFile); IOUtils.closeQuietly(errorFile); } }
From source file:de.akquinet.innovation.play.maven.Play2PackageMojo.java
private void packageApplication() throws MojoExecutionException { String line = getPlay2().getAbsolutePath(); CommandLine cmdLine = CommandLine.parse(line); cmdLine.addArgument("package"); DefaultExecutor executor = new DefaultExecutor(); if (timeout > 0) { ExecuteWatchdog watchdog = new ExecuteWatchdog(timeout); executor.setWatchdog(watchdog); }/* w w w . jav a 2 s . c o m*/ executor.setWorkingDirectory(project.getBasedir()); executor.setExitValue(0); try { executor.execute(cmdLine, getEnvironment()); } catch (IOException e) { throw new MojoExecutionException("Error during packaging", e); } }
From source file:de.akquinet.innovation.play.maven.Play2PackageMojo.java
private void packageDistribution() throws MojoExecutionException { String line = getPlay2().getAbsolutePath(); CommandLine cmdLine = CommandLine.parse(line); cmdLine.addArgument("dist"); DefaultExecutor executor = new DefaultExecutor(); if (timeout > 0) { ExecuteWatchdog watchdog = new ExecuteWatchdog(timeout); executor.setWatchdog(watchdog); }//from w w w .jav a 2s . co m executor.setWorkingDirectory(project.getBasedir()); executor.setExitValue(0); try { executor.execute(cmdLine, getEnvironment()); } catch (IOException e) { throw new MojoExecutionException("Error during distribution creation", e); } }
From source file:com.walmart.gatling.commons.ScriptExecutor.java
private void runCancelJob(Master.Job message) { if (getAbortStatus(message.abortUrl, message.trackingId)) { CommandLine cmdLine = new CommandLine("/bin/bash"); cmdLine.addArgument(agentConfig.getJob().getJobArtifact("cancel")); cmdLine.addArgument(message.jobId); DefaultExecutor killExecutor = new DefaultExecutor(); ExecuteWatchdog watchdog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT); killExecutor.setWatchdog(watchdog); try {/*w w w . j av a 2 s .c o m*/ log.info("Cancel command: {}", cmdLine); killExecutor.execute(cmdLine); } catch (IOException e) { log.error(e, "Error cancelling job"); } } }
From source file:net.ladenthin.snowman.imager.run.streamer.Streamer.java
@Override public void run() { final CImager conf = ConfigurationSingleton.ConfigurationSingleton.getImager(); for (;;) {// w w w. j av a2s . c o m if (WatchdogSingleton.WatchdogSingleton.getWatchdog().getKillFlag() == true) { LOGGER.trace("killFlag == true"); return; } final CommandLine cmdLine = new CommandLine(streamer); // final CommandLine cmdLine = new CommandLine("sleep"); // cmdLine.addArgument("200"); cmdLine.addArgument("-c"); cmdLine.addArgument(conf.getStreamer().getDevice()); cmdLine.addArgument("-t"); cmdLine.addArgument( String.valueOf(conf.getStreamer().getFramesPerSecond() * conf.getStreamer().getRecordTime())); cmdLine.addArgument("-r"); cmdLine.addArgument(String.valueOf(conf.getStreamer().getFramesPerSecond())); cmdLine.addArgument("-s"); cmdLine.addArgument(conf.getStreamer().getResolutionX() + "x" + conf.getStreamer().getResolutionY()); cmdLine.addArgument("-o"); cmdLine.addArgument(conf.getStreamer().getPath() + File.separator + conf.getSnowmanServer().getCameraname() + "_" + (long) (System.currentTimeMillis() / 1000) + "_" + conf.getStreamer().getFramesPerSecond() + "_00000000.jpeg"); LOGGER.trace("cmdLine: {}", cmdLine); // 10 seconds should be more than enough final long safetyTimeWindow = 10000; final DefaultExecutor executor = new DefaultExecutor(); final long timeout = 1000 * (conf.getStreamer().getRecordTime() + safetyTimeWindow); // final long timeout = 5000; LOGGER.trace("timeout: {}", timeout); final ExecuteWatchdog watchdog = new ExecuteWatchdog(timeout); executor.setWatchdog(watchdog); try { LOGGER.debug("start process"); final int exitValue = executor.execute(cmdLine); LOGGER.debug("process executed"); LOGGER.trace("exitValue: {}", exitValue); } catch (IOException e) { if (watchdog.killedProcess()) { LOGGER.warn("Process was killed on purpose by the watchdog "); } else { LOGGER.error("Process exited with an error."); Imager.waitALittleBit(5000); } } LOGGER.trace("loop end"); } }
From source file:edu.stolaf.cs.wmrserver.testjob.TestJobTask.java
public TestJobResult call() throws IOException { // Create the result object TestJobResult result = new TestJobResult(); // Map//from w ww . j a va 2s . c o m CappedInputStream mapInput = null; try { // List the input files and open a stream FileSystem fs = _inputPath.getFileSystem(_conf); FileStatus[] files = JobServiceHandler.listInputFiles(fs, _inputPath); AggregateInputStream aggregateInput = new AggregateInputStream(fs, files); mapInput = new CappedInputStream(aggregateInput, _inputCap); // Run the mapper result.setMapResult(runTransform(_id, _mapperFile, _packageDir, mapInput)); } finally { IOUtils.closeQuietly(mapInput); } // Return if mapper failed or did not produce output if (result.getMapResult().getExitCode() != 0 || result.getMapResult().getOutputFile() == null) return result; // Sort // While this seems (and is) inefficient for computers, this is //actually probably the shortest way to write this code since // vanilla Java does not provide an equivalent of sort -n. // If you want to write it in Java, use java.util.TreeSet. File intermediateFile = null; FileOutputStream intermediateOutput = null; try { // Create and open temporary file for sorted intermediate output intermediateFile = File.createTempFile("job-" + Long.toString(_id), "-intermediate", _tempDir); intermediateOutput = new FileOutputStream(intermediateFile); // Run the sort CommandLine sortCommand = new CommandLine("sort"); //sortCommand.addArgument("--field-separator=\t"); if (_numericSort) sortCommand.addArgument("-n"); sortCommand.addArgument(result.getMapResult().getOutputFile().getCanonicalPath(), false); DefaultExecutor exec = new DefaultExecutor(); ExecuteWatchdog dog = new ExecuteWatchdog(EXECUTABLE_TIMEOUT); PumpStreamHandler pump = new PumpStreamHandler(intermediateOutput); exec.setWatchdog(dog); exec.setStreamHandler(pump); try { exec.execute(sortCommand); } catch (ExecuteException ex) { throw new IOException("Sort process failed while running test jobs", ex); } } finally { IOUtils.closeQuietly(intermediateOutput); } // Reduce FileInputStream reduceInput = null; try { // Open the intermediate file for reading reduceInput = new FileInputStream(intermediateFile); // Run the reducer result.setReduceResult(runTransform(_id, _reducerFile, _packageDir, reduceInput)); } finally { IOUtils.closeQuietly(reduceInput); // Delete intermediate file intermediateFile.delete(); } return result; }
From source file:io.vertx.config.vault.utils.VaultProcess.java
public void runAndProcess(String command, Consumer<String> processor) { String cli = executable.getAbsolutePath() + " " + command; System.out.println(">> " + cli); CommandLine parse = CommandLine.parse(cli); DefaultExecutor executor = new DefaultExecutor(); PumpStreamHandler pump = new PumpStreamHandler(new VaultOutputStream().addExtractor(processor), System.err); ExecuteWatchdog watchDog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT); executor.setWatchdog(watchDog); executor.setStreamHandler(pump);/*from w ww.j a va 2 s .c o m*/ try { executor.execute(parse); } catch (IOException e) { throw new RuntimeException(e); } }