Example usage for org.apache.commons.exec PumpStreamHandler PumpStreamHandler

List of usage examples for org.apache.commons.exec PumpStreamHandler PumpStreamHandler

Introduction

In this page you can find the example usage for org.apache.commons.exec PumpStreamHandler PumpStreamHandler.

Prototype

public PumpStreamHandler(final OutputStream out, final OutputStream err) 

Source Link

Document

Construct a new PumpStreamHandler.

Usage

From source file:com.vmware.bdd.service.impl.NodeLdapUserMgmtConfService.java

private void transferFile(String srcFilePath, String ip, String targetFilePath) {
    CommandLine cmdLine = new CommandLine("scp").addArgument(srcFilePath)
            .addArgument(ip + ":" + targetFilePath);

    DefaultExecutor executor = new DefaultExecutor();

    executor.setStreamHandler(new PumpStreamHandler(new ExecOutputLogger(LOGGER, false), //output logger
            new ExecOutputLogger(LOGGER, true)) //error logger
    );//from  w w  w.  j  a  va2s. c  o  m

    executor.setWatchdog(new ExecuteWatchdog(1000l * 120l));

    try {
        int exitVal = executor.execute(cmdLine);
        if (exitVal != 0) {
            throw new RuntimeException("CFG_LDAP_FAIL", null);
        }
    } catch (IOException e) {
        throw new RuntimeException("CFG_LDAP_FAIL", e);
    }
}

From source file:ch.ivyteam.ivy.maven.engine.EngineControl.java

private PumpStreamHandler createEngineLogStreamForwarder(Consumer<String> logLineHandler)
        throws FileNotFoundException {
    OutputStream output = getEngineLogTarget();
    OutputStream engineLogStream = new LineOrientedOutputStreamRedirector(output) {
        @Override// w  w w .  j  av a2  s . c  o  m
        protected void processLine(byte[] b) throws IOException {
            super.processLine(b); // write file log
            String line = new String(b);
            context.log.debug("engine: " + line);
            if (logLineHandler != null) {
                logLineHandler.accept(line);
            }
        }
    };
    PumpStreamHandler streamHandler = new PumpStreamHandler(engineLogStream, System.err) {
        @Override
        public void stop() throws IOException {
            super.stop();
            engineLogStream.close(); // we opened the stream - we're responsible to close it!
        }
    };
    return streamHandler;
}

From source file:edu.kit.dama.dataworkflow.impl.LocalExecutionHandler.java

/**
 * Execute the user application. This method will start a new process running
 * the prepared user application locally. The method will return as soon as
 * the application has terminated. An asnychronous monitoring task my check
 * whether the process is still running or not via {@link #getTaskStatus(edu.kit.dama.mdm.dataworkflow.DataWorkflowTask)
 * } This method will check the runningIndicator file '.RUNNING', which only
 * exists as long as the application is running.
 *
 * @param pTask The task whose application should be executed.
 *
 * @throws DataWorkflowProcessingException If either the startup or the
 * processing fails for any reason, or if the user application returns an exit
 * code != 0.//from  w  w w .  ja  v a  2  s  .  c  o  m
 */
@Override
public void startUserApplication(DataWorkflowTask pTask) throws DataWorkflowProcessingException {
    //simply start the process...monitoring will be connected later
    File runningIndicator = getRunningIndicator(pTask);
    FileOutputStream fout = null;
    FileOutputStream ferr = null;
    File executablePath;

    try {
        executablePath = DataWorkflowHelper.getTaskMainExecutable(pTask);
        File executionBasePath = DataWorkflowHelper.getExecutionBasePath(pTask);
        File workingDirectory = DataWorkflowHelper.getTaskWorkingDirectory(executionBasePath);
        File tempDirectory = DataWorkflowHelper.getTaskTempDirectory(executionBasePath);
        File inputDirectory = DataWorkflowHelper.getTaskInputDirectory(executionBasePath);
        File outputDirectory = DataWorkflowHelper.getTaskOutputDirectory(executionBasePath);

        if (!executablePath.canExecute()) {
            LOGGER.debug("Executable at location {} seems not to be executable. Taking care of this...");
            if (executablePath.setExecutable(true)) {
                LOGGER.debug("Executable was successfully set to be executable.");
            } else {
                LOGGER.warn("Failed to set executable to be executable. Trying to continue.");
            }
        }

        String cmdLineString = executablePath.getAbsolutePath() + " "
                + pTask.getConfiguration().getApplicationArguments() + " " + pTask.getApplicationArguments();
        LOGGER.debug("Building up command array from string '{}'", cmdLineString);

        CommandLine cmdLine = CommandLine.parse(cmdLineString);
        DefaultExecutor executor = new DefaultExecutor();
        executor.setExitValue(0);
        Map<String, String> env = new HashMap<>();
        env.put("WORKING_DIR", workingDirectory.getAbsolutePath());
        env.put("TEMP_DIR", tempDirectory.getAbsolutePath());
        env.put("INPUT_DIR", inputDirectory.getAbsolutePath());
        env.put("OUTPUT_DIR", outputDirectory.getAbsolutePath());

        fout = new FileOutputStream(new File(tempDirectory, "stdout.log"));
        ferr = new FileOutputStream(new File(tempDirectory, "stderr.log"));
        LOGGER.debug("Setting stream handler for stdout and stderr.");
        executor.setStreamHandler(new PumpStreamHandler(fout, ferr));
        LOGGER.debug("Creating .RUNNING file for monitoring.");
        FileUtils.touch(runningIndicator);
        LOGGER.debug("Executing process.");
        int exitCode = executor.execute(cmdLine);
        if (exitCode != 0) {
            throw new DataWorkflowProcessingException(
                    "Execution returned exit code " + exitCode + ". See logfiles for details.");
        } else {
            LOGGER.debug("Process successfully finished with exit code {}", exitCode);
        }
    } catch (IOException | UnsupportedOperatingSystemException e) {
        throw new DataWorkflowProcessingException("Failed to start executable for task " + pTask.getId(), e);
    } finally {
        LOGGER.debug("Removing running indicator file {}", runningIndicator);
        FileUtils.deleteQuietly(runningIndicator);
        if (fout != null) {
            try {
                fout.close();
            } catch (IOException ex) {
            }
        }

        if (ferr != null) {
            try {
                ferr.close();
            } catch (IOException ex) {
            }
        }
    }
}

From source file:com.netflix.genie.web.tasks.job.JobCompletionService.java

/**
 * Constructor./* w  w  w . j a  v a 2 s  . c  om*/
 *
 * @param jobSearchService         An implementation of the job search service.
 * @param jobPersistenceService    An implementation of the job persistence service.
 * @param genieFileTransferService An implementation of the Genie File Transfer service.
 * @param genieWorkingDir          The working directory where all job directories are created.
 * @param mailServiceImpl          An implementation of the mail service.
 * @param registry                 The metrics registry to use
 * @param jobsProperties           The properties relating to running jobs
 * @param retryTemplate            Retry template for retrying remote calls
 * @throws GenieException if there is a problem
 */
@Autowired
public JobCompletionService(final JobPersistenceService jobPersistenceService,
        final JobSearchService jobSearchService, final GenieFileTransferService genieFileTransferService,
        final Resource genieWorkingDir, final MailService mailServiceImpl, final Registry registry,
        final JobsProperties jobsProperties,
        @Qualifier("genieRetryTemplate") @NotNull final RetryTemplate retryTemplate) throws GenieException {
    this.jobPersistenceService = jobPersistenceService;
    this.jobSearchService = jobSearchService;
    this.genieFileTransferService = genieFileTransferService;
    this.mailServiceImpl = mailServiceImpl;
    this.deleteArchiveFile = jobsProperties.getCleanup().isDeleteArchiveFile();
    this.deleteDependencies = jobsProperties.getCleanup().isDeleteDependencies();
    this.runAsUserEnabled = jobsProperties.getUsers().isRunAsUserEnabled();

    this.executor = new DefaultExecutor();
    this.executor.setStreamHandler(new PumpStreamHandler(null, null));

    try {
        this.baseWorkingDir = genieWorkingDir.getFile();
    } catch (IOException gse) {
        throw new GenieServerException("Could not load the base path from resource");
    }

    // Set up the metrics
    this.registry = registry;
    this.jobCompletionId = registry.createId("genie.jobs.completion.timer");
    this.emailSuccessRate = registry.counter("genie.jobs.email.success.rate");
    this.emailFailureRate = registry.counter("genie.jobs.email.failure.rate");
    this.archivalFailureRate = registry.counter("genie.jobs.archivalFailure.rate");
    this.doneFileProcessingFailureRate = registry.counter("genie.jobs.doneFileProcessingFailure.rate");
    this.finalStatusUpdateFailureRate = registry.counter("genie.jobs.finalStatusUpdateFailure.rate");
    this.processGroupCleanupFailureRate = registry.counter("genie.jobs.processGroupCleanupFailure.rate");
    this.archiveFileDeletionFailure = registry.counter("genie.jobs.archiveFileDeletionFailure.rate");
    this.deleteDependenciesFailure = registry.counter("genie.jobs.deleteDependenciesFailure.rate");
    // Retry template
    this.retryTemplate = retryTemplate;
}

From source file:edu.odu.cs.cs350.yellow1.jar.ExecuteJar.java

/**
 * /*from  w  w w .  j a v  a 2 s. co m*/
 * {@inheritDoc}
 * <br>Run all tests in the test suit on the mutant 
 * capturing the output created and if the execution of the mutant with a test exits successfully compare the standard output generated by<br>
 * the mutant if different stop running tests and return {@link ExecutionResults}
 * <br> Treats exiting the jvm with error as was not killed continue to run more tests 
 * @return {@link ExecutionResults}
 */
@Override
public ExecutionResults call() throws Exception {
    //create new Executor for monitoring mutation running
    executor = new DefaultExecutor();
    //get a MessageDigest Instance for use in comparing outputs
    MessageDigest mDigest = MessageDigest.getInstance("MD5");
    //get file object for gold file
    File f = new File(pathToGold);
    //get the hash value for the gold file
    goldHash = mDigest.digest(FileUtils.readFileToByteArray(f));
    //reset the MessageDigest
    mDigest.reset();
    int testCount = 0;
    //Create a new ExecuteWatchdog with timeout at 10 seconds
    wDog = new ExecuteWatchdog(10000);
    executor.setWatchdog(wDog);
    //loop through the tests till empty
    while (!tests.isEmpty()) {
        //get the next test
        File test = tests.poll();//poll removes the test from the queue
        //prepair captured output files
        String testName = test.getName();
        testName = testName.toUpperCase(Locale.getDefault()).substring(0, testName.indexOf("."));
        String outName = jarName + "_" + testName + "_out.txt";
        String errOutName = jarName + "_" + testName + "_err.txt";
        //create file objects to be written to 
        File standardOut = new File(pathToOutputDir + File.separator + outName);
        File standardErr = new File(pathToOutputDir + File.separator + errOutName);
        //file streams create the files for me
        try {
            log = new FileOutputStream(standardOut);

            err = new FileOutputStream(standardErr);
        } catch (FileNotFoundException e1) {
            logger.error("log or err file not found for jar " + jarName, e1.getMessage());
        }
        //create new stream handler for each execution
        streamHandler = new PumpStreamHandler(/* standard out */log, /* error out */err);
        executor.setStreamHandler(streamHandler);
        //construct the executable command
        CommandLine args = new CommandLine(pathToJVM);
        args.addArgument("-jar");
        args.addArgument(jar.getAbsolutePath());
        args.addArgument(test.getAbsolutePath());
        //new process destroyer per execution
        ShutDownSpawnedJVMProcess killJVM = new ShutDownSpawnedJVMProcess("java -jar " + jarName, 10000);
        killJVM.setWaitOnShutdown(true);
        executor.setProcessDestroyer(killJVM);
        success = false;

        try {
            streamHandler.start();
            int result = executor.execute(args);
            logger.info(jarName + " Sucess with val=[" + result + "] for test[" + testName + "]");
            success = true;
        } catch (ExecuteException ee) {
            logger.error(jarName + " Execute exception " + ee.getMessage() + " with val=[" + ee.getExitValue()
                    + "] for test[" + testName + "]");
        } catch (IOException e) {
            logger.error(jarName + " IOExecption " + e.getMessage());

        } finally {
            //PumpStreamHandler does not guarantee the closing of stream 100% so to release the locks held by the filestreams 
            //on the created output files so close manually 
            //if the streamhandler was able to close then this will through exception which we ignore
            try {
                streamHandler.stop();
                //log.flush();
                log.close();
                //err.flush();
                err.close();
            } catch (IOException e) {
                logger.error(e.getMessage());
                //ignore nothing I can do 
            }

        }

        //if the spawned process exited with success value 
        //check the hash of the output file and delete the empty error file
        //if the hash is different the mutant was killed otherwise test more
        //if the spawned process exited with an error value delete empty standard out file and test more
        if (success) {
            ++numOfSucesses;
            standardErr.delete();
            outFiles.add(standardOut);
            if (!Arrays.equals(goldHash, mDigest.digest(FileUtils.readFileToByteArray(standardOut)))) {
                testMore = false;
                logger.debug("Different hashes for jar [" + jarName + "] for test [" + testName + "]");
            } else {
                logger.debug("Same hashes for jar [" + jarName + "] for test [" + testName + "]");
            }
            mDigest.reset();
        } else {
            ++numOfFailurs;
            standardOut.delete();
            errFiles.add(standardErr);
            this.didNotExecute.add(test);
        }
        ++testCount;
        //the mutant was killed so stop testing 
        if (!testMore) {
            testMore = false;
            killed = true;
            testNumKilledME = testCount;
            break;
        }

    }

    jar.delete();
    return new ExecutionResults(numOfSucesses, numOfFailurs, testNumKilledME, jarName, killed, killedMutant,
            outFiles, errFiles);

}

From source file:de.torstenwalter.maven.plugins.SQLPlusMojo.java

private void runScriptWithSqlPlus(File file, Map environment)
        throws MojoExecutionException, MojoFailureException {
    checkFileIsReadable(file);/*w ww .j av  a  2 s .co m*/

    CommandLine commandLine = new CommandLine(sqlplus);
    // logon only once, without this sql*plus would prompt for
    // credentials if given ones are not correct
    commandLine.addArgument("-L");
    StringTokenizer stringTokenizer = new StringTokenizer(getConnectionIdentifier());
    while (stringTokenizer.hasMoreTokens()) {
        commandLine.addArgument(stringTokenizer.nextToken());
    }
    commandLine.addArgument("@" + file.getName());
    if (arguments != null) {
        for (Object argument : arguments) {
            if (argument == null) {
                throw new MojoExecutionException("Misconfigured argument, value is null. "
                        + "Set the argument to an empty value if this is the required behaviour.");
            } else {
                commandLine.addArgument(argument.toString());
            }
        }
    }

    getLog().info("Executing command line: " + obfuscateCredentials(commandLine.toString(), getCredentials()));

    Executor exec = new DefaultExecutor();
    exec.setWorkingDirectory(file.getParentFile());
    exec.setStreamHandler(new PumpStreamHandler(System.out, System.err));
    try {
        exec.execute(commandLine, environment);
    } catch (ExecuteException e) {
        throw new MojoExecutionException("program exited with exitCode: " + e.getExitValue());
    } catch (IOException e) {
        throw new MojoExecutionException("Command execution failed.", e);
    }
}

From source file:it.drwolf.ridire.utility.RIDIREReTagger.java

public String retagFile(File f) throws ExecuteException, IOException {
    // Map<String, File> map = new HashMap<String, File>();
    String fileIN = f.getAbsolutePath();
    String fileOut = f.getAbsolutePath() + ".iso";
    String posOld = f.getAbsolutePath() + ".iso.pos";
    String posNew = f.getAbsolutePath() + ".pos";
    // first convert from utf8 to iso8859-1
    CommandLine commandLine = CommandLine.parse("iconv");
    commandLine.addArgument("-c").addArgument("-s").addArgument("-f").addArgument("utf8").addArgument("-t")
            .addArgument("iso8859-1//TRANSLIT").addArgument("-o").addArgument(fileOut, false)
            .addArgument(fileIN, false);
    DefaultExecutor executor = new DefaultExecutor();
    ExecuteWatchdog watchdog = new ExecuteWatchdog(RIDIREReTagger.TREETAGGER_TIMEOUT);
    executor.setWatchdog(watchdog);/*from w  w w  .  j a  v a  2  s.com*/
    int exitValue = executor.execute(commandLine);
    if (exitValue == 0) {
        // tag using latin1 and Baroni's tagset
        commandLine = CommandLine.parse(this.treeTaggerBin);
        commandLine.addArgument(fileOut, false);
        executor = new DefaultExecutor();
        executor.setExitValue(0);
        watchdog = new ExecuteWatchdog(RIDIREReTagger.TREETAGGER_TIMEOUT);
        executor.setWatchdog(watchdog);
        TreeTaggerLog treeTaggerLog = new TreeTaggerLog();
        PumpStreamHandler executeStreamHandler = new PumpStreamHandler(treeTaggerLog, null);
        executor.setStreamHandler(executeStreamHandler);
        int exitValue2 = executor.execute(commandLine);
        if (exitValue2 == 0) {
            // FileUtils.deleteQuietly(new File(fileOut));
            File posTagFile = new File(posOld);
            FileUtils.writeLines(posTagFile, treeTaggerLog.getLines());
        }
        // reconvert to utf8
        commandLine = CommandLine.parse("iconv");
        commandLine.addArgument("-s").addArgument("-f").addArgument("iso8859-1").addArgument("-t")
                .addArgument("utf8//TRANSLIT").addArgument("-o").addArgument(posNew, false)
                .addArgument(posOld, false);
        executor = new DefaultExecutor();
        watchdog = new ExecuteWatchdog(RIDIREReTagger.TREETAGGER_TIMEOUT);
        executor.setWatchdog(watchdog);
        int exitValue3 = executor.execute(commandLine);
        if (exitValue3 == 0) {
            // FileUtils.deleteQuietly(new File(f.getPath() + ".iso.pos"));
            return new File(posNew).getCanonicalPath();
        }
    }
    return null;
}

From source file:com.jredrain.startup.AgentProcessor.java

@Override
public Response execute(final Request request) throws TException {
    if (!this.password.equalsIgnoreCase(request.getPassword())) {
        return errorPasswordResponse(request);
    }// ww  w. j  a v a  2s . co m

    String command = request.getParams().get("command") + EXITCODE_SCRIPT;

    String pid = request.getParams().get("pid");
    //??
    Long timeout = CommonUtils.toLong(request.getParams().get("timeout"), 0L);

    boolean timeoutFlag = timeout > 0;

    logger.info("[redrain]:execute:{},pid:{}", command, pid);

    File shellFile = CommandUtils.createShellFile(command, pid);

    Integer exitValue;

    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();

    final Response response = Response.response(request);

    final ExecuteWatchdog watchdog = new ExecuteWatchdog(Integer.MAX_VALUE);

    final Timer timer = new Timer();

    DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler();

    try {
        CommandLine commandLine = CommandLine.parse("/bin/bash +x " + shellFile.getAbsolutePath());
        final DefaultExecutor executor = new DefaultExecutor();

        ExecuteStreamHandler stream = new PumpStreamHandler(outputStream, outputStream);
        executor.setStreamHandler(stream);
        response.setStartTime(new Date().getTime());
        //?0,shell
        executor.setExitValue(0);

        if (timeoutFlag) {
            //...
            executor.setWatchdog(watchdog);
            //
            timer.schedule(new TimerTask() {
                @Override
                public void run() {
                    //,kill...
                    if (watchdog.isWatching()) {
                        /**
                         * watchdogdestroyProcesskill...
                         * watchdog.destroyProcess();
                         */
                        timer.cancel();
                        watchdog.stop();
                        //call  kill...
                        request.setAction(Action.KILL);
                        try {
                            kill(request);
                            response.setExitCode(RedRain.StatusCode.TIME_OUT.getValue());
                        } catch (TException e) {
                            e.printStackTrace();
                        }

                    }
                }
            }, timeout * 60 * 1000);

            //
            resultHandler = new DefaultExecuteResultHandler() {
                @Override
                public void onProcessComplete(int exitValue) {
                    super.onProcessComplete(exitValue);
                    timer.cancel();
                }

                @Override
                public void onProcessFailed(ExecuteException e) {
                    super.onProcessFailed(e);
                    timer.cancel();
                }
            };
        }

        executor.execute(commandLine, resultHandler);

        resultHandler.waitFor();

    } catch (Exception e) {
        if (e instanceof ExecuteException) {
            exitValue = ((ExecuteException) e).getExitValue();
        } else {
            exitValue = RedRain.StatusCode.ERROR_EXEC.getValue();
        }
        if (RedRain.StatusCode.KILL.getValue().equals(exitValue)) {
            if (timeoutFlag) {
                timer.cancel();
                watchdog.stop();
            }
            logger.info("[redrain]:job has be killed!at pid :{}", request.getParams().get("pid"));
        } else {
            logger.info("[redrain]:job execute error:{}", e.getCause().getMessage());
        }
    } finally {

        exitValue = resultHandler.getExitValue();

        if (CommonUtils.notEmpty(outputStream.toByteArray())) {
            try {
                outputStream.flush();
                String text = outputStream.toString();
                if (notEmpty(text)) {
                    try {
                        text = text.replaceAll(String.format(REPLACE_REX, shellFile.getAbsolutePath()), "");
                        response.setMessage(text.substring(0, text.lastIndexOf(EXITCODE_KEY)));
                        exitValue = Integer.parseInt(text
                                .substring(text.lastIndexOf(EXITCODE_KEY) + EXITCODE_KEY.length() + 1).trim());
                    } catch (IndexOutOfBoundsException e) {
                        response.setMessage(text);
                    }
                }
                outputStream.close();
            } catch (Exception e) {
                logger.error("[redrain]:error:{}", e);
            }
        }

        if (RedRain.StatusCode.TIME_OUT.getValue() == response.getExitCode()) {
            response.setSuccess(false).end();
        } else {
            response.setExitCode(exitValue)
                    .setSuccess(response.getExitCode() == RedRain.StatusCode.SUCCESS_EXIT.getValue()).end();
        }

        if (shellFile != null) {
            shellFile.delete();//
        }
    }
    logger.info("[redrain]:execute result:{}", response.toString());
    watchdog.stop();

    return response;
}

From source file:com.github.stephenc.mongodb.maven.StartMongoMojo.java

public void execute() throws MojoExecutionException, MojoFailureException {
    if (skip) {//from w  ww. j  a  v  a2s . c om
        getLog().info("Skipping mongodb: mongodb.skip==true");
        return;
    }
    if (installation == null) {
        getLog().info("Using mongod from PATH");
    } else {
        getLog().info("Using mongod installed in " + installation);
    }
    getLog().info("Using database root of " + databaseRoot);
    final Logger mongoLogger = Logger.getLogger("com.mongodb");
    Level mongoLevel = mongoLogger.getLevel();
    try {
        mongoLogger.setLevel(Level.SEVERE);
        MongoOptions opts = new MongoOptions();
        opts.autoConnectRetry = false;
        opts.connectionsPerHost = 1;
        opts.connectTimeout = 50;
        opts.socketTimeout = 50;
        Mongo instance;
        try {
            instance = new Mongo(new ServerAddress("localhost", port), opts);
            List<String> databaseNames = instance.getDatabaseNames();
            throw new MojoExecutionException("Port " + port
                    + " is already running a MongoDb instance with the following databases " + databaseNames);
        } catch (MongoException.Network e) {
            // fine... no instance running
        } catch (MongoException e) {
            throw new MojoExecutionException("Port " + port + " is already running a MongoDb instance");
        } catch (UnknownHostException e) {
            // ignore... localhost is always known!
        }
    } finally {
        mongoLogger.setLevel(mongoLevel);
    }

    CommandLine commandLine = null;
    if (installation != null && installation.isDirectory()) {
        File bin = new File(installation, "bin");
        File exe = new File(bin, Os.isFamily(Os.FAMILY_WINDOWS) ? "mongod.exe" : "mongod");
        if (exe.isFile()) {
            commandLine = new CommandLine(exe);
        } else {
            throw new MojoExecutionException("Could not find mongo executables in specified installation: "
                    + installation + " expected to find " + exe + " but it does not exist.");
        }
    }
    if (commandLine == null) {
        commandLine = new CommandLine(Os.isFamily(Os.FAMILY_WINDOWS) ? "mongod.exe" : "mongod");
    }
    if (databaseRoot.isFile()) {
        throw new MojoExecutionException("Database root " + databaseRoot + " is a file and not a directory");
    }
    if (databaseRoot.isDirectory() && cleanDatabaseRoot) {
        getLog().info("Cleaning database root directory: " + databaseRoot);
        try {
            FileUtils.deleteDirectory(databaseRoot);
        } catch (IOException e) {
            throw new MojoExecutionException("Could not clean database root directory " + databaseRoot, e);
        }
    }
    if (!databaseRoot.isDirectory()) {
        getLog().debug("Creating database root directory: " + databaseRoot);
        if (!databaseRoot.mkdirs()) {
            throw new MojoExecutionException("Could not create database root directory " + databaseRoot);
        }
    }

    if (!verbose) {
        commandLine.addArgument("--quiet");
    }

    commandLine.addArgument("--logpath");
    commandLine.addArgument(logPath.getAbsolutePath());
    if (logAppend) {
        commandLine.addArgument("--logappend");
    }

    commandLine.addArgument(auth ? "--auth" : "--noauth");

    commandLine.addArgument("--port");
    commandLine.addArgument(Integer.toString(port));

    commandLine.addArgument("--dbpath");
    commandLine.addArgument(databaseRoot.getAbsolutePath());

    if (additionalArguments != null) {
        for (String aa : additionalArguments) {
            commandLine.addArgument(aa);
        }
    }

    Executor exec = new DefaultExecutor();
    DefaultExecuteResultHandler execHandler = new DefaultExecuteResultHandler();
    exec.setWorkingDirectory(databaseRoot);
    ProcessObserver processObserver = new ProcessObserver(new ShutdownHookProcessDestroyer());
    exec.setProcessDestroyer(processObserver);

    LogOutputStream stdout = new MavenLogOutputStream(getLog());
    LogOutputStream stderr = new MavenLogOutputStream(getLog());

    getLog().info("Executing command line: " + commandLine);
    exec.setStreamHandler(new PumpStreamHandler(stdout, stderr));
    try {
        exec.execute(commandLine, execHandler);
        getLog().info("Waiting for MongoDB to start...");
        long timeout = System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(120);
        mongoLevel = mongoLogger.getLevel();
        try {
            mongoLogger.setLevel(Level.SEVERE);
            while (System.currentTimeMillis() < timeout && !execHandler.hasResult()) {
                MongoOptions opts = new MongoOptions();
                opts.autoConnectRetry = false;
                opts.connectionsPerHost = 1;
                opts.connectTimeout = 250;
                opts.socketTimeout = 250;
                Mongo instance;
                try {
                    instance = new Mongo(new ServerAddress("localhost", port), opts);
                    List<String> databaseNames = instance.getDatabaseNames();
                    getLog().info("MongoDb started.");
                    getLog().info("Databases: " + databaseNames);
                } catch (MongoException.Network e) {
                    // ignore, wait and try again
                    try {
                        Thread.sleep(50);
                    } catch (InterruptedException e1) {
                        // ignore
                    }
                    continue;
                } catch (MongoException e) {
                    getLog().info("MongoDb started.");
                    getLog().info("Unable to list databases due to " + e.getMessage());
                }
                break;
            }
        } finally {
            mongoLogger.setLevel(mongoLevel);
        }
        if (execHandler.hasResult()) {
            ExecuteException exception = execHandler.getException();
            if (exception != null) {
                throw new MojoFailureException(exception.getMessage(), exception);
            }
            throw new MojoFailureException(
                    "Command " + commandLine + " exited with exit code " + execHandler.getExitValue());
        }
        Map pluginContext = session.getPluginContext(getPluginDescriptor(), project);
        pluginContext.put(ProcessObserver.class.getName() + ":" + Integer.toString(port), processObserver);
    } catch (IOException e) {
        throw new MojoExecutionException(e.getMessage(), e);
    }

}

From source file:io.vertx.config.vault.utils.VaultProcess.java

public void runAndProcess(String command, Consumer<String> processor) {
    String cli = executable.getAbsolutePath() + " " + command;
    System.out.println(">> " + cli);
    CommandLine parse = CommandLine.parse(cli);
    DefaultExecutor executor = new DefaultExecutor();
    PumpStreamHandler pump = new PumpStreamHandler(new VaultOutputStream().addExtractor(processor), System.err);

    ExecuteWatchdog watchDog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT);
    executor.setWatchdog(watchDog);//  w w w. j  a v  a2 s  . c  om
    executor.setStreamHandler(pump);
    try {
        executor.execute(parse);
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}