Example usage for java.lang ProcessBuilder redirectOutput

List of usage examples for java.lang ProcessBuilder redirectOutput

Introduction

In this page you can find the example usage for java.lang ProcessBuilder redirectOutput.

Prototype

public ProcessBuilder redirectOutput(File file) 

Source Link

Document

Sets this process builder's standard output destination to a file.

Usage

From source file:monasca.api.integration.docker.ITInfluxDBTest.java

private void runAPI() throws Exception {

    if (!isPortFree(8070)) {
        throw new Exception("port 8070 is not free. Unable to start instance" + " of monasca api");
    }/*w w w .  j  av  a 2s  . c  o  m*/

    String latestShadedJarFileName = getLatestShadedJarFileName();
    System.out.println("Running " + latestShadedJarFileName);

    ProcessBuilder pb = new ProcessBuilder("java", "-cp", "./target/" + latestShadedJarFileName,
            "monasca.api.MonApiApplication", "server", "src/test/resources/mon-api-config.yml");
    File log = new File("mon-api-integration-test.log");
    pb.redirectErrorStream(true);
    pb.redirectOutput(ProcessBuilder.Redirect.appendTo(log));
    apiProcess = pb.start();

    System.out.println("Started " + latestShadedJarFileName);

    waitForPortReady("localhost", 8070);
}

From source file:org.opencms.workplace.tools.git.CmsGitCheckin.java

/**
 * Runs the shell script for committing and optionally pushing the changes in the module.
 * @return exit code of the script.// www  . j a v  a 2s .  co m
 */
private int runCommitScript() {

    if (m_checkout && !m_fetchAndResetBeforeImport) {
        m_logStream.println("Skipping script....");
        return 0;
    }
    try {
        m_logStream.flush();
        String commandParam;
        if (m_resetRemoteHead) {
            commandParam = resetRemoteHeadScriptCommand();
        } else if (m_resetHead) {
            commandParam = resetHeadScriptCommand();
        } else if (m_checkout) {
            commandParam = checkoutScriptCommand();
        } else {
            commandParam = checkinScriptCommand();
        }
        String[] cmd = { "bash", "-c", commandParam };
        m_logStream.println("Calling the script as follows:");
        m_logStream.println();
        m_logStream.println(cmd[0] + " " + cmd[1] + " " + cmd[2]);
        ProcessBuilder builder = new ProcessBuilder(cmd);
        m_logStream.close();
        m_logStream = null;
        Redirect redirect = Redirect.appendTo(new File(DEFAULT_LOGFILE_PATH));
        builder.redirectOutput(redirect);
        builder.redirectError(redirect);
        Process scriptProcess = builder.start();
        int exitCode = scriptProcess.waitFor();
        scriptProcess.getOutputStream().close();
        m_logStream = new PrintStream(new FileOutputStream(DEFAULT_LOGFILE_PATH, true));
        return exitCode;
    } catch (InterruptedException | IOException e) {
        e.printStackTrace(m_logStream);
        return -1;
    }

}

From source file:ldbc.snb.datagen.generator.LDBCDatagen.java

public int runGenerateJob(Configuration conf) throws Exception {

    String hadoopPrefix = conf.get("ldbc.snb.datagen.serializer.hadoopDir");
    FileSystem fs = FileSystem.get(conf);
    ArrayList<Float> percentages = new ArrayList<Float>();
    percentages.add(0.45f);//from w w w  .j a  v a  2  s  .c o  m
    percentages.add(0.45f);
    percentages.add(0.1f);

    long start = System.currentTimeMillis();
    printProgress("Starting: Person generation");
    long startPerson = System.currentTimeMillis();
    HadoopPersonGenerator personGenerator = new HadoopPersonGenerator(conf);
    personGenerator.run(hadoopPrefix + "/persons", "ldbc.snb.datagen.hadoop.UniversityKeySetter");
    long endPerson = System.currentTimeMillis();

    printProgress("Creating university location correlated edges");
    long startUniversity = System.currentTimeMillis();
    HadoopKnowsGenerator knowsGenerator = new HadoopKnowsGenerator(conf,
            "ldbc.snb.datagen.hadoop.UniversityKeySetter", "ldbc.snb.datagen.hadoop.RandomKeySetter",
            percentages, 0, conf.get("ldbc.snb.datagen.generator.knowsGenerator"));

    knowsGenerator.run(hadoopPrefix + "/persons", hadoopPrefix + "/universityEdges");
    long endUniversity = System.currentTimeMillis();

    printProgress("Creating main interest correlated edges");
    long startInterest = System.currentTimeMillis();

    knowsGenerator = new HadoopKnowsGenerator(conf, "ldbc.snb.datagen.hadoop.InterestKeySetter",
            "ldbc.snb.datagen.hadoop.RandomKeySetter", percentages, 1,
            conf.get("ldbc.snb.datagen.generator.knowsGenerator"));

    knowsGenerator.run(hadoopPrefix + "/persons", hadoopPrefix + "/interestEdges");
    long endInterest = System.currentTimeMillis();

    printProgress("Creating random correlated edges");
    long startRandom = System.currentTimeMillis();

    knowsGenerator = new HadoopKnowsGenerator(conf, "ldbc.snb.datagen.hadoop.RandomKeySetter",
            "ldbc.snb.datagen.hadoop.RandomKeySetter", percentages, 2,
            "ldbc.snb.datagen.generator.RandomKnowsGenerator");

    knowsGenerator.run(hadoopPrefix + "/persons", hadoopPrefix + "/randomEdges");
    long endRandom = System.currentTimeMillis();

    fs.delete(new Path(DatagenParams.hadoopDir + "/persons"), true);
    printProgress("Merging the different edge files");
    ArrayList<String> edgeFileNames = new ArrayList<String>();
    edgeFileNames.add(hadoopPrefix + "/universityEdges");
    edgeFileNames.add(hadoopPrefix + "/interestEdges");
    edgeFileNames.add(hadoopPrefix + "/randomEdges");
    long startMerge = System.currentTimeMillis();
    HadoopMergeFriendshipFiles merger = new HadoopMergeFriendshipFiles(conf,
            "ldbc.snb.datagen.hadoop.RandomKeySetter");
    merger.run(hadoopPrefix + "/mergedPersons", edgeFileNames);
    long endMerge = System.currentTimeMillis();

    printProgress("Serializing persons");
    long startPersonSerializing = System.currentTimeMillis();
    if (!conf.getBoolean("ldbc.snb.datagen.serializer.persons.sort", false)) {
        HadoopPersonSerializer serializer = new HadoopPersonSerializer(conf);
        serializer.run(hadoopPrefix + "/mergedPersons");
    } else {
        HadoopPersonSortAndSerializer serializer = new HadoopPersonSortAndSerializer(conf);
        serializer.run(hadoopPrefix + "/mergedPersons");
    }
    long endPersonSerializing = System.currentTimeMillis();

    long startPersonActivity = System.currentTimeMillis();
    if (conf.getBoolean("ldbc.snb.datagen.generator.activity", true)) {
        printProgress("Generating and serializing person activity");
        HadoopPersonActivityGenerator activityGenerator = new HadoopPersonActivityGenerator(conf);
        activityGenerator.run(hadoopPrefix + "/mergedPersons");

        int numThreads = DatagenParams.numThreads;
        int blockSize = DatagenParams.blockSize;
        int numBlocks = (int) Math.ceil(DatagenParams.numPersons / (double) blockSize);

        for (int i = 0; i < numThreads; ++i) {
            if (i < numBlocks) {
                fs.copyToLocalFile(false, new Path(DatagenParams.hadoopDir + "/m" + i + "personFactors.txt"),
                        new Path("./"));
                fs.copyToLocalFile(false, new Path(DatagenParams.hadoopDir + "/m" + i + "activityFactors.txt"),
                        new Path("./"));
                fs.copyToLocalFile(false, new Path(DatagenParams.hadoopDir + "/m0friendList" + i + ".csv"),
                        new Path("./"));
            }
        }
    }
    long endPersonActivity = System.currentTimeMillis();

    long startSortingUpdateStreams = System.currentTimeMillis();

    if (conf.getBoolean("ldbc.snb.datagen.serializer.updateStreams", false)) {

        printProgress("Sorting update streams ");

        List<String> personStreamsFileNames = new ArrayList<String>();
        List<String> forumStreamsFileNames = new ArrayList<String>();
        for (int i = 0; i < DatagenParams.numThreads; ++i) {
            int numPartitions = conf.getInt("ldbc.snb.datagen.serializer.numUpdatePartitions", 1);
            for (int j = 0; j < numPartitions; ++j) {
                personStreamsFileNames
                        .add(DatagenParams.hadoopDir + "/temp_updateStream_person_" + i + "_" + j);
                if (conf.getBoolean("ldbc.snb.datagen.generator.activity", false)) {
                    forumStreamsFileNames
                            .add(DatagenParams.hadoopDir + "/temp_updateStream_forum_" + i + "_" + j);
                }
            }
        }
        HadoopUpdateStreamSorterAndSerializer updateSorterAndSerializer = new HadoopUpdateStreamSorterAndSerializer(
                conf);
        updateSorterAndSerializer.run(personStreamsFileNames, "person");
        updateSorterAndSerializer.run(forumStreamsFileNames, "forum");
        for (String file : personStreamsFileNames) {
            fs.delete(new Path(file), true);
        }

        for (String file : forumStreamsFileNames) {
            fs.delete(new Path(file), true);
        }

        long minDate = Long.MAX_VALUE;
        long maxDate = Long.MIN_VALUE;
        long count = 0;
        for (int i = 0; i < DatagenParams.numThreads; ++i) {
            Path propertiesFile = new Path(
                    DatagenParams.hadoopDir + "/temp_updateStream_person_" + i + ".properties");
            FSDataInputStream file = fs.open(propertiesFile);
            Properties properties = new Properties();
            properties.load(file);
            long aux;
            aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.min_write_event_start_time"));
            minDate = aux < minDate ? aux : minDate;
            aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.max_write_event_start_time"));
            maxDate = aux > maxDate ? aux : maxDate;
            aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.num_events"));
            count += aux;
            file.close();
            fs.delete(propertiesFile, true);

            if (conf.getBoolean("ldbc.snb.datagen.generator.activity", false)) {
                propertiesFile = new Path(
                        DatagenParams.hadoopDir + "/temp_updateStream_forum_" + i + ".properties");
                file = fs.open(propertiesFile);
                properties = new Properties();
                properties.load(file);
                aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.min_write_event_start_time"));
                minDate = aux < minDate ? aux : minDate;
                aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.max_write_event_start_time"));
                maxDate = aux > maxDate ? aux : maxDate;
                aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.num_events"));
                count += aux;
                file.close();
                fs.delete(propertiesFile, true);
            }
        }

        OutputStream output = fs
                .create(new Path(DatagenParams.socialNetworkDir + "/updateStream" + ".properties"), true);
        output.write(new String("ldbc.snb.interactive.gct_delta_duration:" + DatagenParams.deltaTime + "\n")
                .getBytes());
        output.write(
                new String("ldbc.snb.interactive.min_write_event_start_time:" + minDate + "\n").getBytes());
        output.write(
                new String("ldbc.snb.interactive.max_write_event_start_time:" + maxDate + "\n").getBytes());
        output.write(new String("ldbc.snb.interactive.update_interleave:" + (maxDate - minDate) / count + "\n")
                .getBytes());
        output.write(new String("ldbc.snb.interactive.num_events:" + count).getBytes());
        output.close();
    }

    long endSortingUpdateStreams = System.currentTimeMillis();

    printProgress("Serializing invariant schema ");
    long startInvariantSerializing = System.currentTimeMillis();
    HadoopInvariantSerializer invariantSerializer = new HadoopInvariantSerializer(conf);
    invariantSerializer.run();
    long endInvariantSerializing = System.currentTimeMillis();

    long end = System.currentTimeMillis();

    System.out.println(((end - start) / 1000) + " total seconds");
    System.out.println("Person generation time: " + ((endPerson - startPerson) / 1000));
    System.out.println(
            "University correlated edge generation time: " + ((endUniversity - startUniversity) / 1000));
    System.out.println("Interest correlated edge generation time: " + ((endInterest - startInterest) / 1000));
    System.out.println("Random correlated edge generation time: " + ((endRandom - startRandom) / 1000));
    System.out.println("Edges merge time: " + ((endMerge - startMerge) / 1000));
    System.out
            .println("Person serialization time: " + ((endPersonSerializing - startPersonSerializing) / 1000));
    System.out.println("Person activity generation and serialization time: "
            + ((endPersonActivity - startPersonActivity) / 1000));
    System.out.println(
            "Sorting update streams time: " + ((endSortingUpdateStreams - startSortingUpdateStreams) / 1000));
    System.out.println("Invariant schema serialization time: "
            + ((endInvariantSerializing - startInvariantSerializing) / 1000));
    System.out.println("Total Execution time: " + ((end - start) / 1000));

    if (conf.getBoolean("ldbc.snb.datagen.parametergenerator.parameters", false)
            && conf.getBoolean("ldbc.snb.datagen.generator.activity", false)) {
        System.out.println("Running Parameter Generation");
        System.out.println("Generating Interactive Parameters");
        ProcessBuilder pb = new ProcessBuilder("mkdir", "-p",
                conf.get("ldbc.snb.datagen.serializer.outputDir") + "/substitution_parameters");
        pb.directory(new File("./"));
        Process p = pb.start();
        p.waitFor();

        pb = new ProcessBuilder(conf.get("ldbc.snb.datagen.parametergenerator.python"),
                "paramgenerator/generateparams.py", "./",
                conf.get("ldbc.snb.datagen.serializer.outputDir") + "/substitution_parameters");
        pb.directory(new File("./"));
        File logInteractive = new File("parameters_interactive.log");
        pb.redirectErrorStream(true);
        pb.redirectOutput(ProcessBuilder.Redirect.appendTo(logInteractive));
        p = pb.start();
        p.waitFor();

        System.out.println("Generating BI Parameters");
        pb = new ProcessBuilder(conf.get("ldbc.snb.datagen.parametergenerator.python"),
                "paramgenerator/generateparamsbi.py", "./",
                conf.get("ldbc.snb.datagen.serializer.outputDir") + "/substitution_parameters");
        pb.directory(new File("./"));
        File logBi = new File("parameters_bi.log");
        pb.redirectErrorStream(true);
        pb.redirectOutput(ProcessBuilder.Redirect.appendTo(logBi));
        p = pb.start();
        p.waitFor();
        System.out.println("Finished Parameter Generation");
    }
    return 0;
}

From source file:com.aurel.track.admin.customize.category.report.execute.ReportBeansToLaTeXConverter.java

/**
 *
 * @param workDir/*from ww  w  . j  a v  a2  s. c om*/
 * @param latexFile
 */
protected int runPdflatex(File workDir, File latexFile, int nrOfRuns) {

    if (latexCmd == null) {
        return -99;
    }

    int exitValue = 0;

    try {

        String[] cmd = new String[] { latexCmd, "--halt-on-error", "-output-directory=" + workDir,
                latexFile.getAbsolutePath() };

        String texpath = new File((new File(latexCmd)).getParent()).getAbsolutePath();

        ProcessBuilder latexProcessBuilder = new ProcessBuilder(cmd);
        latexProcessBuilder.directory(workDir);
        Map<String, String> env = latexProcessBuilder.environment();
        String path = env.get("PATH");
        if (path != null) {
            path = texpath + ":" + path;
            env.put("PATH", path);
        }

        File stdoutlog = new File(workDir + File.separator + "stdout.log");
        latexProcessBuilder.redirectOutput(Redirect.appendTo(stdoutlog));

        File stderrlog = new File(workDir + File.separator + "stderr.log");
        latexProcessBuilder.redirectError(Redirect.appendTo(stderrlog));

        ProcessExecutor latexProcessExecutor = new ProcessExecutor(latexProcessBuilder);

        Thread executionThread = new Thread(latexProcessExecutor);

        long timeout = 20000;

        LOGGER.debug("Run xelatex thread started!");

        long startTime = System.currentTimeMillis();

        executionThread.start();

        int imod = 0;
        while (executionThread.isAlive()) {
            ++imod;
            if (imod % 5 == 0) {
                LOGGER.debug("Run xelatex thread is alive");
            }

            if (((System.currentTimeMillis() - startTime) > timeout) && executionThread.isAlive()) {
                executionThread.interrupt();

                LOGGER.debug("Run xelatex thread interrupted!");

                latexProcessExecutor.killProcess();
            }
            Thread.sleep(100);
        }

        LOGGER.debug("Run xelatex done!");

        exitValue = latexProcessExecutor.getExitValue();

        try {
            Thread.sleep(1000);
        } catch (Exception ex) {
            LOGGER.error(ExceptionUtils.getStackTrace(ex), ex);
        }
    } catch (Exception ex) {
        LOGGER.error(ExceptionUtils.getStackTrace(ex), ex);
    }

    return exitValue;
}

From source file:org.apache.nifi.processors.standard.ExecuteStreamCommand.java

@Override
public void onTrigger(ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile inputFlowFile = session.get();
    if (null == inputFlowFile) {
        return;/*  w ww .  j av  a  2  s.  co  m*/
    }

    final ArrayList<String> args = new ArrayList<>();
    final boolean putToAttribute = context.getProperty(PUT_OUTPUT_IN_ATTRIBUTE).isSet();
    final Integer attributeSize = context.getProperty(PUT_ATTRIBUTE_MAX_LENGTH).asInteger();
    final String attributeName = context.getProperty(PUT_OUTPUT_IN_ATTRIBUTE).getValue();

    final String executeCommand = context.getProperty(EXECUTION_COMMAND)
            .evaluateAttributeExpressions(inputFlowFile).getValue();
    args.add(executeCommand);
    final String commandArguments = context.getProperty(EXECUTION_ARGUMENTS)
            .evaluateAttributeExpressions(inputFlowFile).getValue();
    final boolean ignoreStdin = Boolean.parseBoolean(context.getProperty(IGNORE_STDIN).getValue());
    if (!StringUtils.isBlank(commandArguments)) {
        for (String arg : ArgumentUtils.splitArgs(commandArguments,
                context.getProperty(ARG_DELIMITER).getValue().charAt(0))) {
            args.add(arg);
        }
    }
    final String workingDir = context.getProperty(WORKING_DIR).evaluateAttributeExpressions(inputFlowFile)
            .getValue();

    final ProcessBuilder builder = new ProcessBuilder();

    logger.debug("Executing and waiting for command {} with arguments {}",
            new Object[] { executeCommand, commandArguments });
    File dir = null;
    if (!StringUtils.isBlank(workingDir)) {
        dir = new File(workingDir);
        if (!dir.exists() && !dir.mkdirs()) {
            logger.warn("Failed to create working directory {}, using current working directory {}",
                    new Object[] { workingDir, System.getProperty("user.dir") });
        }
    }
    final Map<String, String> environment = new HashMap<>();
    for (final Map.Entry<PropertyDescriptor, String> entry : context.getProperties().entrySet()) {
        if (entry.getKey().isDynamic()) {
            environment.put(entry.getKey().getName(), entry.getValue());
        }
    }
    builder.environment().putAll(environment);
    builder.command(args);
    builder.directory(dir);
    builder.redirectInput(Redirect.PIPE);
    builder.redirectOutput(Redirect.PIPE);
    final Process process;
    try {
        process = builder.start();
    } catch (IOException e) {
        logger.error("Could not create external process to run command", e);
        throw new ProcessException(e);
    }
    try (final OutputStream pos = process.getOutputStream();
            final InputStream pis = process.getInputStream();
            final InputStream pes = process.getErrorStream();
            final BufferedInputStream bis = new BufferedInputStream(pis);
            final BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(pes))) {
        int exitCode = -1;
        final BufferedOutputStream bos = new BufferedOutputStream(pos);
        FlowFile outputFlowFile = putToAttribute ? inputFlowFile : session.create(inputFlowFile);

        ProcessStreamWriterCallback callback = new ProcessStreamWriterCallback(ignoreStdin, bos, bis, logger,
                attributeName, session, outputFlowFile, process, putToAttribute, attributeSize);
        session.read(inputFlowFile, callback);

        outputFlowFile = callback.outputFlowFile;
        if (putToAttribute) {
            outputFlowFile = session.putAttribute(outputFlowFile, attributeName,
                    new String(callback.outputBuffer, 0, callback.size));
        }

        exitCode = callback.exitCode;
        logger.debug("Execution complete for command: {}.  Exited with code: {}",
                new Object[] { executeCommand, exitCode });

        Map<String, String> attributes = new HashMap<>();

        final StringBuilder strBldr = new StringBuilder();
        try {
            String line;
            while ((line = bufferedReader.readLine()) != null) {
                strBldr.append(line).append("\n");
            }
        } catch (IOException e) {
            strBldr.append("Unknown...could not read Process's Std Error");
        }
        int length = strBldr.length() > 4000 ? 4000 : strBldr.length();
        attributes.put("execution.error", strBldr.substring(0, length));

        final Relationship outputFlowFileRelationship = putToAttribute ? ORIGINAL_RELATIONSHIP
                : OUTPUT_STREAM_RELATIONSHIP;
        if (exitCode == 0) {
            logger.info("Transferring flow file {} to {}",
                    new Object[] { outputFlowFile, outputFlowFileRelationship.getName() });
        } else {
            logger.error("Transferring flow file {} to {}. Executable command {} ended in an error: {}",
                    new Object[] { outputFlowFile, outputFlowFileRelationship.getName(), executeCommand,
                            strBldr.toString() });
        }

        attributes.put("execution.status", Integer.toString(exitCode));
        attributes.put("execution.command", executeCommand);
        attributes.put("execution.command.args", commandArguments);
        outputFlowFile = session.putAllAttributes(outputFlowFile, attributes);

        // This transfer will transfer the FlowFile that received the stream out put to it's destined relationship.
        // In the event the stream is put to the an attribute of the original, it will be transferred here.
        session.transfer(outputFlowFile, outputFlowFileRelationship);

        if (!putToAttribute) {
            logger.info("Transferring flow file {} to original", new Object[] { inputFlowFile });
            inputFlowFile = session.putAllAttributes(inputFlowFile, attributes);
            session.transfer(inputFlowFile, ORIGINAL_RELATIONSHIP);
        }

    } catch (final IOException ex) {
        // could not close Process related streams
        logger.warn("Problem terminating Process {}", new Object[] { process }, ex);
    } finally {
        process.destroy(); // last ditch effort to clean up that process.
    }
}

From source file:org.apache.sling.maven.slingstart.run.LauncherCallable.java

private ProcessDescription start(final File jar) throws Exception {
    final ProcessDescription cfg = new ProcessDescription(this.configuration.getId(),
            this.configuration.getFolder());

    final ProcessBuilder builder = new ProcessBuilder();
    final List<String> args = new ArrayList<String>();

    args.add("java");
    add(args, this.configuration.getVmOpts());
    add(args, this.configuration.getVmDebugOpts(this.environment.getDebug()));

    args.add("-cp");
    args.add("bin");
    args.add(Main.class.getName());
    // first three arguments: jar, listener port, verbose
    args.add(jar.getPath());/*from   ww  w  .j a v  a2s . com*/
    args.add(String.valueOf(cfg.getControlListener().getPort()));
    args.add("true");

    // from here on launchpad properties
    add(args, this.configuration.getOpts());

    final String contextPath = this.configuration.getContextPath();
    if (contextPath != null && contextPath.length() > 0 && !contextPath.equals("/")) {
        args.add("-r");
        args.add(contextPath);
    }

    if (this.configuration.getPort() != null) {
        args.add("-p");
        args.add(this.configuration.getPort());
    }

    if (this.configuration.getControlPort() != null) {
        args.add("-j");
        args.add(this.configuration.getControlPort());
    }
    if (this.configuration.getRunmode() != null && this.configuration.getRunmode().length() > 0) {
        args.add("-Dsling.run.modes=" + this.configuration.getRunmode());
    }
    if (!this.environment.isShutdownOnExit()) {
        args.add("start");
    }

    builder.command(args.toArray(new String[args.size()]));
    builder.directory(this.configuration.getFolder());
    builder.redirectErrorStream(true);
    builder.redirectOutput(Redirect.INHERIT);
    builder.redirectError(Redirect.INHERIT);

    logger.info("Starting Launchpad " + this.configuration.getId() + "...");
    logger.debug("Launchpad cmd: " + builder.command());
    logger.debug("Launchpad dir: " + builder.directory());

    try {
        cfg.setProcess(builder.start());
    } catch (final IOException e) {
        if (cfg.getProcess() != null) {
            cfg.getProcess().destroy();
            cfg.setProcess(null);
        }
        throw new Exception("Could not start the Launchpad", e);
    }

    return cfg;
}

From source file:io.snappydata.hydra.cluster.SnappyTest.java

public void executeProcess(ProcessBuilder pb, File logFile) {
    Process p = null;//from   w  w w. j  a va 2s.c o  m
    try {
        pb.redirectErrorStream(true);
        pb.redirectError(ProcessBuilder.Redirect.PIPE);
        pb.redirectOutput(ProcessBuilder.Redirect.appendTo(logFile));
        p = pb.start();
        assert pb.redirectInput() == ProcessBuilder.Redirect.PIPE;
        assert pb.redirectOutput().file() == logFile;
        assert p.getInputStream().read() == -1;
        int rc = p.waitFor();
        if (rc == 0) {
            Log.getLogWriter().info("Executed successfully");
        } else {
            Log.getLogWriter().info("Failed with exit code: " + rc);
        }
    } catch (IOException e) {
        throw new TestException(
                "Exception occurred while starting the process:" + pb + "\nError Message:" + e.getMessage());
    } catch (InterruptedException e) {
        throw new TestException("Exception occurred while waiting for the process execution:" + p
                + "\nError Message:" + e.getMessage());
    }
}

From source file:com.netflix.genie.agent.execution.services.impl.LaunchJobServiceImpl.java

/**
 * {@inheritDoc}/* w w w  .  ja v  a2  s  .  c om*/
 */
@Override
public void launchProcess(final File jobDirectory, final Map<String, String> environmentVariablesMap,
        final List<String> commandLine, final boolean interactive) throws JobLaunchException {

    if (!launched.compareAndSet(false, true)) {
        throw new IllegalStateException("Job already launched");
    }

    final ProcessBuilder processBuilder = new ProcessBuilder();

    // Validate job running directory
    if (jobDirectory == null) {
        throw new JobLaunchException("Job directory is null");
    } else if (!jobDirectory.exists()) {
        throw new JobLaunchException("Job directory does not exist: " + jobDirectory);
    } else if (!jobDirectory.isDirectory()) {
        throw new JobLaunchException("Job directory is not a directory: " + jobDirectory);
    } else if (!jobDirectory.canWrite()) {
        throw new JobLaunchException("Job directory is not writable: " + jobDirectory);
    }

    final Map<String, String> currentEnvironmentVariables = processBuilder.environment();

    if (environmentVariablesMap == null) {
        throw new JobLaunchException("Job environment variables map is null");
    }

    // Merge job environment variables into process inherited environment
    environmentVariablesMap.forEach((key, value) -> {
        final String replacedValue = currentEnvironmentVariables.put(key, value);
        if (StringUtils.isBlank(replacedValue)) {
            log.debug("Added job environment variable: {}={}", key, value);
        } else if (!replacedValue.equals(value)) {
            log.debug("Set job environment variable: {}={} (previous value: {})", key, value, replacedValue);
        }
    });

    // Validate arguments
    if (commandLine == null) {
        throw new JobLaunchException("Job command-line arguments is null");
    } else if (commandLine.isEmpty()) {
        throw new JobLaunchException("Job command-line arguments are empty");
    }

    // Configure arguments
    log.info("Job command-line: {}", Arrays.toString(commandLine.toArray()));

    final List<String> expandedCommandLine;
    try {
        expandedCommandLine = expandCommandLineVariables(commandLine,
                Collections.unmodifiableMap(currentEnvironmentVariables));
    } catch (final EnvUtils.VariableSubstitutionException e) {
        throw new JobLaunchException("Job command-line arguments variables could not be expanded");
    }

    if (!commandLine.equals(expandedCommandLine)) {
        log.info("Job command-line with variables expanded: {}",
                Arrays.toString(expandedCommandLine.toArray()));
    }

    processBuilder.command(expandedCommandLine);

    if (interactive) {
        processBuilder.inheritIO();
    } else {
        processBuilder.redirectError(PathUtils.jobStdErrPath(jobDirectory).toFile());
        processBuilder.redirectOutput(PathUtils.jobStdOutPath(jobDirectory).toFile());
    }

    if (killed.get()) {
        log.info("Job aborted, skipping launch");
    } else {
        log.info("Launching job");
        try {
            processReference.set(processBuilder.start());
        } catch (final IOException | SecurityException e) {
            throw new JobLaunchException("Failed to launch job: ", e);
        }
        log.info("Process launched (pid: {})", getPid(processReference.get()));
    }
}

From source file:io.snappydata.hydra.cluster.SnappyTest.java

protected void recordSnappyProcessIDinNukeRun(String pName) {
    Process pr = null;/*from ww w .j  av  a 2  s  .  c  om*/
    try {
        String command;
        if (pName.equals("Master"))
            command = "ps ax | grep -w " + pName + " | grep -v grep | awk '{print $1}'";
        else
            command = "jps | grep " + pName + " | awk '{print $1}'";
        hd = TestConfig.getInstance().getMasterDescription().getVmDescription().getHostDescription();
        ProcessBuilder pb = new ProcessBuilder("/bin/bash", "-c", command);
        File log = new File(".");
        pb.redirectErrorStream(true);
        String dest = log.getCanonicalPath() + File.separator + "PIDs.log";
        File logFile = new File(dest);
        pb.redirectOutput(ProcessBuilder.Redirect.appendTo(logFile));
        pr = pb.start();
        pr.waitFor();
        FileInputStream fis = new FileInputStream(logFile);
        BufferedReader br = new BufferedReader(new InputStreamReader(fis));
        String str = null;
        while ((str = br.readLine()) != null) {
            int pid = Integer.parseInt(str);
            try {
                if (pids.contains(pid)) {
                    Log.getLogWriter().info("Pid is already recorded with Master" + pid);
                } else {
                    pids.add(pid);
                    RemoteTestModule.Master.recordPID(hd, pid);
                    SnappyBB.getBB().getSharedMap().put("pid" + "_" + pName + "_" + str, str);
                }
            } catch (RemoteException e) {
                String s = "Unable to access master to record PID: " + pid;
                throw new HydraRuntimeException(s, e);
            }
            Log.getLogWriter().info("pid value successfully recorded with Master");
        }
        br.close();
    } catch (IOException e) {
        String s = "Problem while starting the process : " + pr;
        throw new TestException(s, e);
    } catch (InterruptedException e) {
        String s = "Exception occurred while waiting for the process execution : " + pr;
        throw new TestException(s, e);
    }
}

From source file:io.snappydata.hydra.cluster.SnappyTest.java

/**
 * Task(ENDTASK) for cleaning up snappy processes, because they are not stopped by Hydra in case of Test failure.
 *//*from   www .  ja v a 2  s .  com*/
public static void HydraTask_cleanUpSnappyProcessesOnFailure() {
    Process pr = null;
    ProcessBuilder pb = null;
    File logFile = null, log = null, nukeRunOutput = null;
    try {
        List<String> pidList = new ArrayList();
        HostDescription hd = TestConfig.getInstance().getMasterDescription().getVmDescription()
                .getHostDescription();
        pidList = snappyTest.getPidList();
        log = new File(".");
        String nukerun = log.getCanonicalPath() + File.separator + "snappyNukeRun.sh";
        logFile = new File(nukerun);
        String nukeRunOutputString = log.getCanonicalPath() + File.separator + "nukeRunOutput.log";
        nukeRunOutput = new File(nukeRunOutputString);
        FileWriter fw = new FileWriter(logFile.getAbsoluteFile(), true);
        BufferedWriter bw = new BufferedWriter(fw);
        for (String pidString : pidList) {
            int pid = Integer.parseInt(pidString);
            bw.write("/bin/kill -KILL " + pid);
            bw.newLine();
            try {
                RemoteTestModule.Master.removePID(hd, pid);
            } catch (RemoteException e) {
                String s = "Failed to remove PID from nukerun script: " + pid;
                throw new HydraRuntimeException(s, e);
            }
        }
        bw.close();
        fw.close();
        logFile.setExecutable(true);
        pb = new ProcessBuilder(nukerun);
        pb.redirectErrorStream(true);
        pb.redirectOutput(ProcessBuilder.Redirect.appendTo(nukeRunOutput));
        pr = pb.start();
        pr.waitFor();
    } catch (IOException e) {
        throw new TestException("IOException occurred while retriving logFile path " + log + "\nError Message:"
                + e.getMessage());
    } catch (InterruptedException e) {
        String s = "Exception occurred while waiting for the process execution : " + pr;
        throw new TestException(s, e);
    }
}