Example usage for java.lang ProcessBuilder environment

List of usage examples for java.lang ProcessBuilder environment

Introduction

In this page you can find the example usage for java.lang ProcessBuilder environment.

Prototype

Map environment

To view the source code for java.lang ProcessBuilder environment.

Click Source Link

Usage

From source file:interactivespaces.util.process.BaseNativeApplicationRunner.java

/**
 * Attempt the run./*ww  w  . j  a  v  a  2s  .c  o  m*/
 *
 * @param firstTime
 *          {@code true} if this is the first attempt
 *
 * @return the process that was created
 *
 * @throws InteractiveSpacesException
 *           was not able to start the process the first time
 */
private Process attemptRun(boolean firstTime) throws InteractiveSpacesException {
    try {
        ProcessBuilder builder = new ProcessBuilder(commandLine);

        Map<String, String> processEnvironment = builder.environment();
        if (cleanEnvironment) {
            processEnvironment.clear();
        }
        modifyEnvironment(processEnvironment, environment);

        builder.directory(executableFolder);

        log.info(String.format("Starting up native code in folder %s", executableFolder.getAbsolutePath()));

        return builder.start();
    } catch (Exception e) {
        // Placed here so we can get the exception when thrown.
        if (firstTime) {
            runnerState.set(NativeApplicationRunnerState.STARTUP_FAILED);
            handleApplicationStartupFailed();

            throw new InteractiveSpacesException("Can't start up native application " + appName, e);
        }

        return null;
    }
}

From source file:org.apache.accumulo.miniclusterImpl.MiniAccumuloClusterImpl.java

@SuppressFBWarnings(value = "COMMAND_INJECTION", justification = "mini runs in the same security context as user providing the args")
private ProcessInfo _exec(Class<?> clazz, List<String> extraJvmOpts, String... args) throws IOException {
    String javaHome = System.getProperty("java.home");
    String javaBin = javaHome + File.separator + "bin" + File.separator + "java";
    String classpath = getClasspath();

    String className = clazz.getName();

    ArrayList<String> argList = new ArrayList<>();
    argList.addAll(Arrays.asList(javaBin, "-Dproc=" + clazz.getSimpleName(), "-cp", classpath));
    argList.addAll(extraJvmOpts);/*  w  w w .  j a va2  s .c o  m*/
    for (Entry<String, String> sysProp : config.getSystemProperties().entrySet()) {
        argList.add(String.format("-D%s=%s", sysProp.getKey(), sysProp.getValue()));
    }
    // @formatter:off
    argList.addAll(Arrays.asList("-XX:+UseConcMarkSweepGC", "-XX:CMSInitiatingOccupancyFraction=75",
            "-Dapple.awt.UIElement=true", "-Djava.net.preferIPv4Stack=true", "-XX:+PerfDisableSharedMem",
            "-XX:+AlwaysPreTouch", Main.class.getName(), className));
    // @formatter:on
    argList.addAll(Arrays.asList(args));

    ProcessBuilder builder = new ProcessBuilder(argList);

    builder.environment().put("ACCUMULO_HOME", config.getDir().getAbsolutePath());
    builder.environment().put("ACCUMULO_LOG_DIR", config.getLogDir().getAbsolutePath());
    builder.environment().put("ACCUMULO_CLIENT_CONF_PATH", config.getClientConfFile().getAbsolutePath());
    String ldLibraryPath = Joiner.on(File.pathSeparator).join(config.getNativeLibPaths());
    builder.environment().put("LD_LIBRARY_PATH", ldLibraryPath);
    builder.environment().put("DYLD_LIBRARY_PATH", ldLibraryPath);

    // if we're running under accumulo.start, we forward these env vars
    String env = System.getenv("HADOOP_HOME");
    if (env != null)
        builder.environment().put("HADOOP_HOME", env);
    env = System.getenv("ZOOKEEPER_HOME");
    if (env != null)
        builder.environment().put("ZOOKEEPER_HOME", env);
    builder.environment().put("ACCUMULO_CONF_DIR", config.getConfDir().getAbsolutePath());
    if (config.getHadoopConfDir() != null)
        builder.environment().put("HADOOP_CONF_DIR", config.getHadoopConfDir().getAbsolutePath());

    log.debug("Starting MiniAccumuloCluster process with class: " + clazz.getSimpleName() + "\n, jvmOpts: "
            + extraJvmOpts + "\n, classpath: " + classpath + "\n, args: " + argList + "\n, environment: "
            + builder.environment());

    int hashcode = builder.hashCode();

    File stdOut = new File(config.getLogDir(), clazz.getSimpleName() + "_" + hashcode + ".out");
    File stdErr = new File(config.getLogDir(), clazz.getSimpleName() + "_" + hashcode + ".err");

    Process process = builder.redirectError(stdErr).redirectOutput(stdOut).start();

    cleanup.add(process);

    return new ProcessInfo(process, stdOut);
}

From source file:com.ikanow.infinit.e.application.handlers.polls.LogstashTestRequestPollHandler.java

@Override
public void performPoll() {

    if (null == LOGSTASH_DIRECTORY) { // (static memory not yet initialized)
        try {/* w w w. j ava2 s  .  c  om*/
            Thread.sleep(1000); // (extend the sleep time a bit)
        } catch (Exception e) {
        }
        return;
    }

    // 1] Check - does logstash exist on this server:

    File logstashBinary = new File(LOGSTASH_BINARY);
    if (!logstashBinary.canExecute()) {
        try {
            Thread.sleep(10000); // (extend the sleep time a bit)
        } catch (Exception e) {
        }
        return;
    }

    // 2] (Unlike harvester, _don't_ grab an application token, you can run this on as many servers as you want)

    // 3] Setup

    if (null == _logHarvesterQ) {
        _logHarvesterQ = new MongoQueue(DbManager.getIngest().getLogHarvesterQ().getDB().getName(),
                DbManager.getIngest().getLogHarvesterQ().getName());
    }
    if (null == _testOutputTemplate) {
        try {
            File testOutputTemplate = new File(LOGSTASH_TEST_OUTPUT_TEMPLATE);
            InputStream inStream = null;
            try {
                inStream = new FileInputStream(testOutputTemplate);
                _testOutputTemplate = IOUtils.toString(inStream);
            } catch (Exception e) {// abandon ship!
                return;
            } finally {
                inStream.close();
            }
        } catch (Exception e) {// abandon ship!

            //DEBUG
            //e.printStackTrace();

            return;
        }
    } //TESTED

    // 4] Check if any new requests have been made:

    BasicDBObject queueQuery = new BasicDBObject("logstash", new BasicDBObject(DbManager.exists_, true));
    DBObject nextElement = _logHarvesterQ.pop(queueQuery);
    while (nextElement != null) {
        //DEBUG
        //System.out.println("FOUND: " + nextElement.toString());

        TestLogstashExtractorPojo testInfo = TestLogstashExtractorPojo.fromDb(nextElement,
                TestLogstashExtractorPojo.class);
        if ((null == testInfo.maxDocs) || (null == testInfo.logstash.config) || (null == testInfo.isAdmin)
                || (null == testInfo.sourceKey)) {
            TestLogstashExtractorPojo testErr = new TestLogstashExtractorPojo();
            testErr._id = testInfo._id;
            testErr.error = "Internal Logic Error. Missing one of: maxDocs, isAdmin, sourceKey, logstash.config";
            _logHarvesterQ.push(testErr.toDb());

            return;
        } //TESTED

        // Validate/tranform the configuration:
        StringBuffer errMessage = new StringBuffer();
        String logstashConfig = LogstashConfigUtils.validateLogstashInput(testInfo.sourceKey,
                testInfo.logstash.config, errMessage, testInfo.isAdmin);
        if (null == logstashConfig) { // Validation error...
            TestLogstashExtractorPojo testErr = new TestLogstashExtractorPojo();
            testErr._id = testInfo._id;
            testErr.error = "Validation error: " + errMessage.toString();
            _logHarvesterQ.push(testErr.toDb());

            return;
        } //TESTED

        // Replacement for #LOGSTASH{host} - currently only replacement supported (+ #IKANOW{} in main code)
        try {
            logstashConfig = logstashConfig.replace("#LOGSTASH{host}",
                    java.net.InetAddress.getLocalHost().getHostName());
        } catch (Exception e) {
            logstashConfig = logstashConfig.replace("#LOGSTASH{host}", "localhost.localdomain");
        }
        //TESTED

        String outputConf = _testOutputTemplate.replace("_XXX_COLLECTION_XXX_", testInfo._id.toString()); //TESTED
        String sinceDbPath = LOGSTASH_WD + ".sincedb_" + testInfo._id.toString();
        String conf = logstashConfig.replace("_XXX_DOTSINCEDB_XXX_", sinceDbPath)
                + outputConf.replace("_XXX_SOURCEKEY_XXX_", testInfo.sourceKey);

        boolean allWorked = false;
        Process logstashProcess = null;
        try {
            // 1] Create the process

            ArrayList<String> args = new ArrayList<String>(4);
            args.addAll(Arrays.asList(LOGSTASH_BINARY, "-e", conf));
            if (0 == testInfo.maxDocs) {
                args.add("-t"); // test mode, must faster
            } //TESTED

            if ((null != testInfo.logstash.testDebugOutput) && testInfo.logstash.testDebugOutput) {
                args.add("--debug");
            } else {
                args.add("--verbose");
            }
            ProcessBuilder logstashProcessBuilder = new ProcessBuilder(args);
            logstashProcessBuilder = logstashProcessBuilder.directory(new File(LOGSTASH_WD))
                    .redirectErrorStream(true);
            logstashProcessBuilder.environment().put("JAVA_OPTS", "");

            //DEBUG
            //System.out.println("STARTING: " + ArrayUtils.toString(logstashProcessBuilder.command().toArray()));

            // 2] Kick off the process
            logstashProcess = logstashProcessBuilder.start();
            StringWriter outputAndError = new StringWriter();
            OutputCollector outAndErrorStream = new OutputCollector(logstashProcess.getInputStream(),
                    new PrintWriter(outputAndError));
            outAndErrorStream.start();
            final int toWait_s = 240;

            boolean exited = false;

            // 3] Check the output collection for records

            int errorVal = 0;
            long priorCount = 0L;
            int priorLogCount = 0;

            int timeOfLastLoggingChange = 0;
            int timeOfLastDocCountChange = 0;

            String reasonForExit = "";

            int inactivityTimeout_s = 10; // (default)
            if (null != testInfo.logstash.testInactivityTimeout_secs) {
                inactivityTimeout_s = testInfo.logstash.testInactivityTimeout_secs;
            }
            for (int i = 0; i < toWait_s; i += 5) {
                try {
                    Thread.sleep(5000);
                } catch (Exception e) {
                }

                long count = DbManager.getCollection("ingest", testInfo._id.toString()).count();

                // 3.1] Do we have all the records (or is the number staying static)

                //DEBUG
                //System.out.println("FOUND: " + count + " VS " + priorCount + " , " + priorPriorCount);

                // 3.1a] All done?

                if ((count >= testInfo.maxDocs) && (count > 0)) {
                    allWorked = true;
                    break;
                } //TESTED               

                // 3.1b] If not, has anything changes?

                if (priorCount != count) {
                    timeOfLastDocCountChange = i;
                }
                if (priorLogCount != outAndErrorStream.getLines()) {
                    timeOfLastLoggingChange = i;
                }

                // 3.1c] Check for inactivity 

                if ((timeOfLastDocCountChange > 0) && (i - timeOfLastDocCountChange) >= inactivityTimeout_s) {
                    // Delay between events: treat as success
                    allWorked = true;
                    break;
                } //TESTED

                if ((0 == count) && outAndErrorStream.getPipelineStarted() && ((timeOfLastLoggingChange > 0)
                        && (i - timeOfLastLoggingChange) >= inactivityTimeout_s)) {
                    // Delay between log messages after pipeline started, no documents, treat as failure

                    //DEBUG
                    //System.out.println("LOG LINES! " + i + " NUM = " + outAndErrorStream.getLines());

                    errorVal = 1;
                    reasonForExit = "No records received and logging inactive.\n";
                    break;
                } //TESTED               

                // 3.2] Has the process exited unexpectedly?

                try {
                    errorVal = logstashProcess.exitValue();
                    reasonForExit = "Logstash process exited with error: " + errorVal + ".\n";
                    exited = true;

                    //DEBUG
                    //System.out.println("GOT EXIT VALUE: " + errorVal);
                    break;

                } //TESTED
                catch (Exception e) {
                } // that's OK we're just still going is all...

                priorCount = count;
                priorLogCount = outAndErrorStream.getLines();

            } //(end loop while waiting for job to complete)            

            // 4] If the process is still running then kill it

            if (!exited) {
                //DEBUG
                //System.out.println("EXITED WITHOUT FINISHING");

                logstashProcess.destroy();
            } //TESTED

            // 5] Things to do when the job is done: (worked or not)
            //    Send a message to the harvester

            outAndErrorStream.join(); // (if we're here then must have closed the process, wait for it to die)

            TestLogstashExtractorPojo testErr = new TestLogstashExtractorPojo();
            testErr._id = testInfo._id;
            if ((testInfo.maxDocs > 0) || (0 != errorVal)) {
                testErr.error = reasonForExit + outputAndError.toString();
                // (note this is capped at well below the BSON limit in the thread below)
            } else { // maxDocs==0 (ie pre-publish test) AND no error returned
                testErr.error = null;
            }
            _logHarvesterQ.push(testErr.toDb());
            //TESTED            
        } catch (Exception e) {
            //DEBUG
            //e.printStackTrace();            

            TestLogstashExtractorPojo testErr = new TestLogstashExtractorPojo();
            testErr._id = testInfo._id;
            testErr.error = "Internal Logic Error: " + e.getMessage();
            _logHarvesterQ.push(testErr.toDb());

        } //TOTEST
        finally {
            // If we created a sincedb path then remove it:
            try {
                new File(sinceDbPath).delete();
            } catch (Exception e) {
            } // (don't care if it fails)

            if (!allWorked) { // (otherwise up to the harvester to remove these)
                try {
                    DbManager.getCollection("ingest", testInfo._id.toString()).drop();
                } catch (Exception e) {
                } // doesn't matter if this errors
            }
            try {
                // Really really want to make sure the process isn't running
                if (null != logstashProcess) {
                    logstashProcess.destroy();
                }
            } catch (Exception e) {
            } catch (Error ee) {
            }
        } //TESTED

        // (If we actually processed an element, then try again immediate)
        nextElement = _logHarvesterQ.pop(queueQuery);
    }
}

From source file:edu.stolaf.cs.wmrserver.streaming.PipeMapRed.java

public void configure(JobConf job) {
    try {//  w ww  . j  a  v a  2s  .  c om
        String argv = getPipeCommand(job);

        joinDelay_ = job.getLong("stream.joindelay.milli", 0);

        job_ = job;
        fs_ = FileSystem.get(job_);

        nonZeroExitIsFailure_ = job_.getBoolean("stream.non.zero.exit.is.failure", true);

        doPipe_ = getDoPipe();
        if (!doPipe_)
            return;

        setStreamJobDetails(job);

        String[] argvSplit = splitArgs(argv);
        String prog = argvSplit[0];
        File currentDir = new File(".").getAbsoluteFile();
        if (new File(prog).isAbsolute()) {
            // we don't own it. Hope it is executable
        } else {
            // Try to find executable in unpacked job JAR and make absolute if
            // present. Otherwise, leave it as relative to be resolved against PATH
            File jarDir = new File(job.getJar()).getParentFile();
            File progFile = new File(jarDir, argvSplit[0]);
            if (progFile.isFile()) {
                progFile.setExecutable(true);
                argvSplit[0] = progFile.getAbsolutePath();
            }
        }

        logprintln("PipeMapRed exec " + Arrays.asList(argvSplit));
        Hashtable<String, String> childEnv = new Hashtable();
        addJobConfToEnvironment(job_, childEnv);
        addEnvironment(childEnv, job_.get("stream.addenvironment"));
        // add TMPDIR environment variable with the value of java.io.tmpdir
        envPut(childEnv, "TMPDIR", System.getProperty("java.io.tmpdir"));

        // Start the process
        ProcessBuilder builder = new ProcessBuilder(argvSplit);
        // The process' environment initially inherits all vars from the parent --
        // only setting those we add/override
        builder.environment().putAll(childEnv);
        // Set the working directory to the job jars directory
        // This is a bad idea... fix this.
        builder.directory(new File(job.getJar()).getParentFile());
        sim = builder.start();

        clientOut_ = new DataOutputStream(new BufferedOutputStream(sim.getOutputStream(), BUFFER_SIZE));
        clientIn_ = new DataInputStream(new BufferedInputStream(sim.getInputStream(), BUFFER_SIZE));
        clientErr_ = new DataInputStream(new BufferedInputStream(sim.getErrorStream()));
        startTime_ = System.currentTimeMillis();

        errThread_ = new MRErrorThread();
        errThread_.start();
    } catch (Exception e) {
        logStackTrace(e);
        LOG.error("configuration exception", e);
        throw new RuntimeException("configuration exception", e);
    }
}

From source file:com.netflix.genie.server.jobmanager.impl.JobManagerImpl.java

/**
 * Set/initialize environment variables for this job.
 *
 * @param processBuilder The process builder to use
 * @throws GenieException if there is any error in initialization
 */// ww  w  .j  a  v  a  2  s .  c om
protected void setupCommonProcess(final ProcessBuilder processBuilder) throws GenieException {
    LOG.info("called");

    //Get the directory to stage all the work out of
    final String baseUserWorkingDir = this.getBaseUserWorkingDirectory();

    //Save the base user working directory
    processBuilder.environment().put("BASE_USER_WORKING_DIR", baseUserWorkingDir);

    //Set the process working directory
    processBuilder.directory(this.createWorkingDirectory(baseUserWorkingDir));

    //Copy any attachments from the job.
    this.copyAttachments();

    LOG.info("Setting job working dir , conf dir and jar dir");
    // setup env for current job, conf and jar directories directories
    processBuilder.environment().put("CURRENT_JOB_WORKING_DIR", this.jobDir);
    processBuilder.environment().put("CURRENT_JOB_CONF_DIR", this.jobDir + "/conf");
    processBuilder.environment().put("CURRENT_JOB_JAR_DIR", this.jobDir + "/jars");

    if (this.job.getFileDependencies() != null && !this.job.getFileDependencies().isEmpty()) {
        processBuilder.environment().put("CURRENT_JOB_FILE_DEPENDENCIES",
                StringUtils.replaceChars(this.job.getFileDependencies(), ',', ' '));
    }

    // set the cluster related conf files
    processBuilder.environment().put("S3_CLUSTER_CONF_FILES",
            convertCollectionToString(this.cluster.getConfigs()));

    this.setCommandAndApplicationForJob(processBuilder);

    if (StringUtils.isNotBlank(this.job.getEnvPropFile())) {
        processBuilder.environment().put("JOB_ENV_FILE", this.job.getEnvPropFile());
    }

    // this is for the generic joblauncher.sh to use to create username
    // on the machine if needed
    processBuilder.environment().put("USER_NAME", this.job.getUser());

    processBuilder.environment().put("GROUP_NAME", this.getGroupName());

    // set the java home
    final String javaHome = ConfigurationManager.getConfigInstance()
            .getString("com.netflix.genie.server.java.home");
    if (StringUtils.isNotBlank(javaHome)) {
        processBuilder.environment().put("JAVA_HOME", javaHome);
    }

    // Set an ARN if one is available for role assumption with S3
    final String arn = ConfigurationManager.getConfigInstance()
            .getString("com.netflix.genie.server.aws.iam.arn");
    if (StringUtils.isNotBlank(arn)) {
        processBuilder.environment().put("ARN", arn);
    }

    // set the genie home
    final String genieHome = ConfigurationManager.getConfigInstance()
            .getString("com.netflix.genie.server.sys.home");
    if (StringUtils.isBlank(genieHome)) {
        final String msg = "Property com.netflix.genie.server.sys.home is not set correctly";
        LOG.error(msg);
        throw new GenieServerException(msg);
    }
    processBuilder.environment().put("XS_SYSTEM_HOME", genieHome);

    // set the archive location
    // unless user has explicitly requested for it to be disabled
    if (!this.job.isDisableLogArchival()) {
        final String s3ArchiveLocation = ConfigurationManager.getConfigInstance()
                .getString("com.netflix.genie.server.s3.archive.location");
        if (StringUtils.isNotBlank(s3ArchiveLocation)) {
            processBuilder.environment().put("S3_ARCHIVE_LOCATION", s3ArchiveLocation);
        }
    }
}

From source file:org.apache.hadoop.streaming.MultiPipeMapRed.java

public void configure(JobConf job) {
    try {//from   www.  j  ava 2s  .co  m
        ArrayList<String> argvs = getPipeCommands(job);
        this.dirKeys_ = getDirKeys(job);
        joinDelay_ = job.getLong("stream.joindelay.milli", 0);

        job_ = job;
        fs_ = FileSystem.get(job_);

        nonZeroExitIsFailure_ = job_.getBoolean("stream.non.zero.exit.is.failure", true);

        doPipe_ = getDoPipe();
        if (!doPipe_)
            return;

        setStreamJobDetails(job);

        for (int i = 0; i < argvs.size(); i++) {
            String argv = argvs.get(i);

            String[] argvSplit = splitArgs(argv);
            String prog = argvSplit[0];
            File currentDir = new File(".").getAbsoluteFile();
            if (new File(prog).isAbsolute()) {
                // we don't own it. Hope it is executable
            } else {
                FileUtil.chmod(new File(currentDir, prog).toString(), "a+x");
            }

            //
            // argvSplit[0]:
            // An absolute path should be a preexisting valid path on all
            // TaskTrackers
            // A relative path is converted into an absolute pathname by looking
            // up the PATH env variable. If it still fails, look it up in the
            // tasktracker's local working directory
            //
            if (!new File(argvSplit[0]).isAbsolute()) {
                PathFinder finder = new PathFinder("PATH");
                finder.prependPathComponent(currentDir.toString());
                File f = finder.getAbsolutePath(argvSplit[0]);
                if (f != null) {
                    argvSplit[0] = f.getAbsolutePath();
                }
                f = null;
            }
            logprintln("MultiPipeMapRed exec " + Arrays.asList(argvSplit));
            Environment childEnv = (Environment) StreamUtil.env().clone();
            addJobConfToEnvironment(job_, childEnv);
            addEnvironment(childEnv, job_.get("stream.addenvironment"));
            // add TMPDIR environment variable with the value of java.io.tmpdir
            envPut(childEnv, "TMPDIR", System.getProperty("java.io.tmpdir"));

            // Start the process
            ProcessBuilder builder = new ProcessBuilder(argvSplit);
            builder.environment().putAll(childEnv.toMap());
            sim.add(builder.start());

            clientOut_.add(
                    new DataOutputStream(new BufferedOutputStream(sim.get(i).getOutputStream(), BUFFER_SIZE)));
            clientIn_.add(
                    new DataInputStream(new BufferedInputStream(sim.get(i).getInputStream(), BUFFER_SIZE)));
            clientErr_.add(new DataInputStream(new BufferedInputStream(sim.get(i).getErrorStream())));

            errThread_.add(new MRErrorThread(clientErr_.get(i)));
            errThread_.get(i).start();
        }

        startTime_ = System.currentTimeMillis();

    } catch (Exception e) {
        logStackTrace(e);
        LOG.error("configuration exception", e);
        throw new RuntimeException("configuration exception", e);
    }
}

From source file:org.apache.accumulo.minicluster.impl.MiniAccumuloClusterImpl.java

private Process _exec(Class<?> clazz, List<String> extraJvmOpts, String... args) throws IOException {
    String javaHome = System.getProperty("java.home");
    String javaBin = javaHome + File.separator + "bin" + File.separator + "java";
    String classpath = getClasspath();

    String className = clazz.getName();

    ArrayList<String> argList = new ArrayList<String>();
    argList.addAll(Arrays.asList(javaBin, "-Dproc=" + clazz.getSimpleName(), "-cp", classpath));
    argList.addAll(extraJvmOpts);/*from   w  w  w .  ja v  a2s .co m*/
    for (Entry<String, String> sysProp : config.getSystemProperties().entrySet()) {
        argList.add(String.format("-D%s=%s", sysProp.getKey(), sysProp.getValue()));
    }
    // @formatter:off
    argList.addAll(Arrays.asList("-XX:+UseConcMarkSweepGC", "-XX:CMSInitiatingOccupancyFraction=75",
            "-Dapple.awt.UIElement=true", "-Djava.net.preferIPv4Stack=true", "-XX:+PerfDisableSharedMem",
            "-XX:+AlwaysPreTouch", Main.class.getName(), className));
    // @formatter:on
    argList.addAll(Arrays.asList(args));

    ProcessBuilder builder = new ProcessBuilder(argList);

    builder.environment().put("ACCUMULO_HOME", config.getDir().getAbsolutePath());
    builder.environment().put("ACCUMULO_LOG_DIR", config.getLogDir().getAbsolutePath());
    builder.environment().put("ACCUMULO_CLIENT_CONF_PATH", config.getClientConfFile().getAbsolutePath());
    String ldLibraryPath = Joiner.on(File.pathSeparator).join(config.getNativeLibPaths());
    builder.environment().put("LD_LIBRARY_PATH", ldLibraryPath);
    builder.environment().put("DYLD_LIBRARY_PATH", ldLibraryPath);

    // if we're running under accumulo.start, we forward these env vars
    String env = System.getenv("HADOOP_PREFIX");
    if (env != null)
        builder.environment().put("HADOOP_PREFIX", env);
    env = System.getenv("ZOOKEEPER_HOME");
    if (env != null)
        builder.environment().put("ZOOKEEPER_HOME", env);
    builder.environment().put("ACCUMULO_CONF_DIR", config.getConfDir().getAbsolutePath());
    // hadoop-2.2 puts error messages in the logs if this is not set
    builder.environment().put("HADOOP_HOME", config.getDir().getAbsolutePath());
    if (config.getHadoopConfDir() != null)
        builder.environment().put("HADOOP_CONF_DIR", config.getHadoopConfDir().getAbsolutePath());

    Process process = builder.start();

    LogWriter lw;
    lw = new LogWriter(process.getErrorStream(),
            new File(config.getLogDir(), clazz.getSimpleName() + "_" + process.hashCode() + ".err"));
    logWriters.add(lw);
    lw.start();
    lw = new LogWriter(process.getInputStream(),
            new File(config.getLogDir(), clazz.getSimpleName() + "_" + process.hashCode() + ".out"));
    logWriters.add(lw);
    lw.start();

    return process;
}

From source file:org.apache.hadoop.hive.llap.cli.LlapServiceDriver.java

private int runPackagePy(String[] args, Path tmpDir, Path scriptParent, String version, String outputDir)
        throws IOException, InterruptedException {
    Path scriptPath = new Path(new Path(scriptParent, "slider"), "package.py");
    List<String> scriptArgs = new ArrayList<>(args.length + 7);
    scriptArgs.add("python");
    scriptArgs.add(scriptPath.toString());
    scriptArgs.add("--input");
    scriptArgs.add(tmpDir.toString());/*from  w  ww  . j a va 2s. c om*/
    scriptArgs.add("--output");
    scriptArgs.add(outputDir);
    scriptArgs.add("--javaChild");
    for (String arg : args) {
        scriptArgs.add(arg);
    }
    LOG.debug("Calling package.py via: " + scriptArgs);
    ProcessBuilder builder = new ProcessBuilder(scriptArgs);
    builder.redirectError(ProcessBuilder.Redirect.INHERIT);
    builder.redirectOutput(ProcessBuilder.Redirect.INHERIT);
    builder.environment().put("HIVE_VERSION", version);
    return builder.start().waitFor();
}

From source file:gov.pnnl.goss.gridappsd.simulation.SimulationManagerImpl.java

/**
 * This method is called by Process Manager to start a simulation
 * @param simulationId//ww  w  .  j av a  2 s  .  co  m
 * @param simulationFile
 */
@Override
public void startSimulation(int simulationId, File simulationFile, SimulationConfig simulationConfig) {

    try {
        logManager.log(new LogMessage(this.getClass().getName() + "-" + Integer.toString(simulationId),
                new Date().getTime(), "Starting simulation " + simulationId, LogLevel.INFO,
                ProcessStatus.STARTING, true), GridAppsDConstants.username);
    } catch (Exception e2) {
        log.warn("Error while reporting status " + e2.getMessage());
    }

    Thread thread = new Thread(new Runnable() {

        @Override
        public void run() {

            Process gridlabdProcess = null;
            Process fncsProcess = null;
            Process fncsBridgeProcess = null;
            Process vvoAppProcess = null;
            InitializedTracker isInitialized = new InitializedTracker();
            try {

                File defaultLogDir = simulationFile.getParentFile();

                //Start FNCS
                //TODO, verify no errors on this
                String broker_location = "tcp://*:5570";
                if (simulationConfig != null && simulationConfig.model_creation_config != null
                        && simulationConfig.model_creation_config.schedule_name != null
                        && simulationConfig.model_creation_config.schedule_name.trim().length() > 0) {
                    broker_location = "tcp://" + simulationConfig.getSimulation_broker_location() + ":"
                            + String.valueOf(simulationConfig.getSimulation_broker_port());
                    File serviceDir = serviceManager.getServiceConfigDirectory();
                    //copy zipload_schedule.player file
                    try {
                        RunCommandLine.runCommand("cp " + serviceDir.getAbsolutePath() + File.separator + "etc"
                                + File.separator + "zipload_schedule.player "
                                + simulationFile.getParentFile().getAbsolutePath() + File.separator
                                + simulationConfig.model_creation_config.schedule_name + ".player");
                    } catch (Exception e) {
                        log.warn("Could not copy player file to working directory");
                    }
                }

                logManager.log(new LogMessage(this.getClass().getName() + "-" + Integer.toString(simulationId),
                        new Date().getTime(), "Calling " + getPath(GridAppsDConstants.FNCS_PATH) + " 2",
                        LogLevel.INFO, ProcessStatus.STARTING, true), GridAppsDConstants.username);

                ProcessBuilder fncsBuilder = new ProcessBuilder(getPath(GridAppsDConstants.FNCS_PATH), "2");
                fncsBuilder.redirectErrorStream(true);
                fncsBuilder.redirectOutput(
                        new File(defaultLogDir.getAbsolutePath() + File.separator + "fncs.log"));
                Map<String, String> fncsEnvironment = fncsBuilder.environment();
                fncsEnvironment.put("FNCS_BROKER", broker_location);
                fncsProcess = fncsBuilder.start();
                // Watch the process
                watch(fncsProcess, "FNCS");
                //TODO: check if FNCS is started correctly and send publish simulation status accordingly

                logManager.log(new LogMessage(this.getClass().getName() + "-" + Integer.toString(simulationId),
                        new Date().getTime(), "FNCS Co-Simulator started", LogLevel.INFO, ProcessStatus.RUNNING,
                        true), GridAppsDConstants.username);

                //client.publish(GridAppsDConstants.topic_simulationStatus+simulationId, "FNCS Co-Simulator started");

                //Start GridLAB-D
                logManager.log(new LogMessage(this.getClass().getName() + "-" + Integer.toString(simulationId),
                        new Date().getTime(),
                        "Calling " + getPath(GridAppsDConstants.GRIDLABD_PATH) + " " + simulationFile,
                        LogLevel.INFO, ProcessStatus.RUNNING, true), GridAppsDConstants.username);
                ProcessBuilder gridlabDBuilder = new ProcessBuilder(getPath(GridAppsDConstants.GRIDLABD_PATH),
                        simulationFile.getAbsolutePath());
                gridlabDBuilder.redirectErrorStream(true);
                gridlabDBuilder.redirectOutput(
                        new File(defaultLogDir.getAbsolutePath() + File.separator + "gridlabd.log"));
                //launch from directory containing simulation files
                gridlabDBuilder.directory(simulationFile.getParentFile());
                gridlabdProcess = gridlabDBuilder.start();
                // Watch the process
                watch(gridlabdProcess, "GridLABD");

                //TODO: check if GridLAB-D is started correctly and send publish simulation status accordingly

                logManager.log(new LogMessage(this.getClass().getName() + "-" + Integer.toString(simulationId),
                        new Date().getTime(), "GridLAB-D started", LogLevel.INFO, ProcessStatus.RUNNING, true),
                        GridAppsDConstants.username);

                //Start VVO Application
                //TODO filname really should be constant
                String vvoInputFile = simulationFile.getParentFile().getAbsolutePath() + File.separator
                        + "vvo_inputs.json";
                logManager.log(new LogMessage(this.getClass().getName() + "-" + Integer.toString(simulationId),
                        new Date().getTime(),
                        "Calling " + "python " + getPath(GridAppsDConstants.VVO_APP_PATH) + " " + simulationId
                                + " " + vvoInputFile,
                        LogLevel.INFO, ProcessStatus.RUNNING, true), GridAppsDConstants.username);
                ProcessBuilder vvoAppBuilder = new ProcessBuilder("python",
                        getPath(GridAppsDConstants.VVO_APP_PATH), "-f", vvoInputFile, "" + simulationId);
                vvoAppBuilder.redirectErrorStream(true);
                vvoAppBuilder.redirectOutput(
                        new File(defaultLogDir.getAbsolutePath() + File.separator + "vvo_app.log"));
                vvoAppProcess = vvoAppBuilder.start();
                // Watch the process
                watch(vvoAppProcess, "VVO Application");

                logManager.log(new LogMessage(this.getClass().getName() + "-" + Integer.toString(simulationId),
                        new Date().getTime(), "FNCS-GOSS Bridge started", LogLevel.INFO, ProcessStatus.RUNNING,
                        true), GridAppsDConstants.username);

                //Start GOSS-FNCS Bridge
                logManager.log(new LogMessage(this.getClass().getName() + "-" + Integer.toString(simulationId),
                        new Date().getTime(),
                        "Calling " + "python " + getPath(GridAppsDConstants.FNCS_BRIDGE_PATH) + " "
                                + simulationConfig.getSimulation_name(),
                        LogLevel.INFO, ProcessStatus.RUNNING, true), GridAppsDConstants.username);

                ProcessBuilder fncsBridgeBuilder = new ProcessBuilder("python",
                        getPath(GridAppsDConstants.FNCS_BRIDGE_PATH), simulationConfig.getSimulation_name(),
                        broker_location);
                fncsBridgeBuilder.redirectErrorStream(true);
                fncsBridgeBuilder.redirectOutput(
                        new File(defaultLogDir.getAbsolutePath() + File.separator + "fncs_goss_bridge.log"));
                fncsBridgeProcess = fncsBridgeBuilder.start();
                // Watch the process
                watch(fncsBridgeProcess, "FNCS GOSS Bridge");

                //TODO: check if bridge is started correctly and send publish simulation status accordingly

                logManager.log(new LogMessage(this.getClass().getName() + "-" + Integer.toString(simulationId),
                        new Date().getTime(), "FNCS-GOSS Bridge started", LogLevel.INFO, ProcessStatus.RUNNING,
                        true), GridAppsDConstants.username);

                //Subscribe to fncs-goss-bridge output topic
                client.subscribe(GridAppsDConstants.topic_FNCS_output,
                        new GossFncsResponseEvent(logManager, isInitialized, simulationId));

                int initAttempts = 0;
                while (!isInitialized.isInited && initAttempts < MAX_INIT_ATTEMPTS) {
                    //Send 'isInitialized' call to fncs-goss-bridge to check initialization until it is initialized.
                    //TODO add limiting how long it checks for initialized, or cancel if the fncs process exits
                    //This call would return true/false for initialization and simulation output of time step 0.
                    logManager.log(
                            new LogMessage(this.getClass().getName() + "-" + Integer.toString(simulationId),
                                    new Date().getTime(),
                                    "Checking fncs is initialized, currently " + isInitialized.isInited,
                                    LogLevel.INFO, ProcessStatus.RUNNING, true),
                            GridAppsDConstants.username);

                    client.publish(GridAppsDConstants.topic_FNCS_input, "{\"command\": \"isInitialized\"}");
                    initAttempts++;
                    Thread.sleep(1000);

                }

                if (initAttempts < MAX_INIT_ATTEMPTS) {
                    logManager.log(
                            new LogMessage(Integer.toString(simulationId), new Date().getTime(),
                                    "FNCS Initialized", LogLevel.INFO, ProcessStatus.RUNNING, true),
                            GridAppsDConstants.username);

                    //Send the timesteps by second for the amount of time specified in the simulation config
                    sendTimesteps(simulationConfig, simulationId);
                } else {
                    logManager.log(
                            new LogMessage(Integer.toString(simulationId), new Date().getTime(),
                                    "FNCS Initialization Failed", LogLevel.ERROR, ProcessStatus.ERROR, true),
                            GridAppsDConstants.username);

                }

                //call to stop the fncs broker
                client.publish(GridAppsDConstants.topic_FNCS_input, "{\"command\":  \"stop\"}");
                logManager.log(new LogMessage(Integer.toString(simulationId), new Date().getTime(),
                        "Simulation " + simulationId + " complete", LogLevel.INFO, ProcessStatus.COMPLETE,
                        true), GridAppsDConstants.username);
            } catch (Exception e) {
                log.error("Error during simulation", e);
                try {
                    logManager.log(new LogMessage(Integer.toString(simulationId), new Date().getTime(),
                            "Simulation error: " + e.getMessage(), LogLevel.ERROR, ProcessStatus.ERROR, true),
                            GridAppsDConstants.username);
                } catch (Exception e1) {
                    log.error("Error while reporting error status", e);
                }
            } finally {
                //shut down fncs broker and gridlabd and bridge if still running
                if (fncsProcess != null) {
                    fncsProcess.destroy();
                }
                if (gridlabdProcess != null) {
                    gridlabdProcess.destroy();
                }
                if (fncsBridgeProcess != null) {
                    fncsBridgeProcess.destroy();
                }
            }
        }
    });

    thread.start();
}

From source file:org.pentaho.di.job.entries.spark.JobEntrySparkSubmit.java

/**
 * Executes the spark-submit command and returns a Result
 *
 * @return The Result of the operation//w w w. ja v a2  s. c o  m
 */
public Result execute(Result result, int nr) {
    if (!validate()) {
        result.setResult(false);
        return result;
    }

    try {
        List<String> cmds = getCmds();

        logBasic("Submitting Spark Script");

        if (log.isDetailed()) {
            logDetailed(cmds.toString());
        }

        // Build the environment variable list...
        ProcessBuilder procBuilder = new ProcessBuilder(cmds);
        Map<String, String> env = procBuilder.environment();
        String[] variables = listVariables();
        for (String variable : variables) {
            env.put(variable, getVariable(variable));
        }
        proc = procBuilder.start();

        String[] jobSubmittedPatterns = new String[] { "tracking URL:" };

        final AtomicBoolean jobSubmitted = new AtomicBoolean(false);

        // any error message?
        PatternMatchingStreamLogger errorLogger = new PatternMatchingStreamLogger(log, proc.getErrorStream(),
                jobSubmittedPatterns, jobSubmitted);

        // any output?
        PatternMatchingStreamLogger outputLogger = new PatternMatchingStreamLogger(log, proc.getInputStream(),
                jobSubmittedPatterns, jobSubmitted);

        if (!blockExecution) {
            PatternMatchingStreamLogger.PatternMatchedListener cb = new PatternMatchingStreamLogger.PatternMatchedListener() {
                @Override
                public void onPatternFound(String pattern) {
                    log.logDebug("Found match in output, considering job submitted, stopping spark-submit");
                    jobSubmitted.set(true);
                    proc.destroy();
                }
            };
            errorLogger.addPatternMatchedListener(cb);
            outputLogger.addPatternMatchedListener(cb);
        }

        // kick them off
        Thread errorLoggerThread = new Thread(errorLogger);
        errorLoggerThread.start();
        Thread outputLoggerThread = new Thread(outputLogger);
        outputLoggerThread.start();

        // Stop on job stop
        final AtomicBoolean processFinished = new AtomicBoolean(false);
        new Thread(new Runnable() {
            @Override
            public void run() {
                while (!getParentJob().isStopped() && !processFinished.get()) {
                    try {
                        Thread.sleep(5000);
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                    }
                }
                proc.destroy();
            }
        }).start();

        proc.waitFor();

        processFinished.set(true);

        prepareProcessThreadsToStop(proc, errorLoggerThread, outputLoggerThread);

        if (log.isDetailed()) {
            logDetailed("Spark submit finished");
        }

        // What's the exit status?
        int exitCode;
        if (blockExecution) {
            exitCode = proc.exitValue();
        } else {
            exitCode = jobSubmitted.get() ? 0 : proc.exitValue();
        }

        result.setExitStatus(exitCode);
        if (exitCode != 0) {
            if (log.isDetailed()) {
                logDetailed(
                        BaseMessages.getString(PKG, "JobEntrySparkSubmit.ExitStatus", result.getExitStatus()));
            }

            result.setNrErrors(1);
        }

        result.setResult(exitCode == 0);
    } catch (Exception e) {
        result.setNrErrors(1);
        logError(BaseMessages.getString(PKG, "JobEntrySparkSubmit.Error.SubmittingScript", e.getMessage()));
        logError(Const.getStackTracker(e));
        result.setResult(false);
    }

    return result;
}