Example usage for java.lang ProcessBuilder environment

List of usage examples for java.lang ProcessBuilder environment

Introduction

In this page you can find the example usage for java.lang ProcessBuilder environment.

Prototype

Map environment

To view the source code for java.lang ProcessBuilder environment.

Click Source Link

Usage

From source file:org.apache.nifi.processors.standard.ExecuteProcess.java

protected Future<?> launchProcess(final ProcessContext context, final List<String> commandStrings,
        final Long batchNanos, final ProxyOutputStream proxyOut) throws IOException {

    final Boolean redirectErrorStream = context.getProperty(REDIRECT_ERROR_STREAM).asBoolean();

    final ProcessBuilder builder = new ProcessBuilder(commandStrings);
    final String workingDirName = context.getProperty(WORKING_DIR).getValue();
    if (workingDirName != null) {
        builder.directory(new File(workingDirName));
    }/*from w  ww  .  ja  v a 2 s . co m*/

    final Map<String, String> environment = new HashMap<>();
    for (final Map.Entry<PropertyDescriptor, String> entry : context.getProperties().entrySet()) {
        if (entry.getKey().isDynamic()) {
            environment.put(entry.getKey().getName(), entry.getValue());
        }
    }

    if (!environment.isEmpty()) {
        builder.environment().putAll(environment);
    }

    getLogger().info("Start creating new Process > {} ", new Object[] { commandStrings });
    this.externalProcess = builder.redirectErrorStream(redirectErrorStream).start();

    // Submit task to read error stream from process
    if (!redirectErrorStream) {
        executor.submit(new Runnable() {
            @Override
            public void run() {
                try (final BufferedReader reader = new BufferedReader(
                        new InputStreamReader(externalProcess.getErrorStream()))) {
                    reader.lines().filter(line -> line != null && line.length() > 0).forEach(getLogger()::warn);
                } catch (final IOException ioe) {
                }
            }
        });
    }

    // Submit task to read output of Process and write to FlowFile.
    failure = new AtomicBoolean(false);
    final Future<?> future = executor.submit(new Callable<Object>() {
        @Override
        public Object call() throws IOException {
            try {
                if (batchNanos == null) {
                    // if we aren't batching, just copy the stream from the
                    // process to the flowfile.
                    try (final BufferedInputStream bufferedIn = new BufferedInputStream(
                            externalProcess.getInputStream())) {
                        final byte[] buffer = new byte[4096];
                        int len;
                        while ((len = bufferedIn.read(buffer)) > 0) {

                            // NB!!!! Maybe all data should be read from
                            // input stream in case of !isScheduled() to
                            // avoid subprocess deadlock?
                            // (we just don't write data to proxyOut)
                            // Or because we don't use this subprocess
                            // anymore anyway, we don't care?
                            if (!isScheduled()) {
                                return null;
                            }

                            proxyOut.write(buffer, 0, len);
                        }
                    }
                } else {
                    // we are batching, which means that the output of the
                    // process is text. It doesn't make sense to grab
                    // arbitrary batches of bytes from some process and send
                    // it along as a piece of data, so we assume that
                    // setting a batch during means text.
                    // Also, we don't want that text to get split up in the
                    // middle of a line, so we use BufferedReader
                    // to read lines of text and write them as lines of text.
                    try (final BufferedReader reader = new BufferedReader(
                            new InputStreamReader(externalProcess.getInputStream()))) {
                        String line;

                        while ((line = reader.readLine()) != null) {
                            if (!isScheduled()) {
                                return null;
                            }

                            proxyOut.write((line + "\n").getBytes(StandardCharsets.UTF_8));
                        }
                    }
                }
            } catch (final IOException ioe) {
                failure.set(true);
                throw ioe;
            } finally {
                try {
                    // Since we are going to exit anyway, one sec gives it an extra chance to exit gracefully.
                    // In the future consider exposing it via configuration.
                    boolean terminated = externalProcess.waitFor(1000, TimeUnit.MILLISECONDS);
                    int exitCode = terminated ? externalProcess.exitValue() : -9999;
                    getLogger().info("Process finished with exit code {} ", new Object[] { exitCode });
                } catch (InterruptedException e1) {
                    Thread.currentThread().interrupt();
                }
            }

            return null;
        }
    });

    return future;
}

From source file:com.buaa.cfs.utils.Shell.java

/** Run a command */
private void runCommand() throws IOException {
    ProcessBuilder builder = new ProcessBuilder(getExecString());
    Timer timeOutTimer = null;// ww w  .  ja v a2  s  . c  om
    ShellTimeoutTimerTask timeoutTimerTask = null;
    timedOut = new AtomicBoolean(false);
    completed = new AtomicBoolean(false);

    if (environment != null) {
        builder.environment().putAll(this.environment);
    }
    if (dir != null) {
        builder.directory(this.dir);
    }

    builder.redirectErrorStream(redirectErrorStream);

    if (Shell.WINDOWS) {
        synchronized (WindowsProcessLaunchLock) {
            // To workaround the race condition issue with child processes
            // inheriting unintended handles during process launch that can
            // lead to hangs on reading output and error streams, we
            // serialize process creation. More info available at:
            // http://support.microsoft.com/kb/315939
            process = builder.start();
        }
    } else {
        process = builder.start();
    }

    if (timeOutInterval > 0) {
        timeOutTimer = new Timer("Shell command timeout");
        timeoutTimerTask = new ShellTimeoutTimerTask(this);
        //One time scheduling.
        timeOutTimer.schedule(timeoutTimerTask, timeOutInterval);
    }
    final BufferedReader errReader = new BufferedReader(
            new InputStreamReader(process.getErrorStream(), Charset.defaultCharset()));
    BufferedReader inReader = new BufferedReader(
            new InputStreamReader(process.getInputStream(), Charset.defaultCharset()));
    final StringBuffer errMsg = new StringBuffer();

    // read error and input streams as this would free up the buffers
    // free the error stream buffer
    Thread errThread = new Thread() {
        @Override
        public void run() {
            try {
                String line = errReader.readLine();
                while ((line != null) && !isInterrupted()) {
                    errMsg.append(line);
                    errMsg.append(System.getProperty("line.separator"));
                    line = errReader.readLine();
                }
            } catch (IOException ioe) {
                LOG.warn("Error reading the error stream", ioe);
            }
        }
    };
    try {
        errThread.start();
    } catch (IllegalStateException ise) {
    } catch (OutOfMemoryError oe) {
        LOG.error("Caught " + oe + ". One possible reason is that ulimit"
                + " setting of 'max user processes' is too low. If so, do"
                + " 'ulimit -u <largerNum>' and try again.");
        throw oe;
    }
    try {
        parseExecResult(inReader); // parse the output
        // clear the input stream buffer
        String line = inReader.readLine();
        while (line != null) {
            line = inReader.readLine();
        }
        // wait for the process to finish and check the exit code
        exitCode = process.waitFor();
        // make sure that the error thread exits
        joinThread(errThread);
        completed.set(true);
        //the timeout thread handling
        //taken care in finally block
        if (exitCode != 0) {
            throw new ExitCodeException(exitCode, errMsg.toString());
        }
    } catch (InterruptedException ie) {
        throw new IOException(ie.toString());
    } finally {
        if (timeOutTimer != null) {
            timeOutTimer.cancel();
        }
        // close the input stream
        try {
            // JDK 7 tries to automatically drain the input streams for us
            // when the process exits, but since close is not synchronized,
            // it creates a race if we close the stream first and the same
            // fd is recycled.  the stream draining thread will attempt to
            // drain that fd!!  it may block, OOM, or cause bizarre behavior
            // see: https://bugs.openjdk.java.net/browse/JDK-8024521
            //      issue is fixed in build 7u60
            InputStream stdout = process.getInputStream();
            synchronized (stdout) {
                inReader.close();
            }
        } catch (IOException ioe) {
            LOG.warn("Error while closing the input stream", ioe);
        }
        if (!completed.get()) {
            errThread.interrupt();
            joinThread(errThread);
        }
        try {
            InputStream stderr = process.getErrorStream();
            synchronized (stderr) {
                errReader.close();
            }
        } catch (IOException ioe) {
            LOG.warn("Error while closing the error stream", ioe);
        }
        process.destroy();
        lastTime = Time.monotonicNow();
    }
}

From source file:com.twinsoft.convertigo.engine.localbuild.BuildLocally.java

private String runCommand(File launchDir, String command, List<String> parameters, boolean mergeError)
        throws Throwable {
    if (is(OS.win32)) {
        // Works for cordova and npm
        command += ".cmd";
    }// ww w  . j a  v  a2s.co m

    String shellFullpath = command;
    String paths = getLocalBuildAdditionalPath();
    paths = (paths.length() > 0 ? paths + File.pathSeparator : "") + System.getenv("PATH");

    String defaultPaths = null;
    if (is(OS.mac) || is(OS.linux)) {
        defaultPaths = "/usr/local/bin";
    } else if (is(OS.win32)) {
        String programFiles = System.getenv("ProgramW6432");
        if (programFiles != null && programFiles.length() > 0) {
            defaultPaths = programFiles + File.separator + "nodejs";
        }

        programFiles = System.getenv("ProgramFiles");
        if (programFiles != null && programFiles.length() > 0) {
            defaultPaths = (defaultPaths == null ? "" : defaultPaths + File.pathSeparator) + programFiles
                    + File.separator + "nodejs";
        }

        String appData = System.getenv("APPDATA");
        if (appData != null && appData.length() > 0) {
            defaultPaths = (defaultPaths == null ? "" : defaultPaths + File.pathSeparator) + appData
                    + File.separator + "npm";
        }
    }
    paths += File.pathSeparator + defaultPaths;

    // Checks if the command is already full path 
    if (!(new File(shellFullpath).exists())) {
        // Else search where the "exec" is and build the absolute path for this "exec"
        shellFullpath = getFullPath(paths, command);

        // If the "exec" is not found then it search it elsewhere
        if (shellFullpath == null) {
            shellFullpath = command;
        }
    }

    // Prepares the command
    parameters.add(0, shellFullpath);
    ProcessBuilder pb = new ProcessBuilder(parameters);
    // Set the directory from where the command will be executed
    pb.directory(launchDir.getCanonicalFile());

    Map<String, String> pbEnv = pb.environment();
    // must set "Path" for Windows 8.1 64
    pbEnv.put(pbEnv.get("PATH") == null ? "Path" : "PATH", paths);

    // Specific to npm command
    if (shellFullpath.endsWith("npm") || shellFullpath.endsWith("npm.cmd")) {

        // Set the proxy for npm
        String proxyMode = EnginePropertiesManager
                .getProperty(EnginePropertiesManager.PropertyName.PROXY_SETTINGS_MODE);
        if (proxyMode.equals(ProxyMode.manual.getValue())) {
            String proxyAuthMethod = EnginePropertiesManager
                    .getProperty(EnginePropertiesManager.PropertyName.PROXY_SETTINGS_METHOD);

            if (proxyAuthMethod.equals(ProxyMethod.anonymous.getValue())
                    || proxyAuthMethod.equals(ProxyMethod.basic.getValue())) {
                String proxyHost = EnginePropertiesManager
                        .getProperty(EnginePropertiesManager.PropertyName.PROXY_SETTINGS_HOST);
                String proxyPort = EnginePropertiesManager
                        .getProperty(EnginePropertiesManager.PropertyName.PROXY_SETTINGS_PORT);

                String npmProxy = proxyHost + ":" + proxyPort;

                if (proxyAuthMethod.equals(ProxyMethod.basic.getValue())) {
                    String proxyUser = EnginePropertiesManager
                            .getProperty(EnginePropertiesManager.PropertyName.PROXY_SETTINGS_USER);
                    String proxyPassword = EnginePropertiesManager
                            .getProperty(EnginePropertiesManager.PropertyName.PROXY_SETTINGS_PASSWORD);

                    npmProxy = proxyUser + ":" + proxyPassword + "@" + npmProxy;
                }

                pbEnv.put("http-proxy", "http://" + npmProxy);
                pbEnv.put("https-proxy", "http://" + npmProxy);
            }
        }
    }

    pb.redirectErrorStream(mergeError);

    Engine.logEngine.info("Executing command : " + parameters);

    process = pb.start();

    cmdOutput = "";
    // Logs the output
    new Thread(new Runnable() {
        @Override
        public void run() {
            try {
                String line;
                processCanceled = false;

                BufferedReader bis = new BufferedReader(new InputStreamReader(process.getInputStream()));
                while ((line = bis.readLine()) != null) {
                    Engine.logEngine.info(line);
                    BuildLocally.this.cmdOutput += line;
                }
            } catch (IOException e) {
                Engine.logEngine.error("Error while executing command", e);
            }
        }
    }).start();

    if (!mergeError) {
        // Logs the error output
        new Thread(new Runnable() {
            @Override
            public void run() {
                try {
                    String line;
                    processCanceled = false;

                    BufferedReader bis = new BufferedReader(new InputStreamReader(process.getErrorStream()));
                    while ((line = bis.readLine()) != null) {
                        Engine.logEngine.error(line);
                        errorLines += line;
                    }
                } catch (IOException e) {
                    Engine.logEngine.error("Error while executing command", e);
                }
            }
        }).start();
    }

    int exitCode = process.waitFor();

    if (exitCode != 0 && exitCode != 127) {
        throw new Exception(
                "Exit code " + exitCode + " when running the command '" + command + "' with parameters : '"
                        + parameters + "'. The output of the command is : '" + cmdOutput + "'");
    }

    return cmdOutput;
}

From source file:org.apache.hive.spark.client.SparkClientImpl.java

private Thread startDriver(final RpcServer rpcServer, final String clientId, final String secret)
        throws IOException {
    Runnable runnable;/*from   w  w w .  java 2s .  c  o m*/
    final String serverAddress = rpcServer.getAddress();
    final String serverPort = String.valueOf(rpcServer.getPort());

    if (conf.containsKey(SparkClientFactory.CONF_KEY_IN_PROCESS)) {
        // Mostly for testing things quickly. Do not do this in production.
        // when invoked in-process it inherits the environment variables of the parent
        LOG.warn("!!!! Running remote driver in-process. !!!!");
        runnable = new Runnable() {
            @Override
            public void run() {
                List<String> args = Lists.newArrayList();
                args.add("--remote-host");
                args.add(serverAddress);
                args.add("--remote-port");
                args.add(serverPort);
                args.add("--client-id");
                args.add(clientId);
                args.add("--secret");
                args.add(secret);

                for (Map.Entry<String, String> e : conf.entrySet()) {
                    args.add("--conf");
                    args.add(String.format("%s=%s", e.getKey(), conf.get(e.getKey())));
                }
                try {
                    RemoteDriver.main(args.toArray(new String[args.size()]));
                } catch (Exception e) {
                    LOG.error("Error running driver.", e);
                }
            }
        };
    } else {
        // If a Spark installation is provided, use the spark-submit script. Otherwise, call the
        // SparkSubmit class directly, which has some caveats (like having to provide a proper
        // version of Guava on the classpath depending on the deploy mode).
        String sparkHome = Strings.emptyToNull(conf.get(SPARK_HOME_KEY));
        if (sparkHome == null) {
            sparkHome = Strings.emptyToNull(System.getenv(SPARK_HOME_ENV));
        }
        if (sparkHome == null) {
            sparkHome = Strings.emptyToNull(System.getProperty(SPARK_HOME_KEY));
        }
        String sparkLogDir = conf.get("hive.spark.log.dir");
        if (sparkLogDir == null) {
            if (sparkHome == null) {
                sparkLogDir = "./target/";
            } else {
                sparkLogDir = sparkHome + "/logs/";
            }
        }

        String osxTestOpts = "";
        if (Strings.nullToEmpty(System.getProperty("os.name")).toLowerCase().contains("mac")) {
            osxTestOpts = Strings.nullToEmpty(System.getenv(OSX_TEST_OPTS));
        }

        String driverJavaOpts = Joiner.on(" ").skipNulls().join("-Dhive.spark.log.dir=" + sparkLogDir,
                osxTestOpts, conf.get(DRIVER_OPTS_KEY));
        String executorJavaOpts = Joiner.on(" ").skipNulls().join("-Dhive.spark.log.dir=" + sparkLogDir,
                osxTestOpts, conf.get(EXECUTOR_OPTS_KEY));

        // Create a file with all the job properties to be read by spark-submit. Change the
        // file's permissions so that only the owner can read it. This avoid having the
        // connection secret show up in the child process's command line.
        File properties = File.createTempFile("spark-submit.", ".properties");
        if (!properties.setReadable(false) || !properties.setReadable(true, true)) {
            throw new IOException("Cannot change permissions of job properties file.");
        }
        properties.deleteOnExit();

        Properties allProps = new Properties();
        // first load the defaults from spark-defaults.conf if available
        try {
            URL sparkDefaultsUrl = Thread.currentThread().getContextClassLoader()
                    .getResource("spark-defaults.conf");
            if (sparkDefaultsUrl != null) {
                LOG.info("Loading spark defaults: " + sparkDefaultsUrl);
                allProps.load(new ByteArrayInputStream(Resources.toByteArray(sparkDefaultsUrl)));
            }
        } catch (Exception e) {
            String msg = "Exception trying to load spark-defaults.conf: " + e;
            throw new IOException(msg, e);
        }
        // then load the SparkClientImpl config
        for (Map.Entry<String, String> e : conf.entrySet()) {
            allProps.put(e.getKey(), conf.get(e.getKey()));
        }
        allProps.put(SparkClientFactory.CONF_CLIENT_ID, clientId);
        allProps.put(SparkClientFactory.CONF_KEY_SECRET, secret);
        allProps.put(DRIVER_OPTS_KEY, driverJavaOpts);
        allProps.put(EXECUTOR_OPTS_KEY, executorJavaOpts);

        String isTesting = conf.get("spark.testing");
        if (isTesting != null && isTesting.equalsIgnoreCase("true")) {
            String hiveHadoopTestClasspath = Strings.nullToEmpty(System.getenv("HIVE_HADOOP_TEST_CLASSPATH"));
            if (!hiveHadoopTestClasspath.isEmpty()) {
                String extraDriverClasspath = Strings
                        .nullToEmpty((String) allProps.get(DRIVER_EXTRA_CLASSPATH));
                if (extraDriverClasspath.isEmpty()) {
                    allProps.put(DRIVER_EXTRA_CLASSPATH, hiveHadoopTestClasspath);
                } else {
                    extraDriverClasspath = extraDriverClasspath.endsWith(File.pathSeparator)
                            ? extraDriverClasspath
                            : extraDriverClasspath + File.pathSeparator;
                    allProps.put(DRIVER_EXTRA_CLASSPATH, extraDriverClasspath + hiveHadoopTestClasspath);
                }

                String extraExecutorClasspath = Strings
                        .nullToEmpty((String) allProps.get(EXECUTOR_EXTRA_CLASSPATH));
                if (extraExecutorClasspath.isEmpty()) {
                    allProps.put(EXECUTOR_EXTRA_CLASSPATH, hiveHadoopTestClasspath);
                } else {
                    extraExecutorClasspath = extraExecutorClasspath.endsWith(File.pathSeparator)
                            ? extraExecutorClasspath
                            : extraExecutorClasspath + File.pathSeparator;
                    allProps.put(EXECUTOR_EXTRA_CLASSPATH, extraExecutorClasspath + hiveHadoopTestClasspath);
                }
            }
        }

        Writer writer = new OutputStreamWriter(new FileOutputStream(properties), Charsets.UTF_8);
        try {
            allProps.store(writer, "Spark Context configuration");
        } finally {
            writer.close();
        }

        // Define how to pass options to the child process. If launching in client (or local)
        // mode, the driver options need to be passed directly on the command line. Otherwise,
        // SparkSubmit will take care of that for us.
        String master = conf.get("spark.master");
        Preconditions.checkArgument(master != null, "spark.master is not defined.");
        String deployMode = conf.get("spark.submit.deployMode");

        List<String> argv = Lists.newLinkedList();

        if (sparkHome != null) {
            argv.add(new File(sparkHome, "bin/spark-submit").getAbsolutePath());
        } else {
            LOG.info("No spark.home provided, calling SparkSubmit directly.");
            argv.add(new File(System.getProperty("java.home"), "bin/java").getAbsolutePath());

            if (master.startsWith("local") || master.startsWith("mesos")
                    || SparkClientUtilities.isYarnClientMode(master, deployMode)
                    || master.startsWith("spark")) {
                String mem = conf.get("spark.driver.memory");
                if (mem != null) {
                    argv.add("-Xms" + mem);
                    argv.add("-Xmx" + mem);
                }

                String cp = conf.get("spark.driver.extraClassPath");
                if (cp != null) {
                    argv.add("-classpath");
                    argv.add(cp);
                }

                String libPath = conf.get("spark.driver.extraLibPath");
                if (libPath != null) {
                    argv.add("-Djava.library.path=" + libPath);
                }

                String extra = conf.get(DRIVER_OPTS_KEY);
                if (extra != null) {
                    for (String opt : extra.split("[ ]")) {
                        if (!opt.trim().isEmpty()) {
                            argv.add(opt.trim());
                        }
                    }
                }
            }

            argv.add("org.apache.spark.deploy.SparkSubmit");
        }

        if (SparkClientUtilities.isYarnClusterMode(master, deployMode)) {
            String executorCores = conf.get("spark.executor.cores");
            if (executorCores != null) {
                argv.add("--executor-cores");
                argv.add(executorCores);
            }

            String executorMemory = conf.get("spark.executor.memory");
            if (executorMemory != null) {
                argv.add("--executor-memory");
                argv.add(executorMemory);
            }

            String numOfExecutors = conf.get("spark.executor.instances");
            if (numOfExecutors != null) {
                argv.add("--num-executors");
                argv.add(numOfExecutors);
            }
        }
        // The options --principal/--keypad do not work with --proxy-user in spark-submit.sh
        // (see HIVE-15485, SPARK-5493, SPARK-19143), so Hive could only support doAs or
        // delegation token renewal, but not both. Since doAs is a more common case, if both
        // are needed, we choose to favor doAs. So when doAs is enabled, we use kinit command,
        // otherwise, we pass the principal/keypad to spark to support the token renewal for
        // long-running application.
        if ("kerberos".equals(hiveConf.get(HADOOP_SECURITY_AUTHENTICATION))) {
            String principal = SecurityUtil
                    .getServerPrincipal(hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL), "0.0.0.0");
            String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
            if (StringUtils.isNotBlank(principal) && StringUtils.isNotBlank(keyTabFile)) {
                if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) {
                    List<String> kinitArgv = Lists.newLinkedList();
                    kinitArgv.add("kinit");
                    kinitArgv.add(principal);
                    kinitArgv.add("-k");
                    kinitArgv.add("-t");
                    kinitArgv.add(keyTabFile + ";");
                    kinitArgv.addAll(argv);
                    argv = kinitArgv;
                } else {
                    // if doAs is not enabled, we pass the principal/keypad to spark-submit in order to
                    // support the possible delegation token renewal in Spark
                    argv.add("--principal");
                    argv.add(principal);
                    argv.add("--keytab");
                    argv.add(keyTabFile);
                }
            }
        }
        if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) {
            try {
                String currentUser = Utils.getUGI().getShortUserName();
                // do not do impersonation in CLI mode
                if (!currentUser.equals(System.getProperty("user.name"))) {
                    LOG.info("Attempting impersonation of " + currentUser);
                    argv.add("--proxy-user");
                    argv.add(currentUser);
                }
            } catch (Exception e) {
                String msg = "Cannot obtain username: " + e;
                throw new IllegalStateException(msg, e);
            }
        }

        argv.add("--properties-file");
        argv.add(properties.getAbsolutePath());
        argv.add("--class");
        argv.add(RemoteDriver.class.getName());

        String jar = "spark-internal";
        if (SparkContext.jarOfClass(this.getClass()).isDefined()) {
            jar = SparkContext.jarOfClass(this.getClass()).get();
        }
        argv.add(jar);

        argv.add("--remote-host");
        argv.add(serverAddress);
        argv.add("--remote-port");
        argv.add(serverPort);

        //hive.spark.* keys are passed down to the RemoteDriver via --conf,
        //as --properties-file contains the spark.* keys that are meant for SparkConf object.
        for (String hiveSparkConfKey : RpcConfiguration.HIVE_SPARK_RSC_CONFIGS) {
            String value = RpcConfiguration.getValue(hiveConf, hiveSparkConfKey);
            argv.add("--conf");
            argv.add(String.format("%s=%s", hiveSparkConfKey, value));
        }

        String cmd = Joiner.on(" ").join(argv);
        LOG.info("Running client driver with argv: {}", cmd);
        ProcessBuilder pb = new ProcessBuilder("sh", "-c", cmd);

        // Prevent hive configurations from being visible in Spark.
        pb.environment().remove("HIVE_HOME");
        pb.environment().remove("HIVE_CONF_DIR");
        // Add credential provider password to the child process's environment
        // In case of Spark the credential provider location is provided in the jobConf when the job is submitted
        String password = getSparkJobCredentialProviderPassword();
        if (password != null) {
            pb.environment().put(Constants.HADOOP_CREDENTIAL_PASSWORD_ENVVAR, password);
        }
        if (isTesting != null) {
            pb.environment().put("SPARK_TESTING", isTesting);
        }

        final Process child = pb.start();
        String threadName = Thread.currentThread().getName();
        final List<String> childErrorLog = Collections.synchronizedList(new ArrayList<String>());
        redirect("RemoteDriver-stdout-redir-" + threadName, new Redirector(child.getInputStream()));
        redirect("RemoteDriver-stderr-redir-" + threadName,
                new Redirector(child.getErrorStream(), childErrorLog));

        runnable = new Runnable() {
            @Override
            public void run() {
                try {
                    int exitCode = child.waitFor();
                    if (exitCode != 0) {
                        StringBuilder errStr = new StringBuilder();
                        synchronized (childErrorLog) {
                            Iterator iter = childErrorLog.iterator();
                            while (iter.hasNext()) {
                                errStr.append(iter.next());
                                errStr.append('\n');
                            }
                        }

                        LOG.warn("Child process exited with code {}", exitCode);
                        rpcServer.cancelClient(clientId,
                                "Child process (spark-submit) exited before connecting back with error log "
                                        + errStr.toString());
                    }
                } catch (InterruptedException ie) {
                    LOG.warn(
                            "Thread waiting on the child process (spark-submit) is interrupted, killing the child process.");
                    rpcServer.cancelClient(clientId,
                            "Thread waiting on the child porcess (spark-submit) is interrupted");
                    Thread.interrupted();
                    child.destroy();
                } catch (Exception e) {
                    String errMsg = "Exception while waiting for child process (spark-submit)";
                    LOG.warn(errMsg, e);
                    rpcServer.cancelClient(clientId, errMsg);
                }
            }
        };
    }

    Thread thread = new Thread(runnable);
    thread.setDaemon(true);
    thread.setName("Driver");
    thread.start();
    return thread;
}

From source file:com.aurel.track.admin.customize.category.report.execute.ReportBeansToLaTeXConverter.java

/**
 *
 * @param workDir//from  w w  w .  j av  a  2 s .  co  m
 * @param latexFile
 */
protected int runPdflatex(File workDir, File latexFile, int nrOfRuns) {

    if (latexCmd == null) {
        return -99;
    }

    int exitValue = 0;

    try {

        String[] cmd = new String[] { latexCmd, "--halt-on-error", "-output-directory=" + workDir,
                latexFile.getAbsolutePath() };

        String texpath = new File((new File(latexCmd)).getParent()).getAbsolutePath();

        ProcessBuilder latexProcessBuilder = new ProcessBuilder(cmd);
        latexProcessBuilder.directory(workDir);
        Map<String, String> env = latexProcessBuilder.environment();
        String path = env.get("PATH");
        if (path != null) {
            path = texpath + ":" + path;
            env.put("PATH", path);
        }

        File stdoutlog = new File(workDir + File.separator + "stdout.log");
        latexProcessBuilder.redirectOutput(Redirect.appendTo(stdoutlog));

        File stderrlog = new File(workDir + File.separator + "stderr.log");
        latexProcessBuilder.redirectError(Redirect.appendTo(stderrlog));

        ProcessExecutor latexProcessExecutor = new ProcessExecutor(latexProcessBuilder);

        Thread executionThread = new Thread(latexProcessExecutor);

        long timeout = 20000;

        LOGGER.debug("Run xelatex thread started!");

        long startTime = System.currentTimeMillis();

        executionThread.start();

        int imod = 0;
        while (executionThread.isAlive()) {
            ++imod;
            if (imod % 5 == 0) {
                LOGGER.debug("Run xelatex thread is alive");
            }

            if (((System.currentTimeMillis() - startTime) > timeout) && executionThread.isAlive()) {
                executionThread.interrupt();

                LOGGER.debug("Run xelatex thread interrupted!");

                latexProcessExecutor.killProcess();
            }
            Thread.sleep(100);
        }

        LOGGER.debug("Run xelatex done!");

        exitValue = latexProcessExecutor.getExitValue();

        try {
            Thread.sleep(1000);
        } catch (Exception ex) {
            LOGGER.error(ExceptionUtils.getStackTrace(ex), ex);
        }
    } catch (Exception ex) {
        LOGGER.error(ExceptionUtils.getStackTrace(ex), ex);
    }

    return exitValue;
}

From source file:org.pbccrc.zsls.utils.Shell.java

/** Run a command */
private void runCommand() throws IOException {
    ProcessBuilder builder = new ProcessBuilder(getExecString());
    Timer timeOutTimer = null;/*from w  w w  .ja v  a 2 s.  co  m*/
    ShellTimeoutTimerTask timeoutTimerTask = null;
    timedOut = new AtomicBoolean(false);
    completed = new AtomicBoolean(false);

    if (environment != null) {
        builder.environment().putAll(this.environment);
    }
    if (dir != null) {
        builder.directory(this.dir);
    }

    builder.redirectErrorStream(redirectErrorStream);

    if (Shell.WINDOWS) {
        synchronized (WindowsProcessLaunchLock) {
            // To workaround the race condition issue with child processes
            // inheriting unintended handles during process launch that can
            // lead to hangs on reading output and error streams, we
            // serialize process creation. More info available at:
            // http://support.microsoft.com/kb/315939
            process = builder.start();
        }
    } else {
        process = builder.start();
    }

    if (timeOutInterval > 0) {
        timeOutTimer = new Timer("Shell command timeout");
        timeoutTimerTask = new ShellTimeoutTimerTask(this);
        //One time scheduling.
        timeOutTimer.schedule(timeoutTimerTask, timeOutInterval);
    }
    final BufferedReader errReader = new BufferedReader(new InputStreamReader(process.getErrorStream()));
    BufferedReader inReader = new BufferedReader(new InputStreamReader(process.getInputStream()));
    final StringBuffer errMsg = new StringBuffer();

    // read error and input streams as this would free up the buffers
    // free the error stream buffer
    Thread errThread = new Thread() {
        @Override
        public void run() {
            boolean overErrMsg = false;
            try {
                String line = errReader.readLine();
                while ((line != null) && !isInterrupted()) {
                    if (!overErrMsg) {
                        if (line.length() + errMsg.length() > ERR_MSG_BUFF_SIZE)
                            overErrMsg = true;
                        else {
                            errMsg.append(line);
                            errMsg.append(System.getProperty("line.separator"));
                        }
                    }
                    line = errReader.readLine();
                }
            } catch (IOException ioe) {
                LOG.warn("Error reading the error stream", ioe);
            }
        }
    };
    try {
        errThread.start();
    } catch (IllegalStateException ise) {
    }
    try {
        parseExecResult(inReader); // parse the output
        // clear the input stream buffer
        String line = inReader.readLine();
        while (line != null) {
            line = inReader.readLine();
        }
        // wait for the process to finish and check the exit code
        exitCode = process.waitFor();
        // make sure that the error thread exits
        joinThread(errThread);
        completed.set(true);
        //the timeout thread handling
        //taken care in finally block
        if (exitCode != 0) {
            throw new ExitCodeException(exitCode, errMsg.toString());
        }
    } catch (InterruptedException ie) {
        throw new IOException(ie.toString());
    } finally {
        if (timeOutTimer != null) {
            timeOutTimer.cancel();
        }
        // close the input stream
        try {
            // JDK 7 tries to automatically drain the input streams for us
            // when the process exits, but since close is not synchronized,
            // it creates a race if we close the stream first and the same
            // fd is recycled.  the stream draining thread will attempt to
            // drain that fd!!  it may block, OOM, or cause bizarre behavior
            // see: https://bugs.openjdk.java.net/browse/JDK-8024521
            //      issue is fixed in build 7u60
            InputStream stdout = process.getInputStream();
            synchronized (stdout) {
                inReader.close();
            }
        } catch (IOException ioe) {
            LOG.warn("Error while closing the input stream", ioe);
        }
        if (!completed.get()) {
            errThread.interrupt();
            joinThread(errThread);
        }
        try {
            InputStream stderr = process.getErrorStream();
            synchronized (stderr) {
                errReader.close();
            }
        } catch (IOException ioe) {
            LOG.warn("Error while closing the error stream", ioe);
        }
        process.destroy();
        lastTime = clock.getTime();
    }
}

From source file:org.cloudifysource.usm.launcher.DefaultProcessLauncher.java

private Process launch(final List<String> commandLineParams, final File workingDir, final int retries,
        final boolean redirectErrorStream, final File outputFile, final File errorFile,
        final LifecycleEvents event) throws USMException {

    if (outputFile == null && errorFile != null || outputFile != null && errorFile == null) {
        throw new IllegalArgumentException("Both output and error files must be set, or none of them");
    }//from ww  w.jav a  2 s .c  om

    if (redirectErrorStream && (outputFile != null || errorFile != null)) {
        throw new IllegalArgumentException(
                "If redirectError option is chosen, neither output file or error file can be set");
    }

    List<String> modifiedCommandLineParams = null;
    modifiedCommandLineParams = commandLineParams;
    if (isDebugEvent(event)) {
        // create environment for debugging the event and modify the command line.
        modifyCommandLine(modifiedCommandLineParams, workingDir, outputFile, errorFile, event);

        logger.info("DEBUG BREAKPOINT!");
        DebugHookInvoker dhi = new DebugHookInvoker();

        final ClassLoader loader = this.configutaion.getDslClassLoader();
        logger.info("DSL Class Loader is: " + loader);

        modifiedCommandLineParams = dhi.setUpDebugHook(this.configutaion.getServiceContext(),
                modifiedCommandLineParams, loader, this.debugMode);

    } else {

        modifyCommandLine(modifiedCommandLineParams, workingDir, outputFile, errorFile, event);
    }

    // JDK7 on Windows Specific modifications
    // See CLOUDIFY-1787 for more details.
    File tempBatchFile = null;
    if (shouldModifyCommandLineForJDK7(outputFile)) {
        tempBatchFile = createTempFileForJDK7(workingDir);
        modifyCommandLineForJDK7ProcessBuilder(modifiedCommandLineParams, outputFile, workingDir,
                tempBatchFile);
    }

    final String modifiedCommandLine = org.springframework.util.StringUtils
            .collectionToDelimitedString(modifiedCommandLineParams, " ");

    this.commandLine = modifiedCommandLineParams;

    int attempt = 1;
    USMException ex = null;
    while (attempt <= retries + 1) {
        final ProcessBuilder pb = new ProcessBuilder(modifiedCommandLineParams);
        pb.directory(workingDir);
        pb.redirectErrorStream(redirectErrorStream);
        final Map<String, String> env = createEnvironment();
        pb.environment().putAll(env);

        try {
            logger.fine("Parsed command line: " + commandLineParams.toString());

            final String fileInitialMessage = "Starting service process in working directory:'" + workingDir
                    + "' " + "at:'" + new Date() + "' with command:'" + modifiedCommandLineParams + "'"
                    + System.getProperty("line.separator");
            if (outputFile != null) {
                appendMessageToFile(fileInitialMessage, outputFile);
            }
            if (errorFile != null) {
                appendMessageToFile(fileInitialMessage, errorFile);
            }
            return pb.start();
        } catch (final IOException e) {
            ex = new USMException("Failed to start process with command line: " + modifiedCommandLine, e);
            logger.log(Level.SEVERE, "Process start attempt number " + attempt + " failed", ex);
        }
        ++attempt;
    }
    throw ex;
}

From source file:middleware.NewServerSocket.java

private void clearSetenv() {
    ProcessBuilder pb = new ProcessBuilder("/bin/bash", "shell/clear_setenv.sh");
    Map<String, String> env = pb.environment();
    if (mysql_user != null) {
        env.put("mMYSQL_USER", mysql_user);
    }/* ww  w . j av a  2  s  .c  o  m*/
    if (mysql_pass != null) {
        env.put("mMYSQL_PASS", mysql_pass);
    }
    if (mysql_host != null) {
        env.put("mMYSQL_HOST", mysql_host);
    }
    if (mysql_port != null) {
        env.put("mMYSQL_PORT", mysql_port);
    }
    Process p = null;
    try {
        p = pb.start();
    } catch (IOException e) {
        e.printStackTrace();
    }
    if (p != null) {
        try {
            p.waitFor();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }
}

From source file:middleware.NewServerSocket.java

private void configureSetenv() {
    BufferedReader br = null;/*from w w w . j a  v  a2 s  .  c  om*/
    ProcessBuilder pb = new ProcessBuilder("/bin/bash", "shell/config_setenv.sh");
    Map<String, String> env = pb.environment();
    try {
        br = new BufferedReader(new FileReader(sharedData.getUserInfoFilePath()));
    } catch (FileNotFoundException e) {
        e.printStackTrace();
    }
    String line;
    try {
        while ((line = br.readLine()) != null) {
            String temp = line.replaceAll("\\s", "");
            if (temp.contains("mysql_user=")) {
                mysql_user = temp.substring(temp.indexOf('=') + 1);
                env.put("mMYSQL_USER", mysql_user);
                configSetenv = true;
            } else if (temp.contains("mysql_pass=")) {
                mysql_pass = temp.substring(temp.indexOf('=') + 1);
                env.put("mMYSQL_PASS", mysql_pass);
                configSetenv = true;
            } else if (temp.contains("mysql_host=")) {
                mysql_host = temp.substring(temp.indexOf('=') + 1);
                env.put("mMYSQL_HOST", mysql_host);
                configSetenv = true;
            } else if (temp.contains("mysql_port=")) {
                mysql_port = temp.substring(temp.indexOf('=') + 1);
                env.put("mMYSQL_PORT", mysql_port);
                configSetenv = true;
            }
        }
    } catch (IOException e) {
        e.printStackTrace();
    }
    if (configSetenv) {
        Process p = null;
        try {
            p = pb.start();
        } catch (IOException e) {
            e.printStackTrace();
        }
        if (p != null) {
            try {
                p.waitFor();
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
    }
}