Example usage for org.apache.commons.exec ExecuteWatchdog INFINITE_TIMEOUT

List of usage examples for org.apache.commons.exec ExecuteWatchdog INFINITE_TIMEOUT

Introduction

In this page you can find the example usage for org.apache.commons.exec ExecuteWatchdog INFINITE_TIMEOUT.

Prototype

long INFINITE_TIMEOUT

To view the source code for org.apache.commons.exec ExecuteWatchdog INFINITE_TIMEOUT.

Click Source Link

Document

The marker for an infinite timeout

Usage

From source file:io.selendroid.android.impl.AbstractDevice.java

private void startLogging() {
    logoutput = new ByteArrayOutputStream();
    DefaultExecutor exec = new DefaultExecutor();
    exec.setStreamHandler(new PumpStreamHandler(logoutput));
    CommandLine command = adbCommand("logcat", "ResourceType:S", "dalvikvm:S", "Trace:S", "SurfaceFlinger:S",
            "StrictMode:S", "ExchangeService:S", "SVGAndroid:S", "skia:S", "LoaderManager:S",
            "ActivityThread:S", "-v", "time");
    log.info("starting logcat:");
    log.fine(command.toString());/*from  w w w. j a v  a  2  s .c om*/
    try {
        exec.execute(command, new DefaultExecuteResultHandler());
        logcatWatchdog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT);
        exec.setWatchdog(logcatWatchdog);
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:io.selendroid.standalone.android.impl.AbstractDevice.java

private void startLogging() {
    logoutput = new ByteArrayOutputStream();
    DefaultExecutor exec = new DefaultExecutor();
    exec.setStreamHandler(new PumpStreamHandler(logoutput));
    CommandLine command = adbCommand("logcat", "ResourceType:S", "dalvikvm:S", "Trace:S", "SurfaceFlinger:S",
            "StrictMode:S", "ExchangeService:S", "SVGAndroid:S", "skia:S", "LoaderManager:S",
            "ActivityThread:S", "-v", "time");
    log.info("starting logcat:");
    log.fine(command.toString());//from  w w w.  j  a  v  a2s .c om
    try {
        exec.execute(command, new DefaultExecuteResultHandler());
        logcatWatchdog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT);
        exec.setWatchdog(logcatWatchdog);
    } catch (IOException e) {
        log.log(Level.SEVERE, e.getMessage(), e);
    }
}

From source file:com.nts.alphamale.handler.DeviceHandler.java

public static Map<String, Object> runUiAutomatorTest(String serial) {
    return new AdbShellExecutor().execute(AdbShellCommand.cmd(serial, "cmd_uiautomator_runtest"), false,
            ExecuteWatchdog.INFINITE_TIMEOUT);
}

From source file:org.apache.karaf.decanter.kibana6.KibanaController.java

public KibanaController(File workingDirectory) {
    this.workingDirectory = workingDirectory;
    this.workingDirectory.mkdirs();
    this.executor = new DaemonExecutor();
    PumpStreamHandler pumpStreamHandler = new PumpStreamHandler(new LogOutputStream() {
        @Override//  w ww  . ja v a2s  .com
        protected void processLine(String line, int logLevel) {
            KIBANA_LOGGER.info(line);
        }
    });
    executor.setStreamHandler(pumpStreamHandler);
    executor.setWatchdog(new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT));
    executeResultHandler = new DefaultExecuteResultHandler();
}

From source file:org.apache.zeppelin.interpreter.remote.RemoteInterpreterManagedProcess.java

@Override
public void start(String userName, Boolean isUserImpersonate) {
    // start server process
    try {/* w  w  w  . j  ava2 s .  c om*/
        port = RemoteInterpreterUtils.findRandomAvailablePortOnAllLocalInterfaces();
    } catch (IOException e1) {
        throw new InterpreterException(e1);
    }

    CommandLine cmdLine = CommandLine.parse(interpreterRunner);
    cmdLine.addArgument("-d", false);
    cmdLine.addArgument(interpreterDir, false);
    cmdLine.addArgument("-p", false);
    cmdLine.addArgument(Integer.toString(port), false);
    if (isUserImpersonate && !userName.equals("anonymous")) {
        cmdLine.addArgument("-u", false);
        cmdLine.addArgument(userName, false);
    }
    cmdLine.addArgument("-l", false);
    cmdLine.addArgument(localRepoDir, false);

    executor = new DefaultExecutor();

    ByteArrayOutputStream cmdOut = new ByteArrayOutputStream();
    ProcessLogOutputStream processOutput = new ProcessLogOutputStream(logger);
    processOutput.setOutputStream(cmdOut);

    executor.setStreamHandler(new PumpStreamHandler(processOutput));
    watchdog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT);
    executor.setWatchdog(watchdog);

    try {
        Map procEnv = EnvironmentUtils.getProcEnvironment();
        procEnv.putAll(env);

        logger.info("Run interpreter process {}", cmdLine);
        executor.execute(cmdLine, procEnv, this);
        running = true;
    } catch (IOException e) {
        running = false;
        throw new InterpreterException(e);
    }

    long startTime = System.currentTimeMillis();
    while (System.currentTimeMillis() - startTime < getConnectTimeout()) {
        if (!running) {
            try {
                cmdOut.flush();
            } catch (IOException e) {
                // nothing to do
            }
            throw new InterpreterException(new String(cmdOut.toByteArray()));
        }

        try {
            if (RemoteInterpreterUtils.checkIfRemoteEndpointAccessible("localhost", port)) {
                break;
            } else {
                try {
                    Thread.sleep(500);
                } catch (InterruptedException e) {
                    logger.error("Exception in RemoteInterpreterProcess while synchronized reference "
                            + "Thread.sleep", e);
                }
            }
        } catch (Exception e) {
            if (logger.isDebugEnabled()) {
                logger.debug("Remote interpreter not yet accessible at localhost:" + port);
            }
        }
    }
    processOutput.setOutputStream(null);
}

From source file:org.apache.zeppelin.interpreter.remote.RemoteInterpreterProcess.java

public int reference(InterpreterGroup interpreterGroup) {
    synchronized (referenceCount) {
        if (executor == null) {
            // start server process
            try {
                port = RemoteInterpreterUtils.findRandomAvailablePortOnAllLocalInterfaces();
            } catch (IOException e1) {
                throw new InterpreterException(e1);
            }//from   w  w w . j a v a  2  s  .c  om

            CommandLine cmdLine = CommandLine.parse(interpreterRunner);
            cmdLine.addArgument("-d", false);
            cmdLine.addArgument(interpreterDir, false);
            cmdLine.addArgument("-p", false);
            cmdLine.addArgument(Integer.toString(port), false);

            executor = new DefaultExecutor();

            watchdog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT);
            executor.setWatchdog(watchdog);

            running = true;
            try {
                Map procEnv = EnvironmentUtils.getProcEnvironment();
                procEnv.putAll(env);

                logger.info("Run interpreter process {}", cmdLine);
                executor.execute(cmdLine, procEnv, this);
            } catch (IOException e) {
                running = false;
                throw new InterpreterException(e);
            }

            long startTime = System.currentTimeMillis();
            while (System.currentTimeMillis() - startTime < connectTimeout) {
                if (RemoteInterpreterUtils.checkIfRemoteEndpointAccessible("localhost", port)) {
                    break;
                } else {
                    try {
                        Thread.sleep(500);
                    } catch (InterruptedException e) {
                    }
                }
            }

            clientPool = new GenericObjectPool<Client>(new ClientFactory("localhost", port));

            remoteInterpreterEventPoller.setInterpreterGroup(interpreterGroup);
            remoteInterpreterEventPoller.setInterpreterProcess(this);
            remoteInterpreterEventPoller.start();
        }
        return referenceCount.incrementAndGet();
    }
}

From source file:org.apache.zeppelin.python.IPythonInterpreter.java

private void launchIPythonKernel(int ipythonPort) throws IOException {
    LOGGER.info("Launching IPython Kernel at port: " + ipythonPort);
    // copy the python scripts to a temp directory, then launch ipython kernel in that folder
    File pythonWorkDir = Files.createTempDirectory("zeppelin_ipython").toFile();
    String[] ipythonScripts = { "ipython_server.py", "ipython_pb2.py", "ipython_pb2_grpc.py" };
    for (String ipythonScript : ipythonScripts) {
        URL url = getClass().getClassLoader().getResource("grpc/python" + "/" + ipythonScript);
        FileUtils.copyURLToFile(url, new File(pythonWorkDir, ipythonScript));
    }//from www  .  j a va2s. c  o  m

    CommandLine cmd = CommandLine.parse(pythonExecutable);
    cmd.addArgument(pythonWorkDir.getAbsolutePath() + "/ipython_server.py");
    cmd.addArgument(ipythonPort + "");
    DefaultExecutor executor = new DefaultExecutor();
    ProcessLogOutputStream processOutput = new ProcessLogOutputStream(LOGGER);
    executor.setStreamHandler(new PumpStreamHandler(processOutput));
    watchDog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT);
    executor.setWatchdog(watchDog);

    if (useBuiltinPy4j) {
        //TODO(zjffdu) don't do hard code on py4j here
        File py4jDestFile = new File(pythonWorkDir, "py4j-src-0.10.7.zip");
        FileUtils.copyURLToFile(getClass().getClassLoader().getResource("python/py4j-src-0.10.7.zip"),
                py4jDestFile);
        if (additionalPythonPath != null) {
            // put the py4j at the end, because additionalPythonPath may already contain py4j.
            // e.g. PySparkInterpreter
            additionalPythonPath = additionalPythonPath + ":" + py4jDestFile.getAbsolutePath();
        } else {
            additionalPythonPath = py4jDestFile.getAbsolutePath();
        }
    }

    Map<String, String> envs = setupIPythonEnv();
    executor.execute(cmd, envs, this);

    // wait until IPython kernel is started or timeout
    long startTime = System.currentTimeMillis();
    while (true) {
        try {
            Thread.sleep(100);
        } catch (InterruptedException e) {
            LOGGER.error("Interrupted by something", e);
        }

        try {
            StatusResponse response = ipythonClient.status(StatusRequest.newBuilder().build());
            if (response.getStatus() == IPythonStatus.RUNNING) {
                LOGGER.info("IPython Kernel is Running");
                break;
            } else {
                LOGGER.info("Wait for IPython Kernel to be started");
            }
        } catch (Exception e) {
            // ignore the exception, because is may happen when grpc server has not started yet.
            LOGGER.info("Wait for IPython Kernel to be started");
        }

        if ((System.currentTimeMillis() - startTime) > ipythonLaunchTimeout) {
            throw new IOException(
                    "Fail to launch IPython Kernel in " + ipythonLaunchTimeout / 1000 + " seconds");
        }
    }
}

From source file:org.apache.zeppelin.python.PythonInterpreter.java

private void createGatewayServerAndStartScript() throws IOException {
    // start gateway server in JVM side
    int port = RemoteInterpreterUtils.findRandomAvailablePortOnAllLocalInterfaces();
    // use the FQDN as the server address instead of 127.0.0.1 so that python process in docker
    // container can also connect to this gateway server.
    String serverAddress = PythonUtils.getLocalIP(properties);
    String secret = PythonUtils.createSecret(256);
    this.gatewayServer = PythonUtils.createGatewayServer(this, serverAddress, port, secret, usePy4jAuth);
    gatewayServer.start();/*w w w  . j a  v a 2 s . c o  m*/

    // launch python process to connect to the gateway server in JVM side
    createPythonScript();
    String pythonExec = getPythonExec();
    CommandLine cmd = CommandLine.parse(pythonExec);
    if (!pythonExec.endsWith(".py")) {
        // PythonDockerInterpreter set pythonExec with script
        cmd.addArgument(pythonWorkDir + "/zeppelin_python.py", false);
    }
    cmd.addArgument(serverAddress, false);
    cmd.addArgument(Integer.toString(port), false);

    executor = new DefaultExecutor();
    outputStream = new InterpreterOutputStream(LOGGER);
    PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream);
    executor.setStreamHandler(streamHandler);
    executor.setWatchdog(new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT));
    Map<String, String> env = setupPythonEnv();
    if (usePy4jAuth) {
        env.put("PY4J_GATEWAY_SECRET", secret);
    }
    LOGGER.info("Launching Python Process Command: " + cmd.getExecutable() + " "
            + StringUtils.join(cmd.getArguments(), " "));
    executor.execute(cmd, env, this);
    pythonScriptRunning.set(true);
}

From source file:org.apache.zeppelin.spark.PySparkInterpreter.java

private void createGatewayServerAndStartScript() {
    // create python script
    createPythonScript();//  w ww  . ja  v a  2 s .c o m

    port = findRandomOpenPortOnAllLocalInterfaces();

    gatewayServer = new GatewayServer(this, port);
    gatewayServer.start();

    // Run python shell
    CommandLine cmd = CommandLine.parse(getProperty("zeppelin.pyspark.python"));
    cmd.addArgument(scriptPath, false);
    cmd.addArgument(Integer.toString(port), false);
    cmd.addArgument(Integer.toString(getSparkInterpreter().getSparkVersion().toNumber()), false);
    executor = new DefaultExecutor();
    outputStream = new ByteArrayOutputStream();
    PipedOutputStream ps = new PipedOutputStream();
    in = null;
    try {
        in = new PipedInputStream(ps);
    } catch (IOException e1) {
        throw new InterpreterException(e1);
    }
    ins = new BufferedWriter(new OutputStreamWriter(ps));

    input = new ByteArrayOutputStream();

    PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream, outputStream, in);
    executor.setStreamHandler(streamHandler);
    executor.setWatchdog(new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT));

    try {
        Map env = EnvironmentUtils.getProcEnvironment();

        executor.execute(cmd, env, this);
        pythonscriptRunning = true;
    } catch (IOException e) {
        throw new InterpreterException(e);
    }

    try {
        input.write("import sys, getopt\n".getBytes());
        ins.flush();
    } catch (IOException e) {
        throw new InterpreterException(e);
    }
}

From source file:org.apache.zeppelin.spark.SparkRInterpreter.java

@Override
public void open() {
    // create R script
    createRScript();/*from ww w .  j a  va  2s.c  o m*/

    int backendTimeout = Integer.parseInt(System.getenv().getOrDefault("SPARKR_BACKEND_TIMEOUT", "120"));

    // Launch a SparkR backend server for the R process to connect to; this will let it see our
    // Java system properties etc.
    ZeppelinRBackend sparkRBackend = new ZeppelinRBackend();

    Semaphore initialized = new Semaphore(0);
    Thread sparkRBackendThread = new Thread("SparkR backend") {
        @Override
        public void run() {
            sparkRBackendPort = sparkRBackend.init();
            initialized.release();
            sparkRBackend.run();
        }
    };

    sparkRBackendThread.start();

    // Wait for RBackend initialization to finish
    try {
        if (initialized.tryAcquire(backendTimeout, TimeUnit.SECONDS)) {
            // Launch R
            CommandLine cmd = CommandLine.parse(getProperty("zeppelin.sparkr.r"));
            cmd.addArgument(scriptPath, false);
            cmd.addArgument("--no-save", false);
            //      cmd.addArgument(getJavaSparkContext().version(), false);
            executor = new DefaultExecutor();
            outputStream = new ByteArrayOutputStream();
            PipedOutputStream ps = new PipedOutputStream();
            in = null;
            try {
                in = new PipedInputStream(ps);
            } catch (IOException e1) {
                throw new InterpreterException(e1);
            }
            ins = new BufferedWriter(new OutputStreamWriter(ps));

            input = new ByteArrayOutputStream();

            PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream, outputStream, in);
            executor.setStreamHandler(streamHandler);
            executor.setWatchdog(new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT));

            Map env = EnvironmentUtils.getProcEnvironment();

            String sparkRInterpreterObjId = sparkRBackend.put(this);
            String uberdataContextObjId = sparkRBackend.put(getUberdataContext());
            env.put("R_PROFILE_USER", scriptPath);
            env.put("SPARK_HOME", getSparkHome());
            env.put("EXISTING_SPARKR_BACKEND_PORT", String.valueOf(sparkRBackendPort));
            env.put("SPARKR_INTERPRETER_ID", sparkRInterpreterObjId);
            env.put("UBERDATA_CONTEXT_ID", uberdataContextObjId);
            logger.info("executing {} {}", env, cmd.toString());
            executor.execute(cmd, env, this);
            logger.info("executed");
            rScriptRunning = true;

        } else {
            System.err.println("SparkR backend did not initialize in " + backendTimeout + " seconds");
            System.exit(-1);
        }
    } catch (InterruptedException e) {
        new InterpreterException((e));
    } catch (IOException e) {
        new InterpreterException((e));
    }

}