Example usage for com.google.common.io Resources toByteArray

List of usage examples for com.google.common.io Resources toByteArray

Introduction

In this page you can find the example usage for com.google.common.io Resources toByteArray.

Prototype

public static byte[] toByteArray(URL url) throws IOException 

Source Link

Document

Reads all bytes from a URL into a byte array.

Usage

From source file:com.google.gct.intellij.endpoints.generator.sample.AppEngineMavenGcmGenerator.java

/** Pull in appEngineSampleCode from somewhere, right now it's the in-plugin templating, but maybe this whole things changes */
protected void addAppEngineSampleCode(final String rootPackage, final String appId, final String apiKey)
        throws IOException {

    ApplicationManager.getApplication().runWriteAction(new ThrowableComputable<Void, IOException>() {
        @Override//from  www  .  ja v a 2  s.  c  o  m
        public Void compute() throws IOException {
            LocalFileSystem fs = LocalFileSystem.getInstance();
            PsiManager psiManager = PsiManager.getInstance(myProject);
            PsiDirectory moduleRootDir = psiManager.findDirectory(myModuleRootDir);
            PsiDirectory javaSrcDirectory = psiManager
                    .findDirectory(myModuleRootDir.findFileByRelativePath("src/main/java"));
            PsiDirectory resourcesDirectory = psiManager
                    .findDirectory(myModuleRootDir.findFileByRelativePath("src/main/resources"));
            PsiDirectory webappDirectory = psiManager
                    .findDirectory(myModuleRootDir.findFileByRelativePath("src/main/webapp"));

            // Create package directory
            PsiDirectory curPackageDir = javaSrcDirectory;
            for (String packageComponent : rootPackage.split("\\.")) {
                curPackageDir = curPackageDir.createSubdirectory(packageComponent);
            }

            // Add entity manager
            curPackageDir.add(TemplateHelper.loadJpaEntityManagerFactoryClass(myProject, rootPackage));

            TemplateHelper.EndpointPackageInfo endpointPackageInfo = TemplateHelper
                    .getEndpointPackageInfo(rootPackage);

            // Add entities
            for (String entityName : SAMPLE_ENTITY_NAMES) {
                curPackageDir.add(TemplateHelper.generateJavaTemplateContentWithOwnerDomain(myProject,
                        entityName, rootPackage, endpointPackageInfo));
            }

            // Add endpoints
            for (String endpointName : SAMPLE_ENDPOINTS_NAMES) {
                PsiElement addedEndpoint = curPackageDir
                        .add(TemplateHelper.generateJavaSampleTemplateWithOwnerDomainAndApiKey(myProject,
                                endpointName, rootPackage, endpointPackageInfo, apiKey));
                PsiDocumentManager docManager = PsiDocumentManager.getInstance(myProject);
                docManager.doPostponedOperationsAndUnblockDocument(
                        docManager.getDocument((PsiFile) addedEndpoint));
            }

            // Add/replace persistence.xml
            final PsiDirectory metaInfDir = resourcesDirectory
                    .findSubdirectory(GctConstants.APP_ENGINE_META_INF_DIR);
            PsiUtils.addOrReplaceFile(metaInfDir, TemplateHelper.loadPersistenceXml(myProject));

            // Add static content

            // css
            PsiDirectory cssDir = webappDirectory.createSubdirectory(GctConstants.APP_ENGINE_CSS_DIR);
            cssDir.add(TemplateHelper.generateStaticContent(myProject, "bootstrap.min.css"));

            // js
            PsiDirectory jsDir = webappDirectory.createSubdirectory(GctConstants.APP_ENGINE_JS_DIR);
            jsDir.add(TemplateHelper.generateStaticContent(myProject, "bootstrap.min.js"));
            jsDir.add(TemplateHelper.generateStaticContent(myProject, "jquery-1.9.0.min.js"));

            // images (to support twitter bootstrap)
            VirtualFile moduleImgDir = webappDirectory.getVirtualFile().createChildDirectory(null,
                    GctConstants.APP_ENGINE_IMG_DIR);
            File img = new File(moduleImgDir.getPath() + "/glyphicons-halflings.png");
            Files.write(Resources.toByteArray(TemplateHelper.class.getResource("glyphicons-halflings.png")),
                    img);

            img = new File(moduleImgDir.getPath() + "/glyphicons-halflings-white.png");
            Files.write(
                    Resources.toByteArray(TemplateHelper.class.getResource("glyphicons-halflings-white.png")),
                    img);
            moduleImgDir.refresh(false, true);

            // xml
            PsiDirectory webInfDirectory = webappDirectory
                    .createSubdirectory(GctConstants.APP_ENGINE_WEB_INF_DIR);
            webInfDirectory.add(TemplateHelper.loadWebXml(myProject));
            webInfDirectory.add(TemplateHelper.generateAppEngineWebXml(myProject, appId));

            // html
            webappDirectory.add(TemplateHelper.generateStaticContent(myProject, "index.html"));
            return null; // to Void
        }
    });
}

From source file:org.apache.hive.spark.client.AbstractSparkClient.java

private Future<Void> startDriver(final RpcServer rpcServer, final String clientId, final String secret)
        throws IOException {
    final String serverAddress = rpcServer.getAddress();
    final String serverPort = String.valueOf(rpcServer.getPort());

    String sparkHome = getSparkHome();

    String sparkLogDir = conf.get("hive.spark.log.dir");
    if (sparkLogDir == null) {
        if (sparkHome == null) {
            sparkLogDir = "./target/";
        } else {/* w  w  w.j  a  v a2s  .  c o  m*/
            sparkLogDir = sparkHome + "/logs/";
        }
    }

    String osxTestOpts = "";
    if (Strings.nullToEmpty(System.getProperty("os.name")).toLowerCase().contains("mac")) {
        osxTestOpts = Strings.nullToEmpty(System.getenv(OSX_TEST_OPTS));
    }

    String driverJavaOpts = Joiner.on(" ").skipNulls().join("-Dhive.spark.log.dir=" + sparkLogDir, osxTestOpts,
            conf.get(DRIVER_OPTS_KEY));
    String executorJavaOpts = Joiner.on(" ").skipNulls().join("-Dhive.spark.log.dir=" + sparkLogDir,
            osxTestOpts, conf.get(EXECUTOR_OPTS_KEY));

    // Create a file with all the job properties to be read by spark-submit. Change the
    // file's permissions so that only the owner can read it. This avoid having the
    // connection secret show up in the child process's command line.
    File properties = File.createTempFile("spark-submit.", ".properties");
    if (!properties.setReadable(false) || !properties.setReadable(true, true)) {
        throw new IOException("Cannot change permissions of job properties file.");
    }
    properties.deleteOnExit();

    Properties allProps = new Properties();
    // first load the defaults from spark-defaults.conf if available
    try {
        URL sparkDefaultsUrl = Thread.currentThread().getContextClassLoader()
                .getResource("spark-defaults.conf");
        if (sparkDefaultsUrl != null) {
            LOG.info("Loading spark defaults configs from: " + sparkDefaultsUrl);
            allProps.load(new ByteArrayInputStream(Resources.toByteArray(sparkDefaultsUrl)));
        }
    } catch (Exception e) {
        String msg = "Exception trying to load spark-defaults.conf: " + e;
        throw new IOException(msg, e);
    }
    // then load the SparkClientImpl config
    for (Map.Entry<String, String> e : conf.entrySet()) {
        allProps.put(e.getKey(), conf.get(e.getKey()));
    }
    allProps.put(SparkClientFactory.CONF_CLIENT_ID, clientId);
    allProps.put(SparkClientFactory.CONF_KEY_SECRET, secret);
    allProps.put(DRIVER_OPTS_KEY, driverJavaOpts);
    allProps.put(EXECUTOR_OPTS_KEY, executorJavaOpts);

    String isTesting = conf.get("spark.testing");
    if (isTesting != null && isTesting.equalsIgnoreCase("true")) {
        String hiveHadoopTestClasspath = Strings.nullToEmpty(System.getenv("HIVE_HADOOP_TEST_CLASSPATH"));
        if (!hiveHadoopTestClasspath.isEmpty()) {
            String extraDriverClasspath = Strings.nullToEmpty((String) allProps.get(DRIVER_EXTRA_CLASSPATH));
            if (extraDriverClasspath.isEmpty()) {
                allProps.put(DRIVER_EXTRA_CLASSPATH, hiveHadoopTestClasspath);
            } else {
                extraDriverClasspath = extraDriverClasspath.endsWith(File.pathSeparator) ? extraDriverClasspath
                        : extraDriverClasspath + File.pathSeparator;
                allProps.put(DRIVER_EXTRA_CLASSPATH, extraDriverClasspath + hiveHadoopTestClasspath);
            }

            String extraExecutorClasspath = Strings
                    .nullToEmpty((String) allProps.get(EXECUTOR_EXTRA_CLASSPATH));
            if (extraExecutorClasspath.isEmpty()) {
                allProps.put(EXECUTOR_EXTRA_CLASSPATH, hiveHadoopTestClasspath);
            } else {
                extraExecutorClasspath = extraExecutorClasspath.endsWith(File.pathSeparator)
                        ? extraExecutorClasspath
                        : extraExecutorClasspath + File.pathSeparator;
                allProps.put(EXECUTOR_EXTRA_CLASSPATH, extraExecutorClasspath + hiveHadoopTestClasspath);
            }
        }
    }

    Writer writer = new OutputStreamWriter(new FileOutputStream(properties), Charsets.UTF_8);
    try {
        allProps.store(writer, "Spark Context configuration");
    } finally {
        writer.close();
    }

    // Define how to pass options to the child process. If launching in client (or local)
    // mode, the driver options need to be passed directly on the command line. Otherwise,
    // SparkSubmit will take care of that for us.
    String master = conf.get("spark.master");
    Preconditions.checkArgument(master != null, "spark.master is not defined.");
    String deployMode = conf.get(SPARK_DEPLOY_MODE);

    if (SparkClientUtilities.isYarnClusterMode(master, deployMode)) {
        String executorCores = conf.get("spark.executor.cores");
        if (executorCores != null) {
            addExecutorCores(executorCores);
        }

        String executorMemory = conf.get("spark.executor.memory");
        if (executorMemory != null) {
            addExecutorMemory(executorMemory);
        }

        String numOfExecutors = conf.get("spark.executor.instances");
        if (numOfExecutors != null) {
            addNumExecutors(numOfExecutors);
        }
    }
    // The options --principal/--keypad do not work with --proxy-user in spark-submit.sh
    // (see HIVE-15485, SPARK-5493, SPARK-19143), so Hive could only support doAs or
    // delegation token renewal, but not both. Since doAs is a more common case, if both
    // are needed, we choose to favor doAs. So when doAs is enabled, we use kinit command,
    // otherwise, we pass the principal/keypad to spark to support the token renewal for
    // long-running application.
    if ("kerberos".equals(hiveConf.get(HADOOP_SECURITY_AUTHENTICATION))) {
        String principal = SecurityUtil
                .getServerPrincipal(hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL), "0.0.0.0");
        String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
        boolean isDoAsEnabled = hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS);
        if (StringUtils.isNotBlank(principal) && StringUtils.isNotBlank(keyTabFile)) {
            addKeytabAndPrincipal(isDoAsEnabled, keyTabFile, principal);
        }
    }
    if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) {
        try {
            String currentUser = Utils.getUGI().getShortUserName();
            // do not do impersonation in CLI mode
            if (!currentUser.equals(System.getProperty("user.name"))) {
                LOG.info("Attempting impersonation of " + currentUser);
                addProxyUser(currentUser);
            }
        } catch (Exception e) {
            String msg = "Cannot obtain username: " + e;
            throw new IllegalStateException(msg, e);
        }
    }

    String regStr = conf.get("spark.kryo.registrator");
    if (HIVE_KRYO_REG_NAME.equals(regStr)) {
        addJars(SparkClientUtilities.findKryoRegistratorJar(hiveConf));
    }

    addPropertiesFile(properties.getAbsolutePath());
    addClass(RemoteDriver.class.getName());

    String jar = "spark-internal";
    if (SparkContext.jarOfClass(this.getClass()).isDefined()) {
        jar = SparkContext.jarOfClass(this.getClass()).get();
    }
    addExecutableJar(jar);

    addAppArg(RemoteDriver.REMOTE_DRIVER_HOST_CONF);
    addAppArg(serverAddress);
    addAppArg(RemoteDriver.REMOTE_DRIVER_PORT_CONF);
    addAppArg(serverPort);

    //hive.spark.* keys are passed down to the RemoteDriver via REMOTE_DRIVER_CONF
    // so that they are not used in sparkContext but only in remote driver,
    //as --properties-file contains the spark.* keys that are meant for SparkConf object.
    for (String hiveSparkConfKey : RpcConfiguration.HIVE_SPARK_RSC_CONFIGS) {
        String value = RpcConfiguration.getValue(hiveConf, hiveSparkConfKey);
        addAppArg(RemoteDriver.REMOTE_DRIVER_CONF);
        addAppArg(String.format("%s=%s", hiveSparkConfKey, value));
    }

    return launchDriver(isTesting, rpcServer, clientId);
}

From source file:org.apache.hive.spark.client.SparkClientImpl.java

private Thread startDriver(final RpcServer rpcServer, final String clientId, final String secret)
        throws IOException {
    Runnable runnable;/* ww w . j av a  2  s . com*/
    final String serverAddress = rpcServer.getAddress();
    final String serverPort = String.valueOf(rpcServer.getPort());

    if (conf.containsKey(SparkClientFactory.CONF_KEY_IN_PROCESS)) {
        // Mostly for testing things quickly. Do not do this in production.
        // when invoked in-process it inherits the environment variables of the parent
        LOG.warn("!!!! Running remote driver in-process. !!!!");
        runnable = new Runnable() {
            @Override
            public void run() {
                List<String> args = Lists.newArrayList();
                args.add("--remote-host");
                args.add(serverAddress);
                args.add("--remote-port");
                args.add(serverPort);
                args.add("--client-id");
                args.add(clientId);
                args.add("--secret");
                args.add(secret);

                for (Map.Entry<String, String> e : conf.entrySet()) {
                    args.add("--conf");
                    args.add(String.format("%s=%s", e.getKey(), conf.get(e.getKey())));
                }
                try {
                    RemoteDriver.main(args.toArray(new String[args.size()]));
                } catch (Exception e) {
                    LOG.error("Error running driver.", e);
                }
            }
        };
    } else {
        // If a Spark installation is provided, use the spark-submit script. Otherwise, call the
        // SparkSubmit class directly, which has some caveats (like having to provide a proper
        // version of Guava on the classpath depending on the deploy mode).
        String sparkHome = Strings.emptyToNull(conf.get(SPARK_HOME_KEY));
        if (sparkHome == null) {
            sparkHome = Strings.emptyToNull(System.getenv(SPARK_HOME_ENV));
        }
        if (sparkHome == null) {
            sparkHome = Strings.emptyToNull(System.getProperty(SPARK_HOME_KEY));
        }
        String sparkLogDir = conf.get("hive.spark.log.dir");
        if (sparkLogDir == null) {
            if (sparkHome == null) {
                sparkLogDir = "./target/";
            } else {
                sparkLogDir = sparkHome + "/logs/";
            }
        }

        String osxTestOpts = "";
        if (Strings.nullToEmpty(System.getProperty("os.name")).toLowerCase().contains("mac")) {
            osxTestOpts = Strings.nullToEmpty(System.getenv(OSX_TEST_OPTS));
        }

        String driverJavaOpts = Joiner.on(" ").skipNulls().join("-Dhive.spark.log.dir=" + sparkLogDir,
                osxTestOpts, conf.get(DRIVER_OPTS_KEY));
        String executorJavaOpts = Joiner.on(" ").skipNulls().join("-Dhive.spark.log.dir=" + sparkLogDir,
                osxTestOpts, conf.get(EXECUTOR_OPTS_KEY));

        // Create a file with all the job properties to be read by spark-submit. Change the
        // file's permissions so that only the owner can read it. This avoid having the
        // connection secret show up in the child process's command line.
        File properties = File.createTempFile("spark-submit.", ".properties");
        if (!properties.setReadable(false) || !properties.setReadable(true, true)) {
            throw new IOException("Cannot change permissions of job properties file.");
        }
        properties.deleteOnExit();

        Properties allProps = new Properties();
        // first load the defaults from spark-defaults.conf if available
        try {
            URL sparkDefaultsUrl = Thread.currentThread().getContextClassLoader()
                    .getResource("spark-defaults.conf");
            if (sparkDefaultsUrl != null) {
                LOG.info("Loading spark defaults: " + sparkDefaultsUrl);
                allProps.load(new ByteArrayInputStream(Resources.toByteArray(sparkDefaultsUrl)));
            }
        } catch (Exception e) {
            String msg = "Exception trying to load spark-defaults.conf: " + e;
            throw new IOException(msg, e);
        }
        // then load the SparkClientImpl config
        for (Map.Entry<String, String> e : conf.entrySet()) {
            allProps.put(e.getKey(), conf.get(e.getKey()));
        }
        allProps.put(SparkClientFactory.CONF_CLIENT_ID, clientId);
        allProps.put(SparkClientFactory.CONF_KEY_SECRET, secret);
        allProps.put(DRIVER_OPTS_KEY, driverJavaOpts);
        allProps.put(EXECUTOR_OPTS_KEY, executorJavaOpts);

        String isTesting = conf.get("spark.testing");
        if (isTesting != null && isTesting.equalsIgnoreCase("true")) {
            String hiveHadoopTestClasspath = Strings.nullToEmpty(System.getenv("HIVE_HADOOP_TEST_CLASSPATH"));
            if (!hiveHadoopTestClasspath.isEmpty()) {
                String extraDriverClasspath = Strings
                        .nullToEmpty((String) allProps.get(DRIVER_EXTRA_CLASSPATH));
                if (extraDriverClasspath.isEmpty()) {
                    allProps.put(DRIVER_EXTRA_CLASSPATH, hiveHadoopTestClasspath);
                } else {
                    extraDriverClasspath = extraDriverClasspath.endsWith(File.pathSeparator)
                            ? extraDriverClasspath
                            : extraDriverClasspath + File.pathSeparator;
                    allProps.put(DRIVER_EXTRA_CLASSPATH, extraDriverClasspath + hiveHadoopTestClasspath);
                }

                String extraExecutorClasspath = Strings
                        .nullToEmpty((String) allProps.get(EXECUTOR_EXTRA_CLASSPATH));
                if (extraExecutorClasspath.isEmpty()) {
                    allProps.put(EXECUTOR_EXTRA_CLASSPATH, hiveHadoopTestClasspath);
                } else {
                    extraExecutorClasspath = extraExecutorClasspath.endsWith(File.pathSeparator)
                            ? extraExecutorClasspath
                            : extraExecutorClasspath + File.pathSeparator;
                    allProps.put(EXECUTOR_EXTRA_CLASSPATH, extraExecutorClasspath + hiveHadoopTestClasspath);
                }
            }
        }

        Writer writer = new OutputStreamWriter(new FileOutputStream(properties), Charsets.UTF_8);
        try {
            allProps.store(writer, "Spark Context configuration");
        } finally {
            writer.close();
        }

        // Define how to pass options to the child process. If launching in client (or local)
        // mode, the driver options need to be passed directly on the command line. Otherwise,
        // SparkSubmit will take care of that for us.
        String master = conf.get("spark.master");
        Preconditions.checkArgument(master != null, "spark.master is not defined.");
        String deployMode = conf.get("spark.submit.deployMode");

        List<String> argv = Lists.newLinkedList();

        if (sparkHome != null) {
            argv.add(new File(sparkHome, "bin/spark-submit").getAbsolutePath());
        } else {
            LOG.info("No spark.home provided, calling SparkSubmit directly.");
            argv.add(new File(System.getProperty("java.home"), "bin/java").getAbsolutePath());

            if (master.startsWith("local") || master.startsWith("mesos")
                    || SparkClientUtilities.isYarnClientMode(master, deployMode)
                    || master.startsWith("spark")) {
                String mem = conf.get("spark.driver.memory");
                if (mem != null) {
                    argv.add("-Xms" + mem);
                    argv.add("-Xmx" + mem);
                }

                String cp = conf.get("spark.driver.extraClassPath");
                if (cp != null) {
                    argv.add("-classpath");
                    argv.add(cp);
                }

                String libPath = conf.get("spark.driver.extraLibPath");
                if (libPath != null) {
                    argv.add("-Djava.library.path=" + libPath);
                }

                String extra = conf.get(DRIVER_OPTS_KEY);
                if (extra != null) {
                    for (String opt : extra.split("[ ]")) {
                        if (!opt.trim().isEmpty()) {
                            argv.add(opt.trim());
                        }
                    }
                }
            }

            argv.add("org.apache.spark.deploy.SparkSubmit");
        }

        if (SparkClientUtilities.isYarnClusterMode(master, deployMode)) {
            String executorCores = conf.get("spark.executor.cores");
            if (executorCores != null) {
                argv.add("--executor-cores");
                argv.add(executorCores);
            }

            String executorMemory = conf.get("spark.executor.memory");
            if (executorMemory != null) {
                argv.add("--executor-memory");
                argv.add(executorMemory);
            }

            String numOfExecutors = conf.get("spark.executor.instances");
            if (numOfExecutors != null) {
                argv.add("--num-executors");
                argv.add(numOfExecutors);
            }
        }
        // The options --principal/--keypad do not work with --proxy-user in spark-submit.sh
        // (see HIVE-15485, SPARK-5493, SPARK-19143), so Hive could only support doAs or
        // delegation token renewal, but not both. Since doAs is a more common case, if both
        // are needed, we choose to favor doAs. So when doAs is enabled, we use kinit command,
        // otherwise, we pass the principal/keypad to spark to support the token renewal for
        // long-running application.
        if ("kerberos".equals(hiveConf.get(HADOOP_SECURITY_AUTHENTICATION))) {
            String principal = SecurityUtil
                    .getServerPrincipal(hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL), "0.0.0.0");
            String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
            if (StringUtils.isNotBlank(principal) && StringUtils.isNotBlank(keyTabFile)) {
                if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) {
                    List<String> kinitArgv = Lists.newLinkedList();
                    kinitArgv.add("kinit");
                    kinitArgv.add(principal);
                    kinitArgv.add("-k");
                    kinitArgv.add("-t");
                    kinitArgv.add(keyTabFile + ";");
                    kinitArgv.addAll(argv);
                    argv = kinitArgv;
                } else {
                    // if doAs is not enabled, we pass the principal/keypad to spark-submit in order to
                    // support the possible delegation token renewal in Spark
                    argv.add("--principal");
                    argv.add(principal);
                    argv.add("--keytab");
                    argv.add(keyTabFile);
                }
            }
        }
        if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) {
            try {
                String currentUser = Utils.getUGI().getShortUserName();
                // do not do impersonation in CLI mode
                if (!currentUser.equals(System.getProperty("user.name"))) {
                    LOG.info("Attempting impersonation of " + currentUser);
                    argv.add("--proxy-user");
                    argv.add(currentUser);
                }
            } catch (Exception e) {
                String msg = "Cannot obtain username: " + e;
                throw new IllegalStateException(msg, e);
            }
        }

        argv.add("--properties-file");
        argv.add(properties.getAbsolutePath());
        argv.add("--class");
        argv.add(RemoteDriver.class.getName());

        String jar = "spark-internal";
        if (SparkContext.jarOfClass(this.getClass()).isDefined()) {
            jar = SparkContext.jarOfClass(this.getClass()).get();
        }
        argv.add(jar);

        argv.add("--remote-host");
        argv.add(serverAddress);
        argv.add("--remote-port");
        argv.add(serverPort);

        //hive.spark.* keys are passed down to the RemoteDriver via --conf,
        //as --properties-file contains the spark.* keys that are meant for SparkConf object.
        for (String hiveSparkConfKey : RpcConfiguration.HIVE_SPARK_RSC_CONFIGS) {
            String value = RpcConfiguration.getValue(hiveConf, hiveSparkConfKey);
            argv.add("--conf");
            argv.add(String.format("%s=%s", hiveSparkConfKey, value));
        }

        String cmd = Joiner.on(" ").join(argv);
        LOG.info("Running client driver with argv: {}", cmd);
        ProcessBuilder pb = new ProcessBuilder("sh", "-c", cmd);

        // Prevent hive configurations from being visible in Spark.
        pb.environment().remove("HIVE_HOME");
        pb.environment().remove("HIVE_CONF_DIR");
        // Add credential provider password to the child process's environment
        // In case of Spark the credential provider location is provided in the jobConf when the job is submitted
        String password = getSparkJobCredentialProviderPassword();
        if (password != null) {
            pb.environment().put(Constants.HADOOP_CREDENTIAL_PASSWORD_ENVVAR, password);
        }
        if (isTesting != null) {
            pb.environment().put("SPARK_TESTING", isTesting);
        }

        final Process child = pb.start();
        String threadName = Thread.currentThread().getName();
        final List<String> childErrorLog = Collections.synchronizedList(new ArrayList<String>());
        redirect("RemoteDriver-stdout-redir-" + threadName, new Redirector(child.getInputStream()));
        redirect("RemoteDriver-stderr-redir-" + threadName,
                new Redirector(child.getErrorStream(), childErrorLog));

        runnable = new Runnable() {
            @Override
            public void run() {
                try {
                    int exitCode = child.waitFor();
                    if (exitCode != 0) {
                        StringBuilder errStr = new StringBuilder();
                        synchronized (childErrorLog) {
                            Iterator iter = childErrorLog.iterator();
                            while (iter.hasNext()) {
                                errStr.append(iter.next());
                                errStr.append('\n');
                            }
                        }

                        LOG.warn("Child process exited with code {}", exitCode);
                        rpcServer.cancelClient(clientId,
                                "Child process (spark-submit) exited before connecting back with error log "
                                        + errStr.toString());
                    }
                } catch (InterruptedException ie) {
                    LOG.warn(
                            "Thread waiting on the child process (spark-submit) is interrupted, killing the child process.");
                    rpcServer.cancelClient(clientId,
                            "Thread waiting on the child porcess (spark-submit) is interrupted");
                    Thread.interrupted();
                    child.destroy();
                } catch (Exception e) {
                    String errMsg = "Exception while waiting for child process (spark-submit)";
                    LOG.warn(errMsg, e);
                    rpcServer.cancelClient(clientId, errMsg);
                }
            }
        };
    }

    Thread thread = new Thread(runnable);
    thread.setDaemon(true);
    thread.setName("Driver");
    thread.start();
    return thread;
}

From source file:org.glowroot.agent.weaving.AnalyzedWorld.java

private AnalyzedClass createAnalyzedClass(String className, @Nullable ClassLoader loader)
        throws ClassNotFoundException, IOException {
    String path = ClassNames.toInternalName(className) + ".class";
    URL url;// www. j  a  v  a  2  s .co  m
    if (loader == null) {
        // null loader means the bootstrap class loader
        url = ClassLoader.getSystemResource(path);
    } else {
        url = loader.getResource(path);
        if (url != null) {
            AnalyzedClass parentLoaderAnalyzedClass = tryToReuseFromParentLoader(className, loader, path, url);
            if (parentLoaderAnalyzedClass != null) {
                return parentLoaderAnalyzedClass;
            }
        }
    }
    if (url == null) {
        // what follows is just a best attempt in the sort-of-rare case when a custom class
        // loader does not expose .class file contents via getResource(), e.g.
        // org.codehaus.groovy.runtime.callsite.CallSiteClassLoader
        return createAnalyzedClassPlanB(className, loader);
    }
    byte[] bytes = Resources.toByteArray(url);
    List<Advice> advisors = mergeInstrumentationAnnotations(this.advisors.get(), bytes, loader, className);
    ThinClassVisitor accv = new ThinClassVisitor();
    new ClassReader(bytes).accept(accv, ClassReader.SKIP_FRAMES + ClassReader.SKIP_CODE);
    // passing noLongerNeedToWeaveMainMethods=true since not really weaving bytecode here
    ClassAnalyzer classAnalyzer = new ClassAnalyzer(accv.getThinClass(), advisors, shimTypes, mixinTypes,
            loader, this, null, bytes, null, true);
    classAnalyzer.analyzeMethods();
    return classAnalyzer.getAnalyzedClass();
}

From source file:org.glowroot.weaving.AnalyzedWorld.java

private AnalyzedClass createAnalyzedClass(String className, @Nullable ClassLoader loader)
        throws ClassNotFoundException, IOException {
    String path = ClassNames.toInternalName(className) + ".class";
    URL url;/*from w  ww. ja  v  a2 s .c  o  m*/
    if (loader == null) {
        // null loader means the bootstrap class loader
        url = ClassLoader.getSystemResource(path);
    } else {
        url = loader.getResource(path);
        if (url != null) {
            AnalyzedClass parentLoaderAnalyzedClass = tryToReuseFromParentLoader(className, loader, path, url);
            if (parentLoaderAnalyzedClass != null) {
                return parentLoaderAnalyzedClass;
            }
        }
    }
    if (url == null && extraBootResourceFinder != null) {
        url = extraBootResourceFinder.findResource(path);
    }
    if (url == null) {
        // what follows is just a best attempt in the sort-of-rare case when a custom class
        // loader does not expose .class file contents via getResource(), e.g.
        // org.codehaus.groovy.runtime.callsite.CallSiteClassLoader
        return createAnalyzedClassPlanB(className, loader);
    }
    AnalyzingClassVisitor cv = new AnalyzingClassVisitor(advisors.get(), shimTypes, mixinTypes, loader, this,
            null);
    byte[] bytes = Resources.toByteArray(url);
    ClassReader cr = new ClassReader(bytes);
    try {
        cr.accept(cv, ClassReader.SKIP_CODE);
    } catch (ShortCircuitException e) {
        // this is ok, in either case analyzed class is now available
    }
    AnalyzedClass analyzedClass = cv.getAnalyzedClass();
    checkNotNull(analyzedClass); // analyzedClass is non-null after visiting the class
    return analyzedClass;
}

From source file:org.glowroot.local.ui.HttpServerHandler.java

private FullHttpResponse handleStaticResource(String path, HttpRequest request) throws IOException {
    URL url = getSecureUrlForPath(RESOURCE_BASE + path);
    if (url == null) {
        logger.warn("unexpected path: {}", path);
        return new DefaultFullHttpResponse(HTTP_1_1, NOT_FOUND);
    }/*from   w  w  w.j  a va2 s  .  co  m*/
    Date expires = getExpiresForPath(path);
    if (request.headers().contains(Names.IF_MODIFIED_SINCE) && expires == null) {
        // all static resources without explicit expires are versioned and can be safely
        // cached forever
        return new DefaultFullHttpResponse(HTTP_1_1, NOT_MODIFIED);
    }
    ByteBuf content = Unpooled.copiedBuffer(Resources.toByteArray(url));
    FullHttpResponse response = new DefaultFullHttpResponse(HTTP_1_1, OK, content);
    if (expires != null) {
        response.headers().add(Names.EXPIRES, expires);
    } else {
        response.headers().add(Names.LAST_MODIFIED, new Date(0));
        response.headers().add(Names.EXPIRES, new Date(System.currentTimeMillis() + TEN_YEARS));
    }
    int extensionStartIndex = path.lastIndexOf('.');
    checkState(extensionStartIndex != -1, "found path under %s with no extension: %s", RESOURCE_BASE, path);
    String extension = path.substring(extensionStartIndex + 1);
    MediaType mediaType = mediaTypes.get(extension);
    checkNotNull(mediaType, "found extension under %s with no media type: %s", RESOURCE_BASE, extension);
    response.headers().add(Names.CONTENT_TYPE, mediaType);
    response.headers().add(Names.CONTENT_LENGTH, Resources.toByteArray(url).length);
    return response;
}

From source file:org.dyndns.jkiddo.dmp.util.DmapUtil.java

public static byte[] uriTobuffer(final URI uri) throws IOException {
    return Resources.toByteArray(uri.toURL());
}

From source file:org.glowroot.agent.live.ClasspathCache.java

private static byte[] getBytes(Location location, String className) throws IOException {
    String name = className.replace('.', '/') + ".class";
    File dir = location.directory();
    File jarFile = location.jarFile();
    if (dir != null) {
        URI uri = new File(dir, name).toURI();
        return Resources.toByteArray(uri.toURL());
    } else if (jarFile != null) {
        String jarFileInsideJarFile = location.jarFileInsideJarFile();
        String directoryInsideJarFile = location.directoryInsideJarFile();
        if (jarFileInsideJarFile == null && directoryInsideJarFile == null) {
            return getBytesFromJarFile(name, jarFile);
        } else if (jarFileInsideJarFile != null) {
            return getBytesFromJarFileInsideJarFile(name, jarFile, jarFileInsideJarFile);
        } else {/*from  w  w  w . j  av a2 s.  co  m*/
            // directoryInsideJarFile is not null based on above conditionals
            checkNotNull(directoryInsideJarFile);
            return getBytesFromDirectoryInsideJarFile(name, jarFile, directoryInsideJarFile);
        }
    } else {
        throw new AssertionError("Both Location directory() and jarFile() are null");
    }
}

From source file:org.glowroot.agent.live.ClasspathCache.java

private static byte[] getBytesFromJarFile(String name, File jarFile) throws IOException {
    String path = jarFile.getPath();
    URI uri;// w  w w  .j a  v a2  s  . c om
    try {
        uri = new URI("jar", "file:" + path + "!/" + name, "");
    } catch (URISyntaxException e) {
        // this is a programmatic error
        throw new RuntimeException(e);
    }
    return Resources.toByteArray(uri.toURL());
}

From source file:org.glowroot.agent.live.ClasspathCache.java

private static byte[] getBytesFromDirectoryInsideJarFile(String name, File jarFile,
        String directoryInsideJarFile) throws IOException {
    String path = jarFile.getPath();
    URI uri;//from www. j  a va  2 s. c  o m
    try {
        uri = new URI("jar", "file:" + path + "!/" + directoryInsideJarFile + name, "");
    } catch (URISyntaxException e) {
        // this is a programmatic error
        throw new RuntimeException(e);
    }
    return Resources.toByteArray(uri.toURL());
}