Example usage for java.lang Thread interrupted

List of usage examples for java.lang Thread interrupted

Introduction

In this page you can find the example usage for java.lang Thread interrupted.

Prototype

public static boolean interrupted() 

Source Link

Document

Tests whether the current thread has been interrupted.

Usage

From source file:com.ksc.http.KSCHttpClient.java

/**
 * Determine if an interrupted exception is caused by the client execution timer interrupting the current thread or
 * some other task interrupting the thread for another purpose.
 *
 * @param executionContext/*from   w  w w. j  a v  a2 s.c  om*/
 * @param e
 * @return {@link ClientExecutionTimeoutException} if the {@link InterruptedException} was caused by the {@link
 * ClientExecutionTimer}. Otherwise re-interrupts the current thread and returns an {@link KscClientException}
 * wrapping an {@link InterruptedException}
 */
private RuntimeException handleInterruptedException(ExecutionContext executionContext, InterruptedException e) {
    if (e instanceof SdkInterruptedException) {
        if (((SdkInterruptedException) e).getResponse() != null) {
            ((SdkInterruptedException) e).getResponse().getHttpResponse().getHttpRequest().abort();
        }
    }
    if (executionContext.getClientExecutionTrackerTask().hasTimeoutExpired()) {
        // Clear the interrupt status
        Thread.interrupted();
        return new ClientExecutionTimeoutException();
    } else {
        Thread.currentThread().interrupt();
        return new KscClientException(e);
    }
}

From source file:ei.ne.ke.cassandra.cql3.AstyanaxCql3Repository.java

/**
 * {@inheritDoc}// w ww .  j a  v a2  s  .c  o m
 */
@Override
public synchronized void delete(Iterable<? extends T> entities) {
    int count = Iterables.size(entities);
    List<Callable<List<? extends T>>> todo = Lists.newArrayListWithExpectedSize(count / batchSize);
    for (Iterable<? extends T> partition : Iterables.partition(entities, batchSize)) {
        todo.add(new Deleter(partition));
    }
    try {
        List<Future<List<? extends T>>> futureResults = executorService.invokeAll(todo);
        waitUntilCompletion(futureResults);
    } catch (InterruptedException e) {
        Thread.interrupted();
    }
}

From source file:org.apache.jackrabbit.oak.segment.CompactionAndCleanupIT.java

/**
 * Regression test for OAK-2192 testing for mixed segments. This test does not
 * cover OAK-3348. I.e. it does not assert the segment graph is free of cross
 * gc generation references./*from   w w w .j  a  v a 2  s  . co m*/
 */
@Test
public void testMixedSegments() throws Exception {
    FileStore store = fileStoreBuilder(getFileStoreFolder()).withMaxFileSize(2).withMemoryMapping(true)
            .withGCOptions(defaultGCOptions().setForceAfterFail(true)).build();
    final SegmentNodeStore nodeStore = SegmentNodeStoreBuilders.builder(store).build();
    final AtomicBoolean compactionSuccess = new AtomicBoolean(true);

    NodeBuilder root = nodeStore.getRoot().builder();
    createNodes(root.setChildNode("test"), 10, 3);
    nodeStore.merge(root, EmptyHook.INSTANCE, CommitInfo.EMPTY);

    final Set<UUID> beforeSegments = new HashSet<UUID>();
    collectSegments(store.getReader(), store.getRevisions(), beforeSegments);

    final AtomicReference<Boolean> run = new AtomicReference<Boolean>(true);
    final List<String> failedCommits = newArrayList();
    Thread[] threads = new Thread[10];
    for (int k = 0; k < threads.length; k++) {
        final int threadId = k;
        threads[k] = new Thread(new Runnable() {
            @Override
            public void run() {
                for (int j = 0; run.get(); j++) {
                    String nodeName = "b-" + threadId + "," + j;
                    try {
                        NodeBuilder root = nodeStore.getRoot().builder();
                        root.setChildNode(nodeName);
                        nodeStore.merge(root, EmptyHook.INSTANCE, CommitInfo.EMPTY);
                        Thread.sleep(5);
                    } catch (CommitFailedException e) {
                        failedCommits.add(nodeName);
                    } catch (InterruptedException e) {
                        Thread.interrupted();
                        break;
                    }
                }
            }
        });
        threads[k].start();
    }
    store.compact();
    run.set(false);
    for (Thread t : threads) {
        t.join();
    }
    store.flush();

    assumeTrue("Failed to acquire compaction lock", compactionSuccess.get());
    assertTrue("Failed commits: " + failedCommits, failedCommits.isEmpty());

    Set<UUID> afterSegments = new HashSet<UUID>();
    collectSegments(store.getReader(), store.getRevisions(), afterSegments);
    try {
        for (UUID u : beforeSegments) {
            assertFalse("Mixed segments found: " + u, afterSegments.contains(u));
        }
    } finally {
        store.close();
    }
}

From source file:com.cloudbees.jenkins.plugins.bitbucket.client.BitbucketCloudApiClient.java

/**
 * {@inheritDoc}//from   ww  w.ja va  2s . c  om
 */
@NonNull
@Override
public List<BitbucketRepositoryHook> getWebHooks() throws IOException, InterruptedException {
    List<BitbucketRepositoryHook> repositoryHooks = new ArrayList<>();
    int pageNumber = 1;
    UriTemplate template = UriTemplate.fromTemplate(REPO_URL_TEMPLATE + "/hooks{?page,pagelen}")
            .set("owner", owner).set("repo", repositoryName).set("page", pageNumber).set("pagelen", 50);
    String url = template.expand();
    try {
        String response = getRequest(url);
        BitbucketRepositoryHooks page = parsePaginatedRepositoryHooks(response);
        repositoryHooks.addAll(page.getValues());
        while (page.getNext() != null) {
            if (Thread.interrupted()) {
                throw new InterruptedException();
            }
            pageNumber++;
            response = getRequest(url = template.set("page", pageNumber).expand());
            page = parsePaginatedRepositoryHooks(response);
            repositoryHooks.addAll(page.getValues());
        }
        return repositoryHooks;
    } catch (IOException e) {
        throw new IOException("I/O error when parsing response from URL: " + url, e);
    }
}

From source file:com.dlya.facturews.DlyaPdfExporter2.java

/**
 *
 *//*  ww w . j  a va2 s  .  c  om*/
protected void exportReportToStream(OutputStream os) throws JRException {
    //ByteArrayOutputStream baos = new ByteArrayOutputStream();

    document = new Document(new Rectangle(jasperPrint.getPageWidth(), jasperPrint.getPageHeight()));

    imageTesterDocument = new Document(new Rectangle(10, //jasperPrint.getPageWidth(),
            10 //jasperPrint.getPageHeight()
    ));

    boolean closeDocuments = true;
    try {
        pdfWriter = PdfWriter.getInstance(document, os);
        pdfWriter.setCloseStream(false);

        if (pdfVersion != null) {
            pdfWriter.setPdfVersion(pdfVersion.charValue());
        }
        if (isCompressed) {
            pdfWriter.setFullCompression();
        }
        if (isEncrypted) {
            pdfWriter.setEncryption(is128BitKey, userPassword, ownerPassword, permissions);
        }

        if (printScaling != null) {
            if (JRPdfExporterParameter.PRINT_SCALING_DEFAULT.equals(printScaling)) {
                //pdfWriter.addViewerPreference(PdfName.PRINTSCALING, PdfName.APPDEFAULT);
            } else if (JRPdfExporterParameter.PRINT_SCALING_NONE.equals(printScaling)) {
                //pdfWriter.addViewerPreference(PdfName.PRINTSCALING, PdfName.NONE);
            }
        }

        // Add meta-data parameters to generated PDF document
        // mtclough@users.sourceforge.net 2005-12-05
        String title = (String) parameters.get(JRPdfExporterParameter.METADATA_TITLE);
        if (title != null) {
            document.addTitle(title);
        }
        String author = (String) parameters.get(JRPdfExporterParameter.METADATA_AUTHOR);
        if (author != null) {
            document.addAuthor(author);
        }
        String subject = (String) parameters.get(JRPdfExporterParameter.METADATA_SUBJECT);
        if (subject != null) {
            document.addSubject(subject);
        }
        String keywords = (String) parameters.get(JRPdfExporterParameter.METADATA_KEYWORDS);
        if (keywords != null) {
            document.addKeywords(keywords);
        }
        String creator = (String) parameters.get(JRPdfExporterParameter.METADATA_CREATOR);
        if (creator != null) {
            document.addCreator(creator);
        } else {
            document.addCreator("JasperReports (" + jasperPrint.getName() + ")");
        }

        // BEGIN: PDF/A support
        String pdfaConformance = getStringParameter(JRPdfExporterParameter.PDFA_CONFORMANCE,
                JRPdfExporterParameter.PROPERTY_PDFA_CONFORMANCE);
        boolean gotPdfa = false;
        if (pdfaConformance != null
                && !JRPdfExporterParameter.PDFA_CONFORMANCE_NONE.equalsIgnoreCase(pdfaConformance)) {
            if (JRPdfExporterParameter.PDFA_CONFORMANCE_1A.equalsIgnoreCase(pdfaConformance)) {
                //pdfWriter.setPDFXConformance(PdfWriter.PDFA1A);
                gotPdfa = true;
            } else if (JRPdfExporterParameter.PDFA_CONFORMANCE_1B.equalsIgnoreCase(pdfaConformance)) {
                //pdfWriter.setPDFXConformance(PdfWriter.PDFA1B);
                gotPdfa = true;
            }
        }

        if (gotPdfa) {

            //pdfWriter.createXmpMetadata();
        } else {
            //pdfWriter.setRgbTransparencyBlending(true);
        }
        // END: PDF/A support

        document.open();

        // BEGIN: PDF/A support
        if (gotPdfa) {
            String iccProfilePath = getStringParameter(JRPdfExporterParameter.PDFA_ICC_PROFILE_PATH,
                    JRPdfExporterParameter.PROPERTY_PDFA_ICC_PROFILE_PATH);
            if (iccProfilePath != null) {
                PdfDictionary pdfDictionary = new PdfDictionary(PdfName.OUTPUTINTENT);
                pdfDictionary.put(PdfName.OUTPUTCONDITIONIDENTIFIER, new PdfString("sRGB IEC61966-2.1"));
                pdfDictionary.put(PdfName.INFO, new PdfString("sRGB IEC61966-2.1"));
                //pdfDictionary.put(PdfName.S, PdfName.GTS_PDFA1);

                InputStream iccIs = RepositoryUtil.getInstance(jasperReportsContext)
                        .getInputStreamFromLocation(iccProfilePath);
                //PdfICCBased pdfICCBased = new PdfICCBased(ICC_Profile.getInstance(iccIs));
                //pdfICCBased.remove(PdfName.ALTERNATE);
                //pdfDictionary.put(PdfName.DESTOUTPUTPROFILE, pdfWriter.addToBody(pdfICCBased).getIndirectReference());

                pdfWriter.getExtraCatalog().put(PdfName.OUTPUTINTENTS, new PdfArray(pdfDictionary));
            } else {
                throw new JRPdfaIccProfileNotFoundException();
            }
        }
        // END: PDF/A support

        if (pdfJavaScript != null) {
            pdfWriter.addJavaScript(pdfJavaScript);
        }

        pdfContentByte = pdfWriter.getDirectContent();

        //tagHelper.init(pdfContentByte);

        initBookmarks();

        PdfWriter imageTesterPdfWriter = PdfWriter.getInstance(imageTesterDocument, new NullOutputStream() // discard the output
        );
        imageTesterDocument.open();
        imageTesterDocument.newPage();
        imageTesterPdfContentByte = imageTesterPdfWriter.getDirectContent();
        imageTesterPdfContentByte.setLiteral("\n");

        for (reportIndex = 0; reportIndex < jasperPrintList.size(); reportIndex++) {
            setJasperPrint(jasperPrintList.get(reportIndex));
            loadedImagesMap = new HashMap<Renderable, com.lowagie.text.Image>();

            setPageSize(null);

            BorderOffset.setLegacy(JRPropertiesUtil.getInstance(jasperReportsContext)
                    .getBooleanProperty(jasperPrint, BorderOffset.PROPERTY_LEGACY_BORDER_OFFSET, false));

            boolean sizePageToContent = JRPropertiesUtil.getInstance(jasperReportsContext).getBooleanProperty(
                    jasperPrint, JRPdfExporterParameter.PROPERTY_SIZE_PAGE_TO_CONTENT, false);

            List<JRPrintPage> pages = jasperPrint.getPages();
            if (pages != null && pages.size() > 0) {
                if (isModeBatch) {
                    document.newPage();

                    if (isCreatingBatchModeBookmarks) {
                        //add a new level to our outline for this report
                        addBookmark(0, jasperPrint.getName(), 0, 0);
                    }

                    startPageIndex = 0;
                    endPageIndex = pages.size() - 1;
                }

                for (int pageIndex = startPageIndex; pageIndex <= endPageIndex; pageIndex++) {
                    if (Thread.interrupted()) {
                        throw new JRException("Current thread interrupted.");
                    }

                    JRPrintPage page = pages.get(pageIndex);

                    if (sizePageToContent) {
                        setPageSize(page);
                    }

                    document.newPage();

                    pdfContentByte = pdfWriter.getDirectContent();

                    pdfContentByte.setLineCap(2);//PdfContentByte.LINE_CAP_PROJECTING_SQUARE since iText 1.02b

                    writePageAnchor(pageIndex);

                    /*   */
                    exportPage(page);
                }
            } else {
                document.newPage();
                pdfContentByte = pdfWriter.getDirectContent();
                pdfContentByte.setLiteral("\n");
            }
        }

        closeDocuments = false;
        document.close();
        imageTesterDocument.close();
    } catch (DocumentException e) {
        throw new JRException("PDF Document error : " + jasperPrint.getName(), e);
    } catch (IOException e) {
        throw new JRException("Error generating PDF report : " + jasperPrint.getName(), e);
    } finally {
        if (closeDocuments) //only on exception
        {
            try {
                document.close();
            } catch (Exception e) {
                // ignore, let the original exception propagate
            }

            try {
                imageTesterDocument.close();
            } catch (Exception e) {
                // ignore, let the original exception propagate
            }
        }
    }

    //return os.toByteArray();
}

From source file:org.micromanager.plugins.magellan.imagedisplay.DisplayOverlayer.java

private void addConvexHull(Overlay overlay) throws InterruptedException {
    //draw convex hull
    Vector2D[] hullPoints = display_.getCurrentSurface().getConvexHullPoints();

    LongPoint lastPoint = null, firstPoint = null;
    for (Vector2D v : hullPoints) {
        if (Thread.interrupted()) {
            throw new InterruptedException();
        }/*  w ww  .  ja v  a  2  s.c om*/
        //convert to image coords
        LongPoint p = display_.imageCoordsFromStageCoords(v.getX(), v.getY());
        if (lastPoint != null) {
            Line l = new Line(p.x_, p.y_, lastPoint.x_, lastPoint.y_);
            l.setStrokeColor(CONVEX_HULL_COLOR);
            overlay.add(l);
        } else {
            firstPoint = p;
        }
        lastPoint = p;
    }
    //draw last connection         
    Line l = new Line(firstPoint.x_, firstPoint.y_, lastPoint.x_, lastPoint.y_);
    l.setStrokeColor(CONVEX_HULL_COLOR);
    overlay.add(l);
}

From source file:com.cyberway.issue.crawler.extractor.ExtractorHTML.java

/**
 * Run extractor.// w  w  w . j  a  v a  2s . c  o m
 * This method is package visible to ease testing.
 * @param curi CrawlURI we're processing.
 * @param cs Sequence from underlying ReplayCharSequence. This
 * is TRANSIENT data. Make a copy if you want the data to live outside
 * of this extractors' lifetime.
 */
void extract(CrawlURI curi, CharSequence cs) {
    Matcher tags = TextUtils.getMatcher(RELEVANT_TAG_EXTRACTOR, cs);
    while (tags.find()) {
        if (Thread.interrupted()) {
            break;
        }
        if (tags.start(8) > 0) {
            // comment match
            // for now do nothing
        } else if (tags.start(7) > 0) {
            // <meta> match
            int start = tags.start(5);
            int end = tags.end(5);
            assert start >= 0 : "Start is: " + start + ", " + curi;
            assert end >= 0 : "End is :" + end + ", " + curi;
            if (processMeta(curi, cs.subSequence(start, end))) {

                // meta tag included NOFOLLOW; abort processing
                break;
            }
        } else if (tags.start(5) > 0) {
            // generic <whatever> match
            int start5 = tags.start(5);
            int end5 = tags.end(5);
            assert start5 >= 0 : "Start is: " + start5 + ", " + curi;
            assert end5 >= 0 : "End is :" + end5 + ", " + curi;
            int start6 = tags.start(6);
            int end6 = tags.end(6);
            assert start6 >= 0 : "Start is: " + start6 + ", " + curi;
            assert end6 >= 0 : "End is :" + end6 + ", " + curi;
            processGeneralTag(curi, cs.subSequence(start6, end6), cs.subSequence(start5, end5));

        } else if (tags.start(1) > 0) {
            // <script> match
            int start = tags.start(1);
            int end = tags.end(1);
            assert start >= 0 : "Start is: " + start + ", " + curi;
            assert end >= 0 : "End is :" + end + ", " + curi;
            assert tags.end(2) >= 0 : "Tags.end(2) illegal " + tags.end(2) + ", " + curi;
            processScript(curi, cs.subSequence(start, end), tags.end(2) - start);

        } else if (tags.start(3) > 0) {
            // <style... match
            int start = tags.start(3);
            int end = tags.end(3);
            assert start >= 0 : "Start is: " + start + ", " + curi;
            assert end >= 0 : "End is :" + end + ", " + curi;
            assert tags.end(4) >= 0 : "Tags.end(4) illegal " + tags.end(4) + ", " + curi;
            processStyle(curi, cs.subSequence(start, end), tags.end(4) - start);
        }
    }
    TextUtils.recycleMatcher(tags);
}

From source file:org.apache.hadoop.hbase.ipc.BlockingRpcConnection.java

/**
 * Initiates a call by sending the parameter to the remote server. Note: this is not called from
 * the Connection thread, but by other threads.
 * @see #readResponse()/* ww  w  . jav  a  2s  .  c om*/
 */
private void writeRequest(Call call) throws IOException {
    ByteBuffer cellBlock = this.rpcClient.cellBlockBuilder.buildCellBlock(this.codec, this.compressor,
            call.cells);
    CellBlockMeta cellBlockMeta;
    if (cellBlock != null) {
        cellBlockMeta = CellBlockMeta.newBuilder().setLength(cellBlock.limit()).build();
    } else {
        cellBlockMeta = null;
    }
    RequestHeader requestHeader = buildRequestHeader(call, cellBlockMeta);

    setupIOstreams();

    // Now we're going to write the call. We take the lock, then check that the connection
    // is still valid, and, if so we do the write to the socket. If the write fails, we don't
    // know where we stand, we have to close the connection.
    if (Thread.interrupted()) {
        throw new InterruptedIOException();
    }

    calls.put(call.id, call); // We put first as we don't want the connection to become idle.
    // from here, we do not throw any exception to upper layer as the call has been tracked in the
    // pending calls map.
    try {
        call.callStats.setRequestSizeBytes(write(this.out, requestHeader, call.param, cellBlock));
    } catch (IOException e) {
        closeConn(e);
        return;
    }
    notifyAll();
}

From source file:com.ksc.http.KSCHttpClient.java

/**
 * Check if the thread has been interrupted. If so throw an {@link InterruptedException}. Long running tasks should
 * be periodically checked if the current thread has been interrupted and handle it appropriately
 *
 * @param response Response to be closed before returning control to the caller to avoid leaking the connection.
 * @throws InterruptedException If thread has been interrupted
 *///from  w ww . j  a v a 2 s.  c o  m
private void checkInterrupted(Response<?> response) throws InterruptedException {
    if (Thread.interrupted()) {
        throw new SdkInterruptedException(response);
    }
}

From source file:org.apache.hive.spark.client.SparkClientImpl.java

private Thread startDriver(final RpcServer rpcServer, final String clientId, final String secret)
        throws IOException {
    Runnable runnable;//from ww  w . j a va2s .  c  o  m
    final String serverAddress = rpcServer.getAddress();
    final String serverPort = String.valueOf(rpcServer.getPort());

    if (conf.containsKey(SparkClientFactory.CONF_KEY_IN_PROCESS)) {
        // Mostly for testing things quickly. Do not do this in production.
        // when invoked in-process it inherits the environment variables of the parent
        LOG.warn("!!!! Running remote driver in-process. !!!!");
        runnable = new Runnable() {
            @Override
            public void run() {
                List<String> args = Lists.newArrayList();
                args.add("--remote-host");
                args.add(serverAddress);
                args.add("--remote-port");
                args.add(serverPort);
                args.add("--client-id");
                args.add(clientId);
                args.add("--secret");
                args.add(secret);

                for (Map.Entry<String, String> e : conf.entrySet()) {
                    args.add("--conf");
                    args.add(String.format("%s=%s", e.getKey(), conf.get(e.getKey())));
                }
                try {
                    RemoteDriver.main(args.toArray(new String[args.size()]));
                } catch (Exception e) {
                    LOG.error("Error running driver.", e);
                }
            }
        };
    } else {
        // If a Spark installation is provided, use the spark-submit script. Otherwise, call the
        // SparkSubmit class directly, which has some caveats (like having to provide a proper
        // version of Guava on the classpath depending on the deploy mode).
        String sparkHome = Strings.emptyToNull(conf.get(SPARK_HOME_KEY));
        if (sparkHome == null) {
            sparkHome = Strings.emptyToNull(System.getenv(SPARK_HOME_ENV));
        }
        if (sparkHome == null) {
            sparkHome = Strings.emptyToNull(System.getProperty(SPARK_HOME_KEY));
        }
        String sparkLogDir = conf.get("hive.spark.log.dir");
        if (sparkLogDir == null) {
            if (sparkHome == null) {
                sparkLogDir = "./target/";
            } else {
                sparkLogDir = sparkHome + "/logs/";
            }
        }

        String osxTestOpts = "";
        if (Strings.nullToEmpty(System.getProperty("os.name")).toLowerCase().contains("mac")) {
            osxTestOpts = Strings.nullToEmpty(System.getenv(OSX_TEST_OPTS));
        }

        String driverJavaOpts = Joiner.on(" ").skipNulls().join("-Dhive.spark.log.dir=" + sparkLogDir,
                osxTestOpts, conf.get(DRIVER_OPTS_KEY));
        String executorJavaOpts = Joiner.on(" ").skipNulls().join("-Dhive.spark.log.dir=" + sparkLogDir,
                osxTestOpts, conf.get(EXECUTOR_OPTS_KEY));

        // Create a file with all the job properties to be read by spark-submit. Change the
        // file's permissions so that only the owner can read it. This avoid having the
        // connection secret show up in the child process's command line.
        File properties = File.createTempFile("spark-submit.", ".properties");
        if (!properties.setReadable(false) || !properties.setReadable(true, true)) {
            throw new IOException("Cannot change permissions of job properties file.");
        }
        properties.deleteOnExit();

        Properties allProps = new Properties();
        // first load the defaults from spark-defaults.conf if available
        try {
            URL sparkDefaultsUrl = Thread.currentThread().getContextClassLoader()
                    .getResource("spark-defaults.conf");
            if (sparkDefaultsUrl != null) {
                LOG.info("Loading spark defaults: " + sparkDefaultsUrl);
                allProps.load(new ByteArrayInputStream(Resources.toByteArray(sparkDefaultsUrl)));
            }
        } catch (Exception e) {
            String msg = "Exception trying to load spark-defaults.conf: " + e;
            throw new IOException(msg, e);
        }
        // then load the SparkClientImpl config
        for (Map.Entry<String, String> e : conf.entrySet()) {
            allProps.put(e.getKey(), conf.get(e.getKey()));
        }
        allProps.put(SparkClientFactory.CONF_CLIENT_ID, clientId);
        allProps.put(SparkClientFactory.CONF_KEY_SECRET, secret);
        allProps.put(DRIVER_OPTS_KEY, driverJavaOpts);
        allProps.put(EXECUTOR_OPTS_KEY, executorJavaOpts);

        String isTesting = conf.get("spark.testing");
        if (isTesting != null && isTesting.equalsIgnoreCase("true")) {
            String hiveHadoopTestClasspath = Strings.nullToEmpty(System.getenv("HIVE_HADOOP_TEST_CLASSPATH"));
            if (!hiveHadoopTestClasspath.isEmpty()) {
                String extraDriverClasspath = Strings
                        .nullToEmpty((String) allProps.get(DRIVER_EXTRA_CLASSPATH));
                if (extraDriverClasspath.isEmpty()) {
                    allProps.put(DRIVER_EXTRA_CLASSPATH, hiveHadoopTestClasspath);
                } else {
                    extraDriverClasspath = extraDriverClasspath.endsWith(File.pathSeparator)
                            ? extraDriverClasspath
                            : extraDriverClasspath + File.pathSeparator;
                    allProps.put(DRIVER_EXTRA_CLASSPATH, extraDriverClasspath + hiveHadoopTestClasspath);
                }

                String extraExecutorClasspath = Strings
                        .nullToEmpty((String) allProps.get(EXECUTOR_EXTRA_CLASSPATH));
                if (extraExecutorClasspath.isEmpty()) {
                    allProps.put(EXECUTOR_EXTRA_CLASSPATH, hiveHadoopTestClasspath);
                } else {
                    extraExecutorClasspath = extraExecutorClasspath.endsWith(File.pathSeparator)
                            ? extraExecutorClasspath
                            : extraExecutorClasspath + File.pathSeparator;
                    allProps.put(EXECUTOR_EXTRA_CLASSPATH, extraExecutorClasspath + hiveHadoopTestClasspath);
                }
            }
        }

        Writer writer = new OutputStreamWriter(new FileOutputStream(properties), Charsets.UTF_8);
        try {
            allProps.store(writer, "Spark Context configuration");
        } finally {
            writer.close();
        }

        // Define how to pass options to the child process. If launching in client (or local)
        // mode, the driver options need to be passed directly on the command line. Otherwise,
        // SparkSubmit will take care of that for us.
        String master = conf.get("spark.master");
        Preconditions.checkArgument(master != null, "spark.master is not defined.");
        String deployMode = conf.get("spark.submit.deployMode");

        List<String> argv = Lists.newLinkedList();

        if (sparkHome != null) {
            argv.add(new File(sparkHome, "bin/spark-submit").getAbsolutePath());
        } else {
            LOG.info("No spark.home provided, calling SparkSubmit directly.");
            argv.add(new File(System.getProperty("java.home"), "bin/java").getAbsolutePath());

            if (master.startsWith("local") || master.startsWith("mesos")
                    || SparkClientUtilities.isYarnClientMode(master, deployMode)
                    || master.startsWith("spark")) {
                String mem = conf.get("spark.driver.memory");
                if (mem != null) {
                    argv.add("-Xms" + mem);
                    argv.add("-Xmx" + mem);
                }

                String cp = conf.get("spark.driver.extraClassPath");
                if (cp != null) {
                    argv.add("-classpath");
                    argv.add(cp);
                }

                String libPath = conf.get("spark.driver.extraLibPath");
                if (libPath != null) {
                    argv.add("-Djava.library.path=" + libPath);
                }

                String extra = conf.get(DRIVER_OPTS_KEY);
                if (extra != null) {
                    for (String opt : extra.split("[ ]")) {
                        if (!opt.trim().isEmpty()) {
                            argv.add(opt.trim());
                        }
                    }
                }
            }

            argv.add("org.apache.spark.deploy.SparkSubmit");
        }

        if (SparkClientUtilities.isYarnClusterMode(master, deployMode)) {
            String executorCores = conf.get("spark.executor.cores");
            if (executorCores != null) {
                argv.add("--executor-cores");
                argv.add(executorCores);
            }

            String executorMemory = conf.get("spark.executor.memory");
            if (executorMemory != null) {
                argv.add("--executor-memory");
                argv.add(executorMemory);
            }

            String numOfExecutors = conf.get("spark.executor.instances");
            if (numOfExecutors != null) {
                argv.add("--num-executors");
                argv.add(numOfExecutors);
            }
        }
        // The options --principal/--keypad do not work with --proxy-user in spark-submit.sh
        // (see HIVE-15485, SPARK-5493, SPARK-19143), so Hive could only support doAs or
        // delegation token renewal, but not both. Since doAs is a more common case, if both
        // are needed, we choose to favor doAs. So when doAs is enabled, we use kinit command,
        // otherwise, we pass the principal/keypad to spark to support the token renewal for
        // long-running application.
        if ("kerberos".equals(hiveConf.get(HADOOP_SECURITY_AUTHENTICATION))) {
            String principal = SecurityUtil
                    .getServerPrincipal(hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL), "0.0.0.0");
            String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
            if (StringUtils.isNotBlank(principal) && StringUtils.isNotBlank(keyTabFile)) {
                if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) {
                    List<String> kinitArgv = Lists.newLinkedList();
                    kinitArgv.add("kinit");
                    kinitArgv.add(principal);
                    kinitArgv.add("-k");
                    kinitArgv.add("-t");
                    kinitArgv.add(keyTabFile + ";");
                    kinitArgv.addAll(argv);
                    argv = kinitArgv;
                } else {
                    // if doAs is not enabled, we pass the principal/keypad to spark-submit in order to
                    // support the possible delegation token renewal in Spark
                    argv.add("--principal");
                    argv.add(principal);
                    argv.add("--keytab");
                    argv.add(keyTabFile);
                }
            }
        }
        if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) {
            try {
                String currentUser = Utils.getUGI().getShortUserName();
                // do not do impersonation in CLI mode
                if (!currentUser.equals(System.getProperty("user.name"))) {
                    LOG.info("Attempting impersonation of " + currentUser);
                    argv.add("--proxy-user");
                    argv.add(currentUser);
                }
            } catch (Exception e) {
                String msg = "Cannot obtain username: " + e;
                throw new IllegalStateException(msg, e);
            }
        }

        argv.add("--properties-file");
        argv.add(properties.getAbsolutePath());
        argv.add("--class");
        argv.add(RemoteDriver.class.getName());

        String jar = "spark-internal";
        if (SparkContext.jarOfClass(this.getClass()).isDefined()) {
            jar = SparkContext.jarOfClass(this.getClass()).get();
        }
        argv.add(jar);

        argv.add("--remote-host");
        argv.add(serverAddress);
        argv.add("--remote-port");
        argv.add(serverPort);

        //hive.spark.* keys are passed down to the RemoteDriver via --conf,
        //as --properties-file contains the spark.* keys that are meant for SparkConf object.
        for (String hiveSparkConfKey : RpcConfiguration.HIVE_SPARK_RSC_CONFIGS) {
            String value = RpcConfiguration.getValue(hiveConf, hiveSparkConfKey);
            argv.add("--conf");
            argv.add(String.format("%s=%s", hiveSparkConfKey, value));
        }

        String cmd = Joiner.on(" ").join(argv);
        LOG.info("Running client driver with argv: {}", cmd);
        ProcessBuilder pb = new ProcessBuilder("sh", "-c", cmd);

        // Prevent hive configurations from being visible in Spark.
        pb.environment().remove("HIVE_HOME");
        pb.environment().remove("HIVE_CONF_DIR");
        // Add credential provider password to the child process's environment
        // In case of Spark the credential provider location is provided in the jobConf when the job is submitted
        String password = getSparkJobCredentialProviderPassword();
        if (password != null) {
            pb.environment().put(Constants.HADOOP_CREDENTIAL_PASSWORD_ENVVAR, password);
        }
        if (isTesting != null) {
            pb.environment().put("SPARK_TESTING", isTesting);
        }

        final Process child = pb.start();
        String threadName = Thread.currentThread().getName();
        final List<String> childErrorLog = Collections.synchronizedList(new ArrayList<String>());
        redirect("RemoteDriver-stdout-redir-" + threadName, new Redirector(child.getInputStream()));
        redirect("RemoteDriver-stderr-redir-" + threadName,
                new Redirector(child.getErrorStream(), childErrorLog));

        runnable = new Runnable() {
            @Override
            public void run() {
                try {
                    int exitCode = child.waitFor();
                    if (exitCode != 0) {
                        StringBuilder errStr = new StringBuilder();
                        synchronized (childErrorLog) {
                            Iterator iter = childErrorLog.iterator();
                            while (iter.hasNext()) {
                                errStr.append(iter.next());
                                errStr.append('\n');
                            }
                        }

                        LOG.warn("Child process exited with code {}", exitCode);
                        rpcServer.cancelClient(clientId,
                                "Child process (spark-submit) exited before connecting back with error log "
                                        + errStr.toString());
                    }
                } catch (InterruptedException ie) {
                    LOG.warn(
                            "Thread waiting on the child process (spark-submit) is interrupted, killing the child process.");
                    rpcServer.cancelClient(clientId,
                            "Thread waiting on the child porcess (spark-submit) is interrupted");
                    Thread.interrupted();
                    child.destroy();
                } catch (Exception e) {
                    String errMsg = "Exception while waiting for child process (spark-submit)";
                    LOG.warn(errMsg, e);
                    rpcServer.cancelClient(clientId, errMsg);
                }
            }
        };
    }

    Thread thread = new Thread(runnable);
    thread.setDaemon(true);
    thread.setName("Driver");
    thread.start();
    return thread;
}