Example usage for java.util.concurrent TimeoutException getCause

List of usage examples for java.util.concurrent TimeoutException getCause

Introduction

In this page you can find the example usage for java.util.concurrent TimeoutException getCause.

Prototype

public synchronized Throwable getCause() 

Source Link

Document

Returns the cause of this throwable or null if the cause is nonexistent or unknown.

Usage

From source file:com.quantiply.druid.HTTPTranquilityLoader.java

/**
 * Issue flush request to writer thread and block until complete
 *
 * Error contract: will throw an Exception if a fatal errors occur in the writer thread
 *//*from www .  j a va2s . c  om*/
public void flush() throws Throwable {
    WriterCommand flushCmd = WriterCommand.getFlushCmd();
    sendCmd(flushCmd);
    try {
        //Wait on flush to complete - may block if writer is dead so we must periodically check
        boolean waiting = true;
        while (waiting) {
            try {
                flushCmd.flushCompletedFuture.get(100, TimeUnit.MILLISECONDS);
                waiting = false;
            } catch (TimeoutException e) {
                checkWriter();
            }
        }
    } catch (InterruptedException e) {
        /* If the main Samza thread is interrupted, it's likely a shutdown command
          Try for a clean shutdown by waiting a little longer on the flush
         */
        try {
            flushCmd.flushCompletedFuture.get(SHUTDOWN_WAIT_MS, TimeUnit.MILLISECONDS);
        } catch (Exception retryEx) {
            throw new IOException("Error trying to flush to Tranquility server on shutdown", e);
        }
    } catch (ExecutionException e) {
        throw e.getCause();
    }
}

From source file:com.sap.research.connectivity.gw.GWOperationsUtils.java

public String getMetadataString(String url, String user, String pass, String host, String port, int timeOut)
        throws Exception {
    String returnString = "";

    try {// w  w  w. j  a  va2 s.com
        String execArgs[] = new String[] { "java", "-jar",
                System.getProperty("user.home") + SEPARATOR + "appToRetrieveOdataMetadata.jar", url, user, pass,
                host, port };

        final Process theProcess = Runtime.getRuntime().exec(execArgs);

        Callable<String> call = new Callable<String>() {
            public String call() throws Exception {
                String returnString = "";
                try {
                    BufferedReader inStream = new BufferedReader(
                            new InputStreamReader(theProcess.getInputStream()));
                    returnString = IOUtils.toString(inStream);
                    IOUtils.closeQuietly(inStream);
                    //if (theProcess.exitValue() != 0)
                    theProcess.waitFor();
                } catch (InterruptedException e) {
                    throw new TimeoutException();
                    //log.severe("The call to the Gateway Service was interrupted.");
                }
                return returnString;
            }
        };

        final ExecutorService theExecutor = Executors.newSingleThreadExecutor();
        Future<String> futureResultOfCall = theExecutor.submit(call);
        try {
            returnString = futureResultOfCall.get(timeOut, TimeUnit.SECONDS);
        } catch (TimeoutException ex) {
            throw new TimeoutException(
                    "The Gateway Service call timed out. Please try again or check your settings.");
        } catch (ExecutionException ex) {
            throw new RuntimeException("The Gateway Service call did not complete due to an execution error. "
                    + ex.getCause().getLocalizedMessage());
        } finally {
            theExecutor.shutdownNow();
        }
    } catch (InterruptedException ex) {
        throw new InterruptedException(
                "The Gateway Service call did not complete due to an unexpected interruption.");
    } catch (IOException e) {
        throw new IOException("Error when retrieving metadata from the Gateway Service.");
    }

    return returnString;
}

From source file:org.alfresco.repo.content.metadata.AbstractMappingMetadataExtracter.java

/**
 * Calls the {@link AbstractMappingMetadataExtracter#extractRaw(ContentReader)} method
 * using the given limits.//from   w  ww . j av a  2s. c  o m
 * <p>
 * Currently the only limit supported by {@link MetadataExtracterLimits} is a timeout
 * so this method uses {@link AbstractMappingMetadataExtracter#getExecutorService()}
 * to execute a {@link FutureTask} with any timeout defined.
 * <p>
 * If no timeout limit is defined or is unlimited (-1),
 * the <code>extractRaw</code> method is called directly.
 * 
 * @param reader        the document to extract the values from.  This stream provided by
 *                      the reader must be closed if accessed directly.
 * @param limits        the limits to impose on the extraction
 * @return              Returns a map of document property values keyed by property name.
 * @throws Throwable    All exception conditions can be handled.
 */
private Map<String, Serializable> extractRaw(ContentReader reader, MetadataExtracterLimits limits)
        throws Throwable {
    if (limits == null || limits.getTimeoutMs() == -1) {
        return extractRaw(reader);
    }
    FutureTask<Map<String, Serializable>> task = null;
    StreamAwareContentReaderProxy proxiedReader = null;
    try {
        proxiedReader = new StreamAwareContentReaderProxy(reader);
        task = new FutureTask<Map<String, Serializable>>(new ExtractRawCallable(proxiedReader));
        getExecutorService().execute(task);
        return task.get(limits.getTimeoutMs(), TimeUnit.MILLISECONDS);
    } catch (TimeoutException e) {
        task.cancel(true);
        if (null != proxiedReader) {
            proxiedReader.release();
        }
        throw e;
    } catch (InterruptedException e) {
        // We were asked to stop
        task.cancel(true);
        return null;
    } catch (ExecutionException e) {
        // Unwrap our cause and throw that
        Throwable cause = e.getCause();
        if (cause != null && cause instanceof ExtractRawCallableException) {
            cause = ((ExtractRawCallableException) cause).getCause();
        }
        throw cause;
    }
}

From source file:org.alfresco.repo.content.transform.AbstractContentTransformer2.java

/**
 * @see org.alfresco.repo.content.transform.ContentTransformer#transform(org.alfresco.service.cmr.repository.ContentReader, org.alfresco.service.cmr.repository.ContentWriter, org.alfresco.service.cmr.repository.TransformationOptions)
 *//*from www. j a v  a2 s .c  o  m*/
public final void transform(ContentReader reader, ContentWriter writer, TransformationOptions options)
        throws ContentIOException {
    try {
        depth.set(depth.get() + 1);

        // begin timing
        long before = System.currentTimeMillis();

        String sourceMimetype = reader.getMimetype();
        String targetMimetype = writer.getMimetype();

        // check options map
        if (options == null) {
            options = new TransformationOptions();
        }

        try {
            if (transformerDebug.isEnabled()) {
                transformerDebug.pushTransform(this, reader.getContentUrl(), sourceMimetype, targetMimetype,
                        reader.getSize(), options);
            }

            // MNT-16381: check the mimetype of the file supplied by the user
            // matches the sourceMimetype of the reader. Intermediate files are
            // not checked.
            strictMimetypeCheck(reader, options, sourceMimetype);

            // Check the transformability
            checkTransformable(reader, writer, options);

            // Pass on any limits to the reader
            setReaderLimits(reader, writer, options);

            // Transform
            // MNT-12238: CLONE - CLONE - Upload of PPTX causes very high memory usage leading to system instability
            // Limiting transformation up to configured amount of milliseconds to avoid very high RAM consumption
            // and OOM during transforming problematic documents
            TransformationOptionLimits limits = getLimits(reader.getMimetype(), writer.getMimetype(), options);

            long timeoutMs = limits.getTimeoutMs();
            if (!useTimeoutThread || (null == limits) || (-1 == timeoutMs)) {
                transformInternal(reader, writer, options);
            } else {
                Future<?> submittedTask = null;
                StreamAwareContentReaderProxy proxiedReader = new StreamAwareContentReaderProxy(reader);
                StreamAwareContentWriterProxy proxiedWriter = new StreamAwareContentWriterProxy(writer);

                try {
                    submittedTask = getExecutorService()
                            .submit(new TransformInternalCallable(proxiedReader, proxiedWriter, options));
                    submittedTask.get(timeoutMs + additionalThreadTimout, TimeUnit.MILLISECONDS);
                } catch (TimeoutException e) {
                    releaseResources(submittedTask, proxiedReader, proxiedWriter);
                    throw new TimeoutException("Transformation failed due to timeout limit");
                } catch (InterruptedException e) {
                    releaseResources(submittedTask, proxiedReader, proxiedWriter);
                    throw new InterruptedException(
                            "Transformation failed, because the thread of the transformation was interrupted");
                } catch (ExecutionException e) {
                    Throwable cause = e.getCause();
                    if (cause instanceof TransformInternalCallableException) {
                        cause = ((TransformInternalCallableException) cause).getCause();
                    }

                    throw cause;
                }
            }

            // record time
            long after = System.currentTimeMillis();
            recordTime(sourceMimetype, targetMimetype, after - before);
        } catch (ContentServiceTransientException cste) {
            // A transient failure has occurred within the content transformer.
            // This should not be interpreted as a failure and therefore we should not
            // update the transformer's average time.
            if (logger.isDebugEnabled()) {
                logger.debug("Transformation has been transiently declined: \n" + "   reader: " + reader + "\n"
                        + "   writer: " + writer + "\n" + "   options: " + options + "\n" + "   transformer: "
                        + this);
            }
            // the finally block below will still perform tidyup. Otherwise we're done.
            // We rethrow the exception
            throw cste;
        } catch (UnsupportedTransformationException e) {
            // Don't record an error or even the time, as this is normal in compound transformations.
            transformerDebug.debug("          Failed", e);
            throw e;
        } catch (Throwable e) {
            // Make sure that this transformation gets set back i.t.o. time taken.
            // This will ensure that transformers that compete for the same transformation
            // will be prejudiced against transformers that tend to fail
            long after = System.currentTimeMillis();
            recordError(sourceMimetype, targetMimetype, after - before);

            // Ask Tika to detect the document, and report back on if
            //  the current mime type is plausible
            String differentType = getMimetypeService().getMimetypeIfNotMatches(reader.getReader());

            // Report the error
            if (differentType == null) {
                transformerDebug.debug("          Failed", e);
                throw new ContentIOException("Content conversion failed: \n" + "   reader: " + reader + "\n"
                        + "   writer: " + writer + "\n" + "   options: " + options.toString(false) + "\n"
                        + "   limits: " + getLimits(reader, writer, options), e);
            } else {
                transformerDebug.debug("          Failed: Mime type was '" + differentType + "'", e);

                if (retryTransformOnDifferentMimeType) {
                    // MNT-11015 fix.
                    // Set a new reader to refresh the input stream.
                    reader = reader.getReader();
                    // set the actual file MIME type detected by Tika for content reader
                    reader.setMimetype(differentType);

                    // Get correct transformer according actual file MIME type and try to transform file with
                    // actual transformer
                    ContentTransformer transformer = this.registry.getTransformer(differentType,
                            reader.getSize(), targetMimetype, options);
                    if (null != transformer) {
                        transformer.transform(reader, writer, options);
                    } else {
                        transformerDebug.debug("          Failed", e);
                        throw new ContentIOException("Content conversion failed: \n" + "   reader: " + reader
                                + "\n" + "   writer: " + writer + "\n" + "   options: "
                                + options.toString(false) + "\n" + "   limits: "
                                + getLimits(reader, writer, options) + "\n" + "   claimed mime type: "
                                + reader.getMimetype() + "\n" + "   detected mime type: " + differentType + "\n"
                                + "   transformer not found" + "\n", e);
                    }
                } else {
                    throw new ContentIOException("Content conversion failed: \n" + "   reader: " + reader + "\n"
                            + "   writer: " + writer + "\n" + "   options: " + options.toString(false) + "\n"
                            + "   limits: " + getLimits(reader, writer, options) + "\n"
                            + "   claimed mime type: " + reader.getMimetype() + "\n" + "   detected mime type: "
                            + differentType, e);
                }
            }
        } finally {
            transformerDebug.popTransform();

            // check that the reader and writer are both closed
            if (reader.isChannelOpen()) {
                logger.error("Content reader not closed by transformer: \n" + "   reader: " + reader + "\n"
                        + "   transformer: " + this);
            }
            if (writer.isChannelOpen()) {
                logger.error("Content writer not closed by transformer: \n" + "   writer: " + writer + "\n"
                        + "   transformer: " + this);
            }
        }

        // done
        if (logger.isDebugEnabled()) {
            logger.debug("Completed transformation: \n" + "   reader: " + reader + "\n" + "   writer: " + writer
                    + "\n" + "   options: " + options + "\n" + "   transformer: " + this);
        }
    } finally {
        depth.set(depth.get() - 1);
    }
}

From source file:org.apache.camel.impl.DefaultShutdownStrategy.java

protected boolean doShutdown(CamelContext context, List<RouteStartupOrder> routes, long timeout,
        TimeUnit timeUnit, boolean suspendOnly, boolean abortAfterTimeout) throws Exception {
    StopWatch watch = new StopWatch();

    // at first sort according to route startup order
    List<RouteStartupOrder> routesOrdered = new ArrayList<RouteStartupOrder>(routes);
    Collections.sort(routesOrdered, new Comparator<RouteStartupOrder>() {
        public int compare(RouteStartupOrder o1, RouteStartupOrder o2) {
            return o1.getStartupOrder() - o2.getStartupOrder();
        }//w w  w . ja v  a2s .  c o  m
    });
    if (shutdownRoutesInReverseOrder) {
        Collections.reverse(routesOrdered);
    }

    if (timeout > 0) {
        LOG.info("Starting to graceful shutdown " + routesOrdered.size() + " routes (timeout " + timeout + " "
                + timeUnit.toString().toLowerCase() + ")");
    } else {
        LOG.info("Starting to graceful shutdown " + routesOrdered.size() + " routes (no timeout)");
    }

    // use another thread to perform the shutdowns so we can support timeout
    Future future = getExecutorService()
            .submit(new ShutdownTask(context, routesOrdered, suspendOnly, abortAfterTimeout));
    try {
        if (timeout > 0) {
            future.get(timeout, timeUnit);
        } else {
            future.get();
        }
    } catch (TimeoutException e) {
        // timeout then cancel the task
        future.cancel(true);

        // if set, stop processing and return false to indicate that the shutdown is aborting
        if (abortAfterTimeout) {
            LOG.warn("Timeout occurred. Aborting the shutdown now.");
            return false;
        } else {
            if (shutdownNowOnTimeout) {
                LOG.warn("Timeout occurred. Now forcing the routes to be shutdown now.");
                // force the routes to shutdown now
                shutdownRoutesNow(routesOrdered);
            } else {
                LOG.warn("Timeout occurred. Will ignore shutting down the remainder routes.");
            }
        }
    } catch (ExecutionException e) {
        // unwrap execution exception
        throw ObjectHelper.wrapRuntimeCamelException(e.getCause());
    }

    // convert to seconds as its easier to read than a big milli seconds number
    long seconds = TimeUnit.SECONDS.convert(watch.stop(), TimeUnit.MILLISECONDS);

    LOG.info("Graceful shutdown of " + routesOrdered.size() + " routes completed in " + seconds + " seconds");
    return true;
}

From source file:org.eclipse.gyrex.cloud.tests.internal.locking.DurableLockTests.java

@Test
public void testAcquire002() throws Exception {
    final String lockId = "test." + ZooKeeperGate.get().getSessionId() + "." + System.currentTimeMillis();

    final DurableLockImpl lock1 = new DurableLockImpl(lockId, null);
    final Future<DurableLockImpl> lock1f = executorService.submit(newAcquireLockCall(lock1, 1000));

    final DurableLockImpl lock1lock = lock1f.get(15, TimeUnit.SECONDS);
    assertNotNull(lock1lock);// www.ja va  2 s  . co m
    assertNotNull("lock1 must have a name at this point", lock1.getMyLockName());
    assertTrue(lock1lock.isValid());

    // check that's impossible to acquire a second log
    final DurableLockImpl lock2 = new DurableLockImpl(lockId, null);
    final Future<DurableLockImpl> lock2f = executorService.submit(newAcquireLockCall(lock2, 0));
    try {
        lock2f.get(10, TimeUnit.SECONDS);
        fail("timeout expected, call should never succeed");
    } catch (final TimeoutException e) {
        // good
    }
    assertNotNull("lock2 is still waiting so it must have a name", lock2.getMyLockName());

    // check that acquire timeouts work
    final DurableLockImpl lock3 = new DurableLockImpl(lockId, null);
    final Future<DurableLockImpl> lock3f = executorService.submit(newAcquireLockCall(lock3, 2000));
    try {
        lock3f.get(10, TimeUnit.SECONDS);
        fail("timeout expected, call should never succeed");
    } catch (final ExecutionException e) {
        // check exception
        assertTrue("timeout expected but wrong exception thrown", e.getCause() instanceof TimeoutException);
        // also check that no lock is left in ZooKeeper
        final Collection<String> childrenNames = ZooKeeperGate.get()
                .readChildrenNames(IZooKeeperLayout.PATH_LOCKS_DURABLE.append(lockId), null);
        assertEquals("only two children are allowed for lock node", 2, childrenNames.size());
        assertTrue("lock2 must exist", childrenNames.contains(lock1.getMyLockName()));
        assertTrue("lock2 must exist", childrenNames.contains(lock2.getMyLockName()));
        assertNull("lock3 should not have a lock name anymore", lock3.getMyLockName());
    }

    // release lock 1
    lock1lock.release();
    assertFalse(lock1lock.isValid());

    // check lock 2 is now available
    final DurableLockImpl lock2lock = lock2f.get(10, TimeUnit.SECONDS);
    assertNotNull(lock2lock);
    assertTrue(lock2lock.isValid());

    // release lock 2
    lock2lock.release();
    assertFalse(lock2lock.isValid());
}

From source file:org.springframework.amqp.rabbit.admin.RabbitBrokerAdmin.java

private boolean waitForState(final StatusCallback callable, String state) {

    if (timeout <= 0) {
        return true;
    }//from   w  ww.  java 2 s. c o m

    RabbitStatus status = getStatus();

    if (!callable.get(status)) {

        logger.info("Waiting for broker to enter state: " + state);

        Future<RabbitStatus> started = executor.submit(new Callable<RabbitStatus>() {
            public RabbitStatus call() throws Exception {
                RabbitStatus status = getStatus();
                while (!callable.get(status)) {
                    // Any less than 1000L and we tend to clog up the socket?
                    Thread.sleep(500L);
                    status = getStatus();
                }
                return status;
            }
        });

        try {
            status = started.get(timeout, TimeUnit.MILLISECONDS);
            // This seems to help... really it just means we didn't get the right status data
            Thread.sleep(500L);
        } catch (TimeoutException e) {
            started.cancel(true);
        } catch (InterruptedException e) {
            Thread.currentThread().interrupt();
        } catch (ExecutionException e) {
            logger.error("Exception checking broker status for " + state, e.getCause());
        }

        if (!callable.get(status)) {
            logger.error("Rabbit broker not in " + state + " state after timeout. Stopping process.");
            stopNode();
            return false;
        } else {
            logger.info("Finished waiting for broker to enter state: " + state);
            if (logger.isDebugEnabled()) {
                logger.info("Status: " + status);
            }
            return true;
        }

    } else {
        logger.info("Broker already in state: " + state);
    }

    return true;

}

From source file:org.springframework.ide.eclipse.beans.core.internal.model.BeansJavaConfig.java

@Override
protected void readConfig() {
    if (!isModelPopulated) {

        w.lock();// w  w w. j  a va2s .  com
        if (this.isModelPopulated) {
            w.unlock();
            return;
        }

        try {
            if (this.configClass == null) {
                return;
            }

            IBeansProject beansProject = BeansModelUtils.getParentOfClass(this, IBeansProject.class);
            if (beansProject == null) {
                return;
            }

            final ClassLoader cl = JdtUtils.getClassLoader(beansProject.getProject(),
                    ApplicationContext.class.getClassLoader());

            if (cl.getResource(this.configClass.getFullyQualifiedName().replace('.', '/') + ".class") == null) {
                return;
            }

            Callable<Integer> loadBeanDefinitionOperation = new Callable<Integer>() {
                public Integer call() throws Exception {
                    // Obtain thread context classloader and override with the project classloader
                    ClassLoader threadClassLoader = Thread.currentThread().getContextClassLoader();
                    Thread.currentThread().setContextClassLoader(cl);

                    // Create special ReaderEventListener that essentially just passes through component definitions
                    ReaderEventListener eventListener = new BeansConfigPostProcessorReaderEventListener();
                    problemReporter = new BeansConfigProblemReporter();
                    beanNameGenerator = new UniqueBeanNameGenerator(BeansJavaConfig.this);
                    registry = new ScannedGenericBeanDefinitionSuppressingBeanDefinitionRegistry();

                    try {
                        registerAnnotationProcessors(eventListener);
                        registerBean(eventListener, cl);

                        IBeansConfigPostProcessor[] postProcessors = BeansConfigPostProcessorFactory
                                .createPostProcessor(ConfigurationClassPostProcessor.class.getName());
                        for (IBeansConfigPostProcessor postProcessor : postProcessors) {
                            executePostProcessor(postProcessor, eventListener);
                        }
                    } finally {
                        // Reset the context classloader
                        Thread.currentThread().setContextClassLoader(threadClassLoader);
                        LogFactory.release(cl); //Otherwise permgen leak?
                    }
                    return 0;
                }
            };

            FutureTask<Integer> task = new FutureTask<Integer>(loadBeanDefinitionOperation);
            BeansCorePlugin.getExecutorService().submit(task);
            task.get(BeansCorePlugin.getDefault().getPreferenceStore()
                    .getInt(BeansCorePlugin.TIMEOUT_CONFIG_LOADING_PREFERENCE_ID), TimeUnit.SECONDS);
        } catch (TimeoutException e) {
            problems.add(new ValidationProblem(IMarker.SEVERITY_ERROR,
                    "Loading of configuration '" + this.configClass.getFullyQualifiedName()
                            + "' took more than "
                            + BeansCorePlugin.getDefault().getPreferenceStore()
                                    .getInt(BeansCorePlugin.TIMEOUT_CONFIG_LOADING_PREFERENCE_ID)
                            + "sec",
                    file, 1));
        } catch (Exception e) {
            problems.add(new ValidationProblem(IMarker.SEVERITY_ERROR,
                    String.format("Error occured processing Java config '%s'. See Error Log for more details",
                            e.getCause().getMessage()),
                    getElementResource()));
            BeansCorePlugin.log(new Status(IStatus.INFO, BeansCorePlugin.PLUGIN_ID,
                    String.format("Error occured processing '%s'", this.configClass.getFullyQualifiedName()),
                    e.getCause()));
        } finally {
            // Prepare the internal cache of all children for faster access
            List<ISourceModelElement> allChildren = new ArrayList<ISourceModelElement>(imports);
            allChildren.addAll(aliases.values());
            allChildren.addAll(components);
            allChildren.addAll(beans.values());
            Collections.sort(allChildren, new Comparator<ISourceModelElement>() {
                public int compare(ISourceModelElement element1, ISourceModelElement element2) {
                    return element1.getElementStartLine() - element2.getElementStartLine();
                }
            });
            this.children = allChildren.toArray(new IModelElement[allChildren.size()]);

            this.isModelPopulated = true;
            w.unlock();
        }

    }
}