Example usage for java.lang RuntimeException getCause

List of usage examples for java.lang RuntimeException getCause

Introduction

In this page you can find the example usage for java.lang RuntimeException getCause.

Prototype

public synchronized Throwable getCause() 

Source Link

Document

Returns the cause of this throwable or null if the cause is nonexistent or unknown.

Usage

From source file:ubic.gemma.core.loader.expression.geo.DatasetCombinerTest.java

@Test
public void testFindGDSGrouping() {
    try {/*w  w w.j a  v a  2  s.  c om*/
        Collection<String> result = DatasetCombiner.findGDSforGSE("GSE674");
        assertEquals(2, result.size());
        assertTrue(result.contains("GDS472") && result.contains("GDS473"));
    } catch (RuntimeException e) {
        if (e.getCause() instanceof java.net.UnknownHostException) {
            DatasetCombinerTest.log.warn("Test skipped due to unknown host exception");
            return;
        } else if (e.getCause() instanceof java.io.IOException && e.getCause().getMessage().contains("503")) {
            DatasetCombinerTest.log.warn("Test skipped due to 503 from NCBI");
            DatasetCombinerTest.log.error(e, e);
            return;
        }
        throw e;
    }
}

From source file:com.wavemaker.tools.data.ExportDB.java

@Override
protected void customRun() {

    init();/*from ww  w . jav a2s .c  o m*/

    final Configuration cfg = new Configuration();

    // cfg.addDirectory(this.hbmFilesDir);

    this.hbmFilesDir.find().files().performOperation(new ResourceOperation<com.wavemaker.tools.io.File>() {

        @Override
        public void perform(com.wavemaker.tools.io.File file) {
            if (file.getName().endsWith(".hbm.xml")) {
                cfg.addInputStream(file.getContent().asInputStream());
            }
        }
    });

    Properties connectionProperties = getHibernateConnectionProperties();

    cfg.addProperties(connectionProperties);

    SchemaExport export = null;
    SchemaUpdate update = null;
    File ddlFile = null;

    try {
        if (this.overrideTable) {
            Callable<SchemaExport> t = new Callable<SchemaExport>() {

                @Override
                public SchemaExport call() {
                    return new SchemaExport(cfg);
                }
            };

            if (this.classesDir == null) {
                try {
                    export = t.call();
                } catch (Exception e) {
                    ReflectionUtils.rethrowRuntimeException(e);
                }
            } else {
                export = ResourceClassLoaderUtils.runInClassLoaderContext(true, t, this.classesDir);
            }

            ddlFile = File.createTempFile("ddl", ".sql");
            ddlFile.deleteOnExit();

            export.setOutputFile(ddlFile.getAbsolutePath());
            export.setDelimiter(";");
            export.setFormat(true);

            String extraddl = prepareForExport(this.exportToDatabase);

            export.create(this.verbose, this.exportToDatabase);

            this.errors = CastUtils.cast(export.getExceptions());
            this.errors = filterError(this.errors, connectionProperties);

            this.ddl = IOUtils.read(ddlFile);

            if (!ObjectUtils.isNullOrEmpty(extraddl)) {
                this.ddl = extraddl + "\n" + this.ddl;
            }
        } else {
            Callable<SchemaUpdate> t = new Callable<SchemaUpdate>() {

                @Override
                public SchemaUpdate call() {
                    return new SchemaUpdate(cfg);
                }
            };

            if (this.classesDir == null) {
                try {
                    update = t.call();
                } catch (Exception e) {
                    ReflectionUtils.rethrowRuntimeException(e);
                }
            } else {
                update = ResourceClassLoaderUtils.runInClassLoaderContext(t, this.classesDir);
            }

            prepareForExport(this.exportToDatabase);

            Connection conn = JDBCUtils.getConnection(this.connectionUrl.toString(), this.username,
                    this.password, this.driverClassName);

            Dialect dialect = Dialect.getDialect(connectionProperties);

            DatabaseMetadata meta = new DatabaseMetadata(conn, dialect);

            String[] updateSQL = cfg.generateSchemaUpdateScript(dialect, meta);

            update.execute(this.verbose, this.exportToDatabase);

            this.errors = CastUtils.cast(update.getExceptions());
            StringBuilder sb = new StringBuilder();
            for (String line : updateSQL) {
                sb = sb.append(line);
                sb = sb.append("\n");
            }
            this.ddl = sb.toString();

        }
    } catch (IOException ex) {
        throw new DataServiceRuntimeException(ex);
    } catch (SQLException qex) {
        throw new DataServiceRuntimeException(qex);
    } catch (RuntimeException rex) {
        if (rex.getCause() != null && rex.getCause().getMessage().contains(NO_SUITABLE_DRIVER)
                && WMAppContext.getInstance().isCloudFoundry()) {
            String msg = rex.getMessage() + " - " + UNKNOWN_DATABASE;
            throw new DataServiceRuntimeException(msg);
        } else {
            throw new DataServiceRuntimeException(rex);
        }
    } finally {
        try {
            ddlFile.delete();
        } catch (Exception ignore) {
        }
    }
}

From source file:org.apache.cassandra.db.commitlog.CommitLogReader.java

/**
 * Reads mutations from file, handing them off to handler
 * @param handler Handler that will take action based on deserialized Mutations
 * @param file CommitLogSegment file to read
 * @param minPosition Optional minimum CommitLogPosition - all segments with id > or matching w/greater position will be read
 * @param mutationLimit Optional limit on # of mutations to replay. Local ALL_MUTATIONS serves as marker to play all.
 * @param tolerateTruncation Whether or not we should allow truncation of this file or throw if EOF found
 *
 * @throws IOException//from w  w w  . ja  v  a 2 s.co m
 */
public void readCommitLogSegment(CommitLogReadHandler handler, File file, CommitLogPosition minPosition,
        int mutationLimit, boolean tolerateTruncation) throws IOException {
    // just transform from the file name (no reading of headers) to determine version
    CommitLogDescriptor desc = CommitLogDescriptor.fromFileName(file.getName());

    try (ChannelProxy channel = new ChannelProxy(file);
            RandomAccessReader reader = RandomAccessReader.open(channel)) {
        if (desc.version < CommitLogDescriptor.VERSION_21) {
            if (!shouldSkipSegmentId(file, desc, minPosition)) {
                if (minPosition.segmentId == desc.id)
                    reader.seek(minPosition.position);
                ReadStatusTracker statusTracker = new ReadStatusTracker(mutationLimit, tolerateTruncation);
                statusTracker.errorContext = desc.fileName();
                readSection(handler, reader, minPosition, (int) reader.length(), statusTracker, desc);
            }
            return;
        }

        final long segmentIdFromFilename = desc.id;
        try {
            // The following call can either throw or legitimately return null. For either case, we need to check
            // desc outside this block and set it to null in the exception case.
            desc = CommitLogDescriptor.readHeader(reader, DatabaseDescriptor.getEncryptionContext());
        } catch (Exception e) {
            desc = null;
        }
        if (desc == null) {
            // don't care about whether or not the handler thinks we can continue. We can't w/out descriptor.
            handler.handleUnrecoverableError(new CommitLogReadException(
                    String.format("Could not read commit log descriptor in file %s", file),
                    CommitLogReadErrorReason.UNRECOVERABLE_DESCRIPTOR_ERROR, false));
            return;
        }

        if (segmentIdFromFilename != desc.id) {
            if (handler.shouldSkipSegmentOnError(new CommitLogReadException(
                    String.format("Segment id mismatch (filename %d, descriptor %d) in file %s",
                            segmentIdFromFilename, desc.id, file),
                    CommitLogReadErrorReason.RECOVERABLE_DESCRIPTOR_ERROR, false))) {
                return;
            }
        }

        if (shouldSkipSegmentId(file, desc, minPosition))
            return;

        CommitLogSegmentReader segmentReader;
        try {
            segmentReader = new CommitLogSegmentReader(handler, desc, reader, tolerateTruncation);
        } catch (Exception e) {
            handler.handleUnrecoverableError(new CommitLogReadException(
                    String.format("Unable to create segment reader for commit log file: %s", e),
                    CommitLogReadErrorReason.UNRECOVERABLE_UNKNOWN_ERROR, tolerateTruncation));
            return;
        }

        try {
            ReadStatusTracker statusTracker = new ReadStatusTracker(mutationLimit, tolerateTruncation);
            for (CommitLogSegmentReader.SyncSegment syncSegment : segmentReader) {
                statusTracker.tolerateErrorsInSection &= syncSegment.toleratesErrorsInSection;

                // Skip segments that are completely behind the desired minPosition
                if (desc.id == minPosition.segmentId && syncSegment.endPosition < minPosition.position)
                    continue;

                statusTracker.errorContext = String.format("Next section at %d in %s",
                        syncSegment.fileStartPosition, desc.fileName());

                readSection(handler, syncSegment.input, minPosition, syncSegment.endPosition, statusTracker,
                        desc);
                if (!statusTracker.shouldContinue())
                    break;
            }
        }
        // Unfortunately AbstractIterator cannot throw a checked exception, so we check to see if a RuntimeException
        // is wrapping an IOException.
        catch (RuntimeException re) {
            if (re.getCause() instanceof IOException)
                throw (IOException) re.getCause();
            throw re;
        }
        logger.debug("Finished reading {}", file);
    }
}

From source file:org.xwiki.test.escaping.framework.AbstractEscapingTest.java

/**
 * Check for unescaped data in the given {@code content}. Throws {@link RuntimeException} on errors.
 * /*ww w  .  j  a  v  a  2 s .c  om*/
 * @param url URL used in the test
 * @return list of found validation errors
 */
protected List<ValidationError> getUnderEscapingErrors(String url) {
    // TODO better use XWiki logging
    System.out.println("Testing URL: " + url);

    URLContent content = null;
    try {
        content = AbstractEscapingTest.getUrlContent(url);
    } catch (RuntimeException e) {
        if (e.getCause() instanceof InvalidRedirectLocationException) {
            // Don't fail the test if we can't follow a redirect because the redirect location can be taken from the
            // request parameters which are controlled by the test and most of the tests use values that are not
            // valid URLs. The code that performs the redirect always assumes the redirect URL is valid.
            System.out.println(e.getCause().getMessage());
            return Collections.emptyList();
        } else {
            throw e;
        }
    }

    // TODO: add support for other types than XML
    if (content.getType() == null || XML_MIMETYPES.contains(content.getType().getMimeType())
            || content.getType().getMimeType().endsWith("+xml")) {
        String where = "  Template: " + this.name + "\n  URL: " + url;
        Assert.assertNotNull("Response is null\n" + where, content);
        XMLEscapingValidator validator = new XMLEscapingValidator();
        validator.setDocument(new ByteArrayInputStream(content.getContent()));
        try {
            return validator.validate();
        } catch (EscapingError error) {
            // most probably false positive, generate an error instead of failing the test
            throw new RuntimeException(EscapingError.formatMessage(error.getMessage(), this.name, url, null));
        }
    } else {
        System.err.println("WARN: Unsupported content type [" + content.getType() + "] for URL [" + url + "]");

        return Collections.emptyList();
    }
}

From source file:org.apache.hadoop.mapreduce.Cluster.java

/**
 * Get job corresponding to jobid.// w w w  . java2s  .  c  o  m
 * 
 * @param jobId
 * @return object of {@link Job}
 * @throws IOException
 * @throws InterruptedException
 */
public Job getJob(JobID jobId) throws IOException, InterruptedException {
    JobStatus status = client.getJobStatus(jobId);
    if (status != null) {
        JobConf conf;
        try {
            conf = new JobConf(status.getJobFile());
        } catch (RuntimeException ex) {
            // If job file doesn't exist it means we can't find the job
            if (ex.getCause() instanceof FileNotFoundException) {
                return null;
            } else {
                throw ex;
            }
        }
        return Job.getInstance(this, status, conf);
    }
    return null;
}

From source file:org.apache.ode.axis2.soapbinding.SoapExternalService.java

private ServiceClient getServiceClient() throws AxisFault {
    try {// w  w  w. java  2 s  . c o m
        // call manually the check procedure
        // we dont want a dedicated thread for that
        _axisServiceWatchDog.check();
        _axisOptionsWatchDog.check();
    } catch (RuntimeException e) {
        throw AxisFault.makeFault(e.getCause() != null ? e.getCause() : e);
    }
    AxisService anonymousService = _axisServiceWatchDog.getObserver().get();
    ServiceClient client = _cachedClients.get();
    if (client == null || !client.getAxisService().getName().equals(anonymousService.getName())) {
        // avoid race conditions in AxisConfiguration
        synchronized (_axisConfig) {
            // if the service has changed, discard the client and create a new one
            if (client != null) {
                if (__log.isDebugEnabled())
                    __log.debug("Clean up and discard ServiceClient");
                client.cleanup();
            }
            if (__log.isDebugEnabled())
                __log.debug("Create a new ServiceClient for " + anonymousService.getName());
            client = new ServiceClient(_configContext, null);
            client.setAxisService(anonymousService);
        }
        _cachedClients.set(client);
    }

    // apply the options to the service client
    client.setOptions(_axisOptionsWatchDog.getObserver().get());
    return client;
}

From source file:com.github.tomakehurst.wiremock.StandaloneAcceptanceTest.java

private Matcher<Exception> causedByHttpHostConnectException() {
    return new TypeSafeMatcher<Exception>() {
        @Override/* w ww  . java  2  s  . c  o  m*/
        public boolean matchesSafely(Exception o) {
            if (!(o instanceof RuntimeException)) {
                return false;
            }
            RuntimeException re = (RuntimeException) o;
            return re.getCause() instanceof HttpHostConnectException;
        }

        @Override
        public void describeTo(Description description) {
            description.appendText("Expected RuntimeException with nested HttpHostConnectException");
        }
    };
}

From source file:org.neo4j.backup.TestBackup.java

@Test
public void makeSureStoreIdIsEnforced() throws Exception {
    // Create data set X on server A
    DbRepresentation initialDataSetRepresentation = createInitialDataSet(serverPath);
    ServerInterface server = startServer(serverPath);

    // Grab initial backup from server A
    OnlineBackup backup = OnlineBackup.from("127.0.0.1");
    backup.full(backupPath.getPath());/*from  ww w  .  j  a v  a2 s .  c  o m*/
    assertTrue("Should be consistent", backup.isConsistent());
    assertEquals(initialDataSetRepresentation, DbRepresentation.of(backupPath));
    shutdownServer(server);

    // Create data set X+Y on server B
    createInitialDataSet(otherServerPath);
    addMoreData(otherServerPath);
    server = startServer(otherServerPath);

    // Try to grab incremental backup from server B.
    // Data should be OK, but store id check should prevent that.
    try {
        backup.incremental(backupPath.getPath());
        fail("Shouldn't work");
    } catch (RuntimeException e) {
        assertThat(e.getCause(), instanceOf(MismatchingStoreIdException.class));
    }
    shutdownServer(server);
    // Just make sure incremental backup can be received properly from
    // server A, even after a failed attempt from server B
    DbRepresentation furtherRepresentation = addMoreData(serverPath);
    server = startServer(serverPath);
    backup.incremental(backupPath.getPath());
    assertTrue("Should be consistent", backup.isConsistent());
    assertEquals(furtherRepresentation, DbRepresentation.of(backupPath));
    shutdownServer(server);
}

From source file:org.geotools.data.complex.XmlMappingFeatureIterator.java

@Override
protected String extractIdForAttribute(final Expression idExpression, Object sourceInstance) {
    try {/*from  w ww .  ja  v a 2 s .c  o  m*/
        if (idExpression instanceof Function) {
            // special handling for functions
            XmlXpathFilterData data = new XmlXpathFilterData(namespaces, xmlResponse.getDoc(), -1,
                    XmlMappingFeatureIterator.createIndexedItemXpathString((XmlFeatureTypeMapping) mapping,
                            xmlResponse, indexCounter));
            Object value = idExpression.evaluate(data);
            return (value == null ? "" : value.toString());
        } else {
            return XmlXpathUtilites.getSingleXPathValue(mapping.getNamespaces(),
                    createIndexedItemXpathString((XmlFeatureTypeMapping) mapping, xmlResponse, indexCounter)
                            + XPATH_SEPARATOR + idXpath,
                    xmlResponse.getDoc());
        }
    } catch (RuntimeException e) {
        if (e.getCause() instanceof JXPathException) {
            // only log info since id is not always compulsory
            LOGGER.info("Feature id is not mapped for: " + mapping.getTargetFeature().getName());
        } else {
            throw e;
        }
    }
    return null;
}

From source file:org.springframework.orm.jpa.vendor.HibernateJpaDialect.java

@Override
@Nullable/*from   w  w  w.j  a  v a  2 s  .  co  m*/
public DataAccessException translateExceptionIfPossible(RuntimeException ex) {
    if (ex instanceof HibernateException) {
        return convertHibernateAccessException((HibernateException) ex);
    }
    if (ex instanceof PersistenceException && ex.getCause() instanceof HibernateException) {
        return convertHibernateAccessException((HibernateException) ex.getCause());
    }
    return EntityManagerFactoryUtils.convertJpaAccessExceptionIfPossible(ex);
}