Example usage for java.lang Throwable toString

List of usage examples for java.lang Throwable toString

Introduction

In this page you can find the example usage for java.lang Throwable toString.

Prototype

public String toString() 

Source Link

Document

Returns a short description of this throwable.

Usage

From source file:com.vmware.photon.controller.nsxclient.apis.FabricApiTest.java

@Test
public void testGetTransportZone() throws IOException, InterruptedException {
    final TransportZone mockResponse = new TransportZone();
    mockResponse.setId("id");
    setupMocks(objectMapper.writeValueAsString(mockResponse), HttpStatus.SC_OK);

    FabricApi client = new FabricApi(restClient);
    final CountDownLatch latch = new CountDownLatch(1);
    client.getTransportZone("id", new com.google.common.util.concurrent.FutureCallback<TransportZone>() {
        @Override// www  .ja v  a2  s  .co m
        public void onSuccess(TransportZone result) {
            assertEquals(result, mockResponse);
            latch.countDown();
        }

        @Override
        public void onFailure(Throwable t) {
            fail(t.toString());
            latch.countDown();
        }
    });

    assertThat(latch.await(COUNTDOWNLATCH_AWAIT_TIMEOUT, TimeUnit.SECONDS), is(true));
}

From source file:info.magnolia.cms.core.ie.XmlExport.java

private void exportValue(final org.jdom.Element pElt, final Property property) {

    String sContent;/*w w w  .  j a  va  2s  .  c  o m*/

    try {
        if (property.getType() == PropertyType.BINARY) {

            if (this.binaryAsLink) {
                sContent = property.getPath();
            } else {
                StringBuffer stringBuffer = new StringBuffer();
                try {
                    InputStream is = property.getStream();
                    byte[] buffer = new byte[8192];

                    while ((is.read(buffer)) > 0) {
                        stringBuffer.append(new String(buffer));
                    }
                    IOUtils.closeQuietly(is);
                } catch (Exception e) {
                    log.error("Failed to read input stream", e);
                }

                sContent = new String(Base64.encodeBase64(stringBuffer.toString().getBytes()));
            }
        } else if (property.getType() == PropertyType.DATE) {
            sContent = property.getDate().getTime().toString();
        } else {
            sContent = property.getString();
        }
    } catch (final Throwable t) {

        log.warn("exportValue() failure", t); //$NON-NLS-1$

        sContent = "exportValue() failure " + t.toString(); //$NON-NLS-1$
    }
    pElt.addContent(new org.jdom.Text(sContent));
}

From source file:jetbrains.exodus.entitystore.BackupTests.java

public void doStressTest(final boolean useBackupBean) throws Exception {
    final PersistentEntityStoreImpl store = getEntityStore();
    store.getConfig().setMaxInPlaceBlobSize(0); // no in-place blobs
    final int issueCount = 1000;
    store.executeInTransaction(new StoreTransactionalExecutable() {
        @Override//from  ww w. j a  v  a  2 s .  co m
        public void execute(@NotNull StoreTransaction txn) {
            for (int i = 0; i < issueCount; ++i) {
                final Entity issue = txn.newEntity("Issue");
                issue.setBlobString("description", Double.toString(Math.random()));
            }
        }
    });
    final Random rnd = new Random();
    final boolean[] finish = { false };
    final int[] backgroundChanges = { 0 };
    final int threadCount = 4;
    final ThreadJobProcessor[] threads = new ThreadJobProcessor[threadCount];
    for (int i = 0; i < threads.length; i++) {
        threads[i] = new ThreadJobProcessor("BackupTest Job Processor " + i);
        threads[i].start();
        threads[i].setExceptionHandler(new JobProcessorExceptionHandler() {
            @Override
            public void handle(JobProcessor processor, Job job, Throwable t) {
                System.out.println(t.toString());
            }
        });
        threads[i].queue(new Job() {
            @Override
            protected void execute() throws Throwable {
                while (!finish[0]) {
                    store.executeInTransaction(new StoreTransactionalExecutable() {
                        @Override
                        public void execute(@NotNull final StoreTransaction txn) {
                            final Entity issue = txn.getAll("Issue").skip(rnd.nextInt(issueCount - 1))
                                    .getFirst();
                            assertNotNull(issue);
                            issue.setBlobString("description", Double.toString(Math.random()));
                            System.out.print("\r" + (++backgroundChanges[0]));
                        }
                    });
                }
            }
        });
    }
    Thread.sleep(1000);
    final File backupDir = TestUtil.createTempDir();
    try {
        final File backup = CompressBackupUtil.backup(useBackupBean ? new BackupBean(store) : store, backupDir,
                null, true);
        finish[0] = true;
        final File restoreDir = TestUtil.createTempDir();
        try {
            extractEntireZip(backup, restoreDir);
            final PersistentEntityStoreImpl newStore = PersistentEntityStores.newInstance(restoreDir);
            try {
                final long[] lastUsedBlobHandle = { -1L };
                newStore.executeInReadonlyTransaction(new StoreTransactionalExecutable() {
                    @Override
                    public void execute(@NotNull final StoreTransaction t) {
                        final PersistentStoreTransaction txn = (PersistentStoreTransaction) t;
                        assertEquals(issueCount, txn.getAll("Issue").size());
                        lastUsedBlobHandle[0] = newStore
                                .getSequence(txn, PersistentEntityStoreImpl.BLOB_HANDLES_SEQUENCE)
                                .loadValue(txn);
                        for (final Entity issue : txn.getAll("Issue")) {
                            final String description = issue.getBlobString("description");
                            assertNotNull(description);
                            assertFalse(description.isEmpty());
                        }
                    }
                });
                final FileSystemBlobVault blobVault = (FileSystemBlobVault) newStore.getBlobVault();
                for (final BackupStrategy.FileDescriptor fd : blobVault.getBackupStrategy().listFiles()) {
                    final File file = fd.getFile();
                    if (file.isFile() && !file.getName().equals(FileSystemBlobVaultOld.VERSION_FILE)) {
                        assertTrue(
                                "" + blobVault.getBlobHandleByFile(fd.getFile()) + " > "
                                        + lastUsedBlobHandle[0],
                                blobVault.getBlobHandleByFile(fd.getFile()) <= lastUsedBlobHandle[0]);
                    }
                }
            } finally {
                newStore.close();
            }
        } finally {
            IOUtil.deleteRecursively(restoreDir);
        }
    } finally {
        IOUtil.deleteRecursively(backupDir);
    }
    for (final ThreadJobProcessor thread : threads) {
        thread.finish();
    }
}

From source file:com.vmware.photon.controller.api.client.resource.ClusterApiTest.java

@Test
public void testGetVmsAsyncForPagination() throws IOException, InterruptedException {
    Vm vm1 = new Vm();
    vm1.setId("vm1");

    Vm vm2 = new Vm();
    vm2.setId("vm2");

    Vm vm3 = new Vm();
    vm3.setId("vm3");

    String nextPageLink = "nextPageLink";

    final ResourceList<Vm> vmList = new ResourceList<>(Arrays.asList(vm1, vm2), nextPageLink, null);
    final ResourceList<Vm> vmListNextPage = new ResourceList<>(Arrays.asList(vm3));

    ObjectMapper mapper = new ObjectMapper();
    String serializedTask = mapper.writeValueAsString(vmList);
    String serializedTaskNextPage = mapper.writeValueAsString(vmListNextPage);

    setupMocksForPagination(serializedTask, serializedTaskNextPage, nextPageLink, HttpStatus.SC_OK);

    ClusterApi clusterApi = new ClusterApi(restClient);
    final CountDownLatch latch = new CountDownLatch(1);

    clusterApi.getVmsInClusterAsync("foo", new FutureCallback<ResourceList<Vm>>() {
        @Override/*from www  .  j a va  2 s. co m*/
        public void onSuccess(ResourceList<Vm> result) {
            assertEquals(result.getItems().size(), vmList.getItems().size() + vmListNextPage.getItems().size());
            assertTrue(result.getItems().containsAll(vmList.getItems()));
            assertTrue(result.getItems().containsAll(vmListNextPage.getItems()));
            latch.countDown();
        }

        @Override
        public void onFailure(Throwable t) {
            fail(t.toString());
            latch.countDown();
        }
    });

    assertThat(latch.await(COUNTDOWNLATCH_AWAIT_TIMEOUT, TimeUnit.SECONDS), is(true));
}

From source file:com.vmware.photon.controller.api.client.resource.ClusterRestApiTest.java

@Test
public void testGetVmsAsyncForPagination() throws IOException, InterruptedException {
    Vm vm1 = new Vm();
    vm1.setId("vm1");

    Vm vm2 = new Vm();
    vm2.setId("vm2");

    Vm vm3 = new Vm();
    vm3.setId("vm3");

    String nextPageLink = "nextPageLink";

    final ResourceList<Vm> vmList = new ResourceList<>(Arrays.asList(vm1, vm2), nextPageLink, null);
    final ResourceList<Vm> vmListNextPage = new ResourceList<>(Arrays.asList(vm3));

    ObjectMapper mapper = new ObjectMapper();
    String serializedTask = mapper.writeValueAsString(vmList);
    String serializedTaskNextPage = mapper.writeValueAsString(vmListNextPage);

    setupMocksForPagination(serializedTask, serializedTaskNextPage, nextPageLink, HttpStatus.SC_OK);

    ClusterApi clusterApi = new ClusterRestApi(restClient);
    final CountDownLatch latch = new CountDownLatch(1);

    clusterApi.getVmsInClusterAsync("foo", new FutureCallback<ResourceList<Vm>>() {
        @Override/*  w w w  .  j  a v a  2  s .  c  o  m*/
        public void onSuccess(ResourceList<Vm> result) {
            assertEquals(result.getItems().size(), vmList.getItems().size() + vmListNextPage.getItems().size());
            assertTrue(result.getItems().containsAll(vmList.getItems()));
            assertTrue(result.getItems().containsAll(vmListNextPage.getItems()));
            latch.countDown();
        }

        @Override
        public void onFailure(Throwable t) {
            fail(t.toString());
            latch.countDown();
        }
    });

    assertThat(latch.await(COUNTDOWNLATCH_AWAIT_TIMEOUT, TimeUnit.SECONDS), is(true));
}

From source file:org.onehippo.forge.camel.demo.rest.services.AbstractRestUpdateResource.java

/**
 * Creates a generic JAX-RS Error Response from the given error.
 * @param th/* w w w.j a v  a 2  s.c o m*/
 * @return
 */
protected Response buildServerErrorResponse(Throwable th) {
    return Response.serverError().entity(th.getCause() != null ? th.getCause().toString() : th.toString())
            .build();
}

From source file:edu.pitt.dbmi.facebase.hd.InstructionQueueManager.java

/** Tell's Hub DB how much data is being packaged
 * Writes the value (in bytes) of the size of the files requests to the "results" column in the database in JSON format. 
 * Tells the Hub DB how big (and therefore how long to process) the requested data is. 
 *
 * @param session the Hibernate session object
 * @param qid the id of the QueueItem (and the row in the table) being processed
 * @return true if successful/*from   ww w.ja  v a 2  s.  c o  m*/
 */
boolean updateInstructionSize(long size, long qid) {
    log.debug("InstructionQueueManager.updateInstructionSize() called.");
    Session session = null;
    Transaction transaction = null;
    try {
        session = conf.openSession();
        transaction = session.beginTransaction();
        List<InstructionQueueItem> items = getPendingQueueItems(session, qid);
        String sizeString = (new Long(size)).toString();
        Map resultsJSON = new LinkedHashMap();
        resultsJSON.put("size", sizeString);
        String jsonResultsString = JSONValue.toJSONString(resultsJSON);
        if (items != null && items.size() >= 1) {
            InstructionQueueItem item = items.get(0);
            item.setResults(jsonResultsString);
            session.update(item);
            transaction.commit();
        }
        session.close();
        return true;
    } catch (Throwable t) {
        String errorString = "InstructionQueueManager caught a t in updateInstructionSize(): " + t.toString();
        String logString = t.getMessage();
        edu.pitt.dbmi.facebase.hd.HumanDataController.addError(errorString, logString);
        log.error(errorString, t);
        handleThrowable(t, session, transaction);
    }
    return false;
}

From source file:se.vgregion.javg.portlet.filter.PortletErrorHandlingFilter.java

@Override
public void doFilter(ActionRequest arg0, ActionResponse arg1, FilterChain arg2)
        throws IOException, PortletException {
    try {// w  w  w .  j  av a  2 s.c om
        // call next in line (either another filter or the portlet itself)
        arg2.doFilter(arg0, arg1);
    } catch (Throwable e) {
        // Save the error for the view phase, where we can take control over
        // the response-rendering
        arg1.setRenderParameter("errorInActionPhase", (e.toString()));
    }
}

From source file:com.ctrip.infosec.rule.executor.WhiteListRulesExecutorService.java

/**
 * /*from www  .  j ava 2  s. c  o  m*/
 */
void execute(RiskFact fact) {

    // matchRules      
    List<WhitelistRule> matchedRules = Configs.matchWhitelistRules(fact);
    List<String> scriptRulePackageNames = Collections3.extractToList(matchedRules, "ruleNo");
    logger.debug(Contexts.getLogPrefix() + "matched whitelist rules: "
            + StringUtils.join(scriptRulePackageNames, ", "));
    TraceLogger.traceLog("? " + matchedRules.size() + " ???? ...");

    StatelessWhitelistRuleEngine statelessWhitelistRuleEngine = SpringContextHolder
            .getBean(StatelessWhitelistRuleEngine.class);
    for (WhitelistRule rule : matchedRules) {
        RuleMonitorHelper.newTrans(fact, RuleMonitorType.WB_RULE, rule.getRuleNo());
        TraceLogger.beginNestedTrans(fact.eventId);
        TraceLogger.setNestedLogPrefix("[" + rule.getRuleNo() + "]");
        Contexts.setPolicyOrRuleNo(rule.getRuleNo());
        try {
            long start = System.currentTimeMillis();

            // add current execute ruleNo and logPrefix before execution
            fact.ext.put(Constants.key_ruleNo, rule.getRuleNo());
            fact.ext.put(Constants.key_isAsync, false);

            statelessWhitelistRuleEngine.execute(rule.getRuleNo(), fact);

            // remove current execute ruleNo when finished execution.
            fact.ext.remove(Constants.key_ruleNo);
            fact.ext.remove(Constants.key_isAsync);

            long handlingTime = System.currentTimeMillis() - start;
            if (handlingTime > 100) {
                logger.info(Contexts.getLogPrefix() + "whitelistRule: " + rule.getRuleNo() + ", usage: "
                        + handlingTime + "ms");
            }

            if (fact.finalWhitelistResult.isEmpty()) {
                TraceLogger.traceLog(
                        ">>>> [" + rule.getRuleNo() + "] ???. usage: " + handlingTime + "ms");
            } else {
                TraceLogger.traceLog(">>>> [" + rule.getRuleNo() + "] ???: riskLevel = "
                        + fact.finalWhitelistResult.get(Constants.riskLevel) + ", riskMessage = "
                        + fact.finalWhitelistResult.get(Constants.riskMessage) + ", usage = "
                        + fact.finalWhitelistResult.get(Constants.timeUsage) + "ms");
            }

        } catch (Throwable ex) {
            logger.warn(Contexts.getLogPrefix() + "???. whitelistRule: "
                    + rule.getRuleNo(), ex);
            TraceLogger.traceLog("[" + rule.getRuleNo() + "] EXCEPTION: " + ex.toString());
        } finally {
            TraceLogger.commitNestedTrans();
            RuleMonitorHelper.commitTrans(fact);
            Contexts.clearLogPrefix();
        }
    }

}

From source file:com.vmware.photon.controller.nsxclient.apis.FabricApiTest.java

@Test
public void testGetFabricNodeState() throws IOException, InterruptedException {
    final FabricNodeState mockResponse = new FabricNodeState();
    mockResponse.setState(com.vmware.photon.controller.nsxclient.datatypes.FabricNodeState.SUCCESS);
    setupMocks(objectMapper.writeValueAsString(mockResponse), HttpStatus.SC_OK);

    FabricApi client = new FabricApi(restClient);
    final CountDownLatch latch = new CountDownLatch(1);
    client.getFabricNodeState("nodeId",
            new com.google.common.util.concurrent.FutureCallback<FabricNodeState>() {
                @Override/*from  w  ww. ja  va 2  s.c  o  m*/
                public void onSuccess(FabricNodeState result) {
                    assertEquals(result, mockResponse);
                    latch.countDown();
                }

                @Override
                public void onFailure(Throwable t) {
                    fail(t.toString());
                    latch.countDown();
                }
            });

    assertThat(latch.await(COUNTDOWNLATCH_AWAIT_TIMEOUT, TimeUnit.SECONDS), is(true));
}