List of usage examples for com.google.common.base Stopwatch elapsed
@CheckReturnValue public long elapsed(TimeUnit desiredUnit)
From source file:org.opendaylight.controller.config.persist.impl.ConfigPusherImpl.java
private void executeWithMissingModuleFactoryRetries(ConfigSubsystemFacade facade, ConfigExecution configExecution) throws DocumentedException, ValidationException, ModuleFactoryNotFoundException { Stopwatch stopwatch = Stopwatch.createStarted(); ModuleFactoryNotFoundException lastException = null; do {/*w w w.j av a 2 s.c o m*/ try { facade.executeConfigExecution(configExecution); return; } catch (ModuleFactoryNotFoundException e) { LOG.debug("{} - will retry after timeout", e.toString()); lastException = e; sleep(); } } while (stopwatch.elapsed(TimeUnit.MILLISECONDS) < maxWaitForCapabilitiesMillis); throw lastException; }
From source file:qa.qcri.nadeef.core.pipeline.ViolationRepair.java
/** * Execute the operator.//from w ww .j av a 2 s . co m * * @param violations input object. * @return output object. */ @Override @SuppressWarnings("unchecked") public Collection<Collection<Fix>> execute(Collection<Violation> violations) throws Exception { Stopwatch stopwatch = Stopwatch.createStarted(); Rule rule = getCurrentContext().getRule(); List<Collection<Fix>> result = Lists.newArrayList(); int count = 0; for (Violation violation : violations) { try { Collection<Fix> fix = (Collection<Fix>) rule.repair(violation); result.add(fix); count++; } catch (Exception ex) { Tracer tracer = Tracer.getTracer(ViolationRepair.class); tracer.err("Exception in repair method.", ex); } setPercentage(count / violations.size()); } long elapseTime; if (violations.size() != 0) { elapseTime = stopwatch.elapsed(TimeUnit.MILLISECONDS) / violations.size(); } else { elapseTime = stopwatch.elapsed(TimeUnit.MILLISECONDS); } PerfReport.appendMetric(PerfReport.Metric.RepairCallTime, elapseTime); stopwatch.stop(); return result; }
From source file:co.rsk.bitcoinj.params.AbstractBitcoinNetParams.java
@Override public void checkDifficultyTransitions(final StoredBlock storedPrev, final BtcBlock nextBlock, final BtcBlockStore blockStore) throws VerificationException, BlockStoreException { BtcBlock prev = storedPrev.getHeader(); // Is this supposed to be a difficulty transition point? if (!isDifficultyTransitionPoint(storedPrev)) { // No ... so check the difficulty didn't actually change. if (nextBlock.getDifficultyTarget() != prev.getDifficultyTarget()) throw new VerificationException("Unexpected change in difficulty at height " + storedPrev.getHeight() + ": " + Long.toHexString(nextBlock.getDifficultyTarget()) + " vs " + Long.toHexString(prev.getDifficultyTarget())); return;//from w ww .j av a2s . c o m } // We need to find a block far back in the chain. It's OK that this is expensive because it only occurs every // two weeks after the initial block chain download. final Stopwatch watch = Stopwatch.createStarted(); StoredBlock cursor = blockStore.get(prev.getHash()); for (int i = 0; i < this.getInterval() - 1; i++) { if (cursor == null) { // This should never happen. If it does, it means we are following an incorrect or busted chain. throw new VerificationException( "Difficulty transition point but we did not find a way back to the genesis block."); } cursor = blockStore.get(cursor.getHeader().getPrevBlockHash()); } watch.stop(); if (watch.elapsed(TimeUnit.MILLISECONDS) > 50) log.info("Difficulty transition traversal took {}", watch); BtcBlock blockIntervalAgo = cursor.getHeader(); int timespan = (int) (prev.getTimeSeconds() - blockIntervalAgo.getTimeSeconds()); // Limit the adjustment step. final int targetTimespan = this.getTargetTimespan(); if (timespan < targetTimespan / 4) timespan = targetTimespan / 4; if (timespan > targetTimespan * 4) timespan = targetTimespan * 4; BigInteger newTarget = Utils.decodeCompactBits(prev.getDifficultyTarget()); newTarget = newTarget.multiply(BigInteger.valueOf(timespan)); newTarget = newTarget.divide(BigInteger.valueOf(targetTimespan)); if (newTarget.compareTo(this.getMaxTarget()) > 0) { log.info("Difficulty hit proof of work limit: {}", newTarget.toString(16)); newTarget = this.getMaxTarget(); } int accuracyBytes = (int) (nextBlock.getDifficultyTarget() >>> 24) - 3; long receivedTargetCompact = nextBlock.getDifficultyTarget(); // The calculated difficulty is to a higher precision than received, so reduce here. BigInteger mask = BigInteger.valueOf(0xFFFFFFL).shiftLeft(accuracyBytes * 8); newTarget = newTarget.and(mask); long newTargetCompact = Utils.encodeCompactBits(newTarget); if (newTargetCompact != receivedTargetCompact) throw new VerificationException("Network provided difficulty bits do not match what was calculated: " + Long.toHexString(newTargetCompact) + " vs " + Long.toHexString(receivedTargetCompact)); }
From source file:org.bitcoinj_extra.params.AbstractBitcoinNetParams.java
@Override public void checkDifficultyTransitions(final StoredBlock storedPrev, final Block nextBlock, final BlockStore blockStore) throws VerificationException, BlockStoreException { Block prev = storedPrev.getHeader(); // Is this supposed to be a difficulty transition point? if (!isDifficultyTransitionPoint(storedPrev)) { // No ... so check the difficulty didn't actually change. if (nextBlock.getDifficultyTarget() != prev.getDifficultyTarget()) throw new VerificationException("Unexpected change in difficulty at height " + storedPrev.getHeight() + ": " + Long.toHexString(nextBlock.getDifficultyTarget()) + " vs " + Long.toHexString(prev.getDifficultyTarget())); return;/*w w w .j av a 2s . com*/ } // We need to find a block far back in the chain. It's OK that this is expensive because it only occurs every // two weeks after the initial block chain download. final Stopwatch watch = Stopwatch.createStarted(); StoredBlock cursor = blockStore.get(prev.getHash()); for (int i = 0; i < this.getInterval() - 1; i++) { if (cursor == null) { // This should never happen. If it does, it means we are following an incorrect or busted chain. throw new VerificationException( "Difficulty transition point but we did not find a way back to the genesis block."); } cursor = blockStore.get(cursor.getHeader().getPrevBlockHash()); } watch.stop(); if (watch.elapsed(TimeUnit.MILLISECONDS) > 50) log.info("Difficulty transition traversal took {}", watch); Block blockIntervalAgo = cursor.getHeader(); int timespan = (int) (prev.getTimeSeconds() - blockIntervalAgo.getTimeSeconds()); // Limit the adjustment step. final int targetTimespan = this.getTargetTimespan(); if (timespan < targetTimespan / 4) timespan = targetTimespan / 4; if (timespan > targetTimespan * 4) timespan = targetTimespan * 4; BigInteger newTarget = Utils.decodeCompactBits(prev.getDifficultyTarget()); newTarget = newTarget.multiply(BigInteger.valueOf(timespan)); newTarget = newTarget.divide(BigInteger.valueOf(targetTimespan)); if (newTarget.compareTo(this.getMaxTarget()) > 0) { log.info("Difficulty hit proof of work limit: {}", newTarget.toString(16)); newTarget = this.getMaxTarget(); } int accuracyBytes = (int) (nextBlock.getDifficultyTarget() >>> 24) - 3; long receivedTargetCompact = nextBlock.getDifficultyTarget(); // The calculated difficulty is to a higher precision than received, so reduce here. BigInteger mask = BigInteger.valueOf(0xFFFFFFL).shiftLeft(accuracyBytes * 8); newTarget = newTarget.and(mask); long newTargetCompact = Utils.encodeCompactBits(newTarget); if (newTargetCompact != receivedTargetCompact) throw new VerificationException("Network provided difficulty bits do not match what was calculated: " + Long.toHexString(newTargetCompact) + " vs " + Long.toHexString(receivedTargetCompact)); }
From source file:com.google.api.ads.adwords.jaxws.extensions.processors.onmemory.ReportProcessorOnMemory.java
/** * Generate all the mapped reports to the given account IDs. * /*from w w w . j av a 2s .c om*/ * @param dateRangeType * the date range type. * @param dateStart * the starting date. * @param dateEnd * the ending date. * @param accountIdsSet * the account IDs. * @param properties * the properties file * @throws Exception * error reaching the API. */ @Override public void generateReportsForMCC(String userId, String mccAccountId, ReportDefinitionDateRangeType dateRangeType, String dateStart, String dateEnd, Set<Long> accountIdsSet, Properties properties) throws Exception { LOGGER.info("*** Retrieving account IDs ***"); if (accountIdsSet == null || accountIdsSet.size() == 0) { accountIdsSet = this.retrieveAccountIds(userId, mccAccountId); } else { LOGGER.info("Accounts loaded from file."); } AdWordsSessionBuilderSynchronizer sessionBuilder = new AdWordsSessionBuilderSynchronizer( authenticator.authenticate(userId, mccAccountId, false)); LOGGER.info("*** Generating Reports for " + accountIdsSet.size() + " accounts ***"); Stopwatch stopwatch = Stopwatch.createStarted(); Set<ReportDefinitionReportType> reports = this.csvReportEntitiesMapping.getDefinedReports(); // reports for (ReportDefinitionReportType reportType : reports) { if (properties.containsKey(reportType.name())) { this.downloadAndProcess(mccAccountId, sessionBuilder, reportType, dateRangeType, dateStart, dateEnd, accountIdsSet, properties); } } stopwatch.stop(); LOGGER.info("*** Finished processing all reports in " + (stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000) + " seconds ***\n"); }
From source file:pl.llp.aircasting.view.presenter.MeasurementPresenter.java
private synchronized void updateTimelineView() { Stopwatch stopwatch = new Stopwatch().start(); Measurement measurement = aggregator.getAverage(); if (aggregator.isComposite()) { if (!timelineView.isEmpty()) { timelineView.remove(timelineView.size() - 1); }// w w w . j a v a 2 s . c o m timelineView.add(measurement); Logger.logGraphPerformance( "updateTimelineView step 0 took " + stopwatch.elapsed(TimeUnit.MILLISECONDS)); } else { long firstToDisplay = measurement.getTime().getTime() - visibleMilliseconds; while (!timelineView.isEmpty() && firstToDisplay >= timelineView.get(0).getTime().getTime()) { timelineView.remove(0); } Logger.logGraphPerformance( "updateTimelineView step 1 took " + stopwatch.elapsed(TimeUnit.MILLISECONDS)); measurementsSize += 1; timelineView.add(measurement); } Logger.logGraphPerformance("updateTimelineView step n took " + stopwatch.elapsed(TimeUnit.MILLISECONDS)); }
From source file:com.google.devtools.build.android.AndroidDataSerializer.java
/** * Reads the serialized {@link DataKey} and {@link DataValue} to the {@link KeyValueConsumers}. * * @param inPath The path to the serialized protocol buffer. * @param consumers The {@link KeyValueConsumers} for the entries {@link DataKey} -> * {@link DataValue}.// w w w . ja v a 2 s. com * @throws DeserializationException Raised for an IOException or when the inPath is not a valid * proto buffer. */ public void read(Path inPath, KeyValueConsumers consumers) throws DeserializationException { Stopwatch timer = Stopwatch.createStarted(); try (InputStream in = Files.newInputStream(inPath, StandardOpenOption.READ)) { FileSystem currentFileSystem = inPath.getFileSystem(); Header header = Header.parseDelimitedFrom(in); if (header == null) { throw new DeserializationException("No Header found in " + inPath); } readEntriesSegment(consumers, in, currentFileSystem, header); } catch (IOException e) { throw new DeserializationException(e); } finally { logger.fine(String.format("Deserialized in merged in %sms", timer.elapsed(TimeUnit.MILLISECONDS))); } }
From source file:uk.ac.ebi.atlas.search.diffanalytics.DiffAnalyticsDao.java
public List<DiffAnalytics> fetchTopExpressions(Optional<Collection<IndexedAssayGroup>> indexedContrasts, Optional<? extends Collection<String>> geneIds, String species) { Optional<ImmutableSet<IndexedAssayGroup>> uniqueIndexedContrasts = uniqueIndexedContrasts(indexedContrasts); log("fetchTopExpressions", uniqueIndexedContrasts, geneIds); Stopwatch stopwatch = Stopwatch.createStarted(); DatabaseQuery<Object> indexedContrastQuery = buildSelect(uniqueIndexedContrasts, geneIds, species); jdbcTemplate.setMaxRows(RESULT_SIZE); List<DiffAnalytics> results; try {//from w w w . ja v a 2 s . c o m results = jdbcTemplate.query(indexedContrastQuery.getQuery(), dbeRowMapper, indexedContrastQuery.getParameters().toArray()); stopwatch.stop(); LOGGER.debug(String.format("fetchTopExpressions returned %s expressions in %.2f seconds", results.size(), stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000D)); } catch (Exception e) { LOGGER.error(e.getMessage(), e); throw e; } return results; }
From source file:org.hashtrees.manager.HashTreesManager.java
private void rebuildHashTree(final long treeId, long fullRebuildPeriod) throws IOException { Stopwatch watch = Stopwatch.createStarted(); int dirtySegsCount = hashTrees.rebuildHashTree(treeId, fullRebuildPeriod); watch.stop();/*from www. jav a2s. c o m*/ LOG.info("Total no of dirty segments : {} ", dirtySegsCount); LOG.info("Time taken for rebuilding (treeId: {}) (in ms) : {}", treeId, watch.elapsed(TimeUnit.MILLISECONDS)); }
From source file:tds.dll.common.diagnostic.services.impl.DiagnosticDatabaseServiceImpl.java
private DatabaseOperation readOperation(LegacyDbNameUtility.Databases dbName) { Stopwatch stopwatch = Stopwatch.createStarted(); try {/* w w w. j a va 2 s . com*/ switch (dbName) { case Archive: readTestDao.readArchiveDatabase(); break; case Config: readTestDao.readConfigsDatabase(); break; case Itembank: readTestDao.readItemBankDatabase(); break; case Session: readTestDao.readSessionDatabase(); break; } } catch (DiagnosticException diagnosticException) { stopwatch.stop(); return new DatabaseOperation(Rating.FAILED, DatabaseOperationType.READ, stopwatch.elapsed(TimeUnit.MILLISECONDS), diagnosticException.getMessage()); } return new DatabaseOperation(Rating.IDEAL, DatabaseOperationType.READ, stopwatch.elapsed(TimeUnit.MILLISECONDS)); }