List of usage examples for com.google.common.base Stopwatch createStarted
@CheckReturnValue public static Stopwatch createStarted()
From source file:com.b2international.snowowl.snomed.reasoner.server.ontology.SnomedOntologyService.java
private OWLOntology createOntology(final IRI ontologyIRI, final IRI versionIRI) throws OWLOntologyCreationException { final Stopwatch stopwatch = Stopwatch.createStarted(); LOGGER.info(MessageFormat.format(">>> Creating ontology ''{0}''", ontologyIRI)); OWLOntology ontology = null;/*from ww w.ja v a 2 s . co m*/ final OWLOntologyID ontologyID = new OWLOntologyID(ontologyIRI, versionIRI); ontology = manager.getOntology(ontologyID); if (null != ontology) { LOGGER.info(MessageFormat.format("<<< Creating ontology ''{0}'' [cached] [{1}]", ontologyIRI, stopwatch.toString())); return ontology; } ontology = manager.createOntology(ontologyID); LOGGER.info(MessageFormat.format("<<< Creating ontology ''{0}'' [built] [{1}]", ontologyIRI, stopwatch.toString())); return ontology; }
From source file:pl.coffeepower.blog.messagebus.MessageBusTestHelper.java
Publisher executePublisher(final Engine engine) throws InterruptedException { Publisher publisher = Guice.createInjector(Stage.PRODUCTION, new TestConfigurationModule(), new BytesEventModule(), engine.getModule()).getInstance(Publisher.class); ExecutorService executorService = Executors.newSingleThreadExecutor(); executorService.execute(() -> {/*ww w .jav a 2s . c om*/ Fixtures fixtures = new Fixtures(); Stopwatch stopwatch = Stopwatch.createStarted(); LongStream.rangeClosed(fixtures.getFirstMessageId(), fixtures.getNumberOfMessages()).forEach(value -> { IdleStrategy idleStrategy = new SleepingIdleStrategy(TimeUnit.MICROSECONDS.toNanos(1L)); while (!publisher.send(Bytes.concat(Longs.toByteArray(value), fixtures.getAdditionalData()))) { idleStrategy.idle(); } }); System.out.println("Sent all messages in " + stopwatch.stop()); }); executorService.shutdown(); executorService.awaitTermination(1L, TimeUnit.MINUTES); return publisher; }
From source file:org.opendaylight.yangtools.yang.parser.system.test.Main.java
private static void runSystemTest(final List<String> yangLibDirs, final List<String> yangFiles, final HashSet<QName> supportedFeatures, final boolean recursiveSearch) { LOG.log(Level.INFO, "Yang model dirs: {0} ", yangLibDirs); LOG.log(Level.INFO, "Yang model files: {0} ", yangFiles); LOG.log(Level.INFO, "Supported features: {0} ", supportedFeatures); SchemaContext context = null;/*from ww w. jav a 2 s. c o m*/ printMemoryInfo("start"); final Stopwatch stopWatch = Stopwatch.createStarted(); try { context = SystemTestUtils.parseYangSources(yangLibDirs, yangFiles, supportedFeatures, recursiveSearch); } catch (final Exception e) { LOG.log(Level.SEVERE, "Failed to create SchemaContext.", e); System.exit(1); } stopWatch.stop(); LOG.log(Level.INFO, "Elapsed time: {0}", stopWatch); printMemoryInfo("end"); LOG.log(Level.INFO, "SchemaContext resolved Successfully. {0}", context); Runtime.getRuntime().gc(); printMemoryInfo("after gc"); }
From source file:ch.ge.ve.protopoc.service.simulation.ElectionAdministrationSimulator.java
public List<Long> getTally() throws InvalidDecryptionProofException { TallyData tallyData = bulletinBoardService.getTallyData(); List<DecryptionProof> decryptionProofs = tallyData.getDecryptionProofs(); List<BigInteger> publicKeyShares = tallyData.getPublicKeyShares(); List<Encryption> finalShuffle = tallyData.getFinalShuffle(); List<List<BigInteger>> partialDecryptions = tallyData.getPartialDecryptions(); Stopwatch decryptionProofCheckWatch = Stopwatch.createStarted(); if (!tallyingAuthoritiesAlgorithm.checkDecryptionProofs(decryptionProofs, publicKeyShares, finalShuffle, partialDecryptions)) {// w w w . j av a 2s . c om throw new InvalidDecryptionProofException("An invalid decryption proof was found"); } decryptionProofCheckWatch.stop(); perfLog.info(String.format("Administration : checked decryption proofs in %dms", decryptionProofCheckWatch.elapsed(TimeUnit.MILLISECONDS))); List<BigInteger> decryptions = tallyingAuthoritiesAlgorithm.getDecryptions(finalShuffle, partialDecryptions); List<List<Boolean>> votes = tallyingAuthoritiesAlgorithm.getVotes(decryptions, totalCandidateCount); // Additional verifications on the votes validity may be performed here. return IntStream.range(0, totalCandidateCount) .mapToLong(i -> votes.stream().filter(vote -> vote.get(i)).count()).boxed() .collect(Collectors.toList()); }
From source file:brooklyn.entity.rebind.persister.MementoFileWriterSync.java
public void append(T val) { try {/*from w w w . j a v a 2 s .c om*/ lock.writeLock().lockInterruptibly(); } catch (InterruptedException e) { throw Exceptions.propagate(e); } try { Stopwatch stopwatch = Stopwatch.createStarted(); // Write to the temp file, then atomically move it to the permanent file location Files.append(serializer.toString(val), file, Charsets.UTF_8); modCount.incrementAndGet(); if (LOG.isTraceEnabled()) LOG.trace("Wrote {}, took {}; modified file {} times", new Object[] { file, Time.makeTimeStringRounded(stopwatch), modCount }); } catch (IOException e) { throw Exceptions.propagate(e); } finally { lock.writeLock().unlock(); } }
From source file:org.yql4j.impl.HttpComponentsYqlClient.java
@Override public YqlResult query(YqlQuery query) throws YqlException { checkNotNull(query);/*from ww w. j a v a 2s . c o m*/ try { HttpUriRequest request = createHttpRequest(query); request = signHttpRequest(request, query); Stopwatch timer = Stopwatch.createStarted(); try (CloseableHttpResponse response = httpClient.execute(request)) { logger.debug("YQL query (URL=" + query.toUri() + ") took " + timer.stop().elapsed(TimeUnit.MILLISECONDS) + "ms"); if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) { HttpEntity entity = response.getEntity(); Map<String, String> headers = new HashMap<>(); for (Header header : response.getAllHeaders()) { headers.put(header.getName(), header.getValue()); } return new YqlResult(EntityUtils.toString(entity), headers, query.getFormat() != null ? query.getFormat() : ResultFormat.XML, getAppropriateMapper(query)); } else if (isClientError(response)) { HttpEntity entity = response.getEntity(); ObjectMapper mapper = getAppropriateMapper(query); ErrorType error = mapper.readValue(EntityUtils.toString(entity), ErrorType.class); throw new YqlException( "Failed to execute YQL query (URL=" + query.toUri() + "): " + error.getDescription()); } else { throw new YqlException("Failed to execute YQL query (URL=" + query.toUri() + "): Received unexpected status code " + response.getStatusLine().getStatusCode()); } } } catch (ParseException | OAuthException | IOException e) { throw new YqlException("Failed to execute YQL query (URL=" + query.toUri() + "): " + e.getMessage(), e); } }
From source file:com.github.steveash.jg2p.seq.PhonemeHmmTrainer.java
public void trainForInstances(InstanceList examples) { if (state == State.Initializing) { initializeFor(examples);// w w w . j av a 2s. co m } state = State.Training; Stopwatch watch = Stopwatch.createStarted(); HMMTrainerByLikelihood trainer = makeNewTrainer(hmm); this.lastTrainer = trainer; trainer.train(examples, opts.maxIterations); watch.stop(); log.info("Training took " + watch); if (printEval) { log.info("Accuracy on training data: " + accuracyFor(examples)); } }
From source file:org.apache.drill.exec.store.hive.HiveMetadataProvider.java
/** * Return stats for table/partitions in given {@link HiveReadEntry}. If valid stats are available in MetaStore, * return it. Otherwise estimate using the size of the input data. * * @param hiveReadEntry Subset of the {@link HiveReadEntry} used when creating this cache object. * @return// w w w.ja v a 2 s.c o m * @throws IOException */ public HiveStats getStats(final HiveReadEntry hiveReadEntry) throws IOException { final Stopwatch timeGetStats = Stopwatch.createStarted(); final Table table = hiveReadEntry.getTable(); try { if (!isPartitionedTable) { final Properties properties = MetaStoreUtils.getTableMetadata(table); final HiveStats stats = getStatsFromProps(properties); if (stats.valid()) { return stats; } // estimate the stats from the InputSplits. return getStatsEstimateFromInputSplits(getTableInputSplits()); } else { final HiveStats aggStats = new HiveStats(0, 0); for (Partition partition : hiveReadEntry.getPartitions()) { final Properties properties = HiveUtilities.getPartitionMetadata(partition, table); HiveStats stats = getStatsFromProps(properties); if (!stats.valid()) { // estimate the stats from InputSplits stats = getStatsEstimateFromInputSplits(getPartitionInputSplits(partition)); } aggStats.add(stats); } return aggStats; } } catch (final Exception e) { throw new IOException("Failed to get numRows from HiveTable", e); } finally { logger.debug("Took {} s to get stats from {}.{}", timeGetStats.elapsed(TimeUnit.NANOSECONDS) / 1000, table.getDbName(), table.getTableName()); } }
From source file:eu.project.ttc.engines.CasStatCounter.java
@Override public void initialize(UimaContext context) throws ResourceInitializationException { super.initialize(context); this.sw = Stopwatch.createStarted(); if (traceFileName != null) { File file = new File(traceFileName); try {//from w w w .j a v a2s .co m this.fileWriter = new FileWriter(file); } catch (IOException e) { LOGGER.error("Could not create a writer to file {}", traceFileName); throw new ResourceInitializationException(e); } this.periodicStatEnabled = docPeriod > 0; LOGGER.info("Tracing time performance to file {}", file.getAbsolutePath()); } }
From source file:org.icgc.dcc.submission.validation.first.file.DelegatingFileRowChecker.java
@Override @SneakyThrows/* w ww .j a va 2 s .com*/ public void checkFile(String fileName) { log.info(banner()); log.info("Start performing {} validation...", name); val fileSchema = getFileSchema(fileName); @Cleanup val inputStream = new BufferedInputStream(getFileSystem().getDecompressingInputStream(fileName), LINE_BUFFER_SIZE); val watch = Stopwatch.createStarted(); val line = new StringBuilder(512); long lineNumber = 1; int nextByte = 0; while ((nextByte = inputStream.read()) > 0) { if ((char) nextByte == LINE_SEPARATOR_CHAR) { // Delegate checkRow(fileName, fileSchema, line, lineNumber); // Book-keeping ++lineNumber; if (lineNumber % 10000 == 0) { // Check for cancellation checkInterrupted(name); } if (lineNumber % LINE_STATUS_THRESHOLD == 0L) { // Log status log.info("Checked {} lines of '{}' in {}", new Object[] { formatCount(lineNumber), fileName, watch }); } // Reset line.setLength(0); } else { // Buffer line.appendCodePoint(nextByte); } } // Check buffer to be empty, otherwise we have a file with no trailing new line if (line.length() > 0) { log.info("Missing new line at end of file '{}'", fileName); reportError( error().fileName(fileName).lineNumber(lineNumber).type(LINE_TERMINATOR_MISSING_ERROR).build()); } log.info("Finishing performing {} validation...", name); finish(fileName, fileSchema); log.info("Completed '{}' validation on '{}' in {}. Number of errors found: {}", new Object[] { name, fileName, watch, formatCount(checkErrorCount) }); }