Example usage for com.google.common.base Stopwatch stop

List of usage examples for com.google.common.base Stopwatch stop

Introduction

In this page you can find the example usage for com.google.common.base Stopwatch stop.

Prototype

public Stopwatch stop() 

Source Link

Document

Stops the stopwatch.

Usage

From source file:org.eclipse.viatra.modelobfuscator.application.common.ModelObfuscatorHeadless.java

/**
 * @param outputDirectory/*  w w  w.ja  va  2 s .c  o m*/
 * @param inputs
 * @param obfuscatorBuilder
 */
private void performObfuscation(File outputDirectory, Map<String, FileInputStream> inputs,
        XMLModelObfuscatorBuilder obfuscatorBuilder) {
    for (Entry<String, FileInputStream> input : inputs.entrySet()) {
        BufferedInputStream bufferedInputStream = new BufferedInputStream(input.getValue());
        obfuscatorBuilder.setInput(bufferedInputStream);
        String fileName = input.getKey();
        File output = new File(outputDirectory, fileName);
        BufferedOutputStream bufferedOutputStream;
        try {
            bufferedOutputStream = new BufferedOutputStream(new FileOutputStream(output));
            obfuscatorBuilder.setOutput(bufferedOutputStream);
            XMLModelObfuscator obfuscator = obfuscatorBuilder.build();
            System.out.println("Obfuscating " + fileName);
            Stopwatch stopwatch = Stopwatch.createStarted();
            obfuscator.obfuscate();
            stopwatch.stop();
            System.out.println("Obfuscation finished in: " + stopwatch.elapsed(TimeUnit.MILLISECONDS) + " ms ("
                    + stopwatch.elapsed(TimeUnit.NANOSECONDS) + " ns)");
            bufferedOutputStream.close();
            bufferedInputStream.close();
        } catch (FileNotFoundException e) {
            reportError("Could not ouput to file " + output.getPath());
        } catch (IOException e) {
            reportError("Could not close output file " + output.getPath());
        }
    }
}

From source file:uk.ac.open.kmi.iserve.discovery.disco.impl.SparqlLogicConceptMatcher.java

/**
 * Obtain all match results for the set of origins that are within a range of match types
 * TODO: This method is buggy. To be fixed
 *
 * @param origins/*from   w ww.java2s. c  o  m*/
 * @param minType
 * @param maxType
 * @return
 */
private Table<URI, URI, MatchResult> obtainMatchResults(Set<URI> origins, MatchType minType,
        MatchType maxType) {

    log.debug("Obtain match results for {}, with {} <= Match Result <= {}", origins, minType, maxType);

    Table<URI, URI, MatchResult> result = HashBasedTable.create();
    // Exit fast if no data is provided or no matches can be found
    if (origins == null || origins.isEmpty() || minType.compareTo(maxType) > 0)
        return result;

    // Create the query
    String queryStr = new StringBuffer().append(generateQueryHeader())
            .append(generateRangeMatchWhereClause(origins, minType, maxType)).append(generateQueryFooter())
            .toString();

    log.debug("SPARQL Query generated: \n {}", queryStr);

    // Query the engine
    Query query = QueryFactory.create(queryStr);
    QueryExecution qe = QueryExecutionFactory.sparqlService(this.sparqlEndpoint.toASCIIString(), query);
    MonitoredQueryExecution qexec = new MonitoredQueryExecution(qe);
    try {
        Stopwatch stopwatch = new Stopwatch().start();
        ResultSet qResults = qexec.execSelect();
        stopwatch.stop();
        log.debug("Obtained matches for {} concepts within range {} - {} in {}", origins.size(), minType,
                maxType, stopwatch);

        Resource origin;
        Resource destination;
        URI originUri;
        URI matchUri;
        int index = 0;
        // Iterate over the results obtained starting with the matches for class0 onwards
        while (qResults.hasNext()) {
            QuerySolution soln = qResults.nextSolution();
            origin = soln.getResource(ORIGIN_VAR);
            destination = soln.getResource(MATCH_VAR + index);

            if (origin != null && origin.isURIResource() && destination != null
                    && destination.isURIResource()) {
                originUri = new URI(origin.getURI());
                matchUri = new URI(destination.getURI());
                MatchType type = getMatchType(soln);
                result.put(originUri, matchUri, new AtomicMatchResult(originUri, matchUri, type, this));
                log.debug("Concept {} was matched to {} with type {}", originUri, matchUri, type);
            } else {
                log.warn("Skipping result as some URI is null: Origin - {}, Destination - {}", origin,
                        destination);
                break;
            }
        }
    } catch (URISyntaxException e) {
        log.error("Error obtaining match result. Expected a correct URI", e);
    } finally {
        qexec.close();
    }
    return result;

}

From source file:co.cask.cdap.examples.wordcount.RetrieveCountsHandler.java

/**
 * Returns the counts for all words in the input.  The request body is expected to contain
 * a comma-separated list of words.//from  w  w w  .  j  a v a 2s.c  om
 *
 * <p>
 * This endpoint method differs from {@link RetrieveCountsHandler#getCounts(HttpServiceRequest,HttpServiceResponder)}
 * in using {@link KeyValueTable#readAll(byte[][])} to perform a batched read.
 * </p>
 */
@Path("multicounts")
@POST
public void getMultiCounts(HttpServiceRequest request, HttpServiceResponder responder) {
    String wordString = Charsets.UTF_8.decode(request.getContent()).toString();
    String[] words = wordString.split(",");
    byte[][] wordBytes = new byte[words.length][];
    for (int i = 0; i < words.length; i++) {
        wordBytes[i] = Bytes.toBytes(words[i]);
    }
    Stopwatch timer = new Stopwatch().start();
    Map<byte[], byte[]> results = wordCountsTable.readAll(wordBytes);
    Map<String, Long> wordCounts = Maps.newHashMap();
    for (Map.Entry<byte[], byte[]> entry : results.entrySet()) {
        byte[] val = entry.getValue();
        wordCounts.put(Bytes.toString(entry.getKey()), val != null ? Bytes.toLong(entry.getValue()) : 0);
    }
    timer.stop();
    Map<String, Object> response = Maps.newHashMap();
    response.put("counts", wordCounts);
    response.put("elapsed", timer.toString());
    responder.sendJson(response);
}

From source file:com.google.pubsub.clients.kafka.KafkaPublisherTask.java

@Override
public void run() {
    Stopwatch stopwatch = Stopwatch.createUnstarted();
    Callback callback = (metadata, exception) -> {
        if (exception != null) {
            log.error(exception.getMessage(), exception);
            return;
        }/*from  w  ww  .  j  a  v  a 2s  .c o m*/
        addNumberOfMessages(1);
        metricsHandler.recordLatency(stopwatch.elapsed(TimeUnit.MILLISECONDS));
    };
    stopwatch.start();
    for (int i = 0; i < batchSize; i++) {
        publisher.send(new ProducerRecord<>(topic, null, System.currentTimeMillis(), null, payload), callback);
    }
    publisher.flush();
    stopwatch.stop();
}

From source file:fr.ens.transcriptome.aozan.collectors.AbstractFastqProcessThread.java

@Override
public void run() {

    // Timer/*w w w.  j  a  v  a 2  s  .  co  m*/
    final Stopwatch timer = Stopwatch.createStarted();

    notifyStartLogger();

    try {
        // Launch process treatment related to each collector
        process();

        setSuccess(true);

    } catch (final AozanException e) {
        setException(e);
    } finally {

        final String duration = toTimeHumanReadable(timer.elapsed(TimeUnit.MILLISECONDS));
        timer.stop();

        notifyEndLogger(duration);
    }

}

From source file:org.lenskit.cli.commands.GlobalRecommend.java

@Override
public void execute(Namespace opts) throws IOException, RecommenderBuildException {
    ScriptEnvironment env = new ScriptEnvironment(opts);
    InputData input = new InputData(env, opts);
    RecommenderLoader loader = new RecommenderLoader(input, env, opts);
    LenskitRecommenderEngine engine = loader.loadEngine();

    List<Long> items = opts.get("items");
    final int n = opts.getInt("num_recs");

    try (LenskitRecommender rec = engine.createRecommender()) {
        ItemBasedItemRecommender irec = rec.getItemBasedItemRecommender();
        ItemNameDAO indao = rec.get(ItemNameDAO.class);
        if (irec == null) {
            logger.error("recommender has no global recommender");
            throw new UnsupportedOperationException("no global recommender");
        }//w  w w .j  av a 2s  . c  om

        logger.info("using {} reference items: {}", items.size(), items);
        Stopwatch timer = Stopwatch.createStarted();

        ResultList recs = irec.recommendRelatedItemsWithDetails(LongUtils.packedSet(items), n, null, null);
        for (Result item : recs) {
            System.out.format("%d", item.getId());
            if (indao != null) {
                System.out.format(" (%s)", indao.getItemName(item.getId()));
            }
            System.out.format(": %.3f", item.getScore());
            System.out.println();
        }

        timer.stop();
        logger.info("recommended in {}", timer);
    }
}

From source file:com.b2international.snowowl.snomed.reasoner.server.classification.Reasoner.java

public ReasonerTaxonomy classify(final String userId, final String parentContextDescription,
        final List<ConceptDefinition> additionalDefinitions) {

    try {//from   w w w  .  j  a v  a 2s  . c  o  m

        //         if (CONSTRAINED_HEAP) {
        //            getApplicationContext().getService(ICDORepositoryManager.class).clearRevisionCache();
        //            System.gc();
        //         }

        final Stopwatch stopwatch = Stopwatch.createStarted();

        if (!isSynchronized()) {
            classifyWithContext(createLockContext(userId, parentContextDescription), 5000L,
                    additionalDefinitions);
        }

        final ReasonerTaxonomy taxonomy = computeTaxonomy(stopwatch);
        stopwatch.stop();
        SnomedReasonerServerActivator
                .logInfo(MessageFormat.format("Classified ontology in {0}.", TimeUtil.toString(stopwatch)));

        if (CONSTRAINED_HEAP) {
            unload();
            stateMachine.unload();
        }

        return taxonomy;

    } catch (final RuntimeException | InvocationTargetException | InterruptedException | OutOfMemoryError e) {
        LOGGER.error(MessageFormat.format("Caught exception while classifying ontology on branch path ''{0}''.",
                branchPath), e);

        try {
            unload();
        } catch (final Exception suppressed) {
            e.addSuppressed(suppressed);
        }

        stateMachine.fail();
        throw SnowowlRuntimeException.wrap(e);
    }
}

From source file:it.units.malelab.ege.core.evolver.geneoptimalmixing.GOMEvolver.java

protected Callable<List<Individual<G, T, F>>> gomCallable(final List<Individual<G, T, F>> population,
        final Individual<G, T, F> parent, final Set<Set<Integer>> fos, final Random random,
        final int generation, final Ranker<Individual<G, T, F>> ranker,
        final AbstractMutation<G> mutationOperator,
        final LoadingCache<G, Pair<Node<T>, Map<String, Object>>> mappingCache,
        final LoadingCache<Node<T>, F> fitnessCache, final int maxEvaluations,
        final List<EvolverListener<G, T, F>> listeners, final ExecutorService executor,
        final Stopwatch elapsedStopwatch) {
    final Evolver<G, T, F> evolver = this;
    return new Callable<List<Individual<G, T, F>>>() {
        @Override/*from   w  w w.  j ava2s .  c  o m*/
        public List<Individual<G, T, F>> call() throws Exception {
            try {
                //randomize fos
                List<Set<Integer>> randomizedFos = new ArrayList<>(fos);
                Collections.shuffle(randomizedFos, random);
                //iterate
                Individual<G, T, F> child = parent;
                for (Set<Integer> subset : fos) {
                    //check evaluations
                    if (actualBirths(0, fitnessCache) > maxEvaluations) {
                        break;
                    }
                    if (configuration.getMaxElapsed() > 0) {
                        //check if elapsed time exceeded
                        if (elapsedStopwatch.elapsed(TimeUnit.SECONDS) > configuration.getMaxElapsed()) {
                            break;
                        }
                    }
                    //mix genes
                    Individual<G, T, F> donor = population.get(random.nextInt(population.size()));
                    G donorGenotype = donor.getGenotype();
                    G childGenotype = (G) child.getGenotype().clone();
                    for (Integer locus : subset) {
                        childGenotype.set(locus, donorGenotype.get(locus));
                    }
                    //map
                    Stopwatch stopwatch = Stopwatch.createStarted();
                    Pair<Node<T>, Map<String, Object>> mappingOutcome = mappingCache
                            .getUnchecked(childGenotype);
                    Node<T> phenotype = mappingOutcome.getFirst();
                    long elapsed = stopwatch.stop().elapsed(TimeUnit.NANOSECONDS);
                    Utils.broadcast(
                            new MappingEvent<>(childGenotype, phenotype, elapsed, generation, evolver, null),
                            listeners, executor);
                    //compute fitness
                    stopwatch.reset().start();
                    F fitness = fitnessCache.getUnchecked(phenotype);
                    elapsed = stopwatch.stop().elapsed(TimeUnit.NANOSECONDS);
                    Individual<G, T, F> individual = new Individual<>(childGenotype, phenotype, fitness,
                            generation, saveAncestry ? Arrays.asList(child, donor) : null,
                            mappingOutcome.getSecond());
                    Utils.broadcast(new BirthEvent<>(individual, elapsed, generation, evolver, null), listeners,
                            executor);
                    //rank
                    List<List<Individual<G, T, F>>> ranked = ranker.rank(Arrays.asList(child, individual),
                            random);
                    child = ranked.get(0).get(0);
                }
                if (mutationOperator != null) {
                    while (child.getPhenotype().equals(parent.getPhenotype())) {
                        //check evaluations
                        if (actualBirths(0, fitnessCache) > maxEvaluations) {
                            break;
                        }
                        //mutate
                        G childGenotype = mutationOperator
                                .apply(Collections.singletonList(child.getGenotype()), random).get(0);
                        //map
                        Stopwatch stopwatch = Stopwatch.createStarted();
                        Pair<Node<T>, Map<String, Object>> mappingOutcome = mappingCache
                                .getUnchecked(childGenotype);
                        Node<T> phenotype = mappingOutcome.getFirst();
                        long elapsed = stopwatch.stop().elapsed(TimeUnit.NANOSECONDS);
                        Utils.broadcast(new MappingEvent<>(childGenotype, phenotype, elapsed, generation,
                                evolver, null), listeners, executor);
                        //compute fitness
                        stopwatch.reset().start();
                        F fitness = fitnessCache.getUnchecked(phenotype);
                        elapsed = stopwatch.stop().elapsed(TimeUnit.NANOSECONDS);
                        Individual<G, T, F> individual = new Individual<>(childGenotype, phenotype, fitness,
                                generation, saveAncestry ? Arrays.asList(child) : null,
                                mappingOutcome.getSecond());
                        Utils.broadcast(new BirthEvent<>(individual, elapsed, generation, evolver, null),
                                listeners, executor);
                        child = individual;
                    }
                }
                return Collections.singletonList(child);
            } catch (Throwable t) {
                t.printStackTrace();
                System.exit(-1);
                return null;
            }
        }
    };
}

From source file:grakn.core.server.keyspace.KeyspaceManager.java

public void loadSystemSchema() {
    Stopwatch timer = Stopwatch.createStarted();
    try (TransactionOLTP tx = systemKeyspaceSession.transaction().write()) {
        if (tx.getSchemaConcept(KEYSPACE_ENTITY) != null) {
            LOG.info("System schema has been previously loaded");
            return;
        }/*www.  j  a va 2s .  c  o m*/
        LOG.info("Loading schema");
        loadSystemSchema(tx);
        tx.commit();
        LOG.info("Loaded system schema to system keyspace. Took: {}", timer.stop());
    } catch (RuntimeException e) {
        LOG.error("Error while loading system schema in {}. The error was: {}", timer.stop(), e.getMessage(),
                e);
        throw e;
    }
}

From source file:org.hashtrees.manager.HashTreesManager.java

private void rebuildHashTree(final long treeId, long fullRebuildPeriod) throws IOException {
    Stopwatch watch = Stopwatch.createStarted();
    int dirtySegsCount = hashTrees.rebuildHashTree(treeId, fullRebuildPeriod);
    watch.stop();
    LOG.info("Total no of dirty segments : {} ", dirtySegsCount);
    LOG.info("Time taken for rebuilding (treeId: {}) (in ms) : {}", treeId,
            watch.elapsed(TimeUnit.MILLISECONDS));
}