Example usage for com.google.common.base Stopwatch stop

List of usage examples for com.google.common.base Stopwatch stop

Introduction

In this page you can find the example usage for com.google.common.base Stopwatch stop.

Prototype

public Stopwatch stop() 

Source Link

Document

Stops the stopwatch.

Usage

From source file:com.webbfontaine.valuewebb.irms.factory.risk.RiskActionProcessor.java

private void doApply() {
    Stopwatch stopwatch = Stopwatch.createStarted();

    TtGenHome ttGenHome = getTtGenHome();

    List<Criteria> criteria = criteriaRepository.loadRules(criteriaType);

    LOGGER.trace("Loaded Risk Criteria: {}", criteria);

    RiskResultCollector riskResultCollector = new RiskResultCollector();

    for (Criteria criterion : criteria) {
        RuleCallable<List<RiskResult>> runnable = createCallable(ttGenHome, criterion);
        runnable.setRuleRepository(riskRuleRepository);

        riskResultCollector.add(executor.submit(runnable));
    }// w ww. jav a 2s.  com

    riskResultCollector.calculateResults();

    getRiskResultMerger().merge(ttGenHome.getInstance(), riskResultCollector);

    stopwatch.stop();
    LOGGER.info("IRMS Risk for TT with id {} took: {}", ttGenHome.getId(), stopwatch);
}

From source file:org.locationtech.geogig.repository.RevTreeBuilder2.java

/**
 * Traverses the nodes in the {@link NodeIndex}, deletes the ones with {@link ObjectId#NULL
 * NULL} ObjectIds, and adds the ones with non "NULL" ids.
 * //  w  w w  .  j  a  va2  s .c om
 * @return the new tree, not saved to the object database. Any bucket tree though is saved when
 *         this method returns.
 */
public RevTree build() {
    if (nodeIndex == null) {
        return original.builder(db).build();
    }

    Stopwatch sw = Stopwatch.createStarted();
    RevTreeBuilder builder;
    try {
        builder = new RevTreeBuilder(db, original);
        Iterator<Node> nodes = nodeIndex.nodes();
        while (nodes.hasNext()) {
            Node node = nodes.next();
            if (node.getObjectId().isNull()) {
                builder.remove(node.getName());
            } else {
                builder.put(node);
            }
        }
    } catch (RuntimeException e) {
        e.printStackTrace();
        throw e;
    } finally {
        nodeIndex.close();
    }
    LOGGER.debug("Index traversed in {}", sw.stop());
    sw.reset().start();

    RevTree namedTree = builder.build();
    saveExtraFeatureTypes();
    LOGGER.debug("RevTreeBuilder.build() in {}", sw.stop());
    return namedTree;
}

From source file:qa.qcri.nadeef.core.utils.RuleBuilder.java

/**
 * Generates the <code>Rule</code> class code.
 * /*from   w ww  .j a v a2 s . c o  m*/
 * @return generated rule class.
 */
@SuppressWarnings({ "rawtypes", "unchecked" })
public Collection<Rule> build() throws Exception {
    List<Rule> result = Lists.newArrayList();
    Collection<File> outputFiles = compile();
    Logger tracer = Logger.getLogger(RuleBuilder.class);
    for (File outputFile : outputFiles) {
        Stopwatch stopwatch = Stopwatch.createStarted();
        String className = Files.getNameWithoutExtension(outputFile.getName());

        URL url = new URL("file://" + outputFile.getParent() + File.separator);
        Class ruleClass = CommonTools.loadClass(className, url);
        Rule rule = (Rule) ruleClass.getConstructor().newInstance();

        rule.initialize(Files.getNameWithoutExtension(outputFile.getName()), tableNames);
        result.add(rule);
        tracer.fine("Rule file : " + outputFile.getAbsolutePath() + " is loaded in "
                + stopwatch.elapsed(TimeUnit.MILLISECONDS) + " ms.");
        stopwatch.stop();
    }
    return result;
}

From source file:eu.project.ttc.engines.SyntacticTermGatherer.java

@Override
public void collectionProcessComplete() throws AnalysisEngineProcessException {
    LOGGER.info("Starting syntactic term gathering for TermIndex {}",
            this.termIndexResource.getTermIndex().getName());

    TermIndex termIndex = this.termIndexResource.getTermIndex();

    if (termIndexResource.getTermIndex().getTerms().isEmpty())
        return;/*from ww  w  .j a v a2  s .com*/

    /*
     * Prepare observer and indexes
     */
    for (RunConfig runConfig : RUN_CONFIGS) {
        CustomTermIndex customIndex = termIndex.getCustomIndex(runConfig.indexName);
        customIndex.cleanSingletonKeys();

        // clean biggest classes
        customIndex.cleanEntriesByMaxSize(WARNING_CRITICAL_SIZE);

        CustomIndexStats stats = new CustomIndexStats(customIndex);

        // Display class sizes
        Stopwatch sw1 = Stopwatch.createStarted();
        int k = 0;
        LOGGER.debug("Biggest class is {}, size: {}", stats.getBiggestClass(), stats.getBiggestSize());

        int size;
        for (Integer i : stats.getSizeCounters().keySet()) {
            k++;
            size = stats.getSizeCounters().get(i).size();
            totalComparisons = totalComparisons.add(BigInteger.valueOf(size * i * (i - 1)));
        }
        LOGGER.debug("Number of term pairs to test: " + totalComparisons);
        sw1.stop();
        LOGGER.debug("Time to get the comparisons number: " + sw1.elapsed(TimeUnit.MILLISECONDS));
        LOGGER.debug("Number of classes: " + k);
        if (taskObserver.isPresent())
            taskObserver.get().setTotalTaskWork(totalComparisons.longValue());
    }

    LOGGER.debug("Gathering with default variant rule indexing (source and target patterns)");
    for (RunConfig runConfig : RUN_CONFIGS) {
        gather(runConfig.indexName, runConfig.variantRuleIndex);
        termIndex.dropCustomIndex(runConfig.indexName);
    }

}

From source file:qa.qcri.nadeef.core.util.RuleBuilder.java

/**
 * Generates the <code>Rule</code> class code.
 * /*  www  .  j a  va 2 s  .co m*/
 * @return generated rule class.
 */
@SuppressWarnings({ "rawtypes", "unchecked" })
public Collection<Rule> build() throws Exception {
    List<Rule> result = Lists.newArrayList();
    Collection<File> outputFiles = compile();
    Tracer tracer = Tracer.getTracer(RuleBuilder.class);
    for (File outputFile : outputFiles) {
        Stopwatch stopwatch = Stopwatch.createStarted();
        String className = Files.getNameWithoutExtension(outputFile.getName());

        URL url = new URL("file://" + outputFile.getParent() + File.separator);
        Class ruleClass = CommonTools.loadClass(className, url);
        Rule rule = (Rule) ruleClass.getConstructor().newInstance();

        rule.initialize(Files.getNameWithoutExtension(outputFile.getName()), tableNames);
        result.add(rule);
        tracer.verbose("Rule file : " + outputFile.getAbsolutePath() + " is loaded in "
                + stopwatch.elapsed(TimeUnit.MILLISECONDS) + " ms.");
        stopwatch.stop();
    }
    return result;
}

From source file:eu.numberfour.n4js.ui.editor.syntaxcoloring.HighlightingParser.java

private List<Token> doParse(CharStream in) {
    TokenSource tokenSource = createLexer(in);
    LazyTokenStream tokenStream = createTokenStream(tokenSource);
    setInitialHiddenTokens(tokenStream);
    InternalN4JSParser parser = createParser(tokenStream);
    IUnorderedGroupHelper helper = unorderedGroupHelper.get();
    if (!(helper instanceof IUnorderedGroupHelper.Null)) {
        throw new IllegalStateException("Unexpected usage of unordered groups.");
    }//  w ww .j  av  a2 s.c o  m
    Stopwatch stopwatch = null;
    boolean debug = LOGGER.isDebugEnabled();
    // boolean debug = true;
    if (debug) {
        stopwatch = Stopwatch.createStarted();
    }
    try {
        parser.entryRuleScript();
        while (tokenStream.LT(1) != Token.EOF_TOKEN) {
            tokenStream.consume();
        }
        @SuppressWarnings("unchecked")
        List<Token> result = tokenStream.getTokens();
        return result;
    } catch (Exception re) {
        throw new ParseException(re.getMessage(), re);
    } finally {
        if (debug) {
            assert stopwatch != null;
            long elapsed = stopwatch.stop().elapsed(TimeUnit.MILLISECONDS);
            if (elapsed > 5) {
                LOGGER.warn("Coloring parser took: " + elapsed);
            }
        }
    }
}

From source file:eu.numberfour.n4jsx.ui.editor.syntaxcoloring.HighlightingParser.java

private List<Token> doParse(CharStream in) {
    TokenSource tokenSource = createLexer(in);
    LazyTokenStream tokenStream = createTokenStream(tokenSource);
    setInitialHiddenTokens(tokenStream);
    InternalN4JSXParser parser = createParser(tokenStream);
    IUnorderedGroupHelper helper = unorderedGroupHelper.get();
    if (!(helper instanceof IUnorderedGroupHelper.Null)) {
        throw new IllegalStateException("Unexpected usage of unordered groups.");
    }/*from   w w w  .j  av a2 s . c om*/
    Stopwatch stopwatch = null;
    boolean debug = LOGGER.isDebugEnabled();
    // boolean debug = true;
    if (debug) {
        stopwatch = Stopwatch.createStarted();
    }
    try {
        parser.entryRuleIDLScript();
        while (tokenStream.LT(1) != Token.EOF_TOKEN) {
            tokenStream.consume();
        }
        @SuppressWarnings("unchecked")
        List<Token> result = tokenStream.getTokens();
        return result;
    } catch (Exception re) {
        throw new ParseException(re.getMessage(), re);
    } finally {
        if (debug) {
            assert stopwatch != null;
            long elapsed = stopwatch.stop().elapsed(TimeUnit.MILLISECONDS);
            if (elapsed > 5) {
                LOGGER.warn("Coloring parser took: " + elapsed);
            }
        }
    }
}

From source file:com.mycelium.wapi.api.WapiClient.java

/**
 * Attempt to connect and send to a URL in our list of URLS, if it fails try
 * the next until we have cycled through all URLs. timeout.
 *//*from   w w  w.ja  v a2s.com*/
private Response getConnectionAndSendRequestWithTimeout(Object request, String function, int timeout) {
    int originalConnectionIndex = _serverEndpoints.getCurrentEndpointIndex();
    while (true) {
        // currently active server-endpoint
        HttpEndpoint serverEndpoint = _serverEndpoints.getCurrentEndpoint();
        try {
            OkHttpClient client = serverEndpoint.getClient();
            _logger.logInfo("Connecting to " + serverEndpoint.getBaseUrl() + " ("
                    + _serverEndpoints.getCurrentEndpointIndex() + ")");

            client.setConnectTimeout(timeout, TimeUnit.MILLISECONDS);
            client.setReadTimeout(timeout, TimeUnit.MILLISECONDS);
            client.setWriteTimeout(timeout, TimeUnit.MILLISECONDS);

            Stopwatch callDuration = Stopwatch.createStarted();
            // build request
            final String toSend = getPostBody(request);
            Request rq = new Request.Builder().addHeader(MYCELIUM_VERSION_HEADER, versionCode)
                    .post(RequestBody.create(MediaType.parse("application/json"), toSend))
                    .url(serverEndpoint.getUri(WapiConst.WAPI_BASE_PATH, function).toString()).build();

            // execute request
            Response response = client.newCall(rq).execute();
            callDuration.stop();
            _logger.logInfo(String.format(Locale.ENGLISH, "Wapi %s finished (%dms)", function,
                    callDuration.elapsed(TimeUnit.MILLISECONDS)));

            // Check for status code 2XX
            if (response.isSuccessful()) {
                if (serverEndpoint instanceof FeedbackEndpoint) {
                    ((FeedbackEndpoint) serverEndpoint).onSuccess();
                }
                return response;
            } else {
                // If the status code is not 200 we cycle to the next server
                logError(String.format(Locale.ENGLISH, "Http call to %s failed with %d %s", function,
                        response.code(), response.message()));
                // throw...
            }
        } catch (IOException e) {
            logError("IOException when sending request " + function, e);
            if (serverEndpoint instanceof FeedbackEndpoint) {
                _logger.logInfo("Resetting tor");
                ((FeedbackEndpoint) serverEndpoint).onError();
            }
        }
        // Try the next server
        _serverEndpoints.switchToNextEndpoint();
        if (_serverEndpoints.getCurrentEndpointIndex() == originalConnectionIndex) {
            // We have tried all URLs
            return null;
        }

    }
}

From source file:uk.ac.ebi.atlas.solr.query.SolrQueryService.java

public Set<String> fetchGeneIds(String geneQuery, boolean exactMatch, String species) {

    Stopwatch stopwatch = Stopwatch.createStarted();

    //eg: {!lucene q.op=OR df=property_value_lower}(property_value_lower:Q9NHV9) AND (bioentity_type:"mirna" OR bioentity_type:"ensgene")
    // fl=bioentity_identifier&group=true&group.field=bioentity_identifier&group.main=true
    SolrQuery solrQuery = solrQueryBuilderFactory.createGeneBioentityIdentifierQueryBuilder()
            .forQueryString(geneQuery, true).withExactMatch(exactMatch).withSpecies(species)
            .withBioentityTypes(GENE.getSolrAliases()).build();

    Set<String> geneIds = solrServer.query(solrQuery, BIOENTITY_IDENTIFIER_FIELD, false);

    stopwatch.stop();
    LOGGER.debug(String.format("Fetched gene ids for %s: returned %s results in %s secs", geneQuery,
            geneIds.size(), stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000D));

    return geneIds;
}

From source file:cosmos.mapred.MediawikiQueries.java

public void groupBy(Store id, Column colToFetch, Map<Column, Long> columnCounts, long totalResults)
        throws Exception {
    Stopwatch sw = new Stopwatch();

    sw.start();//from w w w  .  j a va  2  s.c  o  m
    final CloseableIterable<Entry<RecordValue<?>, Long>> results = this.sorts.groupResults(id, colToFetch);
    TreeMap<RecordValue<?>, Long> counts = Maps.newTreeMap();

    for (Entry<RecordValue<?>, Long> entry : results) {
        counts.put(entry.getKey(), entry.getValue());
    }

    results.close();
    sw.stop();

    System.out.println(Thread.currentThread().getName() + ": " + colToFetch + " - Took " + sw.toString()
            + " to group results");
    logTiming(totalResults, sw.elapsed(TimeUnit.MILLISECONDS), "groupBy:" + colToFetch);

    //    System.out.println(counts);

    final CloseableIterable<MultimapRecord> verifyResults = this.sorts.fetch(id, Index.define(colToFetch));
    TreeMap<RecordValue<?>, Long> records = Maps.newTreeMap();
    for (MultimapRecord r : verifyResults) {
        if (r.containsKey(colToFetch)) {
            for (RecordValue<?> val : r.get(colToFetch)) {
                if (records.containsKey(val)) {
                    records.put(val, records.get(val) + 1);
                } else {
                    records.put(val, 1l);
                }
            }
        }
    }

    verifyResults.close();

    if (counts.size() != records.size()) {
        System.out.println(Thread.currentThread().getName() + ": " + colToFetch + " - Expected "
                + records.size() + " groups but found " + counts.size());
        System.exit(1);
    }

    Set<RecordValue<?>> countKeys = counts.keySet(), recordKeys = records.keySet();
    for (RecordValue<?> k : countKeys) {
        if (!recordKeys.contains(k)) {
            System.out.println(Thread.currentThread().getName() + ": " + colToFetch
                    + " - Expected to have count for " + k);
            System.exit(1);
        }

        Long actual = counts.get(k), expected = records.get(k);

        if (!actual.equals(expected)) {
            System.out.println(Thread.currentThread().getName() + ": " + colToFetch + " - Expected " + expected
                    + " value(s) but found " + actual + " value(s) for " + k.value());
            System.exit(1);
        }
    }
}