Example usage for com.google.common.base Stopwatch createStarted

List of usage examples for com.google.common.base Stopwatch createStarted

Introduction

In this page you can find the example usage for com.google.common.base Stopwatch createStarted.

Prototype

@CheckReturnValue
public static Stopwatch createStarted() 

Source Link

Document

Creates (and starts) a new stopwatch using System#nanoTime as its time source.

Usage

From source file:fi.helsinki.moodi.service.synchronize.SynchronizationService.java

public SynchronizationSummary synchronize(final SynchronizationType type) {

    if (synchronizationJobRunService.isSynchronizationInProgress()) {
        throw new SynchronizationInProgressException(type);
    }//  ww w  . j  a v  a2  s .c o m

    final Stopwatch stopwatch = Stopwatch.createStarted();
    final long jobId = begin(type);

    logger.info("Synchronization of type {} started with jobId {}", type, jobId);

    final List<Course> courses = loadCourses(type);
    final List<SynchronizationItem> items = makeItems(courses, type);
    final List<SynchronizationItem> enrichedItems = enrichItems(items);
    final List<SynchronizationItem> processedItems = processItems(enrichedItems);

    final SynchronizationSummary summary = complete(type, jobId, stopwatch, processedItems);

    logger.info("Synchronization with jobId {} completed in {}", jobId, stopwatch.toString());

    applyNotifiers(processedItems);

    return logSummary(summary);
}

From source file:me.lazerka.gae.jersey.oauth2.google.TokenVerifierGoogleRemote.java

@Override
public GoogleUserPrincipal verify(String authToken) throws IOException, InvalidKeyException {
    logger.trace("Requesting endpoint to validate token");

    URL url = UriBuilder.fromUri(TOKEN_INFO).queryParam("id_token", authToken).build().toURL();

    HTTPRequest httpRequest = new HTTPRequest(url, GET, validateCertificate());

    Stopwatch stopwatch = Stopwatch.createStarted();
    HTTPResponse response = urlFetchService.fetch(httpRequest);
    logger.debug("Remote call took {}ms", stopwatch.elapsed(TimeUnit.MILLISECONDS));

    int responseCode = response.getResponseCode();
    String content = new String(response.getContent(), UTF_8);

    if (responseCode != 200) {
        logger.warn("{}: {}", responseCode, content);

        String msg = "Endpoint response code " + responseCode;

        // Something is wrong with our request.
        // If signature is invalid, then response code is 403.
        if (responseCode >= 400 && responseCode < 500) {
            try {
                TokenErrorResponse tokenErrorResponse = jsonFactory.fromString(content,
                        TokenErrorResponse.class);
                msg += ": " + tokenErrorResponse.getErrorDescription();
            } catch (IOException e) {
                logger.warn("Cannot parse response as " + TokenErrorResponse.class.getSimpleName());
            }/*from  ww  w. ja v a  2 s .c om*/
        }

        throw new InvalidKeyException(msg);
    }

    // Signature verification is done remotely (the whole point of this class).
    // Expiration verification is done

    Payload payload = jsonFactory.fromString(content, Payload.class);

    // Issuers verification have been done remotely.

    Set<String> trustedClientIds = Collections.singleton(oauthClientId);
    // Note containsAll.
    if (!trustedClientIds.containsAll(payload.getAudienceAsList())) {
        throw new InvalidKeyException("Audience invalid");
    }

    if (!payload.getEmailVerified()) {
        throw new InvalidKeyException("Email not verified");
    }

    return new GoogleUserPrincipal(payload.getSubject(), payload.getEmail());
}

From source file:com.github.benmanes.caffeine.cache.Stresser.java

public Stresser() {
    ThreadFactory threadFactory = new ThreadFactoryBuilder().setPriority(Thread.MAX_PRIORITY).setDaemon(true)
            .build();//from   ww  w. j  a  v  a2  s. com
    Executors.newSingleThreadScheduledExecutor(threadFactory).scheduleAtFixedRate(this::status, STATUS_INTERVAL,
            STATUS_INTERVAL, SECONDS);
    cache = Caffeine.newBuilder().maximumSize(operation.maxEntries).recordStats().build(key -> key);
    local = (BoundedLocalCache<Integer, Integer>) cache.asMap();
    ints = new Integer[TOTAL_KEYS];
    Arrays.setAll(ints, key -> {
        cache.put(key, key);
        return key;
    });
    cache.cleanUp();
    stopwatch = Stopwatch.createStarted();
    status();
}

From source file:benchmarkio.producer.kafka.KafkaMessageProducer.java

private void produce(final String topic, final String message) {
    for (int i = 0; i < numberOfMessagesToProduce; i++) {
        try {/*from ww w  .  j  av  a2s .  c  om*/
            if (log.isDebugEnabled()) {
                log.debug("Publishing message to Kafka topic {}\n{}", topic, message.toString());
            }

            final KeyedMessage<String, String> data = new KeyedMessage<>(topic, message);

            // Start
            final Stopwatch stopwatch = Stopwatch.createStarted();

            producer.send(data);

            // End
            stopwatch.stop();
            histogram.recordValue(stopwatch.elapsed(Consts.TIME_UNIT_FOR_REPORTING));

        } catch (final Exception e) {
            log.error("Error publishing message to kafka topic {}\n{}", topic, message.toString());
        }
    }

    log.info("Finished production of {} messages", numberOfMessagesToProduce);
}

From source file:fr.ymanvieu.trading.rate.scheduler.SchedulerService.java

@Retryable
public void updateRates(LatestRateProvider provider) throws IOException {

    // use of retry to compensate bad QoS of Yahoo provider 
    // sometimes responding with HttpServerErrorException: 504 Maximum Transaction Time Exceeded, 
    // HttpClientErrorException: 404 Not Found, ResourceAccessException: Connection timed out, etc...

    String providerName = provider.getClass().getSimpleName();

    log.debug("{}: Updating rates", providerName);

    Stopwatch startWatch = Stopwatch.createStarted();

    List<Quote> quotes = provider.getRates();

    if (quotes == null || quotes.isEmpty()) {
        log.info("{}: No rate to update", providerName);
        return;//from   w  w w  . jav  a 2s.  c om
    }

    Stopwatch saveWatch = Stopwatch.createStarted();

    dataUpdater.updateRates(quotes);

    log.debug("{}: Rates stored in {}", providerName, saveWatch);
    log.info("{}: Update done in {}", providerName, startWatch);
}

From source file:benchmarkio.producer.rabbitmq.RabbitMQMessageProducer.java

private void produce(final String topic, final String message) {
    for (int i = 0; i < numberOfMessagesToProduce; i++) {
        try {/*  ww  w  . j  a  va 2 s.  c om*/
            if (log.isDebugEnabled()) {
                log.debug("Publishing message to RabbitMQ topic {}\n{}", topic, message.toString());
            }

            // Start
            final Stopwatch stopwatch = Stopwatch.createStarted();

            channel.basicPublish(topic, "#", null, message.getBytes());

            // End
            stopwatch.stop();
            histogram.recordValue(stopwatch.elapsed(Consts.TIME_UNIT_FOR_REPORTING));

        } catch (final Exception e) {
            log.error("Error publishing message to RabbitMQ topic {}\n{}", topic, message.toString());
        }
    }

    log.info("Finished production of {} messages", numberOfMessagesToProduce);
}

From source file:com.google.cloud.tools.eclipse.test.util.ThreadDumpingWatchdog.java

protected void install() {
    // Surefire doesn't output anything until the test is complete,
    // so it's hard to tell what test we're associated with
    System.out.println("[Watchdog] > " + description);
    timer = new Timer("Thread Dumping Watchdog");
    timer.scheduleAtFixedRate(this, unit.toMillis(period), unit.toMillis(period));
    stopwatch = Stopwatch.createStarted();
}

From source file:brooklyn.entity.rebind.persister.BrooklynMementoPersisterToFile.java

@Override
public BrooklynMemento loadMemento(LookupContext lookupContext, RebindExceptionHandler exceptionHandler) {
    Stopwatch stopwatch = Stopwatch.createStarted();

    String xml = readFile();//from w  w w . j a  v a2  s .  co  m
    serializer.setLookupContext(lookupContext);
    try {
        BrooklynMemento result = serializer.fromString(xml);

        if (LOG.isDebugEnabled())
            LOG.debug("Loaded memento; took {}", Time.makeTimeStringRounded(stopwatch));
        return result;

    } finally {
        serializer.unsetLookupContext();
    }
}

From source file:uk.ac.ebi.atlas.search.baseline.BaselineExperimentAssayGroupsDao.java

public SetMultimap<String, String> fetchExperimentAssayGroupsWithNonSpecificExpression(
        Optional<? extends Collection<IndexedAssayGroup>> indexedAssayGroups,
        Optional<? extends Collection<String>> geneIds) {
    if (isEmpty(indexedAssayGroups) && isEmpty(geneIds)) {
        return ImmutableSetMultimap.<String, String>builder().build();
    }/*  www .  j  a  v a 2s. c  o  m*/

    Optional<ImmutableSet<IndexedAssayGroup>> uniqueIndexedAssayGroups = uniqueIndexedContrasts(
            indexedAssayGroups);

    log("fetchExperimentAssayGroupsWithNonSpecificExpression", uniqueIndexedAssayGroups, geneIds);

    Stopwatch stopwatch = Stopwatch.createStarted();

    DatabaseQuery<Object> baselineExpressionQuery = buildSelect(uniqueIndexedAssayGroups, geneIds);

    try {
        SetMultimap<String, String> results = jdbcTemplate.query(baselineExpressionQuery.getQuery(),
                baselineExpressionQuery.getParameters().toArray(),
                new ResultSetExtractor<SetMultimap<String, String>>() {
                    @Override
                    public SetMultimap<String, String> extractData(ResultSet rs)
                            throws SQLException, DataAccessException {

                        ImmutableSetMultimap.Builder<String, String> builder = ImmutableSetMultimap.builder();

                        while (rs.next()) {
                            String experimentAccession = rs.getString("experiment");
                            String assayGroupId = rs.getString("assaygroupid");

                            builder.put(experimentAccession, assayGroupId);
                        }

                        return builder.build();
                    }
                });

        stopwatch.stop();

        LOGGER.debug(String.format(
                "fetchExperimentAssayGroupsWithNonSpecificExpression returned %s results in %.2f seconds",
                results.size(), stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000D));

        return results;

    } catch (Exception e) {
        LOGGER.error(e.getMessage(), e);
        throw e;
    }

}

From source file:org.apache.drill.test.framework.DrillTestJdbc.java

public void run() {
    final Stopwatch stopwatch = Stopwatch.createStarted();
    this.thread = Thread.currentThread();
    setTestStatus(TestStatus.RUNNING);/*from w w w.  j a va 2  s .  c om*/
    int mainQueryIndex = 0;
    String[] queries = null;
    try {
        connection = connectionPool.getOrCreateConnection(modeler);
    } catch (SQLException e) {
        LOG.error(e.getMessage());
        throw new RuntimeException(e);
    }
    try {
        LOG.debug("Running test " + modeler.queryFilename + " (connection: " + connection.hashCode() + ")");

        if (!modeler.type.equalsIgnoreCase("impersonation")) {
            executeSetupQuery(String.format("use `%s`", matrix.schema));
        }

        queries = Utils.getSqlStatements(modeler.queryFilename);
        mainQueryIndex = queries.length / 2; // Currently, the main query must be in the middle of the list of queries

        for (int i = 0; i < mainQueryIndex; i++) {
            executeSetupQuery(queries[i]);
            Thread.sleep(1000);
        }

        query = queries[mainQueryIndex];
        executeQuery(query);

        testVerifier = new TestVerifier(columnTypes, query, columnLabels, matrix.verificationTypes);
        if (query.startsWith("explain") || matrix.verificationTypes.get(0).equalsIgnoreCase("regex")
                || matrix.verificationTypes.get(0).equalsIgnoreCase("regex-no-order")
                || matrix.verificationTypes.get(0).equalsIgnoreCase("filter-ratio")) {
            setTestStatus(testVerifier.verifyTextPlan(modeler.expectedFilename, outputFilename));
        } else {
            setTestStatus(testVerifier.verifyResultSet(modeler.expectedFilename, outputFilename));
        }

        if (modeler.type.equalsIgnoreCase("limit 0")) {
            String limitZeroQuery = "select * from (" + query + ") t limit 0";
            executeLimitZeroQuery(limitZeroQuery);
        }
    } catch (VerificationException e) {
        fail(TestStatus.VERIFICATION_FAILURE, e);
    } catch (Exception e) {
        fail(TestStatus.EXECUTION_FAILURE, e);
    } finally {
        try {
            for (int i = mainQueryIndex + 1; i < queries.length; i++) {
                Thread.sleep(1000);
                executeSetupQuery(queries[i]);
            }
            Thread.sleep(1000);
            connectionPool.releaseConnection(modeler, connection);
        } catch (Exception e) {
            LOG.error("Failed while running cleanup query. Not returning connection to pool.", e);
            try {
                connection.close();
            } catch (SQLException e1) {
                LOG.warn(e.getMessage());
                e1.printStackTrace();
            }
        }
        if (testStatus == TestStatus.PASS && !TestDriver.OPTIONS.outputQueryResult) {
            Utils.deleteFile(outputFilename);
        }
        duration = stopwatch;
        LOG.info(testStatus + " (" + stopwatch + ") " + modeler.queryFilename + " (connection: "
                + connection.hashCode() + ")");
    }
}