Example usage for com.google.common.base Stopwatch stop

List of usage examples for com.google.common.base Stopwatch stop

Introduction

In this page you can find the example usage for com.google.common.base Stopwatch stop.

Prototype

public Stopwatch stop() 

Source Link

Document

Stops the stopwatch.

Usage

From source file:demos.SynchronousRead.java

public void run() {
    logger.info("Preparing to read data points");

    Cluster cluster = Cluster.builder().addContactPoint("127.0.0.1").build();
    Session session = cluster.connect("demo");
    PreparedStatement query = session.prepare(
            "SELECT metric_id, time, value FROM metric_data WHERE metric_id = ? AND time >= ? AND time <= ?");
    DateTime end = DateTime.now();/*from ww w.  j  a v a  2  s.  c  o m*/
    DateTime start = end.minusYears(1);
    List<DataPoint> dataPoints = new ArrayList<>();

    Stopwatch stopwatch = new Stopwatch().start();
    for (int i = 0; i < NUM_METRICS; ++i) {
        ResultSet resultSet = session.execute(query.bind("metric-" + i, start.toDate(), end.toDate()));
        resultSet.forEach(
                row -> dataPoints.add(new DataPoint(row.getString(0), row.getDate(1), row.getDouble(2))));
    }
    stopwatch.stop();

    logger.info("Retrieved {} data points in {} ms", dataPoints.size(),
            stopwatch.elapsed(TimeUnit.MILLISECONDS));
}

From source file:es.usc.citius.composit.cli.command.CompositionCommand.java

private void benchmark(ComposIT<Concept, Boolean> composit, WSCTest.Dataset dataset, int cycles) {
    // Compute benchmark
    String bestSample = null;//from  w  ww .j av a2  s.  c om
    Stopwatch watch = Stopwatch.createUnstarted();
    long minMS = Long.MAX_VALUE;
    for (int i = 0; i < cycles; i++) {
        System.out.println("[ComposIT Search] Starting benchmark cycle " + (i + 1));
        watch.start();
        composit.search(dataset.getRequest());
        long ms = watch.stop().elapsed(TimeUnit.MILLISECONDS);
        if (ms < minMS) {
            minMS = ms;
        }
        watch.reset();

        if (cli.isMetrics()) {
            cli.println(" > Metrics: ");
            cli.println(" METRICS NOT IMPLEMENTED");
        }
    }
    System.out.println(
            "[Benchmark Result] " + cycles + "-cycle benchmark completed. Best time: " + minMS + " ms.");
    if (cli.isMetrics() && bestSample != null) {
        cli.println("Best sample: " + bestSample);
    }
}

From source file:org.lenskit.eval.traintest.ExperimentJob.java

@Override
protected void compute() {
    ExperimentOutputLayout layout = experiment.getOutputLayout();
    TableWriter globalOutput = layout.prefixTable(experiment.getGlobalOutput(), dataSet, algorithm);
    TableWriter userOutput = layout.prefixTable(experiment.getUserOutput(), dataSet, algorithm);
    RowBuilder outputRow = globalOutput.getLayout().newRowBuilder();

    logger.info("Building {} on {}", algorithm, dataSet);
    Stopwatch buildTimer = Stopwatch.createStarted();
    try (LenskitRecommender rec = buildRecommender()) {
        buildTimer.stop();
        logger.info("Built {} in {}", algorithm.getName(), buildTimer);

        logger.info("Measuring {} on {}", algorithm.getName(), dataSet.getName());

        RowBuilder userRow = userOutput != null ? userOutput.getLayout().newRowBuilder() : null;

        Stopwatch testTimer = Stopwatch.createStarted();

        List<ConditionEvaluator> accumulators = Lists.newArrayList();

        for (EvalTask task : experiment.getTasks()) {
            ConditionEvaluator ce = task.createConditionEvaluator(algorithm, dataSet, rec);
            if (ce != null) {
                accumulators.add(ce);//ww w. j av  a 2 s  .  c o  m
            } else {
                logger.warn("Could not instantiate task {} for algorithm {} on data set {}", task, algorithm,
                        dataSet);
            }
        }

        LongSet testUsers = dataSet.getTestData().getUserDAO().getUserIds();
        UserEventDAO trainEvents = dataSet.getTrainingData().getUserEventDAO();
        UserEventDAO userEvents = dataSet.getTestData().getUserEventDAO();
        final NumberFormat pctFormat = NumberFormat.getPercentInstance();
        pctFormat.setMaximumFractionDigits(2);
        pctFormat.setMinimumFractionDigits(2);
        final int nusers = testUsers.size();
        logger.info("Testing {} on {} ({} users)", algorithm, dataSet, nusers);
        ProgressLogger progress = ProgressLogger.create(logger).setCount(nusers).setLabel("testing users")
                .start();
        for (LongIterator iter = testUsers.iterator(); iter.hasNext();) {
            if (Thread.interrupted()) {
                throw new EvaluationException("eval job interrupted");
            }
            long uid = iter.nextLong();
            if (userRow != null) {
                userRow.add("User", uid);
            }

            UserHistory<Event> trainData = trainEvents.getEventsForUser(uid);
            if (trainData == null) {
                trainData = History.forUser(uid);
            }
            UserHistory<Event> userData = userEvents.getEventsForUser(uid);
            TestUser user = new TestUser(trainData, userData);

            Stopwatch userTimer = Stopwatch.createStarted();

            for (ConditionEvaluator eval : accumulators) {
                Map<String, Object> ures = eval.measureUser(user);
                if (userRow != null) {
                    userRow.addAll(ures);
                }
            }
            userTimer.stop();
            if (userRow != null) {
                userRow.add("TestTime", userTimer.elapsed(TimeUnit.MILLISECONDS) * 0.001);
                assert userOutput != null;
                try {
                    userOutput.writeRow(userRow.buildList());
                } catch (IOException e) {
                    throw new EvaluationException("error writing user row", e);
                }
                userRow.clear();
            }

            progress.advance();
        }

        progress.finish();
        testTimer.stop();
        logger.info("Tested {} in {}", algorithm.getName(), testTimer);
        outputRow.add("BuildTime", buildTimer.elapsed(TimeUnit.MILLISECONDS) * 0.001);
        outputRow.add("TestTime", testTimer.elapsed(TimeUnit.MILLISECONDS) * 0.001);
        for (ConditionEvaluator eval : accumulators) {
            outputRow.addAll(eval.finish());
        }
    } catch (UncheckedInterruptException ex) {
        logger.info("evaluation interrupted");
        throw ex;
    } catch (Throwable th) {
        logger.error("Error evaluating " + algorithm + " on " + dataSet, th);
        throw th;
    }

    try {
        globalOutput.writeRow(outputRow.buildList());
    } catch (IOException e) {
        throw new EvaluationException("error writing output row", e);
    }
}

From source file:com.github.steveash.jg2p.seq.PhonemeHmmTrainer.java

public void trainForInstances(InstanceList examples) {

    if (state == State.Initializing) {
        initializeFor(examples);//from   ww w. j  av  a 2 s. co  m
    }
    state = State.Training;
    Stopwatch watch = Stopwatch.createStarted();
    HMMTrainerByLikelihood trainer = makeNewTrainer(hmm);
    this.lastTrainer = trainer;

    trainer.train(examples, opts.maxIterations);

    watch.stop();
    log.info("Training took " + watch);
    if (printEval) {
        log.info("Accuracy on training data: " + accuracyFor(examples));
    }
}

From source file:com.madgag.agit.filepath.FilterableFileListAdapter.java

public Filter getFilter() {
    if (filter == null) {
        filter = new Filter() {

            @Override/*from w  ww. jav  a  2 s .  com*/
            protected FilterResults performFiltering(CharSequence constraint) {
                if (originalValues == null) {
                    synchronized (mLock) {
                        originalValues = newArrayList(items);
                    }
                }

                List<FilePath> originalValuesCopy;
                synchronized (mLock) {
                    originalValuesCopy = newArrayList(originalValues);
                }

                FilterResults results = new FilterResults();
                if (TextUtils.isEmpty(constraint)) {
                    results.values = originalValuesCopy;
                    results.count = originalValuesCopy.size();
                } else {
                    //                        String tn = "FLA." + originalValuesCopy.size() + "." + constraint + "." + currentTimeMillis();
                    //                        Debug.startMethodTracing(tn, 64 * 1024 * 1024);
                    Stopwatch stopwatch = new Stopwatch().start();
                    List<FilePath> matchingFiles = cachingFilePathListMatcher.get(constraint.toString());
                    Log.d(TAG, "Filtered with '" + constraint + "' to " + matchingFiles.size() + " files "
                            + stopwatch.stop());
                    //                        Debug.stopMethodTracing();

                    results.values = matchingFiles;
                    results.count = matchingFiles.size();
                }

                return results;
            }

            @Override
            protected void publishResults(CharSequence constraint, FilterResults results) {
                visibleFilePathMatcher
                        .set(TextUtils.isEmpty(constraint) ? null : new FilePathMatcher(constraint.toString()));
                setList((List<FilePath>) results.values);
            }
        };
    }
    return filter;
}

From source file:de.hybris.platform.acceleratorcms.component.slot.impl.DefaultCMSPageSlotComponentService.java

@Override
public void renderComponent(final PageContext pageContext, final AbstractCMSComponentModel component)
        throws ServletException, IOException {
    validateParameterNotNull(pageContext, "Parameter pageContext must not be null");
    validateParameterNotNull(component, "Parameter component must not be null");

    if (LOG.isDebugEnabled()) {
        final Stopwatch stopwatch = new Stopwatch();

        stopwatch.start();//from  w w w.  ja  v  a 2  s  .com
        getCmsComponentRenderer().renderComponent(pageContext, component);
        stopwatch.stop();

        if (stopwatch.elapsedMillis() > 1) {
            LOG.debug("Rendered component [" + component.getUid() + "] of type [" + component.getItemtype()
                    + "].. (" + stopwatch.toString() + ")");
        }
    } else {
        getCmsComponentRenderer().renderComponent(pageContext, component);
    }
}

From source file:org.apache.drill.exec.store.kafka.MessageIterator.java

@Override
public boolean hasNext() {
    if (recordIter != null && recordIter.hasNext()) {
        return true;
    }/*from   w w  w .ja v  a  2s .  c o  m*/

    long nextPosition = kafkaConsumer.position(topicPartition);
    if (nextPosition >= endOffset) {
        return false;
    }

    ConsumerRecords<byte[], byte[]> consumerRecords = null;
    Stopwatch stopwatch = Stopwatch.createStarted();
    try {
        consumerRecords = kafkaConsumer.poll(kafkaPollTimeOut);
    } catch (KafkaException ke) {
        logger.error(ke.getMessage(), ke);
        throw UserException.dataReadError(ke).message(ke.getMessage()).build(logger);
    }
    stopwatch.stop();

    if (consumerRecords.isEmpty()) {
        String errorMsg = new StringBuilder().append("Failed to fetch messages within ")
                .append(kafkaPollTimeOut)
                .append(" milliseconds. Consider increasing the value of the property : ")
                .append(ExecConstants.KAFKA_POLL_TIMEOUT).toString();
        throw UserException.dataReadError().message(errorMsg).build(logger);
    }

    long lastFetchTime = stopwatch.elapsed(TimeUnit.MILLISECONDS);
    logger.debug("Total number of messages fetched : {}", consumerRecords.count());
    logger.debug("Time taken to fetch : {} milliseconds", lastFetchTime);
    totalFetchTime += lastFetchTime;

    recordIter = consumerRecords.iterator();
    return recordIter.hasNext();
}

From source file:eu.amidst.huginlink.learning.ParallelPC.java

/**
 * Learns a TAN structure from data using the Chow-Liu algorithm included in the Hugin API.
 * Parallel learning is performed only if the parallel mode was set to true.
 * @param dataStream a stream of data instances to be processed during the TAN structural learning.
 * @return a <code>DAG</code> structure in AMIDST format.
 * @throws ExceptionHugin/*w ww  .  j  a v  a 2 s .c  o  m*/
 */
public DAG learnDAG(DataStream dataStream) throws ExceptionHugin {
    Variables modelHeader = new Variables(dataStream.getAttributes());
    DAG dag = new DAG(modelHeader);
    BayesianNetwork bn = new BayesianNetwork(dag);

    Domain huginNetwork = null;

    try {
        huginNetwork = BNConverterToHugin.convertToHugin(bn);

        DataOnMemory dataOnMemory = ReservoirSampling.samplingNumberOfSamples(this.numSamplesOnMemory,
                dataStream);

        // Set the number of cases
        int numCases = dataOnMemory.getNumberOfDataInstances();
        huginNetwork.setNumberOfCases(numCases);
        huginNetwork.setConcurrencyLevel(this.numCores);
        NodeList nodeList = huginNetwork.getNodes();

        // It is more efficient to loop the matrix of values in this way. 1st variables and 2nd cases
        for (int i = 0; i < nodeList.size(); i++) {
            Variable var = bn.getDAG().getVariables().getVariableById(i);
            Node n = nodeList.get(i);
            if (n.getKind().compareTo(NetworkModel.H_KIND_DISCRETE) == 0) {
                ((DiscreteChanceNode) n).getExperienceTable();
                for (int j = 0; j < numCases; j++) {
                    double state = dataOnMemory.getDataInstance(j).getValue(var);
                    if (!Utils.isMissingValue(state))
                        ((DiscreteChanceNode) n).setCaseState(j, (int) state);
                }
            } else {
                ((ContinuousChanceNode) n).getExperienceTable();
                for (int j = 0; j < numCases; j++) {
                    double value = dataOnMemory.getDataInstance(j).getValue(var);
                    if (!Utils.isMissingValue(value))
                        ((ContinuousChanceNode) n).setCaseValue(j, value);
                }
            }
        }

        //Structural learning
        Stopwatch watch = Stopwatch.createStarted();
        huginNetwork.learnStructureNPC();
        System.out.println("Structural Learning in Hugin: " + watch.stop());

        DAG dagLearned = (BNConverterToAMIDST.convertToAmidst(huginNetwork)).getDAG();
        dagLearned.getVariables().setAttributes(dataStream.getAttributes());
        return dagLearned;
    } catch (ExceptionHugin exceptionHugin) {
        throw new IllegalStateException("Hugin Exception: " + exceptionHugin.getMessage());
    }
}

From source file:es.usc.citius.hipster.algorithm.BellmanFord.java

@Override
public SearchResult search(Predicate<N> condition) {
    int iteration = 0;
    Iterator it = iterator();//from  w w  w  .  j  av a 2 s.  c  o m
    Stopwatch w = Stopwatch.createStarted();
    N currentNode = null;
    N goalNode = null;
    while (it.hasNext()) {
        iteration++;
        it.next();
        if (condition.apply(currentNode)) {
            goalNode = currentNode;
        }

    }
    w.stop();
    if (goalNode != null) {
        N goal = it.explored.get(goalNode.state());
        return new SearchResult(goal, iteration, w);
    }

    return new SearchResult(Collections.<N>emptyList(), iteration, w);
}

From source file:com.google.cloud.genomics.dataflow.readers.bam.Reader.java

/**
 * To compare how sharded reading works vs. plain HTSJDK sequential iteration,
 * this method implements such iteration.
 * This makes it easier to discover errors such as reads that are somehow
 * skipped by a sharded approach./*from  w w  w.j  a v a2s  . c  o m*/
 */
public static Iterable<Read> readSequentiallyForTesting(Objects storageClient, String storagePath,
        Contig contig, ReaderOptions options) throws IOException {
    Stopwatch timer = Stopwatch.createStarted();
    SamReader samReader = BAMIO.openBAM(storageClient, storagePath, options.getStringency());
    SAMRecordIterator iterator = samReader.queryOverlapping(contig.referenceName, (int) contig.start + 1,
            (int) contig.end);
    List<Read> reads = new ArrayList<Read>();

    int recordsBeforeStart = 0;
    int recordsAfterEnd = 0;
    int mismatchedSequence = 0;
    int recordsProcessed = 0;
    Filter filter = setupFilter(options, contig.referenceName);
    while (iterator.hasNext()) {
        SAMRecord record = iterator.next();
        final boolean passesFilter = passesFilter(record, filter, contig.referenceName);

        if (!passesFilter) {
            mismatchedSequence++;
            continue;
        }
        if (record.getAlignmentStart() < contig.start) {
            recordsBeforeStart++;
            continue;
        }
        if (record.getAlignmentStart() > contig.end) {
            recordsAfterEnd++;
            continue;
        }
        reads.add(ReadUtils.makeReadGrpc(record));
        recordsProcessed++;
    }
    timer.stop();
    LOG.info("NON SHARDED: Processed " + recordsProcessed + " in " + timer + ". Speed: "
            + (recordsProcessed * 1000) / timer.elapsed(TimeUnit.MILLISECONDS) + " reads/sec"
            + ", skipped other sequences " + mismatchedSequence + ", skippedBefore " + recordsBeforeStart
            + ", skipped after " + recordsAfterEnd);
    return reads;
}