Example usage for com.google.common.base Stopwatch stop

List of usage examples for com.google.common.base Stopwatch stop

Introduction

In this page you can find the example usage for com.google.common.base Stopwatch stop.

Prototype

public Stopwatch stop() 

Source Link

Document

Stops the stopwatch.

Usage

From source file:tds.dll.common.diagnostic.services.impl.DiagnosticDatabaseServiceImpl.java

private DatabaseOperation writeOperation(LegacyDbNameUtility.Databases dbName) {

    Stopwatch stopwatch = Stopwatch.createStarted();
    try {/*w w  w  .  j  av a 2 s  .c o m*/
        switch (dbName) {
        case Archive:
            writeTestDao.writeArchiveDatabase();
            break;
        case Config:
            writeTestDao.writeConfigsDatabase();
            break;
        case Itembank:
            writeTestDao.writeItemBankDatabase();
            break;
        case Session:
            writeTestDao.writeSessionDatabase();
            break;
        }
    } catch (DiagnosticException diagnosticException) {
        stopwatch.stop();
        return new DatabaseOperation(Rating.FAILED, DatabaseOperationType.WRITE,
                stopwatch.elapsed(TimeUnit.MILLISECONDS), diagnosticException.getMessage());
    }
    return new DatabaseOperation(Rating.IDEAL, DatabaseOperationType.WRITE,
            stopwatch.elapsed(TimeUnit.MILLISECONDS));
}

From source file:ai.grakn.engine.GraknEngineServer.java

public void start() {
    Stopwatch timer = Stopwatch.createStarted();
    logStartMessage(prop.getProperty(GraknEngineConfig.SERVER_HOST_NAME),
            prop.getProperty(GraknEngineConfig.SERVER_PORT_NUMBER));
    synchronized (this) {
        lockAndInitializeSystemOntology();
        startHTTP();//from   w  ww . j  av a 2 s. c  o  m
    }
    graknEngineStatus.setReady(true);
    LOG.info("Engine started in {}", timer.stop());
}

From source file:org.locationtech.geogig.repository.impl.WorkingTreeImpl.java

@Override
public ObjectId insert(Iterator<FeatureInfo> featureInfos, ProgressListener progress) {
    checkArgument(featureInfos != null);
    checkArgument(progress != null);//w w w  .j  av a  2 s. c  o m

    final RevTree currentWorkHead = getTree();
    final Map<String, NodeRef> currentTrees = Maps
            .newHashMap(Maps.uniqueIndex(getFeatureTypeTrees(), (nr) -> nr.path()));

    Map<String, CanonicalTreeBuilder> parentBuilders = new HashMap<>();

    progress.setProgress(0);
    final AtomicLong p = new AtomicLong();
    Function<FeatureInfo, RevFeature> treeBuildingTransformer = (fi) -> {
        final String parentPath = NodeRef.parentPath(fi.getPath());
        final String fid = NodeRef.nodeFromPath(fi.getPath());
        @Nullable
        ObjectId metadataId = fi.getFeatureTypeId();
        CanonicalTreeBuilder parentBuilder = getTreeBuilder(currentTrees, parentBuilders, parentPath,
                metadataId);

        if (fi.isDelete()) {
            if (parentBuilder != null) {
                parentBuilder.remove(fid);
            }
            return null;
        }

        Preconditions.checkState(parentBuilder != null);
        RevFeature feature = fi.getFeature();
        NodeRef parentRef = currentTrees.get(parentPath);
        Preconditions.checkNotNull(parentRef);
        if (fi.getFeatureTypeId().equals(parentRef.getMetadataId())) {
            metadataId = ObjectId.NULL;// use the parent's default
        }

        ObjectId oid = feature.getId();
        Envelope bounds = SpatialOps.boundsOf(feature);
        Node featureNode = Node.create(fid, oid, metadataId, TYPE.FEATURE, bounds);

        parentBuilder.put(featureNode);

        progress.setProgress(p.incrementAndGet());
        return feature;
    };

    Iterator<RevFeature> features = Iterators.transform(featureInfos, treeBuildingTransformer);
    features = Iterators.filter(features, Predicates.notNull());
    features = Iterators.filter(features, (f) -> !progress.isCanceled());

    Stopwatch insertTime = Stopwatch.createStarted();
    indexDatabase.putAll(features);
    insertTime.stop();
    if (progress.isCanceled()) {
        return currentWorkHead.getId();
    }

    progress.setDescription(String.format("%,d features inserted in %s", p.get(), insertTime));

    UpdateTree updateTree = context.command(UpdateTree.class).setRoot(currentWorkHead);
    parentBuilders.forEach((path, builder) -> {

        final NodeRef oldTreeRef = currentTrees.get(path);
        progress.setDescription(String.format("Building final tree %s...", oldTreeRef.name()));
        Stopwatch treeTime = Stopwatch.createStarted();
        final RevTree newFeatureTree = builder.build();
        treeTime.stop();
        progress.setDescription(
                String.format("%,d features tree built in %s", newFeatureTree.size(), treeTime));
        final NodeRef newTreeRef = oldTreeRef.update(newFeatureTree.getId(),
                SpatialOps.boundsOf(newFeatureTree));
        updateTree.setChild(newTreeRef);
    });

    final RevTree newWorkHead = updateTree.call();
    return updateWorkHead(newWorkHead.getId());
}

From source file:ec.nbdemetra.spreadsheet.SpreadSheetBasicFileHandler.java

@Override
public Object asyncLoad(File file, BasicFileViewer.ProgressCallback progress) throws Exception {
    ArrayBook.Builder result = ArrayBook.builder();
    Stopwatch sw = Stopwatch.createStarted();
    Book.Factory factory = factories.firstMatch(filePredicate(file)).get();
    try (Book book = factory.load(file)) {
        for (int s = 0; s < book.getSheetCount(); s++) {
            result.sheet(book.getSheet(s));
            progress.setProgress(0, book.getSheetCount(), s);
        }/*from   w ww  .  ja  v  a2 s  .  co  m*/
    }
    return new Model(factory.getName(), file, result.build(), sw.stop().elapsed(TimeUnit.MILLISECONDS));
}

From source file:monasca.persister.repository.vertica.VerticaMetricRepo.java

@Override
public int flush(String id) throws RepoException {

    try {//from w w w.  jav  a 2  s.  c  o  m

        Stopwatch swOuter = Stopwatch.createStarted();

        Timer.Context context = commitTimer.time();

        executeBatches(id);

        writeRowsFromTempStagingTablesToPermTables(id);

        Stopwatch swInner = Stopwatch.createStarted();

        handle.commit();
        swInner.stop();

        logger.debug("[{}]: committing transaction took: {}", id, swInner);

        swInner.reset().start();
        handle.begin();
        swInner.stop();

        logger.debug("[{}]: beginning new transaction took: {}", id, swInner);

        context.stop();

        swOuter.stop();

        logger.debug(
                "[{}]: total time for writing measurements, definitions, and dimensions to vertica took {}", id,
                swOuter);

        updateIdCaches(id);

        int commitCnt = this.measurementCnt;

        this.measurementCnt = 0;

        return commitCnt;

    } catch (Exception e) {

        logger.error("[{}]: failed to write measurements, definitions, and dimensions to vertica", id, e);

        throw new RepoException("failed to commit batch to vertica", e);

    }
}

From source file:org.locationtech.geogig.remotes.pack.PackImpl.java

private void applyIndex(PackProcessor target, RefRequest req, Deduplicator deduplicator,
        ProgressListener progress) {/* w w w  .j  ava  2  s . co  m*/

    progress.setDescription("Updating spatial indexes for " + req.name);
    ObjectReporter objectReport = new ObjectReporter(progress);

    // back up current progress indicator
    final Function<ProgressListener, String> defaultProgressIndicator;
    defaultProgressIndicator = progress.progressIndicator();
    // set our custom progress indicator
    progress.setProgressIndicator((p) -> objectReport.toString());

    final List<IndexDef> indexes = missingIndexes.get(req);
    checkNotNull(indexes);

    final IndexDatabase sourceStore = source.indexDatabase();
    try {

        final Stopwatch sw = Stopwatch.createStarted();
        for (IndexDef def : indexes) {
            target.putIndex(def, sourceStore, objectReport, deduplicator);
        }
        progress.complete();
        if (objectReport.total.get() > 0) {
            progress.started();
            String description = String.format("Indexes updated: %,d, repeated: %,d, time: %s",
                    objectReport.inserted(), objectReport.found(), sw.stop());
            progress.setDescription(description);
        }
    } finally {
        // restore previous progress indicator
        progress.setProgressIndicator(defaultProgressIndicator);
    }
}

From source file:joshelser.LimitAndSumColumnFamilyIterator.java

@Override
public void seek(Range range, Collection<ByteSequence> columnFamilies, boolean inclusive) throws IOException {
    Stopwatch seekSw = Stopwatch.createStarted();
    // Make sure we invalidate our last record
    nextRecordNotFound();//w  w w  . ja  v  a 2s . c om

    log.debug("Seeking to " + range);

    getSource().seek(range, columnFamilies, inclusive);
    currentRange = range;
    currentColumnFamilies = columnFamilies;
    currentColumnFamiliesInclusive = inclusive;
    aggregate();
    seekSw.stop();
    log.info("Seek duration: " + seekSw.elapsed(TimeUnit.MILLISECONDS));
}

From source file:com.arpnetworking.tsdaggregator.perf.FilePerfTestBase.java

/**
 * Runs a test./*from   w  w  w .j a  v a  2  s .c  om*/
 *
 * @param pipelineConfigurationFile Pipeline configuration file.
 * @param duration Timeout period.
 */
protected void benchmark(final File pipelineConfigurationFile, final Duration duration) {
    LOGGER.debug(String.format("Launching pipeline; configuration=%s", pipelineConfigurationFile));

    // Create custom "canary" sink
    final CountDownLatch latch = new CountDownLatch(1);
    final Stopwatch timer = Stopwatch.createUnstarted();
    final ListeningSink sink = new ListeningSink(new Function<Collection<AggregatedData>, Void>() {
        @Nullable
        @Override
        public Void apply(@Nullable final Collection<AggregatedData> input) {
            if (input != null) {
                final AggregatedData datum = Iterables.getFirst(input, null);
                if (datum != null && TestFileGenerator.CANARY.equals(datum.getFQDSN().getMetric())
                        && timer.isRunning()) {
                    timer.stop();
                    latch.countDown();
                }
            }
            return null;
        }
    });

    // Load the specified stock configuration
    final PipelineConfiguration stockPipelineConfiguration = new StaticConfiguration.Builder()
            .addSource(new JsonNodeFileSource.Builder().setFile(pipelineConfigurationFile).build())
            .setObjectMapper(PipelineConfiguration.createObjectMapper(_injector)).build()
            .getRequiredAs(PipelineConfiguration.class);

    // Add the custom "canary" sink
    final List<Sink> benchmarkSinks = Lists.newArrayList(stockPipelineConfiguration.getSinks());
    benchmarkSinks.add(sink);

    // Create the custom configuration
    final PipelineConfiguration benchmarkPipelineConfiguration = OvalBuilder.<PipelineConfiguration, PipelineConfiguration.Builder>clone(
            stockPipelineConfiguration).setSinks(benchmarkSinks).build();

    // Instantiate the pipeline
    final Pipeline pipeline = new Pipeline(benchmarkPipelineConfiguration);

    // Execute the pipeline until the canary flies the coop
    try {
        timer.start();
        pipeline.launch();

        if (!latch.await(duration.getMillis(), TimeUnit.MILLISECONDS)) {
            LOGGER.error("Test timed out");
            throw new RuntimeException("Test timed out");
        }
    } catch (final InterruptedException e) {
        Thread.interrupted();
        throw new RuntimeException("Test interrupted");
    } finally {
        pipeline.shutdown();
    }
}

From source file:org.jboss.hal.meta.processing.MetadataProcessor.java

@SuppressWarnings("unchecked")
private void processInternal(Set<AddressTemplate> templates, boolean recursive, Progress progress,
        AsyncCallback<Void> callback) {
    // we can skip the tasks if the metadata is already in the regisries
    LookupRegistryTask lookupRegistries = new LookupRegistryTask(resourceDescriptionRegistry,
            securityContextRegistry);// w ww  . j  a  v a2  s .  c  om
    if (lookupRegistries.allPresent(templates, recursive)) {
        logger.debug("All metadata have been already processed -> callback.onSuccess(null)");
        callback.onSuccess(null);

    } else {
        boolean ie = Browser.isIE();
        List<Task<LookupContext>> tasks = new ArrayList<>();
        tasks.add(lookupRegistries);
        if (!ie) {
            tasks.add(new LookupDatabaseTask(resourceDescriptionDatabase, securityContextDatabase));
        }
        tasks.add(new RrdTask(environment, dispatcher, statementContext, settings, BATCH_SIZE, RRD_DEPTH));
        tasks.add(new UpdateRegistryTask(resourceDescriptionRegistry, securityContextRegistry));
        if (!ie) {
            tasks.add(new UpdateDatabaseTask(workerChannel));
        }

        LookupContext context = new LookupContext(progress, templates, recursive);
        Stopwatch stopwatch = Stopwatch.createStarted();
        series(context, tasks).subscribe(new Outcome<LookupContext>() {
            @Override
            public void onError(LookupContext context, Throwable error) {
                logger.debug("Failed to process metadata: {}", error.getMessage());
                callback.onFailure(error);
            }

            @Override
            public void onSuccess(LookupContext context) {
                stopwatch.stop();
                logger.info("Successfully processed metadata in {} ms", stopwatch.elapsed(MILLISECONDS));
                callback.onSuccess(null);
            }
        });
    }
}

From source file:com.google.api.ads.adwords.jaxws.extensions.processors.onmemory.ReportProcessorOnMemory.java

/**
 * Generate all the mapped reports to the given account IDs.
 * /*from  w  w w.ja v  a 2  s  .com*/
 * @param dateRangeType
 *            the date range type.
 * @param dateStart
 *            the starting date.
 * @param dateEnd
 *            the ending date.
 * @param accountIdsSet
 *            the account IDs.
 * @param properties
 *            the properties file
 * @throws Exception
 *             error reaching the API.
 */
@Override
public void generateReportsForMCC(String userId, String mccAccountId,
        ReportDefinitionDateRangeType dateRangeType, String dateStart, String dateEnd, Set<Long> accountIdsSet,
        Properties properties) throws Exception {

    LOGGER.info("*** Retrieving account IDs ***");

    if (accountIdsSet == null || accountIdsSet.size() == 0) {
        accountIdsSet = this.retrieveAccountIds(userId, mccAccountId);
    } else {
        LOGGER.info("Accounts loaded from file.");
    }

    AdWordsSessionBuilderSynchronizer sessionBuilder = new AdWordsSessionBuilderSynchronizer(
            authenticator.authenticate(userId, mccAccountId, false));

    LOGGER.info("*** Generating Reports for " + accountIdsSet.size() + " accounts ***");

    Stopwatch stopwatch = Stopwatch.createStarted();

    Set<ReportDefinitionReportType> reports = this.csvReportEntitiesMapping.getDefinedReports();

    // reports
    for (ReportDefinitionReportType reportType : reports) {

        if (properties.containsKey(reportType.name())) {
            this.downloadAndProcess(mccAccountId, sessionBuilder, reportType, dateRangeType, dateStart, dateEnd,
                    accountIdsSet, properties);
        }
    }

    stopwatch.stop();
    LOGGER.info("*** Finished processing all reports in " + (stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000)
            + " seconds ***\n");
}