Example usage for com.google.common.base Stopwatch Stopwatch

List of usage examples for com.google.common.base Stopwatch Stopwatch

Introduction

In this page you can find the example usage for com.google.common.base Stopwatch Stopwatch.

Prototype

Stopwatch() 

Source Link

Usage

From source file:org.eclipse.osee.orcs.core.internal.types.impl.OrcsTypesIndexer.java

public OrcsTypesIndex index(IResource source) throws Exception {
    Stopwatch stopwatch = new Stopwatch();
    stopwatch.start();/*from ww  w  . ja v  a2s  .c om*/

    OseeDslResource resource = null;
    InputStream inputStream = null;
    try {
        inputStream = source.getContent();
        inputStream = upConvertTo17(inputStream);
        resource = OseeDslResourceUtil.loadModel(source.getLocation().toASCIIString(), inputStream);
    } finally {
        Lib.close(inputStream);
    }
    logger.trace("Converted OrcsTypes to model in [%s]", Lib.getElapseString(stopwatch.elapsedMillis()));

    Conditions.checkNotNull(resource, "osee dsl model", "Error reading osee dsl resource");
    OseeDsl model = resource.getModel();
    ArtifactTypeIndex artifactTypeIndex = new ArtifactTypeIndex(hierarchyProvider);
    AttributeTypeIndex attributeTypeIndex = new AttributeTypeIndex();
    EnumTypeIndex enumTypeIndex = new EnumTypeIndex();
    RelationTypeIndex relationTypeIndex = new RelationTypeIndex(artifactTypeIndex);
    OrcsIndeces index = new OrcsIndeces(source, artifactTypeIndex, attributeTypeIndex, enumTypeIndex,
            relationTypeIndex);

    try {
        for (XOseeArtifactTypeOverride xArtifactTypeOverride : model.getArtifactTypeOverrides()) {
            applyArtifactTypeOverrides(xArtifactTypeOverride);
        }

        for (XOseeEnumOverride xEnumOverride : model.getEnumOverrides()) {
            applyEnumOverrides(xEnumOverride);
        }

        for (XAttributeType dslType : model.getAttributeTypes()) {
            getOrCreateToken(attributeTypeIndex, dslType);
        }

        for (XArtifactType dslType : model.getArtifactTypes()) {
            IArtifactType token = getOrCreateToken(artifactTypeIndex, dslType);
            indexSuperTypes(artifactTypeIndex, token, dslType);
            indexAttributes(artifactTypeIndex, attributeTypeIndex, dslType);
        }

        for (XRelationType dslType : model.getRelationTypes()) {
            getOrCreateToken(relationTypeIndex, dslType);
        }

        for (XOseeEnumType dslType : model.getEnumTypes()) {
            getOrCreateEnumType(enumTypeIndex, dslType);
        }
    } finally {
        logger.trace("Indexed OseeDsl model in [%s]", Lib.getElapseString(stopwatch.elapsedMillis()));
        stopwatch.stop();
    }
    return index;
}

From source file:com.couchbase.roadrunner.workloads.GetsCasWorkload.java

private void casWorkloadWithMeasurement(String key, long cas, SampleDocument doc) {
    Stopwatch watch = new Stopwatch().start();
    casWorkload(key, cas, doc);//from   w  w  w  . j  ava2  s.c  o  m
    watch.stop();
    addMeasure("cas", watch);
}

From source file:org.jclouds.samples.googleappengine.GetAllResourcesController.java

private void addResourcesToRequest(HttpServletRequest request) {
    Stopwatch watch = new Stopwatch().start();
    logger.info("ready to list views: %s", transform(views, ViewToId.INSTANCE));
    Iterable<ListenableFuture<? extends Iterable<? extends ResourceMetadata<?>>>> asyncResources = transform(
            views, viewToAsyncResources);
    logger.info("launched %s tasks with %sms remaining", size(asyncResources), remainingMillis.get());

    Set<Iterable<? extends ResourceMetadata<?>>> done = allResourcesWithinDeadline(asyncResources);
    logger.info("%s tasks completed in %sms with %sms remaining", size(done), watch.stop().elapsedMillis(),
            remainingMillis.get());//www  .  j  ava2  s .  co m

    Iterable<ResourceMetadata<?>> flattened = concat(done);

    Set<ResourceResult> results = FluentIterable.from(flattened).transform(resourceMetadataToStatusResult)
            .toImmutableSet();

    request.setAttribute("resources", results);
}

From source file:com.madgag.agit.filepath.FilterableFileListAdapter.java

public Filter getFilter() {
    if (filter == null) {
        filter = new Filter() {

            @Override/*from  ww  w. j  av a 2  s .c  o  m*/
            protected FilterResults performFiltering(CharSequence constraint) {
                if (originalValues == null) {
                    synchronized (mLock) {
                        originalValues = newArrayList(items);
                    }
                }

                List<FilePath> originalValuesCopy;
                synchronized (mLock) {
                    originalValuesCopy = newArrayList(originalValues);
                }

                FilterResults results = new FilterResults();
                if (TextUtils.isEmpty(constraint)) {
                    results.values = originalValuesCopy;
                    results.count = originalValuesCopy.size();
                } else {
                    //                        String tn = "FLA." + originalValuesCopy.size() + "." + constraint + "." + currentTimeMillis();
                    //                        Debug.startMethodTracing(tn, 64 * 1024 * 1024);
                    Stopwatch stopwatch = new Stopwatch().start();
                    List<FilePath> matchingFiles = cachingFilePathListMatcher.get(constraint.toString());
                    Log.d(TAG, "Filtered with '" + constraint + "' to " + matchingFiles.size() + " files "
                            + stopwatch.stop());
                    //                        Debug.stopMethodTracing();

                    results.values = matchingFiles;
                    results.count = matchingFiles.size();
                }

                return results;
            }

            @Override
            protected void publishResults(CharSequence constraint, FilterResults results) {
                visibleFilePathMatcher
                        .set(TextUtils.isEmpty(constraint) ? null : new FilePathMatcher(constraint.toString()));
                setList((List<FilePath>) results.values);
            }
        };
    }
    return filter;
}

From source file:uk.ac.open.kmi.iserve.discovery.disco.impl.SparqlIndexedLogicConceptMatcher.java

@Inject
protected SparqlIndexedLogicConceptMatcher(RegistryManager registryManager,
        @iServeProperty(ConfigurationProperty.SERVICES_SPARQL_QUERY) String sparqlEndpoint,
        CacheFactory cacheFactory) throws SalException, URISyntaxException {

    super(EnumMatchTypes.of(LogicConceptMatchType.class));

    this.sparqlMatcher = new SparqlLogicConceptMatcher(sparqlEndpoint);
    this.manager = registryManager;
    this.manager.registerAsObserver(this);
    if (indexedMatches == null) {
        try {//w  w w.j  a  va 2s. co  m
            this.indexedMatches = cacheFactory.createPersistentCache("concept-matcher-index");
        } catch (CacheException e) {
            this.indexedMatches = cacheFactory.createInMemoryCache("concept-matcher-index");
        }
    }
    if (indexedMatches.isEmpty()) {
        log.info("Populating Matcher Index...");// if index is empty
        Stopwatch w = new Stopwatch().start();
        populate();
        log.info("Population done in {}. Number of entries {}", w.stop().toString(), indexedMatches.size());
    }

}

From source file:org.apache.tez.common.TezUtilsInternal.java

public static byte[] uncompressBytes(byte[] inBytes) throws IOException {
    Stopwatch sw = new Stopwatch().start();
    byte[] uncompressed = uncompressBytesInflateDeflate(inBytes);
    sw.stop();//from  w w  w.  j  a va2  s .c o m
    if (LOG.isDebugEnabled()) {
        LOG.debug("CompressedSize: " + inBytes.length + ", UncompressedSize: " + uncompressed.length
                + ", UncompressTimeTaken: " + sw.elapsedMillis());
    }
    return uncompressed;
}

From source file:com.Grande.GSM.BACCWS_WAR.WS.REST.EOS.FirmwareDefinitionsEndpoint.java

public String fetchFirmwareDefinitions(@QueryParam("filter") String strFilter) {

    // <editor-fold defaultstate="collapsed" desc="****** Method vars ******">
    final Stopwatch timer = new Stopwatch();
    final QueryResponse qRes = new QueryResponse();
    String strResponse = null;//from   ww w.  ja v  a2  s  . c  om
    String strMake = null;
    String strModel = null;
    List<SenchaFilter> lstFilters = null;
    List<FirmwareDefinition> lstFirmwareDefs = null;
    // start the execution timer
    timer.start();
    // </editor-fold>

    try {
        qRes.vSetNode(java.net.InetAddress.getLocalHost().getHostName());

        // <editor-fold defaultstate="collapsed" desc="****** No filters (return all records) ******">
        if (strFilter == null || strFilter.equals("")) {
            SimpleLogging.vLogEvent(this.strThreadId, "No filter detected, fetching all definitions");
            lstFirmwareDefs = this.trnBN.lstGetFirmwareDefinitions();
            SimpleLogging.vLogEvent(this.strThreadId, "Returning " + lstFirmwareDefs.size() + " definitions");
            // </editor-fold>

            // <editor-fold defaultstate="collapsed" desc="****** Evaluate/apply Sencha filters ******">
        } else {

            // <editor-fold defaultstate="collapsed" desc="****** Filter extraction logic ******">
            // Deserialize the filters
            SimpleLogging.vLogEvent(this.strThreadId, "Processing filter JSON: " + strFilter);
            lstFilters = this.trnBN.lstDeserializeSenchaFilter(strFilter);
            // extract filters that contain 'make' and 'model'
            //lstFilters = this.trnBN.lstFilterSenchaFiltersByStrings(lstFilters, "make", "model");
            SimpleLogging.vLogEvent(this.strThreadId,
                    "Extracted " + lstFilters.size() + " make/model filters: " + lstFilters);
            // </editor-fold>

            // <editor-fold defaultstate="collapsed" desc="****** Filter handling logic ******">
            lstFirmwareDefs = this.trnBN.lstGetFirmwareDefinitionByFilter(lstFilters);
            SimpleLogging.vLogEvent(this.strThreadId, "Returning " + lstFirmwareDefs.size() + " definitions");
            // </editor-fold>
        }
        qRes.vAddResult(lstFirmwareDefs.toArray());
        for (int x = 0; x < lstFirmwareDefs.size(); x++) {
            qRes.vAddResult(lstFirmwareDefs.get(x));
        }
        qRes.vSetSuccessFlag(true);
        // </editor-fold>

    } catch (Exception e) {

        // <editor-fold defaultstate="collapsed" desc="****** Handle failures ******">
        qRes.vSetSuccessFlag(false);
        // handle NPE differently since getMessage() is null
        if (e instanceof NullPointerException) {
            qRes.vSetMessage("NPE occured when serializing result to JSON! " + "File: "
                    + e.getStackTrace()[0].getFileName() + ", " + "Method: "
                    + e.getStackTrace()[0].getMethodName() + ", " + "Line: "
                    + e.getStackTrace()[0].getLineNumber());
        } else {
            qRes.vSetMessage(e.getMessage());
        }
        SimpleLogging.vLogException(this.strThreadId, e);
        // </editor-fold>

    } finally {

        // <editor-fold defaultstate="collapsed" desc="****** Stop timer, convert response to JSON ******">
        timer.stop();
        qRes.vSetRoundTrip(String.valueOf(timer.elapsedTime(TimeUnit.SECONDS)) + "."
                + String.valueOf(timer.elapsedTime(TimeUnit.MILLISECONDS)));
        strResponse = this.trnBN.strQueryResponseToJSON(qRes);
        SimpleLogging.vLogEvent(this.strThreadId + "|" + qRes.strGetRoundTripInSeconds() + "s",
                "retrieved " + qRes.intGetDataCount() + " records");
        // </editor-fold>

    }
    return strResponse;
}

From source file:processing.MPurCalculator.java

public static List<Map<Integer, Double>> startLanguageModelCreation(BookmarkReader reader, int sampleSize,
        boolean sorting, boolean userBased, boolean resBased, int beta) {
    int size = reader.getBookmarks().size();
    int trainSize = size - sampleSize;

    Stopwatch timer = new Stopwatch();
    timer.start();//from   w w w  .ja  v a  2s .  c om
    MPurCalculator calculator = new MPurCalculator(reader, trainSize, beta, userBased, resBased);
    timer.stop();
    long trainingTime = timer.elapsed(TimeUnit.MILLISECONDS);
    List<Map<Integer, Double>> results = new ArrayList<Map<Integer, Double>>();
    if (trainSize == size) {
        trainSize = 0;
    }

    timer.reset();
    timer.start();
    for (int i = trainSize; i < size; i++) { // the test-set
        Bookmark data = reader.getBookmarks().get(i);
        Map<Integer, Double> map = calculator.getRankedTagList(data.getUserID(), data.getResourceID(), sorting);
        results.add(map);
    }
    timer.stop();
    long testTime = timer.elapsed(TimeUnit.MILLISECONDS);

    timeString = PerformanceMeasurement.addTimeMeasurement(timeString, true, trainingTime, testTime,
            sampleSize);
    return results;
}

From source file:org.apache.accumulo.gc.replication.CloseWriteAheadLogReferences.java

@Override
public void run() {
    // As long as we depend on a newer Guava than Hadoop uses, we have to make sure we're compatible with
    // what the version they bundle uses.
    Stopwatch sw = new Stopwatch();

    Connector conn;/* w  w  w  . j av  a 2  s.c o m*/
    try {
        conn = context.getConnector();
    } catch (Exception e) {
        log.error("Could not create connector", e);
        throw new RuntimeException(e);
    }

    if (!ReplicationTable.isOnline(conn)) {
        log.debug("Replication table isn't online, not attempting to clean up wals");
        return;
    }

    Span findWalsSpan = Trace.start("findReferencedWals");
    HashSet<String> closed = null;
    try {
        sw.start();
        closed = getClosedLogs(conn);
    } finally {
        sw.stop();
        findWalsSpan.stop();
    }

    log.info("Found " + closed.size() + " WALs referenced in metadata in " + sw.toString());
    sw.reset();

    Span updateReplicationSpan = Trace.start("updateReplicationTable");
    long recordsClosed = 0;
    try {
        sw.start();
        recordsClosed = updateReplicationEntries(conn, closed);
    } finally {
        sw.stop();
        updateReplicationSpan.stop();
    }

    log.info(
            "Closed " + recordsClosed + " WAL replication references in replication table in " + sw.toString());
}

From source file:org.apache.hadoop.hbase.ScanPerformanceEvaluation.java

protected void testHdfsStreaming(Path filename) throws IOException {
    byte[] buf = new byte[1024];
    FileSystem fs = filename.getFileSystem(getConf());

    // read the file from start to finish
    Stopwatch fileOpenTimer = new Stopwatch();
    Stopwatch streamTimer = new Stopwatch();

    fileOpenTimer.start();//from  ww w.ja v  a  2 s .c o  m
    FSDataInputStream in = fs.open(filename);
    fileOpenTimer.stop();

    long totalBytes = 0;
    streamTimer.start();
    while (true) {
        int read = in.read(buf);
        if (read < 0) {
            break;
        }
        totalBytes += read;
    }
    streamTimer.stop();

    double throughput = (double) totalBytes / streamTimer.elapsedTime(TimeUnit.SECONDS);

    System.out.println("HDFS streaming: ");
    System.out.println("total time to open: " + fileOpenTimer.elapsedMillis() + " ms");
    System.out.println("total time to read: " + streamTimer.elapsedMillis() + " ms");
    System.out.println(
            "total bytes: " + totalBytes + " bytes (" + StringUtils.humanReadableInt(totalBytes) + ")");
    System.out.println("throghput  : " + StringUtils.humanReadableInt((long) throughput) + "B/s");
}