Example usage for com.google.common.base Stopwatch stop

List of usage examples for com.google.common.base Stopwatch stop

Introduction

In this page you can find the example usage for com.google.common.base Stopwatch stop.

Prototype

public Stopwatch stop() 

Source Link

Document

Stops the stopwatch.

Usage

From source file:itemrecommendations.CFResourceCalculator.java

private static List<Map<Integer, Double>> startBM25CreationForResourcesPrediction(BookmarkReader reader,
        int sampleSize, boolean userBased, boolean resBased, boolean allResources, boolean bll,
        Features features) {//  w w w.j  a v  a2 s.c o  m
    int size = reader.getBookmarks().size();
    int trainSize = size - sampleSize;

    Stopwatch timer = new Stopwatch();
    timer.start();
    CFResourceCalculator calculator = new CFResourceCalculator(reader, trainSize, false, userBased, resBased, 5,
            Similarity.COSINE, features);
    timer.stop();
    long trainingTime = timer.elapsed(TimeUnit.MILLISECONDS);

    timer.reset();
    timer.start();
    List<Map<Integer, Double>> results = new ArrayList<Map<Integer, Double>>();
    for (Integer userID : reader.getUniqueUserListFromTestSet(trainSize)) {
        Map<Integer, Double> map = null;
        map = calculator.getRankedResourcesList(userID, -1, true, allResources, bll, true, false); // TODO
        results.add(map);
    }
    timer.stop();
    long testTime = timer.elapsed(TimeUnit.MILLISECONDS);

    timeString = PerformanceMeasurement.addTimeMeasurement(timeString, true, trainingTime, testTime,
            sampleSize);
    return results;
}

From source file:org.apache.drill.exec.store.schedule.BlockMapBuilder.java

/**
 * Builds a mapping of Drillbit endpoints to hostnames
 *///w w w  . ja v  a 2s. c o m
private static ImmutableMap<String, DrillbitEndpoint> buildEndpointMap(Collection<DrillbitEndpoint> endpoints) {
    Stopwatch watch = new Stopwatch();
    watch.start();
    HashMap<String, DrillbitEndpoint> endpointMap = Maps.newHashMap();
    for (DrillbitEndpoint d : endpoints) {
        String hostName = d.getAddress();
        endpointMap.put(hostName, d);
    }
    watch.stop();
    logger.debug("Took {} ms to build endpoint map", watch.elapsed(TimeUnit.MILLISECONDS));
    return ImmutableMap.copyOf(endpointMap);
}

From source file:processing.BM25Calculator.java

private static List<Map<Integer, Double>> startBM25CreationForTagPrediction(BookmarkReader reader,
        int sampleSize, boolean userBased, boolean resBased, int beta) {
    timeString = "";
    int size = reader.getUserLines().size();
    int trainSize = size - sampleSize;
    Stopwatch timer = new Stopwatch();
    timer.start();//from   ww  w . ja v a2s . c  om
    BM25Calculator calculator = new BM25Calculator(reader, trainSize, true, userBased, resBased, beta);
    timer.stop();
    long trainingTime = timer.elapsed(TimeUnit.MILLISECONDS);

    List<Map<Integer, Double>> results = new ArrayList<Map<Integer, Double>>();
    timer = new Stopwatch();
    timer.start();
    for (int i = trainSize; i < size; i++) {
        UserData data = reader.getUserLines().get(i);
        Map<Integer, Double> map = null;
        map = calculator.getRankedTagList(data.getUserID(), data.getWikiID(), true);
        results.add(map);
        //System.out.println(data.getTags() + "|" + map.keySet());
    }
    timer.stop();
    long testTime = timer.elapsed(TimeUnit.MILLISECONDS);
    timeString += ("Full training time: " + trainingTime + "\n");
    timeString += ("Full test time: " + testTime + "\n");
    timeString += ("Average test time: " + testTime / (double) sampleSize) + "\n";
    timeString += ("Total time: " + (trainingTime + testTime) + "\n");

    return results;
}

From source file:com.persinity.common.db.RelDbUtil.java

/**
 * @param schema//from  w  w  w  .j  a v  a  2s. c om
 *         to warm up the cache
 */
public static void warmUpCache(final Schema schema, final boolean warmUpFks) {
    log.debug("Warming up {} of type {}", schema, schema.getClass().getSimpleName());
    final Stopwatch st = Stopwatch.createStarted();
    schema.getUserName();
    final Set<String> tableNames = schema.getTableNames();
    log.info("Warming up tables: {} {} {}...", tableNames.size(), warmUpFks ? "with FKs" : "without FKs",
            schema);
    for (String tableName : tableNames) {
        schema.getTableCols(tableName);
        schema.getTablePk(tableName);
        if (warmUpFks) {
            schema.getTableFks(tableName);
        }
    }
    st.stop();
    log.info("Warm up done for {}", st);
}

From source file:org.apache.jackrabbit.oak.plugins.backup.FileStoreRestore.java

public static void restore(File source, File destination) throws IOException {
    if (!validFileStore(source)) {
        throw new IOException("Folder " + source + " is not a valid FileStore directory");
    }/*from   w  w w. j a va 2s .  c  o  m*/

    FileStore restore = FileStore.builder(source).buildReadOnly();
    Stopwatch watch = Stopwatch.createStarted();

    FileStore store = FileStore.builder(destination).build();
    SegmentNodeState current = store.getHead();
    try {
        Compactor compactor = new Compactor(store.getTracker());
        compactor.setDeepCheckLargeBinaries(true);
        SegmentNodeState after = compactor.compact(current, restore.getHead(), current);
        store.setHead(current, after);
    } finally {
        restore.close();
        store.close();
    }
    watch.stop();
    log.info("Restore finished in {}.", watch);
}

From source file:processing.BLLCalculator.java

private static List<Map<Integer, Double>> startActCreation(BookmarkReader reader, int sampleSize,
        boolean sorting, boolean userBased, boolean resBased, double dVal, int beta, CalculationType cType,
        Double lambda) {//from w ww .ja  v  a2 s  . c  om
    int size = reader.getBookmarks().size();
    int trainSize = size - sampleSize;

    Stopwatch timer = new Stopwatch();
    timer.start();
    BLLCalculator calculator = new BLLCalculator(reader, trainSize, dVal, beta, userBased, resBased, cType,
            lambda);
    timer.stop();
    long trainingTime = timer.elapsed(TimeUnit.MILLISECONDS);
    List<Map<Integer, Double>> results = new ArrayList<Map<Integer, Double>>();
    if (trainSize == size) {
        trainSize = 0;
    }

    timer.reset();
    timer.start();
    for (int i = trainSize; i < size; i++) { // the test-set
        Bookmark data = reader.getBookmarks().get(i);
        Map<Integer, Double> map = calculator.getRankedTagList(data.getUserID(), data.getResourceID(), sorting,
                cType);
        results.add(map);
    }
    timer.stop();
    long testTime = timer.elapsed(TimeUnit.MILLISECONDS);

    timeString = PerformanceMeasurement.addTimeMeasurement(timeString, true, trainingTime, testTime,
            sampleSize);
    return results;
}

From source file:org.apache.beam.sdk.io.synthetic.delay.SyntheticDelay.java

/** Keep cpu busy for {@code delayMillis} by calculating lots of hashes. */
private static void cpuDelay(long delayMillis) {
    // Note that the delay is enforced in terms of walltime. That implies this thread may not
    // keep CPU busy if it gets preempted by other threads. There is more of chance of this
    // occurring in a streaming pipeline as there could be lots of threads running this. The loop
    // measures cpu time spent for each iteration, so that these effects are some what minimized.

    long cpuMicros = delayMillis * 1000;
    Stopwatch timer = Stopwatch.createUnstarted();

    while (timer.elapsed(TimeUnit.MICROSECONDS) < cpuMicros) {
        // Find a long which hashes to HASH in lowest MASK bits.
        // Values chosen to roughly take 1ms on typical workstation.
        timer.start();/*from  w w w.  jav  a2s.  c  om*/
        long p = INIT_PLAINTEXT;
        while (true) {
            long t = Hashing.murmur3_128().hashLong(p).asLong();
            if ((t & MASK) == (HASH & MASK)) {
                break;
            }
            p++;
        }
        timer.stop();
    }
}

From source file:org.geogit.gwc.TruncateHelper.java

public static void issueTruncateTasks(Context geogit, Optional<Ref> oldRef, Optional<Ref> newRef,
        GeoServerTileLayer tileLayer, TileBreeder breeder) {

    final ObjectId oldCommit = oldRef.isPresent() ? oldRef.get().getObjectId() : ObjectId.NULL;
    final ObjectId newCommit = newRef.isPresent() ? newRef.get().getObjectId() : ObjectId.NULL;

    final String tileLayerName = tileLayer.getName();
    final String layerTreeName = tileLayer.getLayerInfo().getResource().getNativeName();

    LOGGER.debug("Computing minimal bounds geometry on layer '{}' (tree '{}') for change {}...{} ",
            tileLayerName, layerTreeName, oldCommit, newCommit);
    final Geometry minimalBounds;
    Stopwatch sw = Stopwatch.createStarted();
    try {/*  w w w. j a va  2  s .c o m*/
        MinimalDiffBounds geomBuildCommand = geogit.command(MinimalDiffBounds.class)
                .setOldVersion(oldCommit.toString()).setNewVersion(newCommit.toString());

        geomBuildCommand.setTreeNameFilter(layerTreeName);

        minimalBounds = geomBuildCommand.call();
        sw.stop();
        if (minimalBounds.isEmpty()) {
            LOGGER.debug("Feature tree '{}' not affected by change {}...{} (took {})", layerTreeName, oldCommit,
                    newCommit, sw);
            return;
        }
        if (LOGGER.isDebugEnabled()) {
            LOGGER.debug("Minimal bounds on layer '{}' computed in {}: {}", tileLayerName, sw,
                    formattedWKT(minimalBounds));
        }
    } catch (Exception e) {
        sw.stop();
        LOGGER.error("Error computing minimal bounds for {}...{} on layer '{}' after {}", oldCommit, newCommit,
                tileLayerName, sw);
        throw Throwables.propagate(e);
    }
    final Set<String> gridSubsets = tileLayer.getGridSubsets();

    LayerInfo layerInfo = tileLayer.getLayerInfo();
    ResourceInfo resource = layerInfo.getResource();
    final CoordinateReferenceSystem nativeCrs = resource.getNativeCRS();

    for (String gridsetId : gridSubsets) {
        GridSubset gridSubset = tileLayer.getGridSubset(gridsetId);
        final CoordinateReferenceSystem gridSetCrs = getGridsetCrs(gridSubset);

        LOGGER.debug("Reprojecting geometry mask to gridset {}", gridsetId);
        Geometry geomInGridsetCrs = transformToGridsetCrs(minimalBounds, nativeCrs, gridSetCrs);
        if (LOGGER.isDebugEnabled()) {
            LOGGER.debug("geometry mask reprojected to gridset {}: {}", gridsetId,
                    formattedWKT(geomInGridsetCrs));
        }
        geomInGridsetCrs = bufferAndSimplifyBySizeOfSmallerTile(geomInGridsetCrs, gridSetCrs, gridSubset);
        try {
            truncate(tileLayer, gridsetId, geomInGridsetCrs, breeder);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}

From source file:org.geogig.geoserver.gwc.TruncateHelper.java

public static void issueTruncateTasks(Context context, Optional<Ref> oldRef, Optional<Ref> newRef,
        GeoServerTileLayer tileLayer, TileBreeder breeder) {

    final ObjectId oldCommit = oldRef.isPresent() ? oldRef.get().getObjectId() : ObjectId.NULL;
    final ObjectId newCommit = newRef.isPresent() ? newRef.get().getObjectId() : ObjectId.NULL;

    final String tileLayerName = tileLayer.getName();
    final String layerTreeName = tileLayer.getLayerInfo().getResource().getNativeName();

    LOGGER.debug(/*from  w w w.  j a va  2s. c o  m*/
            String.format("Computing minimal bounds geometry on layer '%s' (tree '%s') for change %s...%s ",
                    tileLayerName, layerTreeName, oldCommit, newCommit));
    final Geometry minimalBounds;
    Stopwatch sw = Stopwatch.createStarted();
    try {
        MinimalDiffBounds geomBuildCommand = context.command(MinimalDiffBounds.class)
                .setOldVersion(oldCommit.toString()).setNewVersion(newCommit.toString());

        geomBuildCommand.setTreeNameFilter(layerTreeName);

        minimalBounds = geomBuildCommand.call();
        sw.stop();
        if (minimalBounds.isEmpty()) {
            LOGGER.debug(String.format("Feature tree '%s' not affected by change %s...%s (took %s)",
                    layerTreeName, oldCommit, newCommit, sw));
            return;
        }
        if (LOGGER.isDebugEnabled()) {
            LOGGER.debug(String.format("Minimal bounds on layer '%s' computed in %s: %s", tileLayerName, sw,
                    formattedWKT(minimalBounds)));
        }
    } catch (Exception e) {
        sw.stop();
        LOGGER.error(String.format("Error computing minimal bounds for %s...%s on layer '%s' after %s",
                oldCommit, newCommit, tileLayerName, sw));
        throw Throwables.propagate(e);
    }
    final Set<String> gridSubsets = tileLayer.getGridSubsets();

    LayerInfo layerInfo = tileLayer.getLayerInfo();
    ResourceInfo resource = layerInfo.getResource();
    final CoordinateReferenceSystem sourceCrs;
    {
        CoordinateReferenceSystem nativeCrs = resource.getNativeCRS();
        if (nativeCrs == null) {
            // no native CRS specified, layer must have been configured with an overriding one
            sourceCrs = resource.getCRS();
        } else {
            sourceCrs = nativeCrs;
        }
    }
    for (String gridsetId : gridSubsets) {
        GridSubset gridSubset = tileLayer.getGridSubset(gridsetId);
        final CoordinateReferenceSystem gridSetCrs = getGridsetCrs(gridSubset);

        LOGGER.debug("Reprojecting geometry mask to gridset {}", gridsetId);
        Geometry geomInGridsetCrs = transformToGridsetCrs(minimalBounds, sourceCrs, gridSetCrs);
        if (LOGGER.isDebugEnabled()) {
            LOGGER.debug("geometry mask reprojected to gridset {}: {}", gridsetId,
                    formattedWKT(geomInGridsetCrs));
        }
        geomInGridsetCrs = bufferAndSimplifyBySizeOfSmallerTile(geomInGridsetCrs, gridSetCrs, gridSubset);
        try {
            truncate(tileLayer, gridsetId, geomInGridsetCrs, breeder);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}

From source file:com.hortonworks.streamline.common.util.ParallelStreamUtil.java

public static <T> T execute(Supplier<T> supplier, Executor executor) {
    Stopwatch stopwatch = Stopwatch.createStarted();
    LOG.debug("execute start");

    try {//from   www  . j  a va  2 s .c  o m
        CompletableFuture<T> resultFuture = CompletableFuture.supplyAsync(supplier, executor);
        return resultFuture.get();
    } catch (InterruptedException e) {
        throw new RuntimeException(e);
    } catch (ExecutionException e) {
        handleExecutionException(e);
        // shouldn't reach here
        throw new IllegalStateException("Shouldn't reach here");
    } finally {
        LOG.debug("execute complete - elapsed: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS));
        stopwatch.stop();
    }
}