Example usage for com.google.common.cache LoadingCache stats

List of usage examples for com.google.common.cache LoadingCache stats

Introduction

In this page you can find the example usage for com.google.common.cache LoadingCache stats.

Prototype

CacheStats stats();

Source Link

Document

Returns a current snapshot of this cache's cumulative statistics.

Usage

From source file:org.apache.tajo.pullserver.PullServerUtil.java

private static SearchResult searchCorrespondPart(String queryId, String ebSeqId, Path outDir, String startKey,
        String endKey, boolean last, LoadingCache<IndexCacheKey, BSTIndexReader> indexReaderCache,
        int lowCacheHitCheckThreshold) throws IOException, ExecutionException {
    BSTIndexReader idxReader = indexReaderCache.get(new IndexCacheKey(outDir, queryId, ebSeqId));
    idxReader.retain();// ww w.j av  a  2  s .  c  o  m

    File data;
    long startOffset;
    long endOffset;
    try {
        if (LOG.isDebugEnabled()) {
            if (indexReaderCache.size() > lowCacheHitCheckThreshold
                    && indexReaderCache.stats().hitRate() < 0.5) {
                LOG.debug("Too low cache hit rate: " + indexReaderCache.stats());
            }
        }

        Tuple indexedFirst = idxReader.getFirstKey();
        Tuple indexedLast = idxReader.getLastKey();

        if (indexedFirst == null && indexedLast == null) { // if # of rows is zero
            if (LOG.isDebugEnabled()) {
                LOG.debug("There is no contents");
            }
            return null;
        }

        byte[] startBytes = Base64.decodeBase64(startKey);
        byte[] endBytes = Base64.decodeBase64(endKey);

        Tuple start;
        Tuple end;
        Schema keySchema = idxReader.getKeySchema();
        RowStoreDecoder decoder = RowStoreUtil.createDecoder(keySchema);

        try {
            start = decoder.toTuple(startBytes);
        } catch (Throwable t) {
            throw new IllegalArgumentException(
                    "StartKey: " + startKey + ", decoded byte size: " + startBytes.length, t);
        }

        try {
            end = decoder.toTuple(endBytes);
        } catch (Throwable t) {
            throw new IllegalArgumentException("EndKey: " + endKey + ", decoded byte size: " + endBytes.length,
                    t);
        }

        data = new File(URI.create(outDir.toUri() + "/output"));
        if (LOG.isDebugEnabled()) {
            LOG.debug("GET Request for " + data.getAbsolutePath() + " (start=" + start + ", end=" + end
                    + (last ? ", last=true" : "") + ")");
        }

        TupleComparator comparator = idxReader.getComparator();

        if (comparator.compare(end, indexedFirst) < 0 || comparator.compare(indexedLast, start) < 0) {
            if (LOG.isDebugEnabled()) {
                LOG.debug("Out of Scope (indexed data [" + indexedFirst + ", " + indexedLast
                        + "], but request start:" + start + ", end: " + end);
            }
            return null;
        }

        try {
            idxReader.init();
            startOffset = idxReader.find(start);
        } catch (IOException ioe) {
            LOG.error("State Dump (the requested range: " + "[" + start + ", " + end + ")" + ", idx min: "
                    + idxReader.getFirstKey() + ", idx max: " + idxReader.getLastKey());
            throw ioe;
        }
        try {
            endOffset = idxReader.find(end);
            if (endOffset == -1) {
                endOffset = idxReader.find(end, true);
            }
        } catch (IOException ioe) {
            LOG.error("State Dump (the requested range: " + "[" + start + ", " + end + ")" + ", idx min: "
                    + idxReader.getFirstKey() + ", idx max: " + idxReader.getLastKey());
            throw ioe;
        }

        // if startOffset == -1 then case 2-1 or case 3
        if (startOffset == -1) { // this is a hack
            // if case 2-1 or case 3
            try {
                startOffset = idxReader.find(start, true);
            } catch (IOException ioe) {
                LOG.error("State Dump (the requested range: " + "[" + start + ", " + end + ")" + ", idx min: "
                        + idxReader.getFirstKey() + ", idx max: " + idxReader.getLastKey());
                throw ioe;
            }
        }

        if (startOffset == -1) {
            throw new IllegalStateException("startOffset " + startOffset + " is negative \n"
                    + "State Dump (the requested range: " + "[" + start + ", " + end + ")" + ", idx min: "
                    + idxReader.getFirstKey() + ", idx max: " + idxReader.getLastKey());
        }

        // if greater than indexed values
        if (last || (endOffset == -1 && comparator.compare(idxReader.getLastKey(), end) < 0)) {
            endOffset = data.length();
        }
    } finally {
        idxReader.release();
    }

    return new SearchResult(data, startOffset, endOffset);
}

From source file:it.units.malelab.ege.core.evolver.StandardEvolver.java

protected int actualBirths(int births, LoadingCache<Node<T>, F> fitnessCache) {
    return configuration.isActualEvaluations() ? (int) fitnessCache.stats().missCount() : births;
}

From source file:it.units.malelab.ege.core.evolver.StandardEvolver.java

protected Map<String, Object> cacheStats(LoadingCache mappingCache, LoadingCache fitnessCache) {
    Map<String, Object> map = new LinkedHashMap<>();
    map.put(MAPPING_CACHE_NAME, mappingCache.stats());
    map.put(FITNESS_CACHE_NAME, fitnessCache.stats());
    return map;//w ww . j a v a2 s .com
}

From source file:com.pinterest.deployservice.handler.PingHandler.java

private <K, V> V getFromCache(LoadingCache<K, V> cache, K key) throws ExecutionException {
    LOG.debug(key + " Miss Rate: " + cache.stats().missRate());
    return cache.get(key);
}

From source file:org.apache.s4.core.util.S4Metrics.java

public void createCacheGauges(ProcessingElement prototype,
        final LoadingCache<String, ProcessingElement> cache) {

    Metrics.newGauge(prototype.getClass(), prototype.getClass().getName() + "-cache-entries",
            new Gauge<Long>() {

                @Override/*from  ww  w. ja  v a2  s  .c om*/
                public Long value() {
                    return cache.size();
                }
            });
    Metrics.newGauge(prototype.getClass(), prototype.getClass().getName() + "-cache-evictions",
            new Gauge<Long>() {

                @Override
                public Long value() {
                    return cache.stats().evictionCount();
                }
            });
    Metrics.newGauge(prototype.getClass(), prototype.getClass().getName() + "-cache-misses", new Gauge<Long>() {

        @Override
        public Long value() {
            return cache.stats().missCount();
        }
    });
}

From source file:org.mapsforge.map.writer.CB_MapFileWriter.java

/**
 * Writes the map file according to the given configuration using the given data processor.
 * /*from   w  w  w  . j  a v  a  2  s.  c o m*/
 * @param configuration
 *            the configuration
 * @param dataProcessor
 *            the data processor
 * @throws IOException
 *             thrown if any IO error occurs
 */
public static void writeFile(CB_MapWriterConfiguration configuration, CB_TileBasedDataProcessor dataProcessor)
        throws IOException {
    RandomAccessFile randomAccessFile = new RandomAccessFile(configuration.getOutputFile(), "rw");

    int amountOfZoomIntervals = dataProcessor.getZoomIntervalConfiguration().getNumberOfZoomIntervals();
    ByteBuffer containerHeaderBuffer = ByteBuffer.allocate(HEADER_BUFFER_SIZE);
    // CONTAINER HEADER
    int totalHeaderSize = writeHeaderBuffer(configuration, dataProcessor, containerHeaderBuffer);

    // set to mark where zoomIntervalConfig starts
    containerHeaderBuffer.reset();

    final LoadingCache<CB_TDWay, Geometry> jtsGeometryCache = CacheBuilder.newBuilder()
            .maximumSize(JTS_GEOMETRY_CACHE_SIZE)
            .concurrencyLevel(Runtime.getRuntime().availableProcessors() * 2)
            .build(new JTSGeometryCacheLoader(dataProcessor));

    // SUB FILES
    // for each zoom interval write a sub file
    long currentFileSize = totalHeaderSize;
    for (int i = 0; i < amountOfZoomIntervals; i++) {
        // SUB FILE INDEX AND DATA
        long subfileSize = writeSubfile(currentFileSize, i, dataProcessor, jtsGeometryCache, randomAccessFile,
                configuration);
        // SUB FILE META DATA IN CONTAINER HEADER
        writeSubfileMetaDataToContainerHeader(dataProcessor.getZoomIntervalConfiguration(), i, currentFileSize,
                subfileSize, containerHeaderBuffer);
        currentFileSize += subfileSize;
    }

    randomAccessFile.seek(0);
    randomAccessFile.write(containerHeaderBuffer.array(), 0, totalHeaderSize);

    // WRITE FILE SIZE TO HEADER
    long fileSize = randomAccessFile.length();
    randomAccessFile.seek(OFFSET_FILE_SIZE);
    randomAccessFile.writeLong(fileSize);

    randomAccessFile.close();

    CacheStats stats = jtsGeometryCache.stats();
    LOGGER.info("JTS Geometry cache hit rate: " + stats.hitRate());
    LOGGER.info("JTS Geometry total load time: " + stats.totalLoadTime() / 1000);

    LOGGER.info("Finished writing file.");
}

From source file:org.mapsforge.map.writer.MapFileWriter.java

/**
 * Writes the map file according to the given configuration using the given data processor.
 *
 * @param configuration the configuration
 * @param dataProcessor the data processor
 * @throws IOException thrown if any IO error occurs
 *///from  w w  w . ja v  a  2  s.  c o m
public static void writeFile(MapWriterConfiguration configuration, TileBasedDataProcessor dataProcessor)
        throws IOException {
    EXECUTOR_SERVICE = Executors.newFixedThreadPool(configuration.getThreads());
    RandomAccessFile randomAccessFile = new RandomAccessFile(configuration.getOutputFile(), "rw");

    int amountOfZoomIntervals = dataProcessor.getZoomIntervalConfiguration().getNumberOfZoomIntervals();
    ByteBuffer containerHeaderBuffer = ByteBuffer.allocate(HEADER_BUFFER_SIZE);
    // CONTAINER HEADER
    int totalHeaderSize = writeHeaderBuffer(configuration, dataProcessor, containerHeaderBuffer);

    // set to mark where zoomIntervalConfig starts
    containerHeaderBuffer.reset();

    final LoadingCache<TDWay, Geometry> jtsGeometryCache = CacheBuilder.newBuilder()
            .maximumSize(JTS_GEOMETRY_CACHE_SIZE)
            .concurrencyLevel(Runtime.getRuntime().availableProcessors() * 2)
            .build(new JTSGeometryCacheLoader(dataProcessor));

    // SUB FILES
    // for each zoom interval write a sub file
    long currentFileSize = totalHeaderSize;
    for (int i = 0; i < amountOfZoomIntervals; i++) {
        // SUB FILE INDEX AND DATA
        long subfileSize = writeSubfile(currentFileSize, i, dataProcessor, jtsGeometryCache, randomAccessFile,
                configuration);
        // SUB FILE META DATA IN CONTAINER HEADER
        writeSubfileMetaDataToContainerHeader(dataProcessor.getZoomIntervalConfiguration(), i, currentFileSize,
                subfileSize, containerHeaderBuffer);
        currentFileSize += subfileSize;
    }

    randomAccessFile.seek(0);
    randomAccessFile.write(containerHeaderBuffer.array(), 0, totalHeaderSize);

    // WRITE FILE SIZE TO HEADER
    long fileSize = randomAccessFile.length();
    randomAccessFile.seek(OFFSET_FILE_SIZE);
    randomAccessFile.writeLong(fileSize);

    randomAccessFile.close();

    CacheStats stats = jtsGeometryCache.stats();
    LOGGER.fine("Tag values stats:\n" + OSMUtils.logValueTypeCount());
    LOGGER.info("JTS Geometry cache hit rate: " + stats.hitRate());
    LOGGER.info("JTS Geometry total load time: " + stats.totalLoadTime() / 1000);

    LOGGER.info("Finished writing file.");
}

From source file:org.apache.brooklyn.location.basic.SshMachineLocation.java

protected <T> T execSsh(final Map<String, ?> props, final Function<ShellTool, T> task) {
    final LoadingCache<Map<String, ?>, Pool<SshTool>> sshPoolCache = getSshPoolCache();
    Pool<SshTool> pool = sshPoolCache.getUnchecked(props);
    if (LOG.isTraceEnabled()) {
        LOG.trace("{} execSsh got pool: {}", this, pool);
    }/*  ww  w .j a v a 2 s.com*/

    if (truth(props.get(CLOSE_CONNECTION.getName()))) {
        Function<SshTool, T> close = new Function<SshTool, T>() {
            @Override
            public T apply(SshTool input) {
                T result = task.apply(input);
                if (LOG.isDebugEnabled()) {
                    LOG.debug("{} invalidating all sshPoolCache entries: {}", SshMachineLocation.this,
                            sshPoolCache.stats().toString());
                }
                sshPoolCache.invalidateAll();
                sshPoolCache.cleanUp();
                return result;
            }
        };
        return pool.exec(close);
    } else {
        return pool.exec(task);
    }
}

From source file:it.units.malelab.ege.core.evolver.geneoptimalmixing.GOMEvolver.java

@Override
public List<Node<T>> solve(ExecutorService executor, Random random, List<EvolverListener<G, T, F>> listeners)
        throws InterruptedException, ExecutionException {
    LoadingCache<G, Pair<Node<T>, Map<String, Object>>> mappingCache = CacheBuilder.newBuilder()
            .maximumSize(CACHE_SIZE).recordStats().build(getMappingCacheLoader());
    LoadingCache<Node<T>, F> fitnessCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).recordStats()
            .build(getFitnessCacheLoader());
    Stopwatch stopwatch = Stopwatch.createStarted();
    //initialize population
    int births = 0;
    List<Callable<List<Individual<G, T, F>>>> tasks = new ArrayList<>();
    for (G genotype : configuration.getPopulationInitializer().build(configuration.getPopulationSize(),
            configuration.getInitGenotypeValidator(), random)) {
        tasks.add(individualFromGenotypeCallable(genotype, 0, mappingCache, fitnessCache, listeners, null, null,
                executor));//from ww  w  .j  a v  a2s  .co  m
        births = births + 1;
    }
    List<Individual<G, T, F>> population = new ArrayList<>(Utils.getAll(executor.invokeAll(tasks)));
    Utils.broadcast(new EvolutionStartEvent<>(this, cacheStats(mappingCache, fitnessCache)), listeners,
            executor);
    Utils.broadcast(new GenerationEvent<>(configuration.getRanker().rank(population, random),
            (int) Math.floor(actualBirths(births, fitnessCache) / configuration.getPopulationSize()), this,
            cacheStats(mappingCache, fitnessCache)), listeners, executor);
    Set<Individual<G, T, F>> bests = new LinkedHashSet<>();
    //iterate
    while (Math.round(actualBirths(births, fitnessCache) / configuration.getPopulationSize()) < configuration
            .getNumberOfGenerations()) {
        //learn fos
        List<ConstrainedSequence> genotypes = new ArrayList<>(population.size());
        for (Individual<G, T, F> individual : population) {
            genotypes.add(individual.getGenotype());
        }
        Set<Set<Integer>> fos = configuration.getFosBuilder().build(genotypes, random);
        //apply gom
        int lastIterationActualBirths = actualBirths(births, fitnessCache);
        tasks.clear();
        for (Individual<G, T, F> individual : population) {
            tasks.add(gomCallable(population, individual, fos, random,
                    (int) Math.floor(actualBirths(births, fitnessCache) / configuration.getPopulationSize()),
                    configuration.getRanker(), configuration.getMutationOperator(), mappingCache, fitnessCache,
                    population.size() * configuration.getNumberOfGenerations(), listeners, executor,
                    stopwatch));
        }
        List<Individual<G, T, F>> newPopulation = new ArrayList<>();
        for (Future<List<Individual<G, T, F>>> result : executor.invokeAll(tasks)) {
            List<Individual<G, T, F>> newIndividuals = result.get();
            newPopulation.add(newIndividuals.get(0));
            births = births + fos.size();
            if (Math.round(
                    actualBirths(births, fitnessCache) / configuration.getPopulationSize()) >= configuration
                            .getNumberOfGenerations()) {
                break;
            }
        }
        for (int i = 0; i < newPopulation.size(); i++) {
            population.set(i, newPopulation.get(i));
        }
        //update best rank
        List<Individual<G, T, F>> populationWithBests = new ArrayList<>(population);
        populationWithBests.addAll(bests);
        List<List<Individual<G, T, F>>> rankedPopulationWithBests = configuration.getRanker()
                .rank(populationWithBests, random);
        bests.clear();
        for (Individual<G, T, F> individual : rankedPopulationWithBests.get(0)) {
            bests.add(individual);
            if (bests.size() >= configuration.getPopulationSize()) {
                break;
            }
        }
        Utils.broadcast(new GenerationEvent<>(rankedPopulationWithBests,
                (int) Math.floor(actualBirths(births, fitnessCache) / configuration.getPopulationSize()), this,
                cacheStats(mappingCache, fitnessCache)), listeners, executor);
        if (configuration.getMaxRelativeElapsed() > 0) {
            //check if relative elapsed time exceeded
            double avgFitnessComputationNanos = fitnessCache.stats().averageLoadPenalty();
            double elapsedNanos = stopwatch.elapsed(TimeUnit.NANOSECONDS);
            if (elapsedNanos / avgFitnessComputationNanos > configuration.getMaxRelativeElapsed()) {
                break;
            }
        }
        if (configuration.getMaxElapsed() > 0) {
            //check if elapsed time exceeded
            if (stopwatch.elapsed(TimeUnit.SECONDS) > configuration.getMaxElapsed()) {
                break;
            }
        }
        if (configuration.getProblem().getLearningFitnessComputer().bestValue() != null) {
            //check if optimal solution found
            if (rankedPopulationWithBests.get(0).get(0).getFitness()
                    .equals(configuration.getProblem().getLearningFitnessComputer().bestValue())) {
                break;
            }
        }
    }
    //end
    List<Node<T>> bestPhenotypes = new ArrayList<>();
    List<Individual<G, T, F>> populationWithBests = new ArrayList<>(population);
    populationWithBests.addAll(bests);
    List<List<Individual<G, T, F>>> rankedPopulationWithBests = configuration.getRanker()
            .rank(populationWithBests, random);
    Utils.broadcast(new EvolutionEndEvent<>((List) rankedPopulationWithBests,
            (int) Math.floor(births / configuration.getPopulationSize()), this,
            cacheStats(mappingCache, fitnessCache)), listeners, executor);
    for (Individual<G, T, F> individual : rankedPopulationWithBests.get(0)) {
        bestPhenotypes.add(individual.getPhenotype());
    }
    return bestPhenotypes;
}

From source file:it.units.malelab.ege.core.evolver.StandardEvolver.java

@Override
public List<Node<T>> solve(ExecutorService executor, Random random, List<EvolverListener<G, T, F>> listeners)
        throws InterruptedException, ExecutionException {
    LoadingCache<G, Pair<Node<T>, Map<String, Object>>> mappingCache = CacheBuilder.newBuilder()
            .maximumSize(CACHE_SIZE).recordStats().build(getMappingCacheLoader());
    LoadingCache<Node<T>, F> fitnessCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).recordStats()
            .build(getFitnessCacheLoader());
    Stopwatch stopwatch = Stopwatch.createStarted();
    //initialize population
    int births = 0;
    List<Callable<List<Individual<G, T, F>>>> tasks = new ArrayList<>();
    for (G genotype : configuration.getPopulationInitializer().build(configuration.getPopulationSize(),
            configuration.getInitGenotypeValidator(), random)) {
        tasks.add(individualFromGenotypeCallable(genotype, 0, mappingCache, fitnessCache, listeners, null, null,
                executor));/*  w  ww.  ja v  a2 s  .  c  o  m*/
        births = births + 1;
    }
    List<Individual<G, T, F>> population = new ArrayList<>(Utils.getAll(executor.invokeAll(tasks)));
    int lastBroadcastGeneration = (int) Math
            .floor(actualBirths(births, fitnessCache) / configuration.getPopulationSize());
    Utils.broadcast(new EvolutionStartEvent<>(this, cacheStats(mappingCache, fitnessCache)), listeners,
            executor);
    Utils.broadcast(new GenerationEvent<>(configuration.getRanker().rank(population, random),
            lastBroadcastGeneration, this, cacheStats(mappingCache, fitnessCache)), listeners, executor);
    //iterate
    while (Math.round(actualBirths(births, fitnessCache) / configuration.getPopulationSize()) < configuration
            .getNumberOfGenerations()) {
        int currentGeneration = (int) Math.floor(births / configuration.getPopulationSize());
        tasks.clear();
        //re-rank
        List<List<Individual<G, T, F>>> rankedPopulation = configuration.getRanker().rank(population, random);
        //produce offsprings
        int i = 0;
        while (i < configuration.getOffspringSize()) {
            GeneticOperator<G> operator = Utils.selectRandom(configuration.getOperators(), random);
            List<Individual<G, T, F>> parents = new ArrayList<>(operator.getParentsArity());
            for (int j = 0; j < operator.getParentsArity(); j++) {
                parents.add(configuration.getParentSelector().select(rankedPopulation, random));
            }
            tasks.add(operatorApplicationCallable(operator, parents, random, currentGeneration, mappingCache,
                    fitnessCache, listeners, executor));
            i = i + operator.getChildrenArity();
        }
        List<Individual<G, T, F>> newPopulation = new ArrayList<>(Utils.getAll(executor.invokeAll(tasks)));
        births = births + newPopulation.size();
        //build new population
        if (configuration.isOverlapping()) {
            population.addAll(newPopulation);
        } else {
            if (newPopulation.size() >= configuration.getPopulationSize()) {
                population = newPopulation;
            } else {
                //keep missing individuals from old population
                int targetSize = population.size() - newPopulation.size();
                while (population.size() > targetSize) {
                    Individual<G, T, F> individual = configuration.getUnsurvivalSelector()
                            .select(rankedPopulation, random);
                    population.remove(individual);
                }
                population.addAll(newPopulation);
            }
        }
        //select survivals
        while (population.size() > configuration.getPopulationSize()) {
            //re-rank
            rankedPopulation = configuration.getRanker().rank(population, random);
            Individual<G, T, F> individual = configuration.getUnsurvivalSelector().select(rankedPopulation,
                    random);
            population.remove(individual);
        }
        if ((int) Math.floor(actualBirths(births, fitnessCache)
                / configuration.getPopulationSize()) > lastBroadcastGeneration) {
            lastBroadcastGeneration = (int) Math
                    .floor(actualBirths(births, fitnessCache) / configuration.getPopulationSize());
            Utils.broadcast(new GenerationEvent<>((List) rankedPopulation, lastBroadcastGeneration, this,
                    cacheStats(mappingCache, fitnessCache)), listeners, executor);
        }
        if (configuration.getMaxRelativeElapsed() > 0) {
            //check if relative elapsed time exceeded
            double avgFitnessComputationNanos = fitnessCache.stats().averageLoadPenalty();
            double elapsedNanos = stopwatch.elapsed(TimeUnit.NANOSECONDS);
            if (elapsedNanos / avgFitnessComputationNanos > configuration.getMaxRelativeElapsed()) {
                break;
            }
        }
        if (configuration.getMaxElapsed() > 0) {
            //check if elapsed time exceeded
            if (stopwatch.elapsed(TimeUnit.SECONDS) > configuration.getMaxElapsed()) {
                break;
            }
        }
        if (configuration.getProblem().getLearningFitnessComputer().bestValue() != null) {
            //check if optimal solution found
            if (rankedPopulation.get(0).get(0).getFitness()
                    .equals(configuration.getProblem().getLearningFitnessComputer().bestValue())) {
                break;
            }
        }
    }
    //end
    List<Node<T>> bestPhenotypes = new ArrayList<>();
    List<List<Individual<G, T, F>>> rankedPopulation = configuration.getRanker().rank(population, random);
    Utils.broadcast(new EvolutionEndEvent<>((List) rankedPopulation,
            (int) Math.floor(actualBirths(births, fitnessCache) / configuration.getPopulationSize()), this,
            cacheStats(mappingCache, fitnessCache)), listeners, executor);
    for (Individual<G, T, F> individual : rankedPopulation.get(0)) {
        bestPhenotypes.add(individual.getPhenotype());
    }
    return bestPhenotypes;
}