Example usage for com.google.common.cache Cache size

List of usage examples for com.google.common.cache Cache size

Introduction

In this page you can find the example usage for com.google.common.cache Cache size.

Prototype

long size();

Source Link

Document

Returns the approximate number of entries in this cache.

Usage

From source file:com.twitter.common.stats.Stats.java

/**
 * Exports a metric that tracks the size of a cache.
 *
 * @param name Name of the stat to export.
 * @param cache Cache whose size should be tracked.
 *//*from   w w w  .j a va 2  s . c o m*/
public static void exportSize(String name, final Cache<?, ?> cache) {
    export(new StatImpl<Long>(name) {
        @Override
        public Long read() {
            return cache.size();
        }
    });
}

From source file:com.attribyte.essem.metrics.GuavaCacheMetrics.java

/**
 * Create metrics for a cache./*from w w w .  j a  v a2 s .com*/
 * @param cache The cache.
 */
public GuavaCacheMetrics(final Cache cache) {

    final Gauge<Long> sizeGauge = new Gauge<Long>() {
        @Override
        public Long getValue() {
            return cache.size();
        }
    };

    final Gauge<Double> hitRatioGauge = new Gauge<Double>() {
        @Override
        public Double getValue() {
            return cache.stats().hitRate();
        }
    };

    final Gauge<Long> loadExceptions = new Gauge<Long>() {
        @Override
        public Long getValue() {
            return cache.stats().loadExceptionCount();
        }
    };

    final Gauge<Double> loadPenaltyGauge = new Gauge<Double>() {
        @Override
        public Double getValue() {
            return cache.stats().averageLoadPenalty();
        }
    };

    this.metrics = ImmutableMap.<String, Metric>builder().put("size", sizeGauge).put("hit-ratio", hitRatioGauge)
            .put("load-exceptions", loadExceptions).put("load-penalty", loadPenaltyGauge).build();
}

From source file:com.google.gerrit.server.cache.CacheMetrics.java

@Inject
public CacheMetrics(MetricMaker metrics, DynamicMap<Cache<?, ?>> cacheMap) {
    Field<String> F_NAME = Field.ofString("cache_name");

    CallbackMetric1<String, Long> memEnt = metrics.newCallbackMetric("caches/memory_cached", Long.class,
            new Description("Memory entries").setGauge().setUnit("entries"), F_NAME);
    CallbackMetric1<String, Double> memHit = metrics.newCallbackMetric("caches/memory_hit_ratio", Double.class,
            new Description("Memory hit ratio").setGauge().setUnit("percent"), F_NAME);
    CallbackMetric1<String, Long> memEvict = metrics.newCallbackMetric("caches/memory_eviction_count",
            Long.class, new Description("Memory eviction count").setGauge().setUnit("evicted entries"), F_NAME);
    CallbackMetric1<String, Long> perDiskEnt = metrics.newCallbackMetric("caches/disk_cached", Long.class,
            new Description("Disk entries used by persistent cache").setGauge().setUnit("entries"), F_NAME);
    CallbackMetric1<String, Double> perDiskHit = metrics.newCallbackMetric("caches/disk_hit_ratio",
            Double.class, new Description("Disk hit ratio for persistent cache").setGauge().setUnit("percent"),
            F_NAME);//from   www .ja v a2s.c  o  m

    Set<CallbackMetric<?>> cacheMetrics = ImmutableSet.<CallbackMetric<?>>of(memEnt, memHit, memEvict,
            perDiskEnt, perDiskHit);

    metrics.newTrigger(cacheMetrics, () -> {
        for (DynamicMap.Entry<Cache<?, ?>> e : cacheMap) {
            Cache<?, ?> c = e.getProvider().get();
            String name = metricNameOf(e);
            CacheStats cstats = c.stats();
            memEnt.set(name, c.size());
            memHit.set(name, cstats.hitRate() * 100);
            memEvict.set(name, cstats.evictionCount());
            if (c instanceof PersistentCache) {
                PersistentCache.DiskStats d = ((PersistentCache) c).diskStats();
                perDiskEnt.set(name, d.size());
                perDiskHit.set(name, hitRatio(d));
            }
        }
        cacheMetrics.forEach(CallbackMetric::prune);
    });
}

From source file:com.antonjohansson.lprs.controller.spam.SpamController.java

@Override
public boolean check(String user) {
    LOG.debug("Checking spam detection for user '{}'", user);
    if (!isSpamDetectionEnabled()) {
        LOG.debug("Spam detection is disabled");
        return true;
    }/*from w  w  w. j av  a  2  s  .c  o m*/

    Cache<Date, Object> cache = getCache().asMap().computeIfAbsent(user,
            key -> newBuilder().expireAfterWrite(expireTime, timeUnit).build());

    cache.cleanUp();
    if (cache.size() < requestCount) {
        LOG.debug("Spam detection OK!");
        cache.asMap().put(new Date(), EMPTY);
        return true;
    }

    LOG.debug("Spam detection not OK!");
    return false;
}

From source file:rickbw.incubator.cache.MultiCache.java

@Override
public final long size() {
    long totalSize = 0L;
    for (final Cache<?, ?> delegate : this.delegates.values()) {
        totalSize += delegate.size();
    }//from   www. ja  va 2s.c  o m
    return totalSize;
}

From source file:com.toro.torod.cursors.DefaultCursorManager.java

private void removeUnusedCaches() {
    Iterator<Cache<CursorId, CursorProperties>> caches = oldCaches.iterator();

    while (caches.hasNext()) {
        Cache<CursorId, CursorProperties> cache = caches.next();
        if (cache.size() == 0) {
            caches.remove();/*w  w w .ja  v a  2 s . co  m*/
        }
    }
    counterToOldCacheEviction.set(0);
}

From source file:org.gradle.api.internal.changedetection.state.InMemoryTaskArtifactCache.java

private Cache<Object, Object> loadData(String cacheId, String cacheName) {
    Cache<Object, Object> theData;
    synchronized (lock) {
        theData = this.cache.getIfPresent(cacheId);
        if (theData != null) {
            LOG.info("In-memory cache of {}: Size{{}}, {}", cacheId, theData.size(), theData.stats());
        } else {//from   w  w  w .  ja  va  2 s  .co  m
            Integer maxSize = CACHE_CAPS.get(cacheName);
            assert maxSize != null : "Unknown cache.";
            LOG.info("Creating In-memory cache of {}: MaxSize{{}}", cacheId, maxSize);
            LoggingEvictionListener evictionListener = new LoggingEvictionListener(cacheId, maxSize);
            theData = CacheBuilder.newBuilder().maximumSize(maxSize).recordStats()
                    .removalListener(evictionListener).build();
            evictionListener.setCache(theData);
            this.cache.put(cacheId, theData);
        }
    }
    return theData;
}

From source file:com.google.gerrit.sshd.commands.ShowCaches.java

private void printMemoryCaches(Map<String, H2CacheImpl<?, ?>> disks, Map<String, Cache<?, ?>> caches) {
    for (Map.Entry<String, Cache<?, ?>> entry : caches.entrySet()) {
        Cache<?, ?> cache = entry.getValue();
        if (cache instanceof H2CacheImpl) {
            disks.put(entry.getKey(), (H2CacheImpl<?, ?>) cache);
            continue;
        }/*w  w  w  .j a v  a 2s  .  c om*/
        CacheStats stat = cache.stats();
        stdout.print(String.format("  %-" + nw + "s|%6s %6s %7s| %7s |%4s %4s|\n", entry.getKey(),
                count(cache.size()), "", "", duration(stat.averageLoadPenalty()),
                percent(stat.hitCount(), stat.requestCount()), ""));
    }
}

From source file:org.eclipse.emf.compare.ide.ui.internal.logical.EMFModelProvider.java

/**
 * Retrieve the logical model associated with the given IFile from our
 * {@link #contextToResourceMappingCache cache}, compute and store it if we do not have it yet (or no
 * longer)./*from   w  w w . j av  a2s  .c o  m*/
 * <p>
 * The returned traversal will only reflect the local state of the logical model if this is passed a
 * {@link ResourceMappingContext#LOCAL_CONTEXT local mapping context}. Since computing the whole
 * traversal, including remote resources and links, can be a costly operation which involves I/O calls
 * over remote repositories, using a local context is advisable when such an accurate traversal is not
 * needed.
 * </p>
 * 
 * @param file
 *            The IFile for which we are currently seeking a logical model. Does not require to exist
 *            locally, but in this case we may only retrieve its model from the cache and will be unable
 *            to compute it.
 * @param context
 *            The context we'll use to compute this file's model.
 * @param monitor
 *            Monitor on which to report progress to the user.
 * @return The resolved synchronization model for this file.
 * @throws CoreException
 *             if we cannot retrieve the content of a resource for some reason.
 * @throws InterruptedException
 *             If the user interrupts the resolving.
 */
SynchronizationModel getOrComputeLogicalModel(IFile file, ResourceMappingContext context,
        IProgressMonitor monitor) throws CoreException, InterruptedException {
    SynchronizationModel syncModel;
    synchronized (contextToResourceMappingCache) {
        final Cache<IResource, SynchronizationModel> resourceMappingCache = contextToResourceMappingCache
                .getUnchecked(context);
        if (LOGGER.isDebugEnabled()) {
            LOGGER.debug("Retrieved cache with ~ " + resourceMappingCache.size() //$NON-NLS-1$
                    + " entries  for context " + context); //$NON-NLS-1$ 
        }
        syncModel = resourceMappingCache.getIfPresent(file);
        if (syncModel == null) {
            if (LOGGER.isDebugEnabled()) {
                LOGGER.debug("Cache MISSED for " + file); //$NON-NLS-1$
            }
            syncModel = computeLogicalModel(file, context, monitor);
            if (syncModel != null) {
                for (IResource res : syncModel.getResources()) {
                    resourceMappingCache.put(res, syncModel);
                }
                if (LOGGER.isDebugEnabled()) {
                    LOGGER.debug("EMFModelProvider - Minimizing model"); //$NON-NLS-1$
                }
                new IdenticalResourceMinimizer().minimize(syncModel, monitor);
            }
        } else if (LOGGER.isDebugEnabled()) {
            LOGGER.debug("Cache FOUND entry for " + file); //$NON-NLS-1$
        }
    }
    return syncModel;
}

From source file:org.jasig.portal.portlets.search.SearchPortletController.java

@ActionMapping
public void performSearch(@RequestParam(value = "query") String query, ActionRequest request,
        ActionResponse response) {/*w  w  w .  ja  v  a 2s.c om*/
    final PortletSession session = request.getPortletSession();

    final String queryId = RandomStringUtils.randomAlphanumeric(32);

    Cache<String, Boolean> searchCounterCache;
    synchronized (org.springframework.web.portlet.util.PortletUtils.getSessionMutex(session)) {
        searchCounterCache = (Cache<String, Boolean>) session.getAttribute(SEARCH_COUNTER_NAME);
        if (searchCounterCache == null) {
            searchCounterCache = CacheBuilder.newBuilder().expireAfterAccess(1, TimeUnit.MINUTES)
                    .<String, Boolean>build();
            session.setAttribute(SEARCH_COUNTER_NAME, searchCounterCache);
        }
    }

    //Store the query id to track number of searches/minute
    searchCounterCache.put(queryId, Boolean.TRUE);
    if (searchCounterCache.size() > this.maximumSearchesPerMinute) {
        //Make sure old data is expired
        searchCounterCache.cleanUp();

        //Too many searches in the last minute, fail the search
        if (searchCounterCache.size() > this.maximumSearchesPerMinute) {
            response.setRenderParameter("hitMaxQueries", Boolean.TRUE.toString());
            response.setRenderParameter("query", query);
            return;
        }
    }

    // construct a new search query object from the string query
    final SearchRequest queryObj = new SearchRequest();
    queryObj.setQueryId(queryId);
    queryObj.setSearchTerms(query);

    // Create the session-shared results object
    final PortalSearchResults results = new PortalSearchResults(defaultTabKey, resultTypeMappings);

    // place the portal search results object in the session using the queryId to namespace it
    Cache<String, PortalSearchResults> searchResultsCache;
    synchronized (org.springframework.web.portlet.util.PortletUtils.getSessionMutex(session)) {
        searchResultsCache = (Cache<String, PortalSearchResults>) session
                .getAttribute(SEARCH_RESULTS_CACHE_NAME);
        if (searchResultsCache == null) {
            searchResultsCache = CacheBuilder.newBuilder().maximumSize(20)
                    .expireAfterAccess(5, TimeUnit.MINUTES).<String, PortalSearchResults>build();
            session.setAttribute(SEARCH_RESULTS_CACHE_NAME, searchResultsCache);
        }
    }
    searchResultsCache.put(queryId, results);

    // send a search query event
    response.setEvent(SearchConstants.SEARCH_REQUEST_QNAME, queryObj);
    response.setRenderParameter("queryId", queryId);
    response.setRenderParameter("query", query);
}