Example usage for com.google.common.cache CacheBuilder expireAfterWrite

List of usage examples for com.google.common.cache CacheBuilder expireAfterWrite

Introduction

In this page you can find the example usage for com.google.common.cache CacheBuilder expireAfterWrite.

Prototype

public CacheBuilder<K, V> expireAfterWrite(long duration, TimeUnit unit) 

Source Link

Document

Specifies that each entry should be automatically removed from the cache once a fixed duration has elapsed after the entry's creation, or the most recent replacement of its value.

Usage

From source file:com.addthis.hydra.job.store.AvailableCache.java

/**
 * Make a cache using specified cache parameters
 *
 * @param refreshMillis How frequently values should be refreshed in milliseconds (if <= 0, no refresh)
 * @param expireMillis  How old values should have to be before they are expired (if <= 0, they never expire)
 * @param maxSize       How many values should be stored in the cache (if <= 0, no explicit limit)
 * @param fetchThreads  How many threads to use to fetch values in the background (if <=0, use two threads)
 *///w ww.j a v a 2 s.  c  o  m
public AvailableCache(long refreshMillis, long expireMillis, int maxSize, int fetchThreads) {
    CacheBuilder<Object, Object> cacheBuilder = CacheBuilder.newBuilder();
    // Configure the cache for any parameters that are > 0
    if (expireMillis > 0) {
        cacheBuilder.expireAfterWrite(expireMillis, TimeUnit.MILLISECONDS);
    }
    if (refreshMillis > 0) {
        cacheBuilder.refreshAfterWrite(refreshMillis, TimeUnit.MILLISECONDS);
    }
    if (maxSize > 0) {
        cacheBuilder.maximumSize(maxSize);
    }
    if (fetchThreads <= 0) {
        fetchThreads = 2;
    }
    executor = new ThreadPoolExecutor(fetchThreads, fetchThreads, 1000L, TimeUnit.MILLISECONDS,
            new LinkedBlockingQueue<>(),
            new ThreadFactoryBuilder().setNameFormat("avail-cache-%d").setDaemon(true).build());
    //noinspection unchecked
    this.loadingCache = cacheBuilder.build(new CacheLoader<String, Optional<T>>() {
        @Override
        /**
         * If refreshAfterWrite is enabled, this method is called after returning the old value.
         * The new value will be inserted into the cache when the load() operation completes.
         */
        public ListenableFuture<Optional<T>> reload(final String key, Optional<T> oldValue) {
            ListenableFutureTask<Optional<T>> task = ListenableFutureTask.create(() -> load(key));
            executor.execute(task);
            return task;
        }

        @Override
        public Optional<T> load(String key) throws Exception {
            return Optional.fromNullable(fetchValue(key));
        }
    });
}

From source file:org.apache.samza.table.caching.CachingTableProvider.java

private ReadWriteTable createDefaultCacheTable(String tableId, JavaTableConfig tableConfig) {
    long readTtlMs = Long.parseLong(tableConfig.getForTable(tableId, CachingTableDescriptor.READ_TTL_MS, "-1"));
    long writeTtlMs = Long
            .parseLong(tableConfig.getForTable(tableId, CachingTableDescriptor.WRITE_TTL_MS, "-1"));
    long cacheSize = Long.parseLong(tableConfig.getForTable(tableId, CachingTableDescriptor.CACHE_SIZE, "-1"));

    CacheBuilder cacheBuilder = CacheBuilder.newBuilder();
    if (readTtlMs != -1) {
        cacheBuilder.expireAfterAccess(readTtlMs, TimeUnit.MILLISECONDS);
    }/*from w  w w.  j av a2s . c o m*/
    if (writeTtlMs != -1) {
        cacheBuilder.expireAfterWrite(writeTtlMs, TimeUnit.MILLISECONDS);
    }
    if (cacheSize != -1) {
        cacheBuilder.maximumSize(cacheSize);
    }

    logger.info(String.format("Creating default cache with: readTtl=%d, writeTtl=%d, maxSize=%d", readTtlMs,
            writeTtlMs, cacheSize));

    GuavaCacheTable cacheTable = new GuavaCacheTable(tableId + "-def-cache", cacheBuilder.build());
    cacheTable.init(this.context);

    return cacheTable;
}

From source file:org.apache.samza.table.caching.descriptors.CachingTableProvider.java

private ReadWriteTable createDefaultCacheTable(String tableId) {
    long readTtlMs = Long.parseLong(tableSpec.getConfig().getOrDefault(READ_TTL_MS, "-1"));
    long writeTtlMs = Long.parseLong(tableSpec.getConfig().getOrDefault(WRITE_TTL_MS, "-1"));
    long cacheSize = Long.parseLong(tableSpec.getConfig().getOrDefault(CACHE_SIZE, "-1"));

    CacheBuilder cacheBuilder = CacheBuilder.newBuilder();
    if (readTtlMs != -1) {
        cacheBuilder.expireAfterAccess(readTtlMs, TimeUnit.MILLISECONDS);
    }// w  ww.  j  ava 2s .com
    if (writeTtlMs != -1) {
        cacheBuilder.expireAfterWrite(writeTtlMs, TimeUnit.MILLISECONDS);
    }
    if (cacheSize != -1) {
        cacheBuilder.maximumSize(cacheSize);
    }

    logger.info(String.format("Creating default cache with: readTtl=%d, writeTtl=%d, maxSize=%d", readTtlMs,
            writeTtlMs, cacheSize));

    GuavaCacheTable cacheTable = new GuavaCacheTable(tableId + "-def-cache", cacheBuilder.build());
    cacheTable.init(this.context);

    return cacheTable;
}

From source file:com.enitalk.configs.DateCache.java

@Bean(name = "skipCache")
public LoadingCache<String, ConcurrentSkipListSet<DateTime>> datesMap() {
    CacheBuilder<Object, Object> ccc = CacheBuilder.newBuilder();
    ccc.expireAfterWrite(2, TimeUnit.MINUTES);

    LoadingCache<String, ConcurrentSkipListSet<DateTime>> cache = ccc
            .build(new CacheLoader<String, ConcurrentSkipListSet<DateTime>>() {

                @Override//from  w  w  w.j a  va 2 s.  com
                public ConcurrentSkipListSet<DateTime> load(String key) throws Exception {
                    try {
                        HashMap teachers = mongo.findOne(Query.query(Criteria.where("i").is(key)),
                                HashMap.class, "teachers");
                        ObjectNode teacherJson = jackson.convertValue(teachers, ObjectNode.class);
                        String timeZone = teacherJson.at("/calendar/timeZone").asText();

                        NavigableSet<DateTime> set = days(teacherJson.path("schedule"), timeZone, teacherJson);

                        DateTimeZone dzz = DateTimeZone.forID(timeZone);
                        DateTimeFormatter df = ISODateTimeFormat.dateTimeNoMillis().withZone(dzz);

                        byte[] events = calendar.busyEvents(jackson.createObjectNode().put("id", key));
                        JsonNode evs = jackson.readTree(events);
                        Iterator<JsonNode> its = evs.iterator();
                        TreeSet<DateTime> dates = new TreeSet<>();
                        while (its.hasNext()) {
                            String date = its.next().asText();
                            DateTime av = df.parseDateTime(date).toDateTime(DateTimeZone.UTC);
                            dates.add(av);
                        }

                        set.removeAll(dates);

                        logger.info("Dates for i {} {}", key, set);

                        return new ConcurrentSkipListSet<>(set);

                    } catch (Exception e) {
                        logger.error(ExceptionUtils.getFullStackTrace(e));
                    }
                    return null;
                }

            });

    return cache;
}

From source file:org.geoserver.web.demo.PreviewLayerProvider.java

public PreviewLayerProvider() {
    super();/*from   w w  w  .j  a v a 2s .  c om*/
    // Initialization of an inner cache in order to avoid to calculate two times
    // the size() method in a time minor than a second
    CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder();

    cache = builder.expireAfterWrite(DEFAULT_CACHE_TIME, TimeUnit.SECONDS).build();
    // Callable which internally calls the size method
    sizeCaller = new SizeCallable();
    // Callable which internally calls the fullSize() method
    fullSizeCaller = new FullSizeCallable();
}

From source file:org.centralperf.service.RunStatisticsService.java

private LoadingCache<Long, SummaryGraph> getSummaryGraphCache() {
    CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder();
    builder.expireAfterWrite(cacheRefreshDelay, TimeUnit.SECONDS);
    return builder.build(summaryGraphLoader);
}

From source file:org.centralperf.service.RunStatisticsService.java

private LoadingCache<Long, ResponseTimeGraph> getResponseTimeGraphCache() {
    CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder();
    builder.expireAfterWrite(cacheRefreshDelay, TimeUnit.SECONDS);
    return builder.build(responseTimeGraphLoader);
}

From source file:org.centralperf.service.RunStatisticsService.java

private LoadingCache<Long, ResponseSizeGraph> getResponseSizeGraphCache() {
    CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder();
    builder.expireAfterWrite(cacheRefreshDelay, TimeUnit.SECONDS);
    return builder.build(responseSizeGraphLoader);
}

From source file:org.centralperf.service.RunStatisticsService.java

private LoadingCache<Long, ErrorRateGraph> getErrorRateGraphCache() {
    CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder();
    builder.expireAfterWrite(cacheRefreshDelay, TimeUnit.SECONDS);
    return builder.build(errorRateGraphLoader);
}

From source file:org.centralperf.service.RunStatisticsService.java

public LoadingCache<Long, RunStats> getRunStatsCache() {
    CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder();
    builder.expireAfterWrite(cacheRefreshDelay, TimeUnit.SECONDS);
    return builder.build(runStatsLoader);
}