Example usage for java.util.concurrent TimeUnit DAYS

List of usage examples for java.util.concurrent TimeUnit DAYS

Introduction

In this page you can find the example usage for java.util.concurrent TimeUnit DAYS.

Prototype

TimeUnit DAYS

To view the source code for java.util.concurrent TimeUnit DAYS.

Click Source Link

Document

Time unit representing twenty four hours.

Usage

From source file:org.apache.marmotta.ldcache.backend.infinispan.LDCachingInfinispanBackend.java

/**
 * Create a non-clustered instance of the infinispan cache.
 *///from w w  w. java 2 s . co m
public LDCachingInfinispanBackend() {
    globalConfiguration = new GlobalConfigurationBuilder()
            .classLoader(LDCachingInfinispanBackend.class.getClassLoader()).globalJmxStatistics()
            .jmxDomain("org.apache.marmotta.ldcache").allowDuplicateDomains(true).build();

    defaultConfiguration = new ConfigurationBuilder().clustering().cacheMode(CacheMode.LOCAL).eviction()
            .strategy(EvictionStrategy.LIRS).maxEntries(100000).expiration().lifespan(7, TimeUnit.DAYS)
            .maxIdle(1, TimeUnit.DAYS).build();

    clustered = false;

}

From source file:org.eclipse.orion.server.git.jobs.ListPullRequestsJob.java

public ListPullRequestsJob(String userRunningTask, String url, URI cloneLocation, String host, String hostUser,
        String project, String username, String password, Object cookie) {
    super(userRunningTask, true);
    this.cloneLocation = cloneLocation;
    this.host = host;
    this.url = url;
    this.hostUser = hostUser;
    this.project = project;
    this.remote = userRunningTask;
    this.username = username;
    this.password = password;
    this.cookie = (Cookie) cookie;
    setFinalMessage("Getting Pull Requests Complete.");
    setTaskExpirationTime(TimeUnit.DAYS.toMillis(7));
}

From source file:org.schedoscope.metascope.service.MetascopeTableService.java

@PostConstruct
public void init() {
    this.sampleCache = CacheBuilder.newBuilder().maximumSize(1000).expireAfterWrite(1, TimeUnit.DAYS)
            .build(new SampleCacheLoader(this, hiveQueryExecutor));
}

From source file:org.whispersystems.textsecuregcm.storage.Account.java

public boolean isActive() {
    return getMasterDevice().isPresent() && getMasterDevice().get().isActive()
            && getLastSeen() > (System.currentTimeMillis() - TimeUnit.DAYS.toMillis(365));
}

From source file:me.j360.trace.autoconfiguration.ui.ZipkinUiAutoConfiguration.java

@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
    registry.addResourceHandler("/**").addResourceLocations("classpath:/zipkin-ui/")
            .setCachePeriod((int) TimeUnit.DAYS.toSeconds(365));
}

From source file:com.linkedin.pinot.core.startree.TestStarTreeMetadata.java

private void setupSegment(File segmentDir) throws Exception {
    final String filePath = TestUtils
            .getFileFromResourceUrl(getClass().getClassLoader().getResource(AVRO_DATA));

    if (segmentDir.exists()) {
        FileUtils.deleteQuietly(segmentDir);
    }//ww w. j a v  a  2  s  . c om

    final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(
            new File(filePath), segmentDir, "time_day", TimeUnit.DAYS, TABLE_NAME);

    config.setTableName(TABLE_NAME);
    config.setSegmentName(SEGMENT_NAME);
    StarTreeIndexSpec starTreeIndexSpec = new StarTreeIndexSpec();
    starTreeIndexSpec.setDimensionsSplitOrder(DIMENSIONS_SPLIT_ORDER);
    starTreeIndexSpec.setMaxLeafRecords(MAX_LEAF_RECORDS);
    starTreeIndexSpec.setSkipMaterializationCardinalityThreshold(SKIP_CARDINALITY_THRESHOLD);
    starTreeIndexSpec.setSkipStarNodeCreationForDimensions(SKIP_STAR_NODE_CREATION_DIMENSTIONS);
    starTreeIndexSpec.setSkipMaterializationForDimensions(SKIP_MATERIALIZATION_DIMENSIONS);

    config.setEnableStarTreeIndex(true);
    config.setStarTreeIndexSpec(starTreeIndexSpec);

    final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
    driver.init(config);
    driver.build();

}

From source file:org.eclipse.skalli.core.rest.admin.StatisticsQuery.java

private long getTimeInterval(String param) {
    long timeInterval = 0;
    if (StringUtils.isNotBlank(param)) {
        int defaultValue = 0;
        TimeUnit unit = TimeUnit.DAYS;
        char unitSymbol = param.charAt(param.length() - 1);
        if (unitSymbols.indexOf(unitSymbol) >= 0) {
            if (unitSymbol == 'h' || unitSymbol == 'H') {
                unit = TimeUnit.HOURS;
            } else if (unitSymbol == 'm' || unitSymbol == 'M') {
                unit = TimeUnit.MINUTES;
            }/*from   w ww  .  j  a v  a  2  s. co m*/
            param = StringUtils.chop(param);
            defaultValue = 1;
        }
        int value = NumberUtils.toInt(param, defaultValue);
        if (value != 0) {
            timeInterval = TimeUnit.MILLISECONDS.convert(value, unit);
        }
    }
    return timeInterval;
}

From source file:org.pshdl.commandline.PSHDLCompiler.java

@SuppressWarnings("rawtypes")
private void run(String[] args) throws Exception {
    final MultiOption options = getOptions();
    final CommandLine parse = options.parse(args);
    final List argList = parse.getArgList();
    if (parse.hasOption("help") || (args.length == 0)) {
        options.printHelp(System.out);
        return;//from   w  w w. ja  v a  2s .  c o  m
    }
    if (parse.hasOption("version")) {
        System.out.println(PSHDLCompiler.class.getSimpleName() + " version: " + HDLCore.VERSION);
        return;
    }
    final long lastCheck = prefs.getLong("LAST_CHECK", -1);
    final long oneWeek = TimeUnit.MILLISECONDS.convert(7, TimeUnit.DAYS);
    final long oneWeekAgo = System.currentTimeMillis() - oneWeek;
    if ((oneWeekAgo > lastCheck) && !parse.hasOption("nocheck")) {
        new Thread(new Runnable() {
            @Override
            public void run() {
                try {
                    final InputStream stream = new URL(
                            "http://api.pshdl.org/api/v0.1/compiler/version?localVersion=" + HDLCore.VERSION)
                                    .openStream();
                    final byte[] byteArray = ByteStreams.toByteArray(stream);
                    final String remoteVersion = new String(byteArray, StandardCharsets.UTF_8).trim();
                    if (!remoteVersion.equals(HDLCore.VERSION)) {
                        System.err.println("A new version of this compiler is available: " + remoteVersion
                                + " local version: " + HDLCore.VERSION);
                    } else {
                        prefs.putLong("LAST_CHECK", System.currentTimeMillis());
                    }
                } catch (final Exception e) {
                }
            }
        }).start();
    }
    final String arg = argList.get(0).toString();
    final IOutputProvider iop = implementations.get(arg);
    if (iop == null) {
        System.out.println(
                "No such provider: " + arg + " please try one of: " + implementations.keySet().toString());
        System.exit(1);
        return;
    }
    argList.remove(0);
    final String result = iop.invoke(parse);
    if (result != null) {
        System.out.flush();
        System.err.println(result);
        System.exit(2);
        return;
    }
    System.exit(0);
}

From source file:com.linkedin.pinot.core.startree.hll.OffheapStarTreeBuilderWithHllFieldTest.java

private void testSimpleCore(int numDimensions, int numMetrics, int numSkipMaterializationDimensions,
        int[] memberIdColumnValues, long preciseCardinality) throws Exception {
    StarTreeBuilderConfig builderConfig = null;
    try {//from   w  ww  . java  2s  .  c  o m
        builderConfig = new StarTreeBuilderConfig();
        Schema schema = new Schema();
        builderConfig.dimensionsSplitOrder = new ArrayList<>();
        builderConfig.setSkipMaterializationForDimensions(new HashSet<String>());
        Set<String> skipMaterializationForDimensions = builderConfig.getSkipMaterializationForDimensions();

        // add member id dimension spec
        String dimName = memberIdFieldName;
        DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec(dimName, DataType.INT, true);
        schema.addField(dimensionFieldSpec);
        // add other dimension specs
        for (int i = 1; i < numDimensions; i++) {
            dimName = "d" + (i + 1);
            dimensionFieldSpec = new DimensionFieldSpec(dimName, DataType.STRING, true);
            schema.addField(dimensionFieldSpec);

            if (i < (numDimensions - numSkipMaterializationDimensions)) {
                builderConfig.dimensionsSplitOrder.add(dimName);
            } else {
                builderConfig.getSkipMaterializationForDimensions().add(dimName);
            }
        }

        schema.setTimeFieldSpec(new TimeFieldSpec("daysSinceEpoch", DataType.INT, TimeUnit.DAYS));
        // add other metric specs
        for (int i = 0; i < numMetrics - 1; i++) {
            String metricName = "m" + (i + 1);
            MetricFieldSpec metricFieldSpec = new MetricFieldSpec(metricName, DataType.INT);
            schema.addField(metricFieldSpec);
        }
        // add hll metric
        String hllMetricName = memberIdFieldName + hllDeriveFieldSuffix;
        MetricFieldSpec hllDerivedFieldSpec = new MetricFieldSpec(hllMetricName, FieldSpec.DataType.STRING,
                HllUtil.getHllFieldSizeFromLog2m(log2m), MetricFieldSpec.DerivedMetricType.HLL);
        schema.addField(hllDerivedFieldSpec);
        //
        builderConfig.maxLeafRecords = 10;
        builderConfig.schema = schema;
        builderConfig.setOutDir(new File("/tmp/startree"));
        //
        OffHeapStarTreeBuilder builder = new OffHeapStarTreeBuilder();
        builder.init(builderConfig);
        // fill values
        HashMap<String, Object> map = new HashMap<>();
        for (int row = 0; row < memberIdColumnValues.length; row++) {
            // add member id column
            dimName = memberIdFieldName;
            map.put(dimName, memberIdColumnValues[row]);
            // add other dimensions
            for (int i = 1; i < numDimensions; i++) {
                dimName = schema.getDimensionFieldSpecs().get(i).getName();
                map.put(dimName, dimName + "-v" + row % (numDimensions - i));
            }
            // add time column
            map.put("daysSinceEpoch", 1);
            // add other metrics
            for (int i = 0; i < numMetrics - 1; i++) {
                String metName = schema.getMetricFieldSpecs().get(i).getName();
                map.put(metName, 1);
            }
            // add hll column value
            map.put(hllMetricName, HllUtil.singleValueHllAsString(log2m, memberIdColumnValues[row]));
            //
            GenericRow genericRow = new GenericRow();
            genericRow.init(map);
            builder.append(genericRow);
        }
        builder.build();

        int totalDocs = builder.getTotalRawDocumentCount() + builder.getTotalAggregateDocumentCount();
        Iterator<GenericRow> iterator = builder.iterator(0, totalDocs);
        while (iterator.hasNext()) {
            GenericRow row = iterator.next();
            LOGGER.info(HllUtil.inspectGenericRow(row, hllDeriveFieldSuffix));
        }

        iterator = builder.iterator(builder.getTotalRawDocumentCount(), totalDocs);
        GenericRow lastRow = null;
        while (iterator.hasNext()) {
            GenericRow row = iterator.next();
            for (String skipDimension : skipMaterializationForDimensions) {
                String rowValue = (String) row.getValue(skipDimension);
                assert (rowValue.equals("ALL"));
            }
            lastRow = row;
        }

        assertApproximation(HllUtil.convertStringToHll((String) lastRow.getValue(hllMetricName)).cardinality(),
                preciseCardinality, 0.1);
    } finally {
        if (builderConfig != null) {
            FileUtils.deleteDirectory(builderConfig.getOutDir());
        }
    }
}

From source file:ch.cyberduck.core.s3.S3UrlProvider.java

@Override
public DescriptiveUrlBag toUrl(final Path file) {
    final DescriptiveUrlBag list = new DescriptiveUrlBag();
    if (file.isFile()) {
        if (!session.getHost().isDefaultWebURL()) {
            list.addAll(new WebUrlProvider(session.getHost()).toUrl(file));
        }//  ww  w .j a va  2 s  . c o m
        // Publicly accessible URL of given object
        list.add(this.toUrl(file, session.getHost().getProtocol().getScheme()));
        list.add(this.toUrl(file, Scheme.http));
        if (!session.getHost().getCredentials().isAnonymousLogin()) {
            // X-Amz-Expires must be less than a week (in seconds); that is, the given X-Amz-Expires must be less
            // than 604800 seconds
            // In one hour
            list.add(this.sign(file, (int) TimeUnit.HOURS.toSeconds(1)));
            // Default signed URL expiring in 24 hours.
            list.add(this.sign(file, (int) TimeUnit.SECONDS
                    .toSeconds(PreferencesFactory.get().getInteger("s3.url.expire.seconds"))));
            // 1 Week
            list.add(this.sign(file, (int) TimeUnit.DAYS.toSeconds(7)));
            switch (session.getSignatureVersion()) {
            case AWS2:
                // 1 Month
                list.add(this.sign(file, (int) TimeUnit.DAYS.toSeconds(30)));
                // 1 Year
                list.add(this.sign(file, (int) TimeUnit.DAYS.toSeconds(365)));
                break;
            case AWS4HMACSHA256:
                break;
            }
        }
        // Torrent
        list.add(new DescriptiveUrl(
                URI.create(new S3TorrentUrlProvider(session.getHost())
                        .create(containerService.getContainer(file).getName(), containerService.getKey(file))),
                DescriptiveUrl.Type.torrent, MessageFormat.format(LocaleFactory.localizedString("{0} URL"),
                        LocaleFactory.localizedString("Torrent"))));
    }
    list.addAll(new DefaultUrlProvider(session.getHost()).toUrl(file));
    if (!file.isRoot()) {
        list.add(
                new DescriptiveUrl(
                        URI.create(String.format("s3://%s%s", containerService.getContainer(file).getName(),
                                containerService.isContainer(file) ? "/"
                                        : String.format("/%s",
                                                URIEncoder.encode(containerService.getKey(file))))),
                        DescriptiveUrl.Type.provider,
                        MessageFormat.format(LocaleFactory.localizedString("{0} URL"), "S3")));
    }
    return list;
}