Example usage for org.joda.time DateTimeZone forID

List of usage examples for org.joda.time DateTimeZone forID

Introduction

In this page you can find the example usage for org.joda.time DateTimeZone forID.

Prototype

@FromString
public static DateTimeZone forID(String id) 

Source Link

Document

Gets a time zone instance for the specified time zone id.

Usage

From source file:org.akaza.openclinica.service.rule.expression.ExpressionService.java

License:LGPL

public HashMap<String, String> getSSDate(String ssZoneId, String serverZoneId) {
    HashMap<String, String> map = new HashMap<String, String>();
    if (ssZoneId == "" || ssZoneId.equals(""))
        ssZoneId = TimeZone.getDefault().getID();

    DateTimeZone ssZone = DateTimeZone.forID(ssZoneId);
    DateMidnight dm = new DateMidnight(ssZone);
    DateTimeFormatter fmt = ISODateTimeFormat.date();
    map.put("ssDate", fmt.print(dm));

    map.put("serverZoneId", serverZoneId);
    DateTimeZone serverZone = DateTimeZone.forID(serverZoneId);
    DateMidnight serverDate = new DateMidnight(serverZone);
    map.put("serverDate", fmt.print(serverDate));
    return map;/*w w  w  . ja v  a 2s  .  co m*/
}

From source file:org.apache.abdera2.common.date.DateTimes.java

License:Apache License

/**
 * Converts the given DateTime to the given TimeZone
 */// w  ww .j  a v a 2  s .  c om
public static DateTime toTimeZone(DateTime dt, String id) {
    return dt.toDateTime(DateTimeZone.forID(id));
}

From source file:org.apache.abdera2.common.date.DateTimes.java

License:Apache License

public static DateTime now(String tz) {
    return DateTime.now(DateTimeZone.forID(tz));
}

From source file:org.apache.druid.java.util.common.DateTimes.java

License:Apache License

@SuppressForbidden(reason = "DateTimeZone#forID")
public static DateTimeZone inferTzFromString(String tzId) {
    try {/*w  w w.  j a  v a 2 s.  co m*/
        return DateTimeZone.forID(tzId);
    } catch (IllegalArgumentException e) {
        // also support Java timezone strings
        return DateTimeZone.forTimeZone(TimeZone.getTimeZone(tzId));
    }
}

From source file:org.apache.gobblin.compaction.parser.CompactionPathParser.java

License:Apache License

private DateTime getTime(String timeString) {
    DateTimeZone timeZone = DateTimeZone.forID(MRCompactor.DEFAULT_COMPACTION_TIMEZONE);
    int splits = StringUtils.countMatches(timeString, "/");
    String timePattern = "";
    if (splits == 3) {
        timePattern = "YYYY/MM/dd/HH";
    } else if (splits == 2) {
        timePattern = "YYYY/MM/dd";
    }/*from   w ww. jav  a2 s . com*/
    DateTimeFormatter timeFormatter = DateTimeFormat.forPattern(timePattern).withZone(timeZone);
    return timeFormatter.parseDateTime(timeString);
}

From source file:org.apache.gobblin.compaction.verify.CompactionTimeRangeVerifier.java

License:Apache License

public Result verify(FileSystemDataset dataset) {
    final DateTime earliest;
    final DateTime latest;
    try {//from  ww  w . j a  v a2s .com
        CompactionPathParser.CompactionParserResult result = new CompactionPathParser(state).parse(dataset);
        DateTime folderTime = result.getTime();
        DateTimeZone timeZone = DateTimeZone.forID(
                this.state.getProp(MRCompactor.COMPACTION_TIMEZONE, MRCompactor.DEFAULT_COMPACTION_TIMEZONE));
        DateTime compactionStartTime = new DateTime(
                this.state.getPropAsLong(CompactionSource.COMPACTION_INIT_TIME), timeZone);
        PeriodFormatter formatter = new PeriodFormatterBuilder().appendMonths().appendSuffix("m").appendDays()
                .appendSuffix("d").appendHours().appendSuffix("h").toFormatter();

        // Dataset name is like 'Identity/MemberAccount' or 'PageViewEvent'
        String datasetName = result.getDatasetName();

        // get earliest time
        String maxTimeAgoStrList = this.state.getProp(
                TimeBasedSubDirDatasetsFinder.COMPACTION_TIMEBASED_MAX_TIME_AGO,
                TimeBasedSubDirDatasetsFinder.DEFAULT_COMPACTION_TIMEBASED_MAX_TIME_AGO);
        String maxTimeAgoStr = getMachedLookbackTime(datasetName, maxTimeAgoStrList,
                TimeBasedSubDirDatasetsFinder.DEFAULT_COMPACTION_TIMEBASED_MAX_TIME_AGO);
        Period maxTimeAgo = formatter.parsePeriod(maxTimeAgoStr);
        earliest = compactionStartTime.minus(maxTimeAgo);

        // get latest time
        String minTimeAgoStrList = this.state.getProp(
                TimeBasedSubDirDatasetsFinder.COMPACTION_TIMEBASED_MIN_TIME_AGO,
                TimeBasedSubDirDatasetsFinder.DEFAULT_COMPACTION_TIMEBASED_MIN_TIME_AGO);
        String minTimeAgoStr = getMachedLookbackTime(datasetName, minTimeAgoStrList,
                TimeBasedSubDirDatasetsFinder.DEFAULT_COMPACTION_TIMEBASED_MIN_TIME_AGO);
        Period minTimeAgo = formatter.parsePeriod(minTimeAgoStr);
        latest = compactionStartTime.minus(minTimeAgo);

        if (earliest.isBefore(folderTime) && latest.isAfter(folderTime)) {
            log.debug("{} falls in the user defined time range", dataset.datasetRoot());
            return new Result(true, "");
        }
    } catch (Exception e) {
        log.error("{} cannot be verified because of {}", dataset.datasetRoot(),
                ExceptionUtils.getFullStackTrace(e));
        return new Result(false, e.toString());
    }
    return new Result(false, dataset.datasetRoot() + " is not in between " + earliest + " and " + latest);
}

From source file:org.apache.gobblin.data.management.copy.TimeAwareRecursiveCopyableDataset.java

License:Apache License

public TimeAwareRecursiveCopyableDataset(FileSystem fs, Path rootPath, Properties properties, Path glob) {
    super(fs, rootPath, properties, glob);
    this.lookbackTime = properties.getProperty(LOOKBACK_TIME_KEY);
    PeriodFormatter periodFormatter = new PeriodFormatterBuilder().appendDays().appendSuffix("d").appendHours()
            .appendSuffix("h").toFormatter();
    this.lookbackPeriod = periodFormatter.parsePeriod(lookbackTime);
    this.datePattern = properties.getProperty(DATE_PATTERN_KEY);
    this.isPatternHourly = isDatePatternHourly(datePattern);
    this.currentTime = properties.containsKey(DATE_PATTERN_TIMEZONE_KEY)
            ? LocalDateTime.now(DateTimeZone.forID(DATE_PATTERN_TIMEZONE_KEY))
            : LocalDateTime.now(DateTimeZone.forID(DEFAULT_DATE_PATTERN_TIMEZONE));

    //Daily directories cannot have a "hourly" lookback pattern. But hourly directories can accept lookback pattern with days.
    if (!this.isPatternHourly) {
        Assert.assertTrue(isLookbackTimeStringDaily(this.lookbackTime),
                "Expected day format for lookback time; found hourly format");
    }//from w  w w  .  j a  v  a2 s  . c  o m
}

From source file:org.apache.gobblin.source.DatePartitionedNestedRetriever.java

License:Apache License

@Override
public void init(SourceState state) {
    DateTimeZone.setDefault(DateTimeZone.forID(
            state.getProp(ConfigurationKeys.SOURCE_TIMEZONE, ConfigurationKeys.DEFAULT_SOURCE_TIMEZONE)));

    initDatePartition(state);/*from   www . ja  va 2 s  .  co m*/
    this.sourcePartitionPrefix = state
            .getProp(PartitionedFileSourceBase.DATE_PARTITIONED_SOURCE_PARTITION_PREFIX, StringUtils.EMPTY);

    this.sourcePartitionSuffix = state
            .getProp(PartitionedFileSourceBase.DATE_PARTITIONED_SOURCE_PARTITION_SUFFIX, StringUtils.EMPTY);
    this.sourceDir = new Path(state.getProp(ConfigurationKeys.SOURCE_FILEBASED_DATA_DIRECTORY));
    this.leadTimeDuration = PartitionAwareFileRetrieverUtils.getLeadTimeDurationFromConfig(state);
    this.helper = new HadoopFsHelper(state);
    this.schemaInSourceDir = state.getPropAsBoolean(ConfigurationKeys.SCHEMA_IN_SOURCE_DIR,
            ConfigurationKeys.DEFAULT_SCHEMA_IN_SOURCE_DIR);
    this.schemaFile = this.schemaInSourceDir
            ? state.getProp(ConfigurationKeys.SCHEMA_FILENAME, ConfigurationKeys.DEFAULT_SCHEMA_FILENAME)
            : "";
}

From source file:org.apache.phoenix.cache.JodaTimezoneCache.java

License:Apache License

private static LoadingCache<ByteBuffer, DateTimeZone> createTimezoneCache() {
    return CacheBuilder.newBuilder().expireAfterAccess(CACHE_EXPRIRE_TIME_MINUTES, TimeUnit.MINUTES)
            .build(new CacheLoader<ByteBuffer, DateTimeZone>() {

                @Override/*from   w  w w .j  a va2 s .  co m*/
                public DateTimeZone load(ByteBuffer timezone) throws Exception {
                    return DateTimeZone.forID(Bytes.toString(timezone.array()));
                }
            });
}

From source file:org.apache.pig.backend.hadoop.executionengine.fetch.FetchLauncher.java

License:Apache License

private void init(PhysicalPlan pp, POStore poStore) throws IOException {
    poStore.setStoreImpl(new FetchPOStoreImpl(pigContext));
    poStore.setUp();/*from  w  ww.  j a  v  a 2 s .c  om*/

    TaskAttemptID taskAttemptID = HadoopShims.getNewTaskAttemptID();
    HadoopShims.setTaskAttemptId(conf, taskAttemptID);

    if (!PlanHelper.getPhysicalOperators(pp, POStream.class).isEmpty()) {
        MapRedUtil.setupStreamingDirsConfSingle(poStore, pigContext, conf);
    }

    String currentTime = Long.toString(System.currentTimeMillis());
    conf.set("pig.script.submitted.timestamp", currentTime);
    conf.set("pig.job.submitted.timestamp", currentTime);

    PhysicalOperator.setReporter(new FetchProgressableReporter());
    SchemaTupleBackend.initialize(conf, pigContext);

    UDFContext udfContext = UDFContext.getUDFContext();
    udfContext.addJobConf(conf);
    udfContext.setClientSystemProps(pigContext.getProperties());
    udfContext.serialize(conf);

    PigMapReduce.sJobConfInternal.set(conf);
    String dtzStr = conf.get("pig.datetime.default.tz");
    if (dtzStr != null && dtzStr.length() > 0) {
        // ensure that the internal timezone is uniformly in UTC offset style
        DateTimeZone.setDefault(DateTimeZone.forOffsetMillis(DateTimeZone.forID(dtzStr).getOffset(null)));
    }

    boolean aggregateWarning = "true".equalsIgnoreCase(conf.get("aggregate.warning"));
    PigStatusReporter pigStatusReporter = PigStatusReporter.getInstance();
    pigStatusReporter.setContext(new FetchTaskContext(new FetchContext()));
    PigHadoopLogger pigHadoopLogger = PigHadoopLogger.getInstance();
    pigHadoopLogger.setReporter(pigStatusReporter);
    pigHadoopLogger.setAggregate(aggregateWarning);
    PhysicalOperator.setPigLogger(pigHadoopLogger);
}