Example usage for org.joda.time DateTimeZone forID

List of usage examples for org.joda.time DateTimeZone forID

Introduction

In this page you can find the example usage for org.joda.time DateTimeZone forID.

Prototype

@FromString
public static DateTimeZone forID(String id) 

Source Link

Document

Gets a time zone instance for the specified time zone id.

Usage

From source file:org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigGenericMapBase.java

License:Apache License

/**
 * Configures the mapper with the map plan and the
 * reproter thread//from w  w  w .ja va 2s  .  c om
 */
@SuppressWarnings("unchecked")
@Override
public void setup(Context context) throws IOException, InterruptedException {
    super.setup(context);

    Configuration job = context.getConfiguration();
    SpillableMemoryManager.configure(ConfigurationUtil.toProperties(job));
    PigMapReduce.sJobContext = context;
    PigMapReduce.sJobConfInternal.set(context.getConfiguration());
    PigMapReduce.sJobConf = context.getConfiguration();
    inIllustrator = inIllustrator(context);

    PigContext
            .setPackageImportList((ArrayList<String>) ObjectSerializer.deserialize(job.get("udf.import.list")));
    pigContext = (PigContext) ObjectSerializer.deserialize(job.get("pig.pigContext"));

    // This attempts to fetch all of the generated code from the distributed cache, and resolve it
    SchemaTupleBackend.initialize(job, pigContext);

    if (pigContext.getLog4jProperties() != null)
        PropertyConfigurator.configure(pigContext.getLog4jProperties());

    if (mp == null)
        mp = (PhysicalPlan) ObjectSerializer.deserialize(job.get("pig.mapPlan"));
    stores = PlanHelper.getPhysicalOperators(mp, POStore.class);

    // To be removed
    if (mp.isEmpty())
        log.debug("Map Plan empty!");
    else {
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        mp.explain(baos);
        log.debug(baos.toString());
    }
    keyType = ((byte[]) ObjectSerializer.deserialize(job.get("pig.map.keytype")))[0];
    // till here

    pigReporter = new ProgressableReporter();
    // Get the UDF specific context
    MapRedUtil.setupUDFContext(job);

    if (!(mp.isEmpty())) {

        PigSplit split = (PigSplit) context.getInputSplit();
        List<OperatorKey> targetOpKeys = split.getTargetOps();

        ArrayList<PhysicalOperator> targetOpsAsList = new ArrayList<PhysicalOperator>();
        for (OperatorKey targetKey : targetOpKeys) {
            targetOpsAsList.add(mp.getOperator(targetKey));
        }
        roots = targetOpsAsList.toArray(new PhysicalOperator[1]);
        leaf = mp.getLeaves().get(0);
    }

    PigStatusReporter pigStatusReporter = PigStatusReporter.getInstance();
    pigStatusReporter.setContext(new MRTaskContext(context));

    log.info(
            "Aliases being processed per job phase (AliasName[line,offset]): " + job.get("pig.alias.location"));

    String dtzStr = PigMapReduce.sJobConfInternal.get().get("pig.datetime.default.tz");
    if (dtzStr != null && dtzStr.length() > 0) {
        // ensure that the internal timezone is uniformly in UTC offset style
        DateTimeZone.setDefault(DateTimeZone.forOffsetMillis(DateTimeZone.forID(dtzStr).getOffset(null)));
    }
}

From source file:org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigGenericMapBaseRollupSample.java

License:Apache License

/**
 * Configures the mapper with the map plan and the
 * reproter thread//www.j a v  a2s  . c o  m
 */
@SuppressWarnings("unchecked")
@Override
public void setup(Context context) throws IOException, InterruptedException {
    super.setup(context);
    Configuration job = context.getConfiguration();
    SpillableMemoryManager.configure(ConfigurationUtil.toProperties(job));
    PigMapReduce.sJobContext = context;
    PigMapReduce.sJobConfInternal.set(context.getConfiguration());
    PigMapReduce.sJobConf = context.getConfiguration();
    inIllustrator = inIllustrator(context);

    PigContext
            .setPackageImportList((ArrayList<String>) ObjectSerializer.deserialize(job.get("udf.import.list")));
    pigContext = (PigContext) ObjectSerializer.deserialize(job.get("pig.pigContext"));

    // This attempts to fetch all of the generated code from the distributed cache, and resolve it
    SchemaTupleBackend.initialize(job, pigContext);

    if (pigContext.getLog4jProperties() != null)
        PropertyConfigurator.configure(pigContext.getLog4jProperties());

    if (mp == null)
        mp = (PhysicalPlan) ObjectSerializer.deserialize(job.get("pig.mapPlan"));
    stores = PlanHelper.getPhysicalOperators(mp, POStore.class);

    // To be removed
    if (mp.isEmpty())
        log.debug("Map Plan empty!");
    else {
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        mp.explain(baos);
        log.debug(baos.toString());
    }
    keyType = ((byte[]) ObjectSerializer.deserialize(job.get("pig.map.keytype")))[0];
    // till here

    pigReporter = new ProgressableReporter();
    // Get the UDF specific context
    MapRedUtil.setupUDFContext(job);

    if (!(mp.isEmpty())) {

        PigSplit split = (PigSplit) context.getInputSplit();
        List<OperatorKey> targetOpKeys = split.getTargetOps();

        ArrayList<PhysicalOperator> targetOpsAsList = new ArrayList<PhysicalOperator>();
        for (OperatorKey targetKey : targetOpKeys) {
            targetOpsAsList.add(mp.getOperator(targetKey));
        }
        roots = targetOpsAsList.toArray(new PhysicalOperator[1]);
        leaf = mp.getLeaves().get(0);
    }

    PigStatusReporter.setContext(context);

    log.info(
            "Aliases being processed per job phase (AliasName[line,offset]): " + job.get("pig.alias.location"));

    String dtzStr = PigMapReduce.sJobConfInternal.get().get("pig.datetime.default.tz");
    if (dtzStr != null && dtzStr.length() > 0) {
        // ensure that the internal timezone is uniformly in UTC offset style
        DateTimeZone.setDefault(DateTimeZone.forOffsetMillis(DateTimeZone.forID(dtzStr).getOffset(null)));
    }
}

From source file:org.apache.pig.builtin.ToDate3ARGS.java

License:Apache License

public DateTime exec(Tuple input) throws IOException {
    if (input == null || input.size() < 1 || input.get(0) == null) {
        return null;
    }/*from  w w  w  .j  a  va2  s.  co m*/
    DateTimeFormatter dtf = DateTimeFormat.forPattern(DataType.toString(input.get(1)));
    DateTimeZone dtz = DateTimeZone
            .forOffsetMillis(DateTimeZone.forID(DataType.toString(input.get(2))).getOffset(null));
    return dtf.withZone(dtz).parseDateTime(DataType.toString(input.get(0)));
}

From source file:org.apache.pig.impl.util.Utils.java

License:Apache License

public static void setDefaultTimeZone(Configuration conf) {
    String dtzStr = conf.get(PigConfiguration.PIG_DATETIME_DEFAULT_TIMEZONE);
    if (dtzStr != null && dtzStr.length() > 0) {
        // don't use offsets because it breaks across DST/Standard Time
        DateTimeZone.setDefault(DateTimeZone.forID(dtzStr));
    }//from   www. ja  va  2  s  .c  o m
}

From source file:org.apache.solr.update.processor.ParseDateFieldUpdateProcessorFactory.java

License:Apache License

@Override
public void init(NamedList args) {

    Locale locale = Locale.ROOT;

    String localeParam = (String) args.remove(LOCALE_PARAM);
    if (null != localeParam) {
        locale = LocaleUtils.toLocale(localeParam);
    }/*from ww w.ja  v a  2 s.  c o m*/

    Object defaultTimeZoneParam = args.remove(DEFAULT_TIME_ZONE_PARAM);
    DateTimeZone defaultTimeZone = DateTimeZone.UTC;
    if (null != defaultTimeZoneParam) {
        defaultTimeZone = DateTimeZone.forID(defaultTimeZoneParam.toString());
    }

    Collection<String> formatsParam = args.removeConfigArgs(FORMATS_PARAM);
    if (null != formatsParam) {
        for (String value : formatsParam) {
            formats.put(value, DateTimeFormat.forPattern(value).withZone(defaultTimeZone).withLocale(locale));
        }
    }
    super.init(args);
}

From source file:org.apache.tamaya.jodatime.DateTimeZoneConverter.java

License:Apache License

@Override
public DateTimeZone convert(String value) {
    String trimmed = requireNonNull(value).trim();

    DateTimeZone result = null;//www .  j ava  2s  .  c om

    try {
        if (isSingleIntegerValue(trimmed)) {
            int offset = Integer.parseInt(trimmed);
            result = DateTimeZone.forOffsetHours(offset);
        } else { // Let us assume a string id
            result = DateTimeZone.forID(trimmed);
        }

    } catch (RuntimeException e) {
        result = null; // Give the next converter a change. Read the JavaDoc of convert
    }

    return result;
}

From source file:org.basepom.mojo.propertyhelper.DateField.java

License:Apache License

@Override
public Optional<String> getPropertyValue() {
    final DateTimeZone timeZone = dateDefinition.getTimezone().isPresent()
            ? DateTimeZone.forID(dateDefinition.getTimezone().get())
            : DateTimeZone.getDefault();

    final Optional<String> format = dateDefinition.getFormat();
    final DateTimeFormatter formatter;
    if (format.isPresent()) {
        formatter = DateTimeFormat.forPattern(format.get());
    } else {/*from   w ww . j a  va2  s .  co  m*/
        formatter = null;
    }

    DateTime date = getDateTime(valueProvider.getValue(), formatter, timeZone);

    if (date == null && dateDefinition.getValue().isPresent()) {
        date = new DateTime(dateDefinition.getValue().get(), timeZone);
    }

    if (date == null) {
        date = new DateTime(timeZone);
    }

    String result;
    if (formatter != null) {
        result = formatter.print(date);
        valueProvider.setValue(result);
    } else {
        result = date.toString();
        valueProvider.setValue(Long.toString(date.getMillis()));
    }

    if (dateDefinition.getTransformers().isPresent()) {
        result = TransformerRegistry.applyTransformers(dateDefinition.getTransformers().get(), result);
    }
    return Optional.fromNullable(result);
}

From source file:org.bigloupe.web.scheduler.InitializeScheduler.java

License:Apache License

public InitializeScheduler(List<File> jobDirs, File logDir, File tempDir, MailerService mailerService,
        BigLoupeConfiguration configuration, boolean enableDevMode) throws IOException {
    this.jobDirs = Utils.nonNull(jobDirs);
    this.logsDir = Utils.nonNull(logDir);
    this.tempDir = Utils.nonNull(tempDir);
    this._mailerService = mailerService;

    if (!this.logsDir.exists())
        this.logsDir.mkdirs();

    if (!this.tempDir.exists())
        this.tempDir.mkdirs();

    for (File jobDir : jobDirs) {
        if (!jobDir.exists()) {
            logger.warn("Job directory " + jobDir + " does not exist. Creating.");
            jobDir.mkdirs();//from w ww  . j  av  a 2 s .  co m
        }
    }

    if (jobDirs.size() < 1)
        throw new IllegalArgumentException("No job directory given.");

    Props defaultProps = PropsUtils.loadPropsInDirs(jobDirs, ".properties", ".schema");

    _baseClassLoader = getBaseClassloader();

    String defaultTimezoneID = defaultProps.getString(DEFAULT_TIMEZONE_ID, null);
    if (defaultTimezoneID != null) {
        DateTimeZone.setDefault(DateTimeZone.forID(defaultTimezoneID));
        TimeZone.setDefault(TimeZone.getTimeZone(defaultTimezoneID));
    }

    NamedPermitManager permitManager = getNamedPermitManager(defaultProps);
    JobWrappingFactory factory = new JobWrappingFactory(permitManager, new ReadWriteLockManager(),
            logsDir.getAbsolutePath(), "java",
            new ImmutableMap.Builder<String, Class<? extends Job>>().put("java", JavaJob.class)
                    .put("command", ProcessJob.class).put("javaprocess", JavaProcessJob.class)
                    .put("map-reduce", MapReduceJob.class).put("pig", PigProcessJob.class)
                    .put("propertyPusher", NoopJob.class).put("python", PythonJob.class)
                    .put("ruby", RubyJob.class).put("script", ScriptJob.class)
                    .put("sqoop", SqoopProcessJob.class).put("indexfile", IndexFileProcessJob.class)
                    .put("noop", NoopJob.class).build());

    _hdfsUrl = defaultProps.getString("hdfs.instance.url", null);
    _jobManager = new JobManager(factory, logsDir.getAbsolutePath(), defaultProps, jobDirs, _baseClassLoader,
            configuration);

    String failureEmail = defaultProps.getString("job.failure.email", null);
    String successEmail = defaultProps.getString("job.success.email", null);
    int schedulerThreads = defaultProps.getInt("scheduler.threads", 50);
    _instanceName = defaultProps.getString(INSTANCE_NAME, "");

    final File initialJobDir = jobDirs.get(0);
    File schedule = getScheduleFile(defaultProps, initialJobDir);
    File backup = getBackupFile(defaultProps, initialJobDir);
    File executionsStorageDir = new File(defaultProps.getString("azkaban.executions.storage.dir",
            initialJobDir.getAbsolutePath() + "/executions"));
    if (!executionsStorageDir.exists())
        executionsStorageDir.mkdirs();
    long lastExecutionId = getLastExecutionId(executionsStorageDir);
    logger.info(String.format("Using path[%s] for storing executions.", executionsStorageDir));
    logger.info(String.format("Last known execution id was [%s]", lastExecutionId));

    final ExecutableFlowSerializer flowSerializer = new DefaultExecutableFlowSerializer();
    final ExecutableFlowDeserializer flowDeserializer = new DefaultExecutableFlowDeserializer(_jobManager,
            factory);

    FlowExecutionSerializer flowExecutionSerializer = new FlowExecutionSerializer(flowSerializer);
    FlowExecutionDeserializer flowExecutionDeserializer = new FlowExecutionDeserializer(flowDeserializer);

    _allFlows = new CachingFlowManager(
            new RefreshableFlowManager(_jobManager, flowExecutionSerializer, flowExecutionDeserializer,
                    executionsStorageDir, lastExecutionId),
            defaultProps.getInt("azkaban.flow.cache.size", 1000));
    _jobManager.setFlowManager(_allFlows);

    _jobExecutorManager = new JobExecutorManager(_allFlows, _jobManager, failureEmail, successEmail,
            schedulerThreads);

    this._schedulerManager = new ScheduleManager(_jobExecutorManager,
            new LocalFileScheduleLoader(schedule, backup));

    /* set predefined log url prefix 
    */
    String server_url = defaultProps.getString("server.url", null);
    if (server_url != null) {
        if (server_url.endsWith("/"))
            _jobExecutorManager.setRuntimeProperty(BigLoupeConfiguration.DEFAULT_LOG_URL_PREFIX,
                    server_url + "logs?file=");
        else
            _jobExecutorManager.setRuntimeProperty(BigLoupeConfiguration.DEFAULT_LOG_URL_PREFIX,
                    server_url + "/logs?file=");
    }

}

From source file:org.codehaus.httpcache4j.Conditionals.java

License:Open Source License

/**
 * You should use the server's time here. Otherwise you might get unexpected results.
 * The typical use case is: <br/>//from  ww w  .  j  a va 2 s  .  co m
 * <pre>
 *   HTTPResponse response = ....
 *   HTTPRequest request = createRequest();
 *   request = request.conditionals(new Conditionals().ifModifiedSince(response.getLastModified());
 * </pre>
 *
 * @param time the time to check.
 * @return the conditionals with the If-Modified-Since date set.
 */
public Conditionals ifModifiedSince(DateTime time) {
    Validate.isTrue(match.isEmpty(),
            String.format(ERROR_MESSAGE, HeaderConstants.IF_MODIFIED_SINCE, HeaderConstants.IF_MATCH));
    Validate.isTrue(unModifiedSince == null, String.format(ERROR_MESSAGE, HeaderConstants.IF_MODIFIED_SINCE,
            HeaderConstants.IF_UNMODIFIED_SINCE));
    time = time.toDateTime(DateTimeZone.forID("UTC"));
    time = time.withMillisOfSecond(0);
    return new Conditionals(empty(), noneMatch, time, null);
}

From source file:org.codehaus.httpcache4j.Conditionals.java

License:Open Source License

/**
 * You should use the server's time here. Otherwise you might get unexpected results.
 * The typical use case is: <br/>/*from  w  w  w.j ava  2  s.com*/
 * <pre>
 *   HTTPResponse response = ....
 *   HTTPRequest request = createRequest();
 *   request = request.conditionals(new Conditionals().ifUnModifiedSince(response.getLastModified());
 * </pre>
 *
 * @param time the time to check.
 * @return the conditionals with the If-Unmodified-Since date set.
 */
public Conditionals ifUnModifiedSince(DateTime time) {
    Validate.isTrue(noneMatch.isEmpty(),
            String.format(ERROR_MESSAGE, HeaderConstants.IF_UNMODIFIED_SINCE, HeaderConstants.IF_NON_MATCH));
    Validate.isTrue(modifiedSince == null, String.format(ERROR_MESSAGE, HeaderConstants.IF_UNMODIFIED_SINCE,
            HeaderConstants.IF_MODIFIED_SINCE));
    time = time.toDateTime(DateTimeZone.forID("UTC"));
    time = time.withMillisOfSecond(0);
    return new Conditionals(match, empty(), null, time);
}