Example usage for org.joda.time DateTimeZone setDefault

List of usage examples for org.joda.time DateTimeZone setDefault

Introduction

In this page you can find the example usage for org.joda.time DateTimeZone setDefault.

Prototype

public static void setDefault(DateTimeZone zone) throws SecurityException 

Source Link

Document

Sets the default time zone.

Usage

From source file:$.ServiceApplication.java

License:Apache License

public static void main(String[] args) throws Exception {
        DateTimeZone.setDefault(DateTimeZone.UTC);

        ServiceApplication serviceApplication = new ServiceApplication(new GuiceBundleProvider());

        try {//w  ww  .  ja  va2 s .  c o  m
            serviceApplication.run(args);
        } catch (Throwable ex) {
            ex.printStackTrace();

            System.exit(1);
        }
    }

From source file:azkaban.app.AzkabanApplication.java

License:Apache License

public AzkabanApplication(final List<File> jobDirs, final File logDir, final File tempDir,
        final boolean enableDevMode) throws IOException {
    this._jobDirs = Utils.nonNull(jobDirs);
    this._logsDir = Utils.nonNull(logDir);
    this._tempDir = Utils.nonNull(tempDir);

    if (!this._logsDir.exists()) {
        this._logsDir.mkdirs();
    }//from   w  w  w . j a v  a2 s . co m

    if (!this._tempDir.exists()) {
        this._tempDir.mkdirs();
    }

    for (File jobDir : _jobDirs) {
        if (!jobDir.exists()) {
            logger.warn("Job directory " + jobDir + " does not exist. Creating.");
            jobDir.mkdirs();
        }
    }

    if (jobDirs.size() < 1) {
        throw new IllegalArgumentException("No job directory given.");
    }

    Props defaultProps = PropsUtils.loadPropsInDirs(_jobDirs, ".properties", ".schema");

    _baseClassLoader = getBaseClassloader();

    String defaultTimezoneID = defaultProps.getString(DEFAULT_TIMEZONE_ID, null);
    if (defaultTimezoneID != null) {
        DateTimeZone.setDefault(DateTimeZone.forID(defaultTimezoneID));
        TimeZone.setDefault(TimeZone.getTimeZone(defaultTimezoneID));
    }

    NamedPermitManager permitManager = getNamedPermitManager(defaultProps);
    JobWrappingFactory factory = new JobWrappingFactory(permitManager, new ReadWriteLockManager(),
            _logsDir.getAbsolutePath(), "java",
            new ImmutableMap.Builder<String, Class<? extends Job>>().put("java", JavaJob.class)
                    .put("command", ProcessJob.class).put("javaprocess", JavaProcessJob.class)
                    .put("pig", PigProcessJob.class).put("propertyPusher", NoopJob.class)
                    .put("python", PythonJob.class).put("ruby", RubyJob.class).put("script", ScriptJob.class)
                    .build());

    _hdfsUrl = defaultProps.getString("hdfs.instance.url", null);
    _jobManager = new JobManager(factory, _logsDir.getAbsolutePath(), defaultProps, _jobDirs, _baseClassLoader);

    _mailer = new Mailman(defaultProps.getString("mail.host", "localhost"),
            defaultProps.getString("mail.user", ""), defaultProps.getString("mail.password", ""));

    String failureEmail = defaultProps.getString("job.failure.email", null);
    String successEmail = defaultProps.getString("job.success.email", null);
    int schedulerThreads = defaultProps.getInt("scheduler.threads", 20);
    _instanceName = defaultProps.getString(INSTANCE_NAME, "");

    final File initialJobDir = _jobDirs.get(0);
    File schedule = getScheduleFile(defaultProps, initialJobDir);
    File backup = getBackupFile(defaultProps, initialJobDir);
    File executionsStorageDir = new File(defaultProps.getString("azkaban.executions.storage.dir",
            initialJobDir.getAbsolutePath() + "/executions"));
    if (!executionsStorageDir.exists()) {
        executionsStorageDir.mkdirs();
    }
    long lastExecutionId = getLastExecutionId(executionsStorageDir);
    logger.info(String.format("Using path[%s] for storing executions.", executionsStorageDir));
    logger.info(String.format("Last known execution id was [%s]", lastExecutionId));

    final ExecutableFlowSerializer flowSerializer = new DefaultExecutableFlowSerializer();
    final ExecutableFlowDeserializer flowDeserializer = new DefaultExecutableFlowDeserializer(_jobManager,
            factory);

    FlowExecutionSerializer flowExecutionSerializer = new FlowExecutionSerializer(flowSerializer);
    FlowExecutionDeserializer flowExecutionDeserializer = new FlowExecutionDeserializer(flowDeserializer);

    _monitor = MonitorImpl.getMonitor();

    _allFlows = new CachingFlowManager(
            new RefreshableFlowManager(_jobManager, flowExecutionSerializer, flowExecutionDeserializer,
                    executionsStorageDir, lastExecutionId),
            defaultProps.getInt("azkaban.flow.cache.size", 1000));
    _jobManager.setFlowManager(_allFlows);

    _jobExecutorManager = new JobExecutorManager(_allFlows, _jobManager, _mailer, failureEmail, successEmail,
            schedulerThreads);

    this._schedulerManager = new ScheduleManager(_jobExecutorManager,
            new LocalFileScheduleLoader(schedule, backup));

    /* set predefined log url prefix 
    */
    String server_url = defaultProps.getString("server.url", null);
    if (server_url != null) {
        if (server_url.endsWith("/")) {
            _jobExecutorManager.setRuntimeProperty(AppCommon.DEFAULT_LOG_URL_PREFIX, server_url + "logs?file=");
        } else {
            _jobExecutorManager.setRuntimeProperty(AppCommon.DEFAULT_LOG_URL_PREFIX,
                    server_url + "/logs?file=");
        }
    }

    this._velocityEngine = configureVelocityEngine(enableDevMode);

    configureMBeanServer();
}

From source file:azkaban.execapp.AzkabanExecutorServer.java

License:Apache License

/**
 * Azkaban using Jetty/*from w w w  . ja  v  a  2  s .c o m*/
 *
 * @param args
 * @throws IOException
 */
public static void main(String[] args) throws Exception {
    logger.error("Starting Jetty Azkaban Executor...");
    Props azkabanSettings = AzkabanServer.loadProps(args);

    if (azkabanSettings == null) {
        logger.error("Azkaban Properties not loaded.");
        logger.error("Exiting Azkaban Executor Server...");
        return;
    }

    // Setup time zone
    if (azkabanSettings.containsKey(DEFAULT_TIMEZONE_ID)) {
        String timezone = azkabanSettings.getString(DEFAULT_TIMEZONE_ID);
        System.setProperty("user.timezone", timezone);
        TimeZone.setDefault(TimeZone.getTimeZone(timezone));
        DateTimeZone.setDefault(DateTimeZone.forID(timezone));

        logger.info("Setting timezone to " + timezone);
    }

    app = new AzkabanExecutorServer(azkabanSettings);

    Runtime.getRuntime().addShutdownHook(new Thread() {

        @Override
        public void run() {
            logger.info("Shutting down http server...");
            try {
                app.stopServer();
            } catch (Exception e) {
                logger.error("Error while shutting down http server.", e);
            }
            logger.info("kk thx bye.");
        }
    });
}

From source file:azkaban.webapp.AzkabanWebServer.java

License:Apache License

/**
 * Constructor//from  w w w.  ja  va  2  s  .  c  o m
 */
public AzkabanWebServer(Server server, Props props) throws Exception {
    this.props = props;
    this.server = server;
    velocityEngine = configureVelocityEngine(props.getBoolean(VELOCITY_DEV_MODE_PARAM, false));
    sessionCache = new SessionCache(props);
    userManager = loadUserManager(props);

    alerters = loadAlerters(props);

    executorManager = loadExecutorManager(props);
    projectManager = loadProjectManager(props);

    triggerManager = loadTriggerManager(props);
    loadBuiltinCheckersAndActions();

    // load all trigger agents here
    scheduleManager = loadScheduleManager(triggerManager, props);

    String triggerPluginDir = props.getString("trigger.plugin.dir", "plugins/triggers");

    loadPluginCheckersAndActions(triggerPluginDir);

    baseClassLoader = this.getClassLoader();

    tempDir = new File(props.getString("azkaban.temp.dir", "temp"));

    // Setup time zone
    if (props.containsKey(DEFAULT_TIMEZONE_ID)) {
        String timezone = props.getString(DEFAULT_TIMEZONE_ID);
        System.setProperty("user.timezone", timezone);
        TimeZone.setDefault(TimeZone.getTimeZone(timezone));
        DateTimeZone.setDefault(DateTimeZone.forID(timezone));
        logger.info("Setting timezone to " + timezone);
    }

    configureMBeanServer();
}

From source file:com.alliander.osgp.acceptancetests.ScopedGivWenZenForSlim.java

License:Open Source License

private static InstantiationStrategy autowireStepDefinitionClassesWithSpring() {
    rootContext = new AnnotationConfigApplicationContext();

    // Force local timezone to UTC (like platform)
    DateTimeZone.setDefault(DateTimeZone.UTC);
    TimeZone.setDefault(TimeZone.getTimeZone("UTC"));

    // Set loglevel to INFO (instead of DEBUG)
    final Logger root = (Logger) LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME);
    root.setLevel(Level.INFO);//www  .j a v  a2  s  .  c o  m

    rootContext.register(TestApplicationContext.class);
    try {
        rootContext.refresh();
    } catch (final Exception e) {
        // just for debugging...
        throw e;
    }
    return new SpringInstantiationStrategy(rootContext);
}

From source file:com.cloudera.gertrude.deploy.AvroSupport.java

License:Open Source License

public AvroSupport(boolean skipValidation, ConditionFactorySupport conditionFactorySupport,
        ExperimentFlagSupport experimentFlagSupport) {
    this.skipValidation = skipValidation;
    this.conditionFactorySupport = conditionFactorySupport;
    this.experimentFlagSupport = experimentFlagSupport;
    DateTimeZone.setDefault(DateTimeZone.forID(timeZoneId));
}

From source file:com.daemon.Master.java

License:Open Source License

public static void main(String[] args) {
    // Set the time zone for the daemon to UTC
    TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
    DateTimeZone.setDefault(DateTimeZone.forTimeZone(TimeZone.getTimeZone("UTC")));

    System.out.println(Localization.now() + " Master started.");

    Master m = null;/*ww w.  j av a2 s  .  c  om*/
    try {
        // Start the master
        m = new Master();
        m.run();
    } catch (Exception ex) {
        if (m != null) {
            m.getLogger().logStackTrace(ex);

            System.err.println(
                    Localization.now() + " An error occured. Consult Master.log for further information.");
        } else {
            System.err.println(Localization.now() + " An error occured.");

            // Print stack trace to error stream, because there is no log file, yet.
            ex.printStackTrace();
        }
    }
}

From source file:com.daemon.Minion.java

License:Open Source License

public void run() {
    // Set the time zone for the minion to UTC
    TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
    DateTimeZone.setDefault(DateTimeZone.forTimeZone(TimeZone.getTimeZone("UTC")));

    System.out.println(prependInfo("Started."));

    // Initialize error log
    _logger = LogManager.getLogger(_logFilename);

    // Minion logic

    // We do not want to work with the search term lists as given, but 
    // want to store a lot of meta data for each search term, so we work
    // with the meta data lists instead. So we need to create and initialize
    // the new lists.
    List<SearchTermMetaData> shortTermMetaData = null;
    List<SearchTermMetaData> longTermMetaData = null;

    // Initialize map holding all tweets
    Map<SearchTerm, List<Status>> allTweetsMap = new HashMap<SearchTerm, List<Status>>();
    for (SearchTerm term : _searchTerms) {
        allTweetsMap.put(term, new LinkedList<Status>());
    }//from  w ww .j  a  va 2  s  .  c  o m

    try {
        // Get the twitter object for this profile
        _twitter = _twitterProfile.getTwitterObject();

        // Convert the search terms to meta data search terms
        shortTermMetaData = convertSearchTerms(_shortTerms);
        longTermMetaData = convertSearchTerms(_longTerms);

        // We expect to fetch all tweets with one search for each short search term, so there
        // is no need for a enclosing while loop.
        MapUtil.fillupMap(allTweetsMap, fetchTweetsForSearchTerms(shortTermMetaData));

        // We expect the long search terms to run more than once. For every loop iteration
        // each search term in the long list gets one search, except for the newer search terms,
        // which get more search requests per loop iteration. See Minion.fillUpList(...).
        while (countFilteredSearchTerms(longTermMetaData) < longTermMetaData.size()
                && _twitterProfile.getUsedRateLimit() < _props.maxRateLimit) {
            MapUtil.fillupMap(allTweetsMap, fetchTweetsForSearchTerms(longTermMetaData));
        }
    } catch (TwitterException te) {
        // If there is something wrong with twitter, we are unable to do anything about it
        _logger.logStackTrace(te, _twitterProfile.getScreenName());

        System.err.println(prependInfo("Error during communicating with Twitter. Consult " + _logFilename
                + " for further information."));
    } catch (Exception cnfe) {
        _logger.logStackTrace(cnfe, _twitterProfile.getScreenName());

        System.err.println(prependInfo(
                "Cannot load the JDBC driver. Consult " + _logFilename + " for further information."));
    } finally {
        int countTweetsTotal = 0;

        // Used to count new tweets for the same search term (that can be splitted over many
        // search term meta data objects
        Map<SearchTerm, Integer> searchTermMap = new HashMap<SearchTerm, Integer>();

        for (SearchTerm term : _searchTerms) {
            searchTermMap.put(term, 0);
        }

        // At the end of the session update each search term's interval length
        // and the count of new fetched tweets

        // Short terms
        if (shortTermMetaData != null) {
            for (SearchTermMetaData metaData : shortTermMetaData) {
                updateIntervalLength(metaData);
                metaData.getSearchTerm().setLastFetchedTweetCount(metaData.getTweetCount());
                countTweetsTotal += metaData.getTweetCount();

                searchTermMap.put(metaData.getSearchTerm(),
                        searchTermMap.get(metaData.getSearchTerm()) + metaData.getTweetCount());
            }
        }

        // Long terms
        if (longTermMetaData != null) {
            for (SearchTermMetaData metaData : longTermMetaData) {
                updateIntervalLength(metaData);
                metaData.getSearchTerm().setLastFetchedTweetCount(metaData.getTweetCount());
                countTweetsTotal += metaData.getTweetCount();

                searchTermMap.put(metaData.getSearchTerm(),
                        searchTermMap.get(metaData.getSearchTerm()) + metaData.getTweetCount());
            }
        }

        // Output new tweets for search terms
        for (SearchTerm term : _searchTerms) {
            System.out.println(prependInfo("Fetched  " + searchTermMap.get(term)
                    + "  new tweet(s) since last search for term '" + term.getTerm() + "'."));
        }

        // Output for the user
        System.out.println(prependInfo("Fetched  " + countTweetsTotal + "  tweets in total"));
        System.out.println("                     for " + _searchTerms.size() + " search term(s),");
        System.out.println("                     in " + _numRequests + " requests.");

        // Inform master about finishing the work
        MessageType messageType = MessageType.MINION_FINISHED;

        // If the is a limited minion, the type changes
        if (_limitPerSearchTerm != _props.unlimitedRequestsPerSearchTerm) {
            messageType = MessageType.LIMITEDMINION_FINISHED;
        }

        // Create packages for each search term
        List<Package> tweetPackages = new LinkedList<Package>();
        for (Map.Entry<SearchTerm, List<Status>> entry : allTweetsMap.entrySet()) {
            // The date of the package is now
            tweetPackages.add(new Package(entry.getValue(), new SearchTerm(entry.getKey()), new DateTime()));
        }

        _master.update(this, new MinionData(messageType, _searchTerms, tweetPackages));
        // clear Tweets map Afterwards
        allTweetsMap.clear();
        System.out.println(prependInfo("Exited."));
    }
}

From source file:com.esofthead.mycollab.configuration.SiteConfiguration.java

License:Open Source License

public static void loadConfiguration() {
    TimeZone.setDefault(DateTimeZone.UTC.toTimeZone());
    DateTimeZone.setDefault(DateTimeZone.UTC);
    int serverPort = Integer.parseInt(System.getProperty(ApplicationProperties.MYCOLLAB_PORT, "8080"));
    ApplicationProperties.loadProps();//from  w ww . j  a  v  a 2 s .com
    instance = new SiteConfiguration();

    instance.sentErrorEmail = ApplicationProperties.getString(ERROR_SENDTO, "support@mycollab.com");
    instance.siteName = ApplicationProperties.getString(SITE_NAME, "MyCollab");
    instance.serverAddress = ApplicationProperties.getString(SERVER_ADDRESS, "localhost");
    instance.serverPort = serverPort;

    String pullMethodValue = ApplicationProperties.getString(ApplicationProperties.PULL_METHOD, "push");
    instance.pullMethod = PullMethod.valueOf(pullMethodValue);

    instance.cdnUrl = String.format(ApplicationProperties.getString(CDN_URL), instance.serverAddress,
            instance.serverPort);

    instance.appUrl = String.format(ApplicationProperties.getString(APP_URL), instance.serverAddress,
            instance.serverPort);
    if (!instance.appUrl.endsWith("/")) {
        instance.appUrl += "/";
    }

    instance.endecryptPassword = ApplicationProperties.getString(BI_ENDECRYPT_PASSWORD, "esofthead321");

    // load email
    String host = ApplicationProperties.getString(MAIL_SMTPHOST);
    String user = ApplicationProperties.getString(MAIL_USERNAME);
    String password = ApplicationProperties.getString(MAIL_PASSWORD);
    Integer port = Integer.parseInt(ApplicationProperties.getString(MAIL_PORT, "25"));
    Boolean isTls = Boolean.parseBoolean(ApplicationProperties.getString(MAIL_IS_TLS, "false"));
    Boolean isSsl = Boolean.parseBoolean(ApplicationProperties.getString(MAIL_IS_SSL, "false"));
    String noreplyEmail = ApplicationProperties.getString(MAIL_NOTIFY, "");
    instance.emailConfiguration = new EmailConfiguration(host, user, password, port, isTls, isSsl,
            noreplyEmail);

    // load database configuration
    String driverClass = ApplicationProperties.getString(DB_DRIVER_CLASS);
    String dbUrl = ApplicationProperties.getString(DB_URL);
    String dbUser = ApplicationProperties.getString(DB_USERNAME);
    String dbPassword = ApplicationProperties.getString(DB_PASSWORD);
    instance.databaseConfiguration = new DatabaseConfiguration(driverClass, dbUrl, dbUser, dbPassword);

    instance.resourceDownloadUrl = ApplicationProperties.getString(RESOURCE_DOWNLOAD_URL);
    if (!"".equals(instance.resourceDownloadUrl)) {
        instance.resourceDownloadUrl = String.format(instance.resourceDownloadUrl, instance.serverAddress,
                instance.serverPort);
    } else {
        instance.resourceDownloadUrl = instance.appUrl + "file/";
    }

    instance.dropboxCallbackUrl = ApplicationProperties.getString(DROPBOX_AUTH_LINK);
    instance.ggDriveCallbackUrl = ApplicationProperties.getString(GOOGLE_DRIVE_LINK);

    instance.facebookUrl = ApplicationProperties.getString(FACEBOOK_URL, "https://www.facebook.com/mycollab2");
    instance.twitterUrl = ApplicationProperties.getString(TWITTER_URL, "https://twitter.com/mycollabdotcom");
    instance.googleUrl = ApplicationProperties.getString(GOOGLE_URL,
            "https://plus.google.com/u/0/b/112053350736358775306/+Mycollab/about/p/pub");
    instance.linkedinUrl = ApplicationProperties.getString(LINKEDIN_URL,
            "http://www.linkedin.com/company/mycollab");

    Configuration configuration = new Configuration(Configuration.VERSION_2_3_24);
    configuration.setDefaultEncoding("UTF-8");
    try {
        List<TemplateLoader> loaders = new ArrayList<>();
        File i18nFolder = new File(FileUtils.getUserFolder(), "i18n");
        File confFolder1 = new File(FileUtils.getUserFolder(), "conf");
        File confFolder2 = new File(FileUtils.getUserFolder(), "src/main/conf");
        if (i18nFolder.exists()) {
            loaders.add(new FileTemplateLoader(i18nFolder));
        }
        if (confFolder1.exists()) {
            loaders.add(new FileTemplateLoader(confFolder1));
        }
        if (confFolder2.exists()) {
            loaders.add(new FileTemplateLoader(confFolder2));
        }
        loaders.add(new ClassTemplateLoader(SiteConfiguration.class.getClassLoader(), ""));
        configuration.setTemplateLoader(
                new MultiTemplateLoader(loaders.toArray(new TemplateLoader[loaders.size()])));
        instance.freemarkerConfiguration = configuration;
    } catch (IOException e) {
        e.printStackTrace();
        System.exit(-1);
    }
}

From source file:com.esofthead.mycollab.test.rule.EssentialInitRule.java

License:Open Source License

@Override
public Statement apply(Statement base, Description description) {
    TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
    DateTimeZone.setDefault(DateTimeZone.UTC);

    URL resourceUrl = IntergrationServiceTest.class.getClassLoader().getResource("log4j-test.properties");
    if (resourceUrl != null) {
        PropertyConfigurator.configure(resourceUrl);
    }//from   w  w  w  . jav a2s  . c  o m
    return base;
}