Example usage for org.joda.time DateTimeZone setDefault

List of usage examples for org.joda.time DateTimeZone setDefault

Introduction

In this page you can find the example usage for org.joda.time DateTimeZone setDefault.

Prototype

public static void setDefault(DateTimeZone zone) throws SecurityException 

Source Link

Document

Sets the default time zone.

Usage

From source file:de.rwth.idsg.xsharing.router.core.CoreBootstrapper.java

License:Open Source License

@PostConstruct
public void init() {

    // Just to be extra sure
    TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
    DateTimeZone.setDefault(DateTimeZone.UTC);

    log.info("System going for boot. Server status: {}", this.serverStatus);

    // setServerStatus(ServerStatus.BOOTING);
    // statusChangeEvents.fire(ServerStatus.BOOTING);

    // use default granularity
    triggerDataLoad(0);/*from   w ww .  j  av a 2 s.  co m*/
}

From source file:divconq.hub.Clock.java

License:Open Source License

/**
 * Called from Hub.start this method configures the clock features.
 * //from   ww w  . j av a 2 s .c o  m
 * @param or logger for the initialize
 * @param config xml holding the configuration
 */
public void init(OperationResult or, XElement config) {
    this.config = config;

    if (config != null) {
        String timeZone = config.getAttribute("TimeZone", "UTC");

        if (StringUtil.isNotEmpty(timeZone))
            DateTimeZone.setDefault(DateTimeZone.forID(timeZone));

        String coreDate = config.getAttribute("CoreDate");

        if (StringUtil.isNotEmpty(coreDate)) {
            this.setAppClock(TimeUtil.parseDateTime(coreDate));
        }

        this.speed = StringUtil.parseInt(config.getAttribute("Speed"), 0);

        String obclass = config.getAttribute("TimerClass");

        if (StringUtil.isNotEmpty(obclass)) {
            try {
                Class<?> obc = this.getClass().getClassLoader().loadClass(obclass);
                this.obfus = (ISettingsObfuscator) obc.newInstance();
            } catch (Exception x) {
                Logger.error("Unable to load custom Settings Obfuscator class: " + obclass, "Code", "207");
            }
        }
    } else {
        DateTimeZone.setDefault(DateTimeZone.UTC);
    }

    if (this.obfus == null)
        this.obfus = new BasicSettingsObfuscator();

    this.obfus.init(config);
}

From source file:druid.examples.flights.FlightsConverter.java

License:Open Source License

public static void main(String[] args) throws IOException {
    DateTimeZone.setDefault(DateTimeZone.UTC);
    ObjectMapper mapper = new DefaultObjectMapper();

    File flightsDataDirectory = new File(args[0]);
    File flightsOutputDirectory = new File(args[1]);
    flightsOutputDirectory.mkdirs();/* ww  w  .ja  v  a  2  s. com*/

    for (File flightsDataFile : flightsDataDirectory.listFiles()) {
        System.out.printf("Processing file[%s]%n", flightsDataFile);

        CSVParser parser = new CSVParser();
        BufferedReader in = null;
        BufferedWriter out = null;

        try {
            in = new BufferedReader(new FileReader(flightsDataFile));
            out = new BufferedWriter(new FileWriter(
                    new File(flightsOutputDirectory, flightsDataFile.getName().replace("csv", "json"))));

            int count = 0;
            long time = System.currentTimeMillis();
            parser.setFieldNames(in.readLine());
            String line = null;
            while ((line = in.readLine()) != null) {
                if (++count % 100000 == 0) {
                    System.out.printf("File[%s], processed %,d lines in %,d millis.%n",
                            flightsDataFile.getName(), count, System.currentTimeMillis() - time);
                    time = System.currentTimeMillis();
                }
                Map<String, Object> event = parser.parse(line);

                int year = Integer.parseInt(event.get("Year").toString());
                int month = Integer.parseInt(event.get("Month").toString());
                int dayOfMonth = Integer.parseInt(event.get("DayofMonth").toString());
                int departureTime = Integer.parseInt(event.get("CRSDepTime").toString());
                int hourOfDay = departureTime / 100;
                final int minuteOfHour = departureTime % 100;

                DateTime timestamp = new DateTime(String.format("%4d-%02d-%02d", year, month, dayOfMonth))
                        .plus(new Period(hourOfDay, minuteOfHour, 0, 0));

                event.put("timestamp", timestamp);

                for (String metricDimension : METRIC_DIMENSIONS) {
                    String value = event.get(metricDimension).toString();

                    if (value.equals("NA")) {
                        event.put(metricDimension, 0);
                    } else {
                        event.put(metricDimension, Integer.parseInt(value));
                    }
                }

                out.write(mapper.writeValueAsString(event));
                out.write("\n");
            }
        } finally {
            Closeables.closeQuietly(in);
            Closeables.closeQuietly(out);
        }
    }
}

From source file:gobblin.source.DatePartitionedAvroFileSource.java

License:Apache License

/**
 * Gobblin calls the {@link Source#getWorkunits(SourceState)} method after creating a {@link Source} object with a
 * blank constructor, so any custom initialization of the object needs to be done here.
 *///from   w ww  .ja v a2s . co m
protected void init(SourceState state) {
    DateTimeZone.setDefault(DateTimeZone.forID(
            state.getProp(ConfigurationKeys.SOURCE_TIMEZONE, ConfigurationKeys.DEFAULT_SOURCE_TIMEZONE)));

    initDatePartition(state);

    try {
        initFileSystemHelper(state);
    } catch (FileBasedHelperException e) {
        Throwables.propagate(e);
    }

    AvroFsHelper fsHelper = (AvroFsHelper) this.fsHelper;
    this.fs = fsHelper.getFileSystem();

    this.sourceState = state;

    this.lowWaterMark = getLowWaterMark(state.getPreviousWorkUnitStates(),
            state.getProp(DATE_PARTITIONED_SOURCE_MIN_WATERMARK_VALUE,
                    this.partitionPatternFormatter.print(DEFAULT_DATE_PARTITIONED_SOURCE_MIN_WATERMARK_VALUE)));

    this.maxFilesPerJob = state.getPropAsInt(DATE_PARTITIONED_SOURCE_MAX_FILES_PER_JOB,
            DEFAULT_DATE_PARTITIONED_SOURCE_MAX_FILES_PER_JOB);

    this.maxWorkUnitsPerJob = state.getPropAsInt(DATE_PARTITIONED_SOURCE_MAX_WORKUNITS_PER_JOB,
            DEFAULT_DATE_PARTITIONED_SOURCE_MAX_WORKUNITS_PER_JOB);

    this.tableType = TableType.valueOf(state.getProp(ConfigurationKeys.EXTRACT_TABLE_TYPE_KEY).toUpperCase());

    this.fileCount = 0;

    this.sourceDir = new Path(state.getProp(ConfigurationKeys.SOURCE_FILEBASED_DATA_DIRECTORY));

    this.sourcePartitionPrefix = state.getProp(DATE_PARTITIONED_SOURCE_PARTITION_PREFIX, StringUtils.EMPTY);

    this.sourcePartitionSuffix = state.getProp(DATE_PARTITIONED_SOURCE_PARTITION_SUFFIX, StringUtils.EMPTY);

}

From source file:gobblin.source.DatePartitionedDailyAvroSource.java

License:Open Source License

/**
 * Gobblin calls the {@link Source#getWorkunits(SourceState)} method after creating a {@link Source} object with a
 * blank constructor, so any custom initialization of the object needs to be done here.
 *///from   w  w  w. j a  va2  s  .c  o m
private void init(SourceState state) {
    DateTimeZone.setDefault(DateTimeZone.forID(
            state.getProp(ConfigurationKeys.SOURCE_TIMEZONE, ConfigurationKeys.DEFAULT_SOURCE_TIMEZONE)));

    try {
        initFileSystemHelper(state);
    } catch (FileBasedHelperException e) {
        Throwables.propagate(e);
    }

    AvroFsHelper fsHelper = (AvroFsHelper) this.fsHelper;
    this.fs = fsHelper.getFileSystem();

    this.sourceState = state;

    this.lowWaterMark = getLowWaterMark(state.getPreviousWorkUnitStates(),
            state.getProp(DATE_PARTITIONED_SOURCE_MIN_WATERMARK_VALUE,
                    DAILY_FOLDER_FORMATTER.print(DEFAULT_DATE_PARTITIONED_SOURCE_MIN_WATERMARK_VALUE)));

    this.maxFilesPerJob = state.getPropAsInt(DATE_PARTITIONED_SOURCE_MAX_FILES_PER_JOB,
            DEFAULT_DATE_PARTITIONED_SOURCE_MAX_FILES_PER_JOB);

    this.maxWorkUnitsPerJob = state.getPropAsInt(DATE_PARTITIONED_SOURCE_MAX_WORKUNITS_PER_JOB,
            DEFAULT_DATE_PARTITIONED_SOURCE_MAX_WORKUNITS_PER_JOB);

    this.tableType = TableType.valueOf(state.getProp(ConfigurationKeys.EXTRACT_TABLE_TYPE_KEY).toUpperCase());

    this.fileCount = 0;

    this.sourceDir = new Path(state.getProp(ConfigurationKeys.SOURCE_FILEBASED_DATA_DIRECTORY));
}

From source file:gobblin.source.DatePartitionedNestedRetriever.java

License:Apache License

@Override
public void init(SourceState state) {
    DateTimeZone.setDefault(DateTimeZone.forID(
            state.getProp(ConfigurationKeys.SOURCE_TIMEZONE, ConfigurationKeys.DEFAULT_SOURCE_TIMEZONE)));

    initDatePartition(state);/*w w w  .j av a  2  s .  c o  m*/
    this.sourcePartitionPrefix = state
            .getProp(PartitionedFileSourceBase.DATE_PARTITIONED_SOURCE_PARTITION_PREFIX, StringUtils.EMPTY);

    this.sourcePartitionSuffix = state
            .getProp(PartitionedFileSourceBase.DATE_PARTITIONED_SOURCE_PARTITION_SUFFIX, StringUtils.EMPTY);
    this.sourceDir = new Path(state.getProp(ConfigurationKeys.SOURCE_FILEBASED_DATA_DIRECTORY));
    this.helper = new HadoopFsHelper(state);
}

From source file:graphene.util.time.JodaTimeUtil.java

License:Apache License

public static void test_localDate_shift_joda_tz() {
    System.out.println("Test LocalDate with shifted JodaTime timezone");
    final DateTimeZone originalTZ = DateTimeZone.getDefault();
    final DateTimeZone losAngelesTZ = DateTimeZone.forID("America/Los_Angeles");

    DateTimeZone.setDefault(losAngelesTZ);
    final LocalDate ld0 = new LocalDate(losAngelesTZ);
    System.out.println(//ww w  .  ja v  a2 s.c  o  m
            "ld0 LocalDate(losAngelesTZ) = " + ld0 + " when default TZ = " + DateTimeZone.getDefault());

    DateTimeZone.setDefault(losAngelesTZ);
    final LocalDate ld1 = new LocalDate();
    System.out.println(
            "ld1 LocalDate()             = " + ld1 + " when default TZ = " + DateTimeZone.getDefault());

    final java.sql.Date d0 = toSQLDate(ld1);
    System.out
            .println("d0 toSQLDate(ld0)           = " + d0 + " when default TZ = " + DateTimeZone.getDefault());
    final java.sql.Date d1 = toSQLDate(ld1);
    System.out
            .println("d1 toSQLDate(ld1)           = " + d1 + " when default TZ = " + DateTimeZone.getDefault());
    DateTimeZone.setDefault(originalTZ);
    System.out
            .println("d1 toSQLDate(ld1)           = " + d1 + " when default TZ = " + DateTimeZone.getDefault());

    DateTimeZone.setDefault(originalTZ);
    final LocalDate ld2 = toLocalDate(d1);
    System.out.println(
            "ld2 toLocalDate(d1)         = " + ld2 + " when default TZ = " + DateTimeZone.getDefault());

    DateTimeZone.setDefault(originalTZ);
    if (!ld2.equals(ld1)) {
        throw new IllegalStateException();
    }
}

From source file:io.cassandrareaper.ReaperApplication.java

License:Apache License

@Override
public void run(ReaperApplicationConfiguration config, Environment environment) throws Exception {
    // Using UTC times everywhere as default. Affects only Yoda time.
    DateTimeZone.setDefault(DateTimeZone.UTC);

    checkConfiguration(config);/*from www . jav  a  2  s  .  c  o m*/
    context.config = config;

    addSignalHandlers(); // SIGHUP, etc.

    context.metricRegistry = environment.metrics();
    CollectorRegistry.defaultRegistry.register(new DropwizardExports(environment.metrics()));

    environment.admin().addServlet("prometheusMetrics", new MetricsServlet(CollectorRegistry.defaultRegistry))
            .addMapping("/prometheusMetrics");

    int repairThreads = config.getRepairRunThreadCount();
    LOG.info("initializing runner thread pool with {} threads", repairThreads);

    context.repairManager = RepairManager.create(context,
            environment.lifecycle().scheduledExecutorService("RepairRunner").threads(repairThreads).build(),
            config.getHangingRepairTimeoutMins(), TimeUnit.MINUTES,
            config.getRepairManagerSchedulingIntervalSeconds(), TimeUnit.SECONDS);

    if (context.storage == null) {
        LOG.info("initializing storage of type: {}", config.getStorageType());
        context.storage = initializeStorage(config, environment);
    } else {
        LOG.info("storage already given in context, not initializing a new one");
    }

    if (context.jmxConnectionFactory == null) {
        LOG.info("no JMX connection factory given in context, creating default");
        context.jmxConnectionFactory = new JmxConnectionFactory(context.metricRegistry);

        // read jmx host/port mapping from config and provide to jmx con.factory
        Map<String, Integer> jmxPorts = config.getJmxPorts();
        if (jmxPorts != null) {
            LOG.debug("using JMX ports mapping: {}", jmxPorts);
            context.jmxConnectionFactory.setJmxPorts(jmxPorts);
        }

        if (config.useAddressTranslator()) {
            context.jmxConnectionFactory.setAddressTranslator(new EC2MultiRegionAddressTranslator());
        }
    }

    JmxCredentials jmxAuth = config.getJmxAuth();
    if (jmxAuth != null) {
        LOG.debug("using specified JMX credentials for authentication");
        context.jmxConnectionFactory.setJmxAuth(jmxAuth);
    }

    Map<String, JmxCredentials> jmxCredentials = config.getJmxCredentials();
    if (jmxCredentials != null) {
        LOG.debug("using specified JMX credentials per cluster for authentication");
        context.jmxConnectionFactory.setJmxCredentials(jmxCredentials);
    }

    // Enable cross-origin requests for using external GUI applications.
    if (config.isEnableCrossOrigin() || System.getProperty("enableCrossOrigin") != null) {
        final FilterRegistration.Dynamic cors = environment.servlets().addFilter("crossOriginRequests",
                CrossOriginFilter.class);
        cors.setInitParameter("allowedOrigins", "*");
        cors.setInitParameter("allowedHeaders", "X-Requested-With,Content-Type,Accept,Origin");
        cors.setInitParameter("allowedMethods", "OPTIONS,GET,PUT,POST,DELETE,HEAD");
        cors.addMappingForUrlPatterns(EnumSet.allOf(DispatcherType.class), true, "/*");
    }

    LOG.info("creating and registering health checks");
    // Notice that health checks are registered under the admin application on /healthcheck
    final ReaperHealthCheck healthCheck = new ReaperHealthCheck(context);
    environment.healthChecks().register("reaper", healthCheck);

    LOG.info("creating resources and registering endpoints");
    final PingResource pingResource = new PingResource(healthCheck);
    environment.jersey().register(pingResource);

    final ClusterResource addClusterResource = new ClusterResource(context,
            environment.lifecycle().executorService("SnapshotManager").minThreads(6).maxThreads(6).build());

    environment.jersey().register(addClusterResource);
    final RepairRunResource addRepairRunResource = new RepairRunResource(context);
    environment.jersey().register(addRepairRunResource);
    final RepairScheduleResource addRepairScheduleResource = new RepairScheduleResource(context);
    environment.jersey().register(addRepairScheduleResource);
    final SnapshotResource snapshotResource = new SnapshotResource(context, environment);
    environment.jersey().register(snapshotResource);

    final NodeStatsResource nodeStatsResource = new NodeStatsResource(context);
    environment.jersey().register(nodeStatsResource);

    if (config.isAccessControlEnabled()) {
        SessionHandler sessionHandler = new SessionHandler();
        sessionHandler.setMaxInactiveInterval((int) config.getAccessControl().getSessionTimeout().getSeconds());
        environment.getApplicationContext().setSessionHandler(sessionHandler);
        environment.servlets().setSessionHandler(sessionHandler);
        environment.jersey().register(new ShiroExceptionMapper());
        environment.jersey().register(new LoginResource(context));
    }

    Thread.sleep(1000);
    SchedulingManager.start(context);

    if (config.hasAutoSchedulingEnabled()) {
        LOG.debug("using specified configuration for auto scheduling: {}", config.getAutoScheduling());
        AutoSchedulingManager.start(context);
    }

    initializeJmxSeedsForAllClusters();
    LOG.info("resuming pending repair runs");

    Preconditions.checkState(
            context.storage instanceof IDistributedStorage
                    || DatacenterAvailability.EACH != context.config.getDatacenterAvailability(),
            "Cassandra backend storage is the only one allowing EACH datacenter availability modes.");

    ScheduledExecutorService scheduler = new InstrumentedScheduledExecutorService(
            environment.lifecycle().scheduledExecutorService("ReaperApplication-scheduler").threads(1).build(),
            context.metricRegistry);

    if (context.storage instanceof IDistributedStorage) {
        // Allowing multiple Reaper instances to work concurrently requires
        // us to poll the database for running repairs regularly
        // only with Cassandra storage
        scheduleRepairManager(scheduler);
    } else {
        // Storage is different than Cassandra, assuming we have a single instance
        context.repairManager.resumeRunningRepairRuns();
    }

    schedulePurge(scheduler);

    LOG.info("Initialization complete!");
    LOG.warn("Reaper is ready to get things done!");
}

From source file:nab.detectors.htmjava.HTMModel.java

License:Open Source License

/**
 * Launch htm.java NAB detector/*from  w  ww .ja v a 2  s .  c  o  m*/
 *
 * Usage:
 *      As a standalone application (for debug purpose only):
 *
 *          java -jar htm.java-nab.jar "{\"modelParams\":{....}}" < nab_data.csv > anomalies.out
 *
 *      For complete list of command line options use:
 *
 *          java -jar htm.java-nab.jar --help
 *
 *      As a NAB detector (see 'htmjava_detector.py'):
 *
 *          python run.py --detect --score --normalize -d htmjava
 *
 *      Logging options, see "log4j.properties":
 *
 *          - "LOGLEVEL": Controls log output (default: "OFF")
 *          - "LOGGER": Either "CONSOLE" or "FILE" (default: "CONSOLE")
 *          - "LOGFILE": Log file destination (default: "htmjava.log")
 *
 *      For example:
 *
 *          java -DLOGLEVEL=TRACE -DLOGGER=FILE -jar htm.java-nab.jar "{\"modelParams\":{....}}" < nab_data.csv > anomalies.out
 *
 */
@SuppressWarnings("resource")
public static void main(String[] args) {
    try {
        LOGGER.trace("main({})", Arrays.asList(args));
        // Parse command line args
        OptionParser parser = new OptionParser();
        parser.nonOptions("OPF parameters object (JSON)");
        parser.acceptsAll(Arrays.asList("p", "params"),
                "OPF parameters file (JSON).\n(default: first non-option argument)").withOptionalArg()
                .ofType(File.class);
        parser.acceptsAll(Arrays.asList("i", "input"), "Input data file (csv).\n(default: stdin)")
                .withOptionalArg().ofType(File.class);
        parser.acceptsAll(Arrays.asList("o", "output"), "Output results file (csv).\n(default: stdout)")
                .withOptionalArg().ofType(File.class);
        parser.acceptsAll(Arrays.asList("s", "skip"), "Header lines to skip").withOptionalArg()
                .ofType(Integer.class).defaultsTo(0);
        parser.acceptsAll(Arrays.asList("h", "?", "help"), "Help");
        OptionSet options = parser.parse(args);
        if (args.length == 0 || options.has("h")) {
            parser.printHelpOn(System.out);
            return;
        }

        // Get in/out files
        final PrintStream output;
        final InputStream input;
        if (options.has("i")) {
            input = new FileInputStream((File) options.valueOf("i"));
        } else {
            input = System.in;
        }
        if (options.has("o")) {
            output = new PrintStream((File) options.valueOf("o"));
        } else {
            output = System.out;
        }

        // Parse OPF Model Parameters
        JsonNode params;
        ObjectMapper mapper = new ObjectMapper();
        if (options.has("p")) {
            params = mapper.readTree((File) options.valueOf("p"));
        } else if (options.nonOptionArguments().isEmpty()) {
            try {
                input.close();
            } catch (Exception ignore) {
            }
            if (options.has("o")) {
                try {
                    output.flush();
                    output.close();
                } catch (Exception ignore) {
                }
            }
            throw new IllegalArgumentException("Expecting OPF parameters. See 'help' for more information");
        } else {
            params = mapper.readTree((String) options.nonOptionArguments().get(0));
        }

        // Number of header lines to skip
        int skip = (int) options.valueOf("s");

        // Force timezone to UTC
        DateTimeZone.setDefault(DateTimeZone.UTC);

        // Create NAB Network Model
        HTMModel model = new HTMModel(params);
        Network network = model.getNetwork();
        network.observe().subscribe((inference) -> {
            double score = inference.getAnomalyScore();
            int record = inference.getRecordNum();
            LOGGER.trace("record = {}, score = {}", record, score);
            // Output raw anomaly score
            output.println(score);
        }, (error) -> {
            LOGGER.error("Error processing data", error);
        }, () -> {
            LOGGER.trace("Done processing data");
            if (LOGGER.isDebugEnabled()) {
                model.showDebugInfo();
            }
        });
        network.start();

        // Pipe data to network
        Publisher publisher = model.getPublisher();
        BufferedReader in = new BufferedReader(new InputStreamReader(input));
        String line;
        while ((line = in.readLine()) != null && line.trim().length() > 0) {
            // Skip header lines
            if (skip > 0) {
                skip--;
                continue;
            }
            publisher.onNext(line);
        }
        publisher.onComplete();
        in.close();
        LOGGER.trace("Done publishing data");
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:name.martingeisse.wicket.demo_app.InitializationContextListener.java

License:Open Source License

@Override
public void contextInitialized(ServletContextEvent event) {
    // TODO set default locale
    DateTimeZone.setDefault(DateTimeZone.UTC);
    Constants.timeZone = DateTimeZone.forID("Europe/Berlin");
    Constants.internalDateFormatter = DateTimeFormat.forPattern("YYYY-MM-dd").withZone(Constants.timeZone);
    Constants.internalDateTimeFormatter = DateTimeFormat.forPattern("YYYY-MM-dd HH:mm:ss")
            .withZone(Constants.timeZone);
    Constants.loggingDateFormatter = DateTimeFormat.forPattern("dd.MM.YYYY").withZone(Constants.timeZone);
    Constants.loggingDateTimeFormatter = DateTimeFormat.forPattern("dd.MM.YYYY HH:mm:ss")
            .withZone(Constants.timeZone);
    Constants.uiDateFormatter = Constants.loggingDateFormatter;
    Constants.uiDateTimeFormatter = Constants.loggingDateTimeFormatter;
}