Example usage for org.joda.time DateTimeZone forID

List of usage examples for org.joda.time DateTimeZone forID

Introduction

In this page you can find the example usage for org.joda.time DateTimeZone forID.

Prototype

@FromString
public static DateTimeZone forID(String id) 

Source Link

Document

Gets a time zone instance for the specified time zone id.

Usage

From source file:com.enitalk.controllers.bots.TimeZoneTestr.java

public static void main(String[] args) {
    Set<String> ids = DateTimeZone.getAvailableIDs();
    TreeMultimap<Long, String> map = TreeMultimap.create();
    for (String id : ids) {
        DateTimeZone dz = DateTimeZone.forID(id);
        int offset = dz.getOffset(DateTime.now().withZone(DateTimeZone.UTC));

        map.put(TimeUnit.MILLISECONDS.toMinutes(offset), id);
    }//from  www .  j av a  2 s  .co  m

    ObjectMapper j = new ObjectMapper();
    ArrayNode a = j.createArrayNode();
    map.keySet().forEach((Long key) -> {
        a.addObject().set(key.toString(), j.convertValue(map.get(key), ArrayNode.class));
    });

    System.out.println(a);

    //        System.out.println(map);
}

From source file:com.enonic.cms.business.timezone.TimeZoneServiceImpl.java

License:Open Source License

@SuppressWarnings({ "unchecked" })
public void afterPropertiesSet() throws Exception {
    Set<String> ids = DateTimeZone.getAvailableIDs();
    timeZones.add(DateTimeZone.UTC);/*from  ww  w  .j  a  v a 2 s. co  m*/
    for (String id : ids) {
        if (!id.equals("UTC")) {
            timeZones.add(DateTimeZone.forID(id));
        }
    }
}

From source file:com.enonic.cms.core.timezone.TimeZoneServiceImpl.java

License:Open Source License

@SuppressWarnings({ "unchecked" })
public TimeZoneServiceImpl() {
    Set<String> ids = DateTimeZone.getAvailableIDs();
    this.timeZones.add(DateTimeZone.UTC);
    for (final String id : ids) {
        if (!id.equals("UTC")) {
            this.timeZones.add(DateTimeZone.forID(id));
        }/*from  ww w  .  ja v  a 2s. c  o  m*/
    }
}

From source file:com.ephesoft.dcma.nsi.NsiExporter.java

License:Open Source License

private void transformXmlAndExportFiles(String batchInstanceID, String exportFolder, String xmlTagStyle,
        boolean isZipSwitchOn, String baseDocsFolder, InputStream xslStream)
        throws TransformerFactoryConfigurationError, DCMAApplicationException {
    String batchXmlName = batchInstanceID + ICommonConstants.UNDERSCORE_BATCH_XML;
    String sourceXMLPath = baseDocsFolder + File.separator + batchInstanceID
            + ICommonConstants.UNDERSCORE_BATCH_XML;
    String targetXmlPath = exportFolder + File.separator + batchInstanceID + xmlTagStyle;
    LOGGER.debug("Transforming XML " + sourceXMLPath + " to " + targetXmlPath);
    try {/*from   ww  w  .  j a v a 2  s .c  om*/
        TransformerFactory tFactory = TransformerFactory.newInstance();
        Transformer transformer = null;
        try {
            // NOTE, this needs to be fixed to use the InputStream xslStream object, not a hardcoded path to the file.
            transformer = tFactory.newTransformer(new StreamSource(xslStream));
        } finally {
            if (xslStream != null) {
                try {
                    xslStream.close();
                } catch (IOException e) {
                    LOGGER.info("Error closing input stream for :" + xslResource.toString());
                }
            }
        }
        if (transformer != null) {
            DateTimeZone zone = DateTimeZone.forID(NSIExportConstant.TIME_ZONE_ID);
            DateTime dateTime = new DateTime(zone);
            String date = Integer.toString(dateTime.getYear()) + NSIExportConstant.HYPEN
                    + Integer.toString(dateTime.getMonthOfYear()) + NSIExportConstant.HYPEN
                    + Integer.toString(dateTime.getDayOfMonth());
            String time = Integer.toString(dateTime.getHourOfDay()) + NSIExportConstant.COLON
                    + Integer.toString(dateTime.getMinuteOfHour()) + NSIExportConstant.COLON
                    + Integer.toString(dateTime.getSecondOfMinute());
            transformer.setParameter(NSIExportConstant.DATE, date);
            transformer.setParameter(NSIExportConstant.HOURS, time);
            transformer.setParameter(NSIExportConstant.BASE_DOC_FOLDER_PATH, baseDocsFolder + File.separator);
            transformer.setParameter(NSIExportConstant.EXPORT_FOLDER_PATH, exportFolder + File.separator);
            File file = new File(exportFolder);
            boolean isFileCreated = false;
            if (!file.exists()) {
                isFileCreated = file.mkdir();
            } else {
                isFileCreated = true;
            }
            if (isFileCreated) {
                String imageFolderPath = exportFolder + File.separator + NSIExportConstant.IMAGE_FOLDER_NAME;
                File imageFolder = new File(imageFolderPath);
                boolean isImageFolderCreated = false;
                if (!imageFolder.exists()) {
                    isImageFolderCreated = imageFolder.mkdir();
                } else {
                    isImageFolderCreated = true;
                }
                if (isImageFolderCreated) {
                    LOGGER.info(exportFolder + " folder created");
                    Batch batch = batchSchemaService.getBatch(batchInstanceID);
                    List<Document> documentList = batch.getDocuments().getDocument();

                    transformXML(isZipSwitchOn, batchXmlName, sourceXMLPath, targetXmlPath, transformer);

                    File baseDocFolder = new File(baseDocsFolder);
                    for (Document document : documentList) {
                        if (document != null && document.getMultiPageTiffFile() != null
                                && !document.getMultiPageTiffFile().isEmpty()) {
                            String multipageTiffName = document.getMultiPageTiffFile();
                            String filePath = baseDocFolder.getAbsolutePath() + File.separator
                                    + multipageTiffName;
                            String exportFileName = multipageTiffName.replace(
                                    NSIExportConstant.TIF_WITH_DOT_EXTENSION,
                                    NSIExportConstant.DAT_WITH_DOT_EXTENSION);
                            String exportFilePath = imageFolderPath + File.separator + exportFileName;
                            File oldFile = new File(filePath);
                            File newFile = new File(exportFilePath);
                            try {
                                FileUtils.copyFile(oldFile, newFile);
                            } catch (Exception e) {
                                LOGGER.error("Error creating in file: " + newFile + "is" + e.getMessage(), e);
                            }
                        }
                    }
                }
            } else {
                LOGGER.error("Access is denied for creating: " + file.getName());
            }
        } else {
            LOGGER.error("Transformer is null due to Invalid xsl file.");
        }
    } catch (FileNotFoundException e1) {
        LOGGER.error("Could not find NSITransform.xsl file : " + e1.getMessage(), e1);
        throw new DCMAApplicationException("Could not find nsiTransform.xsl file : " + e1.getMessage(), e1);
    } catch (TransformerException e1) {
        LOGGER.error(
                "Problem occurred in transforming " + sourceXMLPath + " to " + targetXmlPath + e1.getMessage(),
                e1);
        throw new DCMAApplicationException("Could not find nsiTransform.xsl file : ", e1);
    } catch (IOException ioe) {
        LOGGER.error(
                "Problem occurred in transforming " + sourceXMLPath + " to " + targetXmlPath + ioe.getMessage(),
                ioe);
        throw new DCMAApplicationException("Could not transform ibmCMTransform.xsl file : " + ioe.getMessage(),
                ioe);
    }
}

From source file:com.example.geomesa.accumulo.AccumuloQuickStart.java

License:Open Source License

static FeatureCollection createNewFeatures(SimpleFeatureType simpleFeatureType, int numNewFeatures) {
    DefaultFeatureCollection featureCollection = new DefaultFeatureCollection();

    String id;//from   w  ww.  jav  a2  s  .c om
    Object[] NO_VALUES = {};
    String[] PEOPLE_NAMES = { "Addams", "Bierce", "Clemens" };
    Long SECONDS_PER_YEAR = 365L * 24L * 60L * 60L;
    Random random = new Random(5771);
    DateTime MIN_DATE = new DateTime(2014, 1, 1, 0, 0, 0, DateTimeZone.forID("UTC"));
    Double MIN_X = -78.0;
    Double MIN_Y = -39.0;
    Double DX = 2.0;
    Double DY = 2.0;

    for (int i = 0; i < numNewFeatures; i++) {
        // create the new (unique) identifier and empty feature shell
        id = "Observation." + Integer.toString(i);
        SimpleFeature simpleFeature = SimpleFeatureBuilder.build(simpleFeatureType, NO_VALUES, id);

        // be sure to tell GeoTools explicitly that you want to use the ID you provided
        simpleFeature.getUserData().put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE);

        // populate the new feature's attributes

        // string value
        simpleFeature.setAttribute("Who", PEOPLE_NAMES[i % PEOPLE_NAMES.length]);

        // long value
        simpleFeature.setAttribute("What", i);

        // location:  construct a random point within a 2-degree-per-side square
        double x = MIN_X + random.nextDouble() * DX;
        double y = MIN_Y + random.nextDouble() * DY;
        Geometry geometry = WKTUtils.read("POINT(" + x + " " + y + ")");

        // date-time:  construct a random instant within a year
        simpleFeature.setAttribute("Where", geometry);
        DateTime dateTime = MIN_DATE.plusSeconds((int) Math.round(random.nextDouble() * SECONDS_PER_YEAR));
        simpleFeature.setAttribute("When", dateTime.toDate());

        // another string value
        // "Why"; left empty, showing that not all attributes need values

        // accumulate this new feature in the collection
        featureCollection.add(simpleFeature);
    }

    return featureCollection;
}

From source file:com.example.geomesa.accumulo.FeatureLevelVisibility.java

License:Open Source License

static FeatureCollection createNewFeatures(SimpleFeatureType simpleFeatureType, int numNewFeatures) {
    DefaultFeatureCollection featureCollection = new DefaultFeatureCollection();

    String id;//w w  w.j a  v a 2s  .co m
    Object[] NO_VALUES = {};
    String[] PEOPLE_NAMES = { "Addams", "Bierce", "Clemens" };
    Long SECONDS_PER_YEAR = 365L * 24L * 60L * 60L;
    Random random = new Random(5771);
    DateTime MIN_DATE = new DateTime(2014, 1, 1, 0, 0, 0, DateTimeZone.forID("UTC"));
    Double MIN_X = -78.0;
    Double MIN_Y = -39.0;
    Double DX = 2.0;
    Double DY = 2.0;

    for (int i = 0; i < numNewFeatures; i++) {
        // create the new (unique) identifier and empty feature shell
        id = "Observation." + Integer.toString(i);
        SimpleFeature simpleFeature = SimpleFeatureBuilder.build(simpleFeatureType, NO_VALUES, id);

        // be sure to tell GeoTools explicitly that you want to use the ID you provided
        simpleFeature.getUserData().put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE);

        // populate the new feature's attributes

        // string value
        simpleFeature.setAttribute("Who", PEOPLE_NAMES[i % PEOPLE_NAMES.length]);

        // long value
        simpleFeature.setAttribute("What", i);

        // location:  construct a random point within a 2-degree-per-side square
        double x = MIN_X + random.nextDouble() * DX;
        double y = MIN_Y + random.nextDouble() * DY;
        Geometry geometry = WKTUtils.read("POINT(" + x + " " + y + ")");

        // date-time:  construct a random instant within a year
        simpleFeature.setAttribute("Where", geometry);
        DateTime dateTime = MIN_DATE.plusSeconds((int) Math.round(random.nextDouble() * SECONDS_PER_YEAR));
        simpleFeature.setAttribute("When", dateTime.toDate());

        // another string value
        // "Why"; left empty, showing that not all attributes need values

        // set visibility on each feature, and attribute for display
        if (i % 2 == 0) {
            simpleFeature.setAttribute("Visibility", "admin");
            SecurityUtils.setFeatureVisibility(simpleFeature, "admin");
        } else {
            simpleFeature.setAttribute("Visibility", "user|admin");
            SecurityUtils.setFeatureVisibility(simpleFeature, "user|admin");
        }
        // accumulate this new feature in the collection
        featureCollection.add(simpleFeature);
    }

    return featureCollection;
}

From source file:com.example.geomesa.hbase.HBaseQuickStart.java

License:Apache License

static FeatureCollection createNewFeatures(SimpleFeatureType simpleFeatureType, int numNewFeatures) {
    DefaultFeatureCollection featureCollection = new DefaultFeatureCollection();

    String id;//ww w . j av  a2s  . c om
    Object[] NO_VALUES = {};
    String[] PEOPLE_NAMES = { "Addams", "Bierce", "Clemens" };
    Long SECONDS_PER_YEAR = 365L * 24L * 60L * 60L;
    Random random = new Random(5771);
    DateTime MIN_DATE = new DateTime(2014, 1, 1, 0, 0, 0, DateTimeZone.forID("UTC"));
    Double MIN_X = -79.5;
    Double MIN_Y = 37.0;
    Double DX = 2.0;
    Double DY = 2.0;

    for (int i = 0; i < numNewFeatures; i++) {
        // create the new (unique) identifier and empty feature shell
        id = "Observation." + Integer.toString(i);
        SimpleFeature simpleFeature = SimpleFeatureBuilder.build(simpleFeatureType, NO_VALUES, id);

        // be sure to tell GeoTools explicitly that you want to use the ID you provided
        simpleFeature.getUserData().put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE);

        // populate the new feature's attributes

        // Who: string value
        simpleFeature.setAttribute("Who", PEOPLE_NAMES[i % PEOPLE_NAMES.length]);

        // What: long value
        simpleFeature.setAttribute("What", i);

        // Where: location: construct a random point within a 2-degree-per-side square
        double x = MIN_X + random.nextDouble() * DX;
        double y = MIN_Y + random.nextDouble() * DY;
        Geometry geometry = WKTUtils$.MODULE$.read("POINT(" + x + " " + y + ")");
        simpleFeature.setAttribute("Where", geometry);

        // When: date-time:  construct a random instant within a year
        DateTime dateTime = MIN_DATE.plusSeconds((int) Math.round(random.nextDouble() * SECONDS_PER_YEAR));
        simpleFeature.setAttribute("When", dateTime.toDate());

        // Why: another string value
        // left empty, showing that not all attributes need values

        // accumulate this new feature in the collection
        featureCollection.add(simpleFeature);
    }

    return featureCollection;
}

From source file:com.example.geomesa.kafka.KafkaQuickStart.java

License:Open Source License

public static void addSimpleFeatures(SimpleFeatureType sft, FeatureStore producerFS)
        throws InterruptedException, IOException {
    final int MIN_X = -180;
    final int MAX_X = 180;
    final int MIN_Y = -90;
    final int MAX_Y = 90;
    final int DX = 2;
    final int DY = 1;
    final String[] PEOPLE_NAMES = { "James", "John", "Peter", "Hannah", "Claire", "Gabriel" };
    final long SECONDS_PER_YEAR = 365L * 24L * 60L * 60L;
    final Random random = new Random();
    final DateTime MIN_DATE = new DateTime(2015, 1, 1, 0, 0, 0, DateTimeZone.forID("UTC"));

    SimpleFeatureBuilder builder = new SimpleFeatureBuilder(sft);
    DefaultFeatureCollection featureCollection = new DefaultFeatureCollection();

    // creates and updates two SimpleFeatures.
    // the first time this for loop runs the two SimpleFeatures are created.
    // in the subsequent iterations of the for loop, the two SimpleFeatures are updated.
    int numFeatures = (MAX_X - MIN_X) / DX;
    for (int i = 1; i <= numFeatures; i++) {
        builder.add(PEOPLE_NAMES[i % PEOPLE_NAMES.length]); // name
        builder.add((int) Math.round(random.nextDouble() * 110)); // age
        builder.add(MIN_DATE.plusSeconds((int) Math.round(random.nextDouble() * SECONDS_PER_YEAR)).toDate()); // dtg
        builder.add(WKTUtils$.MODULE$.read("POINT(" + (MIN_X + DX * i) + " " + (MIN_Y + DY * i) + ")")); // geom
        SimpleFeature feature1 = builder.buildFeature("1");

        builder.add(PEOPLE_NAMES[(i + 1) % PEOPLE_NAMES.length]); // name
        builder.add((int) Math.round(random.nextDouble() * 110)); // age
        builder.add(MIN_DATE.plusSeconds((int) Math.round(random.nextDouble() * SECONDS_PER_YEAR)).toDate()); // dtg
        builder.add(WKTUtils$.MODULE$.read("POINT(" + (MIN_X + DX * i) + " " + (MAX_Y - DY * i) + ")")); // geom
        SimpleFeature feature2 = builder.buildFeature("2");

        // write the SimpleFeatures to Kafka
        featureCollection.add(feature1);
        featureCollection.add(feature2);
        producerFS.addFeatures(featureCollection);
        featureCollection.clear();/* w w  w  . j av  a 2 s .com*/

        // wait 100 ms in between updating SimpleFeatures to simulate a stream of data
        Thread.sleep(100);
    }
}

From source file:com.example.geomesa.kafka08.KafkaQuickStart.java

License:Open Source License

public static void addSimpleFeatures(SimpleFeatureType sft, FeatureStore producerFS, String visibility)
        throws InterruptedException, IOException {
    final int MIN_X = -180;
    final int MAX_X = 180;
    final int MIN_Y = -90;
    final int MAX_Y = 90;
    final int DX = 2;
    final int DY = 1;
    final String[] PEOPLE_NAMES = { "James", "John", "Peter", "Hannah", "Claire", "Gabriel" };
    final long SECONDS_PER_YEAR = 365L * 24L * 60L * 60L;
    final Random random = new Random();
    final DateTime MIN_DATE = new DateTime(2015, 1, 1, 0, 0, 0, DateTimeZone.forID("UTC"));

    SimpleFeatureBuilder builder = new SimpleFeatureBuilder(sft);
    DefaultFeatureCollection featureCollection = new DefaultFeatureCollection();

    // creates and updates two SimpleFeatures.
    // the first time this for loop runs the two SimpleFeatures are created.
    // in the subsequent iterations of the for loop, the two SimpleFeatures are updated.
    int numFeatures = (MAX_X - MIN_X) / DX;
    for (int i = 1; i <= numFeatures; i++) {
        builder.add(PEOPLE_NAMES[i % PEOPLE_NAMES.length]); // name
        builder.add((int) Math.round(random.nextDouble() * 110)); // age
        builder.add(MIN_DATE.plusSeconds((int) Math.round(random.nextDouble() * SECONDS_PER_YEAR)).toDate()); // dtg
        builder.add(WKTUtils$.MODULE$.read("POINT(" + (MIN_X + DX * i) + " " + (MIN_Y + DY * i) + ")")); // geom
        SimpleFeature feature1 = builder.buildFeature("1");

        builder.add(PEOPLE_NAMES[(i + 1) % PEOPLE_NAMES.length]); // name
        builder.add((int) Math.round(random.nextDouble() * 110)); // age
        builder.add(MIN_DATE.plusSeconds((int) Math.round(random.nextDouble() * SECONDS_PER_YEAR)).toDate()); // dtg
        builder.add(WKTUtils$.MODULE$.read("POINT(" + (MIN_X + DX * i) + " " + (MAX_Y - DY * i) + ")")); // geom
        SimpleFeature feature2 = builder.buildFeature("2");

        if (visibility != null) {
            feature1.getUserData().put("geomesa.feature.visibility", visibility);
            feature2.getUserData().put("geomesa.feature.visibility", visibility);
        }/*from ww w . jav  a 2s.  c  o m*/

        // write the SimpleFeatures to Kafka
        featureCollection.add(feature1);
        featureCollection.add(feature2);
        producerFS.addFeatures(featureCollection);
        featureCollection.clear();

        // wait 100 ms in between updating SimpleFeatures to simulate a stream of data
        Thread.sleep(100);
    }
}

From source file:com.facebook.presto.hive.benchmark.FileFormat.java

License:Apache License

private static ConnectorPageSource createPageSource(HiveRecordCursorProvider cursorProvider,
        ConnectorSession session, File targetFile, List<String> columnNames, List<Type> columnTypes,
        HiveStorageFormat format) {/*w w  w.  j a v a  2 s  .  c o m*/
    List<HiveColumnHandle> columnHandles = new ArrayList<>(columnNames.size());
    TypeTranslator typeTranslator = new HiveTypeTranslator();
    for (int i = 0; i < columnNames.size(); i++) {
        String columnName = columnNames.get(i);
        Type columnType = columnTypes.get(i);
        columnHandles
                .add(new HiveColumnHandle("test", columnName, HiveType.toHiveType(typeTranslator, columnType),
                        columnType.getTypeSignature(), i, REGULAR, Optional.empty()));
    }

    RecordCursor recordCursor = cursorProvider
            .createRecordCursor("test", conf, session, new Path(targetFile.getAbsolutePath()), 0,
                    targetFile.length(), createSchema(format, columnNames, columnTypes), columnHandles,
                    TupleDomain.all(), DateTimeZone.forID(session.getTimeZoneKey().getId()), TYPE_MANAGER)
            .get();
    return new RecordPageSource(columnTypes, recordCursor);
}