Example usage for org.joda.time DateTimeZone UTC

List of usage examples for org.joda.time DateTimeZone UTC

Introduction

In this page you can find the example usage for org.joda.time DateTimeZone UTC.

Prototype

DateTimeZone UTC

To view the source code for org.joda.time DateTimeZone UTC.

Click Source Link

Document

The time zone for Universal Coordinated Time

Usage

From source file:com.evinceframework.membership.authentication.UserUpdater.java

License:Apache License

protected DateTime utcNow() {
    return DateTime.now(DateTimeZone.UTC);
}

From source file:com.example.bigquery.QueryParametersSample.java

License:Apache License

private static void runTimestamp() throws InterruptedException {
    BigQuery bigquery = new BigQueryOptions.DefaultBigqueryFactory()
            .create(BigQueryOptions.getDefaultInstance());

    DateTime timestamp = new DateTime(2016, 12, 7, 8, 0, 0, DateTimeZone.UTC);

    String queryString = "SELECT TIMESTAMP_ADD(@ts_value, INTERVAL 1 HOUR);";
    QueryRequest queryRequest = QueryRequest.newBuilder(queryString)
            .addNamedParameter("ts_value", QueryParameterValue.timestamp(
                    // Timestamp takes microseconds since 1970-01-01T00:00:00 UTC
                    timestamp.getMillis() * 1000))
            // Standard SQL syntax is required for parameterized queries.
            // See: https://cloud.google.com/bigquery/sql-reference/
            .setUseLegacySql(false).build();

    // Execute the query.
    QueryResponse response = bigquery.query(queryRequest);

    // Wait for the job to finish (if the query takes more than 10 seconds to complete).
    while (!response.jobCompleted()) {
        Thread.sleep(1000);/*from ww w . j a va2 s . co m*/
        response = bigquery.getQueryResults(response.getJobId());
    }

    if (response.hasErrors()) {
        throw new RuntimeException(response.getExecutionErrors().stream().<String>map(err -> err.getMessage())
                .collect(Collectors.joining("\n")));
    }

    QueryResult result = response.getResult();
    Iterator<List<FieldValue>> iter = result.iterateAll();

    DateTimeFormatter formatter = ISODateTimeFormat.dateTimeNoMillis().withZoneUTC();
    while (iter.hasNext()) {
        List<FieldValue> row = iter.next();
        System.out.printf("%s\n", formatter.print(new DateTime(
                // Timestamp values are returned in microseconds since 1970-01-01T00:00:00 UTC,
                // but org.joda.time.DateTime constructor accepts times in milliseconds.
                row.get(0).getTimestampValue() / 1000, DateTimeZone.UTC)));
    }
}

From source file:com.example.getstarted.util.CloudStorageHelper.java

License:Apache License

/**
 * Uploads a file to Google Cloud Storage to the bucket specified in the BUCKET_NAME
 * environment variable, appending a timestamp to end of the uploaded filename.
 *//*from  w ww.  java  2  s  . com*/
public String uploadFile(FileItemStream fileStream, final String bucketName)
        throws IOException, ServletException {
    checkFileExtension(fileStream.getName());

    DateTimeFormatter dtf = DateTimeFormat.forPattern("-YYYY-MM-dd-HHmmssSSS");
    DateTime dt = DateTime.now(DateTimeZone.UTC);
    String dtString = dt.toString(dtf);
    final String fileName = fileStream.getName() + dtString;

    // the inputstream is closed by default, so we don't need to close it here
    BlobInfo blobInfo = storage.create(BlobInfo.newBuilder(bucketName, fileName)
            // Modify access list to allow all users with link to read file
            .setAcl(new ArrayList<>(Arrays.asList(Acl.of(User.ofAllUsers(), Role.READER)))).build(),
            fileStream.openStream());
    logger.log(Level.INFO, "Uploaded file {0} as {1}", new Object[] { fileStream.getName(), fileName });
    // return the public download link
    return blobInfo.getMediaLink();
}

From source file:com.example.getstarted.util.DatastoreSessionFilter.java

License:Apache License

@Override
public void init(FilterConfig config) throws ServletException {
    // initialize local copy of datastore session variables

    datastore = DatastoreServiceFactory.getDatastoreService();
    // Delete all sessions unmodified for over two days
    DateTime dt = DateTime.now(DateTimeZone.UTC);
    Query query = new Query(SESSION_KIND).setFilter(new FilterPredicate("lastModified",
            FilterOperator.LESS_THAN_OR_EQUAL, dt.minusDays(2).toString(DTF)));
    Iterator<Entity> results = datastore.prepare(query).asIterator();
    while (results.hasNext()) {
        Entity stateEntity = results.next();
        datastore.delete(stateEntity.getKey());
    }//from   w  ww. j a va2s. co  m
}

From source file:com.example.getstarted.util.DatastoreSessionFilter.java

License:Apache License

/**
 * Stores the state value in each key-value pair in the project's datastore.
 * @param sessionId Request from which to extract session.
 * @param varName the name of the desired session variable
 * @param varValue the value of the desired session variable
 *///from  ww w  .  j  ava  2s  .c  o m
protected void setSessionVariables(String sessionId, Map<String, String> setMap) {
    if (sessionId.equals("")) {
        return;
    }
    Key key = KeyFactory.createKey(SESSION_KIND, sessionId);
    Transaction transaction = datastore.beginTransaction();
    DateTime dt = DateTime.now(DateTimeZone.UTC);
    dt.toString(DTF);
    try {
        Entity stateEntity;
        try {
            stateEntity = datastore.get(transaction, key);
        } catch (EntityNotFoundException e) {
            stateEntity = new Entity(key);
        }
        for (String varName : setMap.keySet()) {
            stateEntity.setProperty(varName, setMap.get(varName));
        }
        stateEntity.setProperty("lastModified", dt.toString(DTF));
        datastore.put(transaction, stateEntity);
        transaction.commit();
    } finally {
        if (transaction.isActive()) {
            transaction.rollback();
        }
    }
}

From source file:com.example.managedvms.gettingstartedjava.util.CloudStorageHelper.java

License:Open Source License

/**
 * Uploads a file to Google Cloud Storage to the bucket specified in the BUCKET_NAME
 * environment variable, appending a timestamp to end of the uploaded filename.
 *///from w ww.j a  va 2s .  c o m
public String uploadFile(Part filePart, final String bucketName) throws IOException {
    DateTimeFormatter dtf = DateTimeFormat.forPattern("-YYYY-MM-dd-HHmmssSSS");
    DateTime dt = DateTime.now(DateTimeZone.UTC);
    String dtString = dt.toString(dtf);
    final String fileName = filePart.getSubmittedFileName() + dtString;

    // the inputstream is closed by default, so we don't need to close it here
    BlobInfo blobInfo = storage.create(BlobInfo.builder(bucketName, fileName)
            // Modify access list to allow all users with link to read file
            .acl(new ArrayList<>(Arrays.asList(Acl.of(User.ofAllUsers(), Role.READER)))).build(),
            filePart.getInputStream());
    logger.log(Level.INFO, "Uploaded file {0} as {1}",
            new Object[] { filePart.getSubmittedFileName(), fileName });
    // return the public download link
    return blobInfo.mediaLink();
}

From source file:com.F8Full.bixhistorique.backend.ParseCronServlet.java

private void processProperties(Network curNetwork) {
    DateTime today = new DateTime(DateTimeZone.UTC).withTimeAtStartOfDay();

    for (int stationId : curNetwork.stationPropertieTransientMap.keySet()) {
        curNetwork.stationPropertieTransientMap.get(stationId).setKey(KeyFactory
                .createKey(StationProperties.class.getSimpleName(), stationId + "_" + today.getMillis()));
    }/*from w  w  w  . j  a  v a2  s  .  c  o  m*/

    PersistenceManager pm = PMF.get().getPersistenceManager();
    addStationToParsingStatus();

    try {
        pm.makePersistentAll(curNetwork.stationPropertieTransientMap.values());
    } finally {
        pm.close();
    }

}

From source file:com.facebook.presto.AbstractTestQueries.java

License:Apache License

@Test
public void testTimeLiterals() throws Exception {
    assertQuery("SELECT TIME '3:04', TIMESTAMP '1960-01-22 3:04', DATE '2013-03-22', INTERVAL '123' DAY\n",
            "SELECT "
                    + MILLISECONDS.toSeconds(new DateTime(1970, 1, 1, 3, 4, 0, 0, DateTimeZone.UTC).getMillis())
                    + ",  "
                    + MILLISECONDS/* w ww  .  j ava  2s .  c om*/
                            .toSeconds(new DateTime(1960, 1, 22, 3, 4, 0, 0, DateTimeZone.UTC).getMillis())
                    + ",  "
                    + MILLISECONDS
                            .toSeconds(new DateTime(2013, 3, 22, 0, 0, 0, 0, DateTimeZone.UTC).getMillis())
                    + ",  " + String.valueOf(TimeUnit.DAYS.toSeconds(123)));
}

From source file:com.facebook.presto.accumulo.serializers.AbstractTestAccumuloRowSerializer.java

License:Apache License

@Test
public void testDate() throws Exception {
    Date expected = new Date(new DateTime(2001, 2, 3, 4, 5, 6, DateTimeZone.UTC).getMillis());
    AccumuloRowSerializer serializer = serializerClass.getConstructor().newInstance();
    byte[] data = serializer.encode(DATE, expected);

    deserializeData(serializer, data);//  www . ja  v a 2s .  c om
    Date actual = serializer.getDate(COLUMN_NAME);

    // Convert milliseconds to days so they can be compared regardless of the time of day
    assertEquals(MILLISECONDS.toDays(actual.getTime()), MILLISECONDS.toDays(expected.getTime()));
}

From source file:com.facebook.presto.hive.BenchmarkHiveFileFormats.java

License:Apache License

private static long benchmarkReadNone(FileSplit fileSplit, Properties partitionProperties,
        HiveRecordCursorProvider hiveRecordCursorProvider) throws Exception {
    HiveSplit split = createHiveSplit(fileSplit, partitionProperties);

    long count = 0;
    for (int i = 0; i < LOOPS; i++) {
        count = 0;//from ww  w .  java  2s.c  om

        HiveRecordCursor recordCursor = hiveRecordCursorProvider.createHiveRecordCursor(split.getClientId(),
                new Configuration(), SESSION, new Path(split.getPath()), split.getStart(), split.getLength(),
                split.getSchema(), ImmutableList.<HiveColumnHandle>of(), split.getPartitionKeys(),
                TupleDomain.<HiveColumnHandle>all(), DateTimeZone.UTC, TYPE_MANAGER).get();

        while (recordCursor.advanceNextPosition()) {
            count++;
        }
        recordCursor.close();
    }
    return count;
}