Example usage for org.joda.time DateTimeZone forID

List of usage examples for org.joda.time DateTimeZone forID

Introduction

In this page you can find the example usage for org.joda.time DateTimeZone forID.

Prototype

@FromString
public static DateTimeZone forID(String id) 

Source Link

Document

Gets a time zone instance for the specified time zone id.

Usage

From source file:com.facebook.presto.hive.benchmark.FileFormat.java

License:Apache License

private static ConnectorPageSource createPageSource(HivePageSourceFactory pageSourceFactory,
        ConnectorSession session, File targetFile, List<String> columnNames, List<Type> columnTypes,
        HiveStorageFormat format) {/* w  w w.ja v a2 s . com*/
    List<HiveColumnHandle> columnHandles = new ArrayList<>(columnNames.size());
    TypeTranslator typeTranslator = new HiveTypeTranslator();
    for (int i = 0; i < columnNames.size(); i++) {
        String columnName = columnNames.get(i);
        Type columnType = columnTypes.get(i);
        columnHandles
                .add(new HiveColumnHandle("test", columnName, HiveType.toHiveType(typeTranslator, columnType),
                        columnType.getTypeSignature(), i, REGULAR, Optional.empty()));
    }

    return pageSourceFactory.createPageSource(conf, session, new Path(targetFile.getAbsolutePath()), 0,
            targetFile.length(), createSchema(format, columnNames, columnTypes), columnHandles,
            TupleDomain.all(), DateTimeZone.forID(session.getTimeZoneKey().getId())).get();
}

From source file:com.facebook.presto.hive.ColumnarBinaryHiveRecordCursorProvider.java

License:Apache License

@Override
public Optional<HiveRecordCursor> createHiveRecordCursor(HiveSplit split, RecordReader<?, ?> recordReader,
        List<HiveColumnHandle> columns, DateTimeZone hiveStorageTimeZone) {
    if (usesColumnarBinarySerDe(split)) {
        return Optional.<HiveRecordCursor>of(new ColumnarBinaryHiveRecordCursor<>(
                bytesRecordReader(recordReader), split.getLength(), split.getSchema(), split.getPartitionKeys(),
                columns, DateTimeZone.forID(split.getSession().getTimeZoneKey().getId())));
    }//from w  ww.  ja  va 2  s. com
    return Optional.absent();
}

From source file:com.facebook.presto.hive.ColumnarTextHiveRecordCursorProvider.java

License:Apache License

@Override
public Optional<HiveRecordCursor> createHiveRecordCursor(HiveSplit split, RecordReader<?, ?> recordReader,
        List<HiveColumnHandle> columns, DateTimeZone hiveStorageTimeZone) {
    if (usesColumnarTextSerDe(split)) {
        return Optional
                .<HiveRecordCursor>of(new ColumnarTextHiveRecordCursor<>(columnarTextRecordReader(recordReader),
                        split.getLength(), split.getSchema(), split.getPartitionKeys(), columns,
                        hiveStorageTimeZone, DateTimeZone.forID(split.getSession().getTimeZoneKey().getId())));
    }//  w  ww .  ja v  a 2 s .c o  m
    return Optional.absent();
}

From source file:com.facebook.presto.hive.DwrfRecordCursorProvider.java

License:Apache License

@Override
public Optional<HiveRecordCursor> createHiveRecordCursor(String clientId, Configuration configuration,
        ConnectorSession session, Path path, long start, long length, Properties schema,
        List<HiveColumnHandle> columns, List<HivePartitionKey> partitionKeys,
        TupleDomain<HiveColumnHandle> tupleDomain, DateTimeZone hiveStorageTimeZone, TypeManager typeManager) {
    @SuppressWarnings("deprecation")
    Deserializer deserializer = getDeserializer(schema);
    if (!(deserializer instanceof OrcSerde)) {
        return Optional.absent();
    }/*from w  ww. j av  a 2s .c  o  m*/

    StructObjectInspector rowInspector = getTableObjectInspector(schema);
    if (!all(rowInspector.getAllStructFieldRefs(), isSupportedDwrfType())) {
        throw new IllegalArgumentException("DWRF does not support DATE type");
    }

    ReaderWriterProfiler.setProfilerOptions(configuration);

    RecordReader recordReader;
    try {
        FileSystem fileSystem = path.getFileSystem(configuration);
        Reader reader = OrcFile.createReader(fileSystem, path, new JobConf(configuration));
        boolean[] include = findIncludedColumns(reader.getTypes(), columns);
        recordReader = reader.rows(start, length, include);
    } catch (Exception e) {
        throw Throwables.propagate(e);
    }

    return Optional.<HiveRecordCursor>of(new DwrfHiveRecordCursor(recordReader, length, schema, partitionKeys,
            columns, hiveStorageTimeZone, DateTimeZone.forID(session.getTimeZoneKey().getId()), typeManager));
}

From source file:com.facebook.presto.hive.GenericHiveRecordCursorProvider.java

License:Apache License

@Override
public Optional<HiveRecordCursor> createHiveRecordCursor(HiveSplit split, RecordReader<?, ?> recordReader,
        List<HiveColumnHandle> columns, DateTimeZone hiveStorageTimeZone) {
    return Optional.<HiveRecordCursor>of(new GenericHiveRecordCursor<>(genericRecordReader(recordReader),
            split.getLength(), split.getSchema(), split.getPartitionKeys(), columns, hiveStorageTimeZone,
            DateTimeZone.forID(split.getSession().getTimeZoneKey().getId())));
}

From source file:com.facebook.presto.hive.OrcRecordCursorProvider.java

License:Apache License

@Override
public Optional<HiveRecordCursor> createHiveRecordCursor(String clientId, Configuration configuration,
        ConnectorSession session, Path path, long start, long length, Properties schema,
        List<HiveColumnHandle> columns, List<HivePartitionKey> partitionKeys,
        TupleDomain<HiveColumnHandle> tupleDomain, DateTimeZone hiveStorageTimeZone, TypeManager typeManager) {
    @SuppressWarnings("deprecation")
    Deserializer deserializer = getDeserializer(schema);
    if (!(deserializer instanceof OrcSerde)) {
        return Optional.absent();
    }//w  w w .j  av  a  2s  . co m

    RecordReader recordReader;
    try {
        FileSystem fileSystem = path.getFileSystem(configuration);
        Reader reader = OrcFile.createReader(fileSystem, path);
        boolean[] include = findIncludedColumns(reader.getTypes(), columns);
        recordReader = reader.rows(start, length, include);
    } catch (Exception e) {
        throw Throwables.propagate(e);
    }

    return Optional.<HiveRecordCursor>of(new OrcHiveRecordCursor(recordReader, length, schema, partitionKeys,
            columns, hiveStorageTimeZone, DateTimeZone.forID(session.getTimeZoneKey().getId()), typeManager));
}

From source file:com.facebook.presto.jdbc.ext.PrestoResultSet.java

License:Apache License

PrestoResultSet(StatementClient client, Consumer<QueryStats> progressCallback) throws SQLException {
    this.client = requireNonNull(client, "client is null");
    requireNonNull(progressCallback, "progressCallback is null");

    this.sessionTimeZone = DateTimeZone.forID(client.getTimeZoneId());
    this.queryId = client.current().getId();

    List<Column> columns = getColumns(client, progressCallback);
    this.fieldMap = getFieldMap(columns);
    this.columnInfoList = getColumnInfo(columns);
    this.resultSetMetaData = new PrestoResultSetMetaData(columnInfoList);

    this.results = flatten(new ResultsPageIterator(client, progressCallback));
}

From source file:com.facebook.presto.jdbc.PrestoResultSet.java

License:Apache License

PrestoResultSet(UniversalStatementClient client) throws SQLException {
    this.client = checkNotNull(client, "client is null");
    this.sessionTimeZone = DateTimeZone.forID(client.getTimeZoneId());
    this.queryId = client.current().getId();

    List<Column> columns = getColumns(client);
    this.fieldMap = getFieldMap(columns);
    this.columnInfoList = getColumnInfo(columns);
    this.resultSetMetaData = new PrestoResultSetMetaData(columnInfoList);

    this.results = flatten(new ResultsPageIterable(client));
}

From source file:com.foundationdb.server.types.mcompat.mfuncs.MFromUnixtimeOneArg.java

License:Open Source License

@Override
protected void doEvaluate(TExecutionContext context, LazyList<? extends ValueSource> inputs,
        ValueTarget output) {//from   www  .j  av a2 s .  com
    // unixtime is in second
    // convert it to millis
    long millis = inputs.get(0).getInt64() * 1000L;
    output.putInt64(MDateAndTime.encodeDateTime(MDateAndTime
            .fromJodaDateTime(new DateTime(millis, DateTimeZone.forID(context.getCurrentTimezone())))));
}

From source file:com.foundationdb.server.types.mcompat.mfuncs.MFromUnixtimeTwoArgs.java

License:Open Source License

private static Object[] computeResult(long unix, String format, String tz) {
    String st = null;/* ww  w. j a v  a  2  s  . c  o m*/
    InvalidOperationException error = null;

    try {
        st = DateTimeField.getFormatted(new MutableDateTime(unix * 1000L, DateTimeZone.forID(tz)), format);
    } catch (InvalidParameterValueException e) {
        st = null;
        error = e;
    }

    return new Object[] { st, error };
}