Example usage for org.joda.time DateTimeZone UTC

List of usage examples for org.joda.time DateTimeZone UTC

Introduction

In this page you can find the example usage for org.joda.time DateTimeZone UTC.

Prototype

DateTimeZone UTC

To view the source code for org.joda.time DateTimeZone UTC.

Click Source Link

Document

The time zone for Universal Coordinated Time

Usage

From source file:com.facebook.presto.hive.BenchmarkHiveFileFormats.java

License:Apache License

private static double benchmarkReadTpch1(FileSplit fileSplit, Properties partitionProperties,
        HivePageSourceFactory pageSourceFactory) throws IOException {
    HiveSplit split = createHiveSplit(fileSplit, partitionProperties);

    double sum = 0;
    for (int i = 0; i < LOOPS; i++) {
        sum = 0;/*from  w ww.java 2  s.c  o  m*/

        ConnectorPageSource pageSource = pageSourceFactory.createPageSource(new Configuration(), SESSION,
                new Path(split.getPath()), split.getStart(), split.getLength(), split.getSchema(),
                TPCH_1_COLUMNS, split.getPartitionKeys(), TupleDomain.<HiveColumnHandle>all(), DateTimeZone.UTC)
                .get();

        while (!pageSource.isFinished()) {
            Page page = pageSource.getNextPage();
            if (page == null) {
                continue;
            }

            Block block0 = page.getBlock(0);
            Block block1 = page.getBlock(1);
            Block block2 = page.getBlock(2);
            Block block3 = page.getBlock(3);
            Block block4 = page.getBlock(4);
            Block block5 = page.getBlock(5);
            Block block6 = page.getBlock(6);

            for (int position = 0; position < page.getPositionCount(); position++) {
                if (!block0.isNull(position)) {
                    sum += BIGINT.getLong(block0, position);
                }
                if (!block1.isNull(position)) {
                    sum += DOUBLE.getDouble(block1, position);
                }
                if (!block2.isNull(position)) {
                    sum += DOUBLE.getDouble(block2, position);
                }
                if (!block3.isNull(position)) {
                    sum += DOUBLE.getDouble(block3, position);
                }
                if (!block4.isNull(position)) {
                    sum += VARCHAR.getSlice(block4, position).length();
                }
                if (!block5.isNull(position)) {
                    sum += VARCHAR.getSlice(block5, position).length();
                }
                if (!block6.isNull(position)) {
                    sum += VARCHAR.getSlice(block6, position).length();
                }
            }
        }
        pageSource.close();
    }
    return sum;
}

From source file:com.facebook.presto.hive.BenchmarkHiveFileFormats.java

License:Apache License

private static double benchmarkReadAll(FileSplit fileSplit, Properties partitionProperties,
        HiveRecordCursorProvider hiveRecordCursorProvider) throws IOException {
    HiveSplit split = createHiveSplit(fileSplit, partitionProperties);

    double sum = 0;
    for (int i = 0; i < LOOPS; i++) {
        sum = 0;//from  w  w  w .j a va 2  s .  co  m

        HiveRecordCursor recordCursor = hiveRecordCursorProvider.createHiveRecordCursor(split.getClientId(),
                new Configuration(), SESSION, new Path(split.getPath()), split.getStart(), split.getLength(),
                split.getSchema(), ALL_COLUMNS, split.getPartitionKeys(), TupleDomain.<HiveColumnHandle>all(),
                DateTimeZone.UTC, TYPE_MANAGER).get();

        while (recordCursor.advanceNextPosition()) {
            if (!recordCursor.isNull(0)) {
                sum += recordCursor.getLong(0);
            }
            if (!recordCursor.isNull(1)) {
                sum += recordCursor.getLong(1);
            }
            if (!recordCursor.isNull(2)) {
                sum += recordCursor.getLong(2);
            }
            if (!recordCursor.isNull(3)) {
                sum += recordCursor.getLong(3);
            }
            if (!recordCursor.isNull(4)) {
                sum += recordCursor.getLong(4);
            }
            if (!recordCursor.isNull(5)) {
                sum += recordCursor.getDouble(5);
            }
            if (!recordCursor.isNull(6)) {
                sum += recordCursor.getDouble(6);
            }
            if (!recordCursor.isNull(7)) {
                sum += recordCursor.getDouble(7);
            }
            if (!recordCursor.isNull(8)) {
                sum += recordCursor.getSlice(8).length();
            }
            if (!recordCursor.isNull(9)) {
                sum += recordCursor.getSlice(9).length();
            }
            if (!recordCursor.isNull(10)) {
                sum += recordCursor.getSlice(10).length();
            }
            if (!recordCursor.isNull(11)) {
                sum += recordCursor.getSlice(11).length();
            }
            if (!recordCursor.isNull(12)) {
                sum += recordCursor.getSlice(12).length();
            }
            if (!recordCursor.isNull(13)) {
                sum += recordCursor.getSlice(13).length();
            }
            if (!recordCursor.isNull(14)) {
                sum += recordCursor.getSlice(14).length();
            }
            if (!recordCursor.isNull(15)) {
                sum += recordCursor.getSlice(15).length();
            }
        }
        recordCursor.close();
    }
    return sum;
}

From source file:com.facebook.presto.hive.BenchmarkHiveFileFormats.java

License:Apache License

private static double benchmarkReadAll(FileSplit fileSplit, Properties partitionProperties,
        HivePageSourceFactory pageSourceFactory) throws IOException {
    HiveSplit split = createHiveSplit(fileSplit, partitionProperties);

    double sum = 0;
    for (int i = 0; i < LOOPS; i++) {
        sum = 0;/*from  w ww  .  j  av a 2s .c  o m*/

        ConnectorPageSource pageSource = pageSourceFactory.createPageSource(new Configuration(), SESSION,
                new Path(split.getPath()), split.getStart(), split.getLength(), split.getSchema(), ALL_COLUMNS,
                split.getPartitionKeys(), TupleDomain.<HiveColumnHandle>all(), DateTimeZone.UTC).get();

        while (!pageSource.isFinished()) {
            Page page = pageSource.getNextPage();
            if (page == null) {
                continue;
            }

            Block block0 = page.getBlock(0);
            Block block1 = page.getBlock(1);
            Block block2 = page.getBlock(2);
            Block block3 = page.getBlock(3);
            Block block4 = page.getBlock(4);
            Block block5 = page.getBlock(5);
            Block block6 = page.getBlock(6);
            Block block7 = page.getBlock(7);
            Block block8 = page.getBlock(8);
            Block block9 = page.getBlock(9);
            Block block10 = page.getBlock(10);
            Block block11 = page.getBlock(11);
            Block block12 = page.getBlock(12);
            Block block13 = page.getBlock(13);
            Block block14 = page.getBlock(14);
            Block block15 = page.getBlock(15);

            for (int position = 0; position < page.getPositionCount(); position++) {
                if (!block0.isNull(position)) {
                    sum += BIGINT.getLong(block0, position);
                }
                if (!block1.isNull(position)) {
                    sum += BIGINT.getLong(block1, position);
                }
                if (!block2.isNull(position)) {
                    sum += BIGINT.getLong(block2, position);
                }
                if (!block3.isNull(position)) {
                    sum += BIGINT.getLong(block3, position);
                }
                if (!block4.isNull(position)) {
                    sum += BIGINT.getLong(block4, position);
                }
                if (!block5.isNull(position)) {
                    sum += DOUBLE.getDouble(block5, position);
                }
                if (!block6.isNull(position)) {
                    sum += DOUBLE.getDouble(block6, position);
                }
                if (!block7.isNull(position)) {
                    sum += DOUBLE.getDouble(block7, position);
                }
                if (!block8.isNull(position)) {
                    sum += VARCHAR.getSlice(block8, position).length();
                }
                if (!block9.isNull(position)) {
                    sum += VARCHAR.getSlice(block9, position).length();
                }
                if (!block10.isNull(position)) {
                    sum += VARCHAR.getSlice(block10, position).length();
                }
                if (!block11.isNull(position)) {
                    sum += VARCHAR.getSlice(block11, position).length();
                }
                if (!block12.isNull(position)) {
                    sum += VARCHAR.getSlice(block12, position).length();
                }
                if (!block13.isNull(position)) {
                    sum += VARCHAR.getSlice(block13, position).length();
                }
                if (!block14.isNull(position)) {
                    sum += VARCHAR.getSlice(block14, position).length();
                }
                if (!block15.isNull(position)) {
                    sum += VARCHAR.getSlice(block15, position).length();
                }
            }
        }
        pageSource.close();
    }
    return sum;
}

From source file:com.facebook.presto.hive.BenchmarkHiveFileFormats.java

License:Apache License

private static double benchmarkLoadAllReadOne(FileSplit fileSplit, Properties partitionProperties,
        HiveRecordCursorProvider hiveRecordCursorProvider) throws IOException {
    HiveSplit split = createHiveSplit(fileSplit, partitionProperties);

    double sum = 0;
    for (int i = 0; i < LOOPS; i++) {
        sum = 0;//  ww w. java 2 s. co  m

        HiveRecordCursor recordCursor = hiveRecordCursorProvider.createHiveRecordCursor(split.getClientId(),
                new Configuration(), SESSION, new Path(split.getPath()), split.getStart(), split.getLength(),
                split.getSchema(), ALL_COLUMNS, split.getPartitionKeys(), TupleDomain.<HiveColumnHandle>all(),
                DateTimeZone.UTC, TYPE_MANAGER).get();

        while (recordCursor.advanceNextPosition()) {
            if (!recordCursor.isNull(0)) {
                sum += recordCursor.getLong(0);
            }
        }
        recordCursor.close();
    }
    return sum;
}

From source file:com.facebook.presto.hive.BenchmarkHiveFileFormats.java

License:Apache License

private static double benchmarkLoadAllReadOne(FileSplit fileSplit, Properties partitionProperties,
        HivePageSourceFactory pageSourceFactory) throws IOException {
    HiveSplit split = createHiveSplit(fileSplit, partitionProperties);

    double sum = 0;
    for (int i = 0; i < LOOPS; i++) {
        sum = 0;// w  w  w .jav  a 2  s.co m

        ConnectorPageSource pageSource = pageSourceFactory.createPageSource(new Configuration(), SESSION,
                new Path(split.getPath()), split.getStart(), split.getLength(), split.getSchema(), ALL_COLUMNS,
                split.getPartitionKeys(), TupleDomain.<HiveColumnHandle>all(), DateTimeZone.UTC).get();

        while (!pageSource.isFinished()) {
            Page page = pageSource.getNextPage();
            if (page == null) {
                continue;
            }
            Block block = page.getBlock(0);
            for (int position = 0; position < block.getPositionCount(); position++) {
                if (!block.isNull(position)) {
                    sum += BIGINT.getLong(block, position);
                }
            }
        }
        pageSource.close();
    }
    return sum;
}

From source file:com.facebook.presto.hive.BenchmarkHiveFileFormats.java

License:Apache License

private static double benchmarkPredicatePushDown(FileSplit fileSplit, Properties partitionProperties,
        HiveRecordCursorProvider hiveRecordCursorProvider) throws IOException {
    HiveSplit split = createHiveSplit(fileSplit, partitionProperties);

    double sum = 0;
    for (int i = 0; i < LOOPS; i++) {
        sum = 0;/*from   w ww  . j av  a  2s .c  o  m*/

        HiveRecordCursor recordCursor = hiveRecordCursorProvider.createHiveRecordCursor(split.getClientId(),
                new Configuration(), SESSION, new Path(split.getPath()), split.getStart(), split.getLength(),
                split.getSchema(), ALL_COLUMNS, split.getPartitionKeys(), TupleDomain.<HiveColumnHandle>all(),
                DateTimeZone.UTC, TYPE_MANAGER).get();

        while (recordCursor.advanceNextPosition()) {
            if (!recordCursor.isNull(0)) {
                long orderKey = recordCursor.getLong(0);
                if (orderKey != FILTER_ORDER_KEY_ID) {
                    continue;
                }
                sum += orderKey;
            }
            if (!recordCursor.isNull(1)) {
                sum += recordCursor.getLong(1);
            }
            if (!recordCursor.isNull(2)) {
                sum += recordCursor.getLong(2);
            }
            if (!recordCursor.isNull(3)) {
                sum += recordCursor.getLong(3);
            }
            if (!recordCursor.isNull(4)) {
                sum += recordCursor.getLong(4);
            }
            if (!recordCursor.isNull(5)) {
                sum += recordCursor.getDouble(5);
            }
            if (!recordCursor.isNull(6)) {
                sum += recordCursor.getDouble(6);
            }
            if (!recordCursor.isNull(7)) {
                sum += recordCursor.getDouble(7);
            }
            if (!recordCursor.isNull(8)) {
                sum += recordCursor.getSlice(8).length();
            }
            if (!recordCursor.isNull(9)) {
                sum += recordCursor.getSlice(9).length();
            }
            if (!recordCursor.isNull(10)) {
                sum += recordCursor.getSlice(10).length();
            }
            if (!recordCursor.isNull(11)) {
                sum += recordCursor.getSlice(11).length();
            }
            if (!recordCursor.isNull(12)) {
                sum += recordCursor.getSlice(12).length();
            }
            if (!recordCursor.isNull(13)) {
                sum += recordCursor.getSlice(13).length();
            }
            if (!recordCursor.isNull(14)) {
                sum += recordCursor.getSlice(14).length();
            }
            if (!recordCursor.isNull(15)) {
                sum += recordCursor.getSlice(15).length();
            }
        }
        recordCursor.close();
    }
    return sum;
}

From source file:com.facebook.presto.hive.BenchmarkHiveFileFormats.java

License:Apache License

private static double benchmarkPredicatePushDown(FileSplit fileSplit, Properties partitionProperties,
        HivePageSourceFactory pageSourceFactory) throws IOException {
    HiveSplit split = createHiveSplit(fileSplit, partitionProperties);

    double sum = 0;
    for (int i = 0; i < LOOPS; i++) {
        sum = 0;//  w w  w. j a  v  a2s .c  o m

        ConnectorPageSource pageSource = pageSourceFactory
                .createPageSource(new Configuration(), SESSION, new Path(split.getPath()), split.getStart(),
                        split.getLength(), split.getSchema(), ALL_COLUMNS, split.getPartitionKeys(),
                        TupleDomain.fromFixedValues(
                                ImmutableMap.of(Iterables.getOnlyElement(getHiveColumnHandles(ORDER_KEY)),
                                        NullableValue.of(BIGINT, FILTER_ORDER_KEY_ID))),
                        DateTimeZone.UTC)
                .get();

        while (!pageSource.isFinished()) {
            Page page = pageSource.getNextPage();
            if (page == null) {
                continue;
            }

            Block block0 = page.getBlock(0);
            Block block1 = page.getBlock(1);
            Block block2 = page.getBlock(2);
            Block block3 = page.getBlock(3);
            Block block4 = page.getBlock(4);
            Block block5 = page.getBlock(5);
            Block block6 = page.getBlock(6);
            Block block7 = page.getBlock(7);
            Block block8 = page.getBlock(8);
            Block block9 = page.getBlock(9);
            Block block10 = page.getBlock(10);
            Block block11 = page.getBlock(11);
            Block block12 = page.getBlock(12);
            Block block13 = page.getBlock(13);
            Block block14 = page.getBlock(14);
            Block block15 = page.getBlock(15);

            for (int position = 0; position < page.getPositionCount(); position++) {
                if (!block0.isNull(position)) {
                    long orderKey = BIGINT.getLong(block0, position);
                    if (orderKey != FILTER_ORDER_KEY_ID) {
                        continue;
                    }
                    sum += orderKey;
                }
                if (!block1.isNull(position)) {
                    sum += BIGINT.getLong(block1, position);
                }
                if (!block2.isNull(position)) {
                    sum += BIGINT.getLong(block2, position);
                }
                if (!block3.isNull(position)) {
                    sum += BIGINT.getLong(block3, position);
                }
                if (!block4.isNull(position)) {
                    sum += BIGINT.getLong(block4, position);
                }
                if (!block5.isNull(position)) {
                    sum += DOUBLE.getDouble(block5, position);
                }
                if (!block6.isNull(position)) {
                    sum += DOUBLE.getDouble(block6, position);
                }
                if (!block7.isNull(position)) {
                    sum += DOUBLE.getDouble(block7, position);
                }
                if (!block8.isNull(position)) {
                    sum += VARCHAR.getSlice(block8, position).length();
                }
                if (!block9.isNull(position)) {
                    sum += VARCHAR.getSlice(block9, position).length();
                }
                if (!block10.isNull(position)) {
                    sum += VARCHAR.getSlice(block10, position).length();
                }
                if (!block11.isNull(position)) {
                    sum += VARCHAR.getSlice(block11, position).length();
                }
                if (!block12.isNull(position)) {
                    sum += VARCHAR.getSlice(block12, position).length();
                }
                if (!block13.isNull(position)) {
                    sum += VARCHAR.getSlice(block13, position).length();
                }
                if (!block14.isNull(position)) {
                    sum += VARCHAR.getSlice(block14, position).length();
                }
                if (!block15.isNull(position)) {
                    sum += VARCHAR.getSlice(block15, position).length();
                }
            }
        }
        pageSource.close();
    }
    return sum;
}

From source file:com.facebook.presto.hive.DwrfHiveRecordCursor.java

License:Apache License

public DwrfHiveRecordCursor(RecordReader recordReader, long totalBytes, Properties splitSchema,
        List<HivePartitionKey> partitionKeys, List<HiveColumnHandle> columns, DateTimeZone hiveStorageTimeZone,
        DateTimeZone sessionTimeZone, TypeManager typeManager) {
    checkNotNull(recordReader, "recordReader is null");
    checkArgument(totalBytes >= 0, "totalBytes is negative");
    checkNotNull(splitSchema, "splitSchema is null");
    checkNotNull(partitionKeys, "partitionKeys is null");
    checkNotNull(columns, "columns is null");
    checkNotNull(hiveStorageTimeZone, "hiveStorageTimeZone is null");
    checkNotNull(sessionTimeZone, "sessionTimeZone is null");

    this.recordReader = recordReader;
    this.totalBytes = totalBytes;
    this.sessionTimeZone = sessionTimeZone;

    int size = columns.size();

    this.names = new String[size];
    this.types = new Type[size];
    this.hiveTypes = new HiveType[size];

    this.fieldInspectors = new ObjectInspector[size];

    this.hiveColumnIndexes = new int[size];

    this.isPartitionColumn = new boolean[size];

    this.loaded = new boolean[size];
    this.booleans = new boolean[size];
    this.longs = new long[size];
    this.doubles = new double[size];
    this.slices = new Slice[size];
    this.nulls = new boolean[size];

    // DWRF uses an epoch sensitive to the JVM default timezone, so we need to correct for this
    long hiveStorageCorrection = new DateTime(2015, 1, 1, 0, 0, hiveStorageTimeZone).getMillis()
            - new DateTime(2015, 1, 1, 0, 0, DateTimeZone.UTC).getMillis();
    long jvmCorrection = new DateTime(2015, 1, 1, 0, 0).getMillis()
            - new DateTime(2015, 1, 1, 0, 0, DateTimeZone.UTC).getMillis();
    timeZoneCorrection = hiveStorageCorrection - jvmCorrection;

    // initialize data columns
    StructObjectInspector rowInspector = getTableObjectInspector(splitSchema);

    for (int i = 0; i < columns.size(); i++) {
        HiveColumnHandle column = columns.get(i);

        names[i] = column.getName();//  www . jav a 2  s.  com
        types[i] = typeManager.getType(column.getTypeName());
        hiveTypes[i] = column.getHiveType();

        if (!column.isPartitionKey()) {
            fieldInspectors[i] = rowInspector.getStructFieldRef(column.getName()).getFieldObjectInspector();
        }

        hiveColumnIndexes[i] = column.getHiveColumnIndex();
        isPartitionColumn[i] = column.isPartitionKey();
    }

    // parse requested partition columns
    Map<String, HivePartitionKey> partitionKeysByName = uniqueIndex(partitionKeys,
            HivePartitionKey.nameGetter());
    for (int columnIndex = 0; columnIndex < columns.size(); columnIndex++) {
        HiveColumnHandle column = columns.get(columnIndex);
        if (column.isPartitionKey()) {
            HivePartitionKey partitionKey = partitionKeysByName.get(column.getName());
            checkArgument(partitionKey != null, "Unknown partition key %s", column.getName());

            byte[] bytes = partitionKey.getValue().getBytes(Charsets.UTF_8);

            Type type = types[columnIndex];
            if (HiveUtil.isHiveNull(bytes)) {
                nulls[columnIndex] = true;
            } else if (BOOLEAN.equals(type)) {
                if (isTrue(bytes, 0, bytes.length)) {
                    booleans[columnIndex] = true;
                } else if (isFalse(bytes, 0, bytes.length)) {
                    booleans[columnIndex] = false;
                } else {
                    String valueString = new String(bytes, Charsets.UTF_8);
                    throw new IllegalArgumentException(
                            String.format("Invalid partition value '%s' for BOOLEAN partition key %s",
                                    valueString, names[columnIndex]));
                }
            } else if (BIGINT.equals(type)) {
                if (bytes.length == 0) {
                    throw new IllegalArgumentException(String.format(
                            "Invalid partition value '' for BIGINT partition key %s", names[columnIndex]));
                }
                longs[columnIndex] = parseLong(bytes, 0, bytes.length);
            } else if (DOUBLE.equals(type)) {
                if (bytes.length == 0) {
                    throw new IllegalArgumentException(String.format(
                            "Invalid partition value '' for DOUBLE partition key %s", names[columnIndex]));
                }
                doubles[columnIndex] = parseDouble(bytes, 0, bytes.length);
            } else if (VARCHAR.equals(type)) {
                slices[columnIndex] = Slices.wrappedBuffer(Arrays.copyOf(bytes, bytes.length));
            } else if (DATE.equals(type)) {
                longs[columnIndex] = ISODateTimeFormat.date().withZone(DateTimeZone.UTC)
                        .parseMillis(partitionKey.getValue());
            } else if (TIMESTAMP.equals(type)) {
                longs[columnIndex] = parseHiveTimestamp(partitionKey.getValue(), hiveStorageTimeZone);
            } else {
                throw new UnsupportedOperationException("Unsupported column type: " + type);
            }
        }
    }
}

From source file:com.facebook.presto.hive.orc.DwrfHiveRecordCursor.java

License:Apache License

public DwrfHiveRecordCursor(RecordReader recordReader, long totalBytes, Properties splitSchema,
        List<HivePartitionKey> partitionKeys, List<HiveColumnHandle> columns, DateTimeZone hiveStorageTimeZone,
        TypeManager typeManager) {/*from  ww w  .  j  ava 2  s .  c om*/
    requireNonNull(recordReader, "recordReader is null");
    checkArgument(totalBytes >= 0, "totalBytes is negative");
    requireNonNull(splitSchema, "splitSchema is null");
    requireNonNull(partitionKeys, "partitionKeys is null");
    requireNonNull(columns, "columns is null");
    requireNonNull(hiveStorageTimeZone, "hiveStorageTimeZone is null");

    this.recordReader = recordReader;
    this.totalBytes = totalBytes;

    int size = columns.size();

    this.names = new String[size];
    this.types = new Type[size];
    this.hiveTypes = new HiveType[size];

    this.fieldInspectors = new ObjectInspector[size];

    this.hiveColumnIndexes = new int[size];

    this.isPartitionColumn = new boolean[size];

    this.loaded = new boolean[size];
    this.booleans = new boolean[size];
    this.longs = new long[size];
    this.doubles = new double[size];
    this.slices = new Slice[size];
    this.objects = new Object[size];
    this.nulls = new boolean[size];

    // DWRF uses an epoch sensitive to the JVM default timezone, so we need to correct for this
    long hiveStorageCorrection = new DateTime(2015, 1, 1, 0, 0, hiveStorageTimeZone).getMillis()
            - new DateTime(2015, 1, 1, 0, 0, DateTimeZone.UTC).getMillis();
    long jvmCorrection = new DateTime(2015, 1, 1, 0, 0).getMillis()
            - new DateTime(2015, 1, 1, 0, 0, DateTimeZone.UTC).getMillis();
    timeZoneCorrection = hiveStorageCorrection - jvmCorrection;

    // initialize data columns
    StructObjectInspector rowInspector = getTableObjectInspector(splitSchema);

    for (int i = 0; i < columns.size(); i++) {
        HiveColumnHandle column = columns.get(i);

        names[i] = column.getName();
        types[i] = typeManager.getType(column.getTypeSignature());
        hiveTypes[i] = column.getHiveType();

        if (!column.isPartitionKey()) {
            fieldInspectors[i] = rowInspector.getStructFieldRef(column.getName()).getFieldObjectInspector();
        }

        hiveColumnIndexes[i] = column.getHiveColumnIndex();
        isPartitionColumn[i] = column.isPartitionKey();
    }

    // parse requested partition columns
    Map<String, HivePartitionKey> partitionKeysByName = uniqueIndex(partitionKeys, HivePartitionKey::getName);
    for (int columnIndex = 0; columnIndex < columns.size(); columnIndex++) {
        HiveColumnHandle column = columns.get(columnIndex);
        if (column.isPartitionKey()) {
            HivePartitionKey partitionKey = partitionKeysByName.get(column.getName());
            checkArgument(partitionKey != null, "Unknown partition key %s", column.getName());

            byte[] bytes = partitionKey.getValue().getBytes(UTF_8);

            String name = names[columnIndex];
            Type type = types[columnIndex];
            if (HiveUtil.isHiveNull(bytes)) {
                nulls[columnIndex] = true;
            } else if (BOOLEAN.equals(type)) {
                booleans[columnIndex] = booleanPartitionKey(partitionKey.getValue(), name);
            } else if (BIGINT.equals(type)) {
                longs[columnIndex] = bigintPartitionKey(partitionKey.getValue(), name);
            } else if (DOUBLE.equals(type)) {
                doubles[columnIndex] = doublePartitionKey(partitionKey.getValue(), name);
            } else if (VARCHAR.equals(type)) {
                slices[columnIndex] = Slices.wrappedBuffer(Arrays.copyOf(bytes, bytes.length));
            } else if (DATE.equals(type)) {
                longs[columnIndex] = datePartitionKey(partitionKey.getValue(), name);
            } else if (TIMESTAMP.equals(type)) {
                longs[columnIndex] = timestampPartitionKey(partitionKey.getValue(), hiveStorageTimeZone, name);
            } else {
                throw new PrestoException(NOT_SUPPORTED, format(
                        "Unsupported column type %s for partition key: %s", type.getDisplayName(), name));
            }
        }
    }
}

From source file:com.facebook.presto.hive.orc.OrcHiveRecordCursor.java

License:Apache License

public OrcHiveRecordCursor(RecordReader recordReader, long totalBytes, Properties splitSchema,
        List<HivePartitionKey> partitionKeys, List<HiveColumnHandle> columns, DateTimeZone hiveStorageTimeZone,
        TypeManager typeManager) {//from   w  w w  .  ja va 2  s.c  om
    requireNonNull(recordReader, "recordReader is null");
    checkArgument(totalBytes >= 0, "totalBytes is negative");
    requireNonNull(splitSchema, "splitSchema is null");
    requireNonNull(partitionKeys, "partitionKeys is null");
    requireNonNull(columns, "columns is null");
    requireNonNull(hiveStorageTimeZone, "hiveStorageTimeZone is null");

    this.recordReader = recordReader;
    this.totalBytes = totalBytes;

    int size = columns.size();

    this.names = new String[size];
    this.types = new Type[size];
    this.hiveTypes = new HiveType[size];

    this.fieldInspectors = new ObjectInspector[size];

    this.hiveColumnIndexes = new int[size];

    this.isPartitionColumn = new boolean[size];

    this.loaded = new boolean[size];
    this.booleans = new boolean[size];
    this.longs = new long[size];
    this.doubles = new double[size];
    this.slices = new Slice[size];
    this.objects = new Object[size];
    this.nulls = new boolean[size];

    // ORC stores timestamps relative to 2015-01-01 00:00:00 but in the timezone of the writer
    // When reading back a timestamp the Hive ORC reader will an epoch in this machine's timezone
    // We must correct for the difference between the writer's timezone and this machine's
    // timezone (on 2015-01-01)
    long hiveStorageCorrection = new DateTime(2015, 1, 1, 0, 0, hiveStorageTimeZone).getMillis()
            - new DateTime(2015, 1, 1, 0, 0, DateTimeZone.UTC).getMillis();
    long jvmCorrection = new DateTime(2015, 1, 1, 0, 0).getMillis()
            - new DateTime(2015, 1, 1, 0, 0, DateTimeZone.UTC).getMillis();
    timeZoneCorrection = hiveStorageCorrection - jvmCorrection;

    // initialize data columns
    StructObjectInspector rowInspector = getTableObjectInspector(splitSchema);

    for (int i = 0; i < columns.size(); i++) {
        HiveColumnHandle column = columns.get(i);

        names[i] = column.getName();
        types[i] = typeManager.getType(column.getTypeSignature());
        hiveTypes[i] = column.getHiveType();

        if (!column.isPartitionKey()) {
            fieldInspectors[i] = rowInspector.getStructFieldRef(column.getName()).getFieldObjectInspector();
        }

        hiveColumnIndexes[i] = column.getHiveColumnIndex();
        isPartitionColumn[i] = column.isPartitionKey();
    }

    // parse requested partition columns
    Map<String, HivePartitionKey> partitionKeysByName = uniqueIndex(partitionKeys, HivePartitionKey::getName);
    for (int columnIndex = 0; columnIndex < columns.size(); columnIndex++) {
        HiveColumnHandle column = columns.get(columnIndex);
        if (column.isPartitionKey()) {
            HivePartitionKey partitionKey = partitionKeysByName.get(column.getName());
            checkArgument(partitionKey != null, "Unknown partition key %s", column.getName());

            byte[] bytes = partitionKey.getValue().getBytes(UTF_8);

            String name = names[columnIndex];
            Type type = types[columnIndex];
            if (HiveUtil.isHiveNull(bytes)) {
                nulls[columnIndex] = true;
            } else if (type.equals(BOOLEAN)) {
                booleans[columnIndex] = booleanPartitionKey(partitionKey.getValue(), name);
            } else if (type.equals(BIGINT)) {
                longs[columnIndex] = bigintPartitionKey(partitionKey.getValue(), name);
            } else if (type.equals(DOUBLE)) {
                doubles[columnIndex] = doublePartitionKey(partitionKey.getValue(), name);
            } else if (type.equals(VARCHAR)) {
                slices[columnIndex] = Slices.wrappedBuffer(bytes);
            } else if (type.equals(DATE)) {
                longs[columnIndex] = datePartitionKey(partitionKey.getValue(), name);
            } else if (type.equals(TIMESTAMP)) {
                longs[columnIndex] = timestampPartitionKey(partitionKey.getValue(), hiveStorageTimeZone, name);
            } else {
                throw new PrestoException(NOT_SUPPORTED, format(
                        "Unsupported column type %s for partition key: %s", type.getDisplayName(), name));
            }
        }
    }
}