Example usage for org.apache.spark.memory MemoryMode ON_HEAP

List of usage examples for org.apache.spark.memory MemoryMode ON_HEAP

Introduction

In this page you can find the example usage for org.apache.spark.memory MemoryMode ON_HEAP.

Prototype

MemoryMode ON_HEAP

To view the source code for org.apache.spark.memory MemoryMode ON_HEAP.

Click Source Link

Usage

From source file:com.splicemachine.orc.block.AbstractColumnBlock.java

License:Open Source License

@Override
public void setPartitionNull(int size) {
    columnVector = ColumnVector.allocate(size, dataType, MemoryMode.ON_HEAP);
    columnVector.appendNulls(size);
}

From source file:com.splicemachine.orc.block.BooleanColumnBlock.java

License:Open Source License

@Override
public void setPartitionValue(String value, int size) {
    columnVector = ColumnVector.allocate(size, dataType, MemoryMode.ON_HEAP);
    columnVector.appendBooleans(size, Boolean.parseBoolean(value));
}

From source file:com.splicemachine.orc.block.ByteColumnBlock.java

License:Open Source License

@Override
public void setPartitionValue(String value, int size) {
    columnVector = ColumnVector.allocate(size, dataType, MemoryMode.ON_HEAP);
    columnVector.appendBytes(size, Byte.parseByte(value));
}

From source file:com.splicemachine.orc.block.DoubleColumnBlock.java

License:Open Source License

@Override
public void setPartitionValue(String value, int size) {
    columnVector = ColumnVector.allocate(size, dataType, MemoryMode.ON_HEAP);
    columnVector.appendDoubles(size, Double.parseDouble(value));
}

From source file:com.splicemachine.orc.block.FloatColumnBlock.java

License:Open Source License

@Override
public void setPartitionValue(String value, int size) {
    columnVector = ColumnVector.allocate(size, dataType, MemoryMode.ON_HEAP);
    columnVector.appendFloats(size, Float.parseFloat(value));
}

From source file:com.splicemachine.orc.block.IntegerColumnBlock.java

License:Open Source License

@Override
public void setPartitionValue(String value, int size) {
    columnVector = ColumnVector.allocate(size, dataType, MemoryMode.ON_HEAP);
    columnVector.appendInts(size, Integer.parseInt(value));
}

From source file:com.splicemachine.orc.block.LongColumnBlock.java

License:Open Source License

@Override
public void setPartitionValue(String value, int size) {
    columnVector = ColumnVector.allocate(size, dataType, MemoryMode.ON_HEAP);
    columnVector.appendLongs(size, Long.parseLong(value));
}

From source file:com.splicemachine.orc.block.ShortColumnBlock.java

License:Open Source License

@Override
public void setPartitionValue(String value, int size) {
    columnVector = ColumnVector.allocate(size, dataType, MemoryMode.ON_HEAP);
    columnVector.appendShorts(size, Short.parseShort(value));
}

From source file:com.splicemachine.orc.block.StringColumnBlock.java

License:Open Source License

@Override
public void setPartitionValue(String value, int size) {
    try {/*from   ww w .j  a v  a2s.  c o  m*/
        columnVector = ColumnVector.allocate(size, DataTypes.IntegerType, MemoryMode.ON_HEAP);
        SliceDictionary dictionary = new SliceDictionary(
                new Slice[] { Slices.wrappedBuffer(value.getBytes("UTF-8")) });
        columnVector.setDictionary(dictionary);
        columnVector.reserveDictionaryIds(size);
        columnVector.getDictionaryIds().appendInts(size, 0);
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}

From source file:com.splicemachine.orc.OrcRecordReader.java

License:Open Source License

public ColumnarBatch getColumnarBatch(StructType schema) throws IOException {
    ColumnarBatch columnarBatch = ColumnarBatch.allocate(schema, MemoryMode.ON_HEAP, currentBatchSize);

    // This is the place for predicate evaluation on the column level
    // ouch !!!/* w w  w  .  j a v  a2 s .c o  m*/

    // Count(*) optimization
    if (schema.fields().length == 0) {
        columnarBatch.setNumRows(currentBatchSize);
        return columnarBatch;
    }
    StructField[] fields = schema.fields();
    ColumnBlock[] columnBlocks = new ColumnBlock[fields.length];

    // Populate Present Columns
    int i = 0;
    for (int column : presentColumns) {
        columnBlocks[i] = new LazyColumnBlock(
                new LazyIncludedColumnBlockLoaderImpl(streamReaders[column], fields[i].dataType()));
        i++;
    }

    // Populate Possibly missing columns (TODO - JL)
    /*        int j = 0;
            while(i+partitionValues.size() + j != fields.length) {
    columnBlocks[i+j] = new LazyColumnBlock(new LazyNullColumnBlockLoaderImpl(fields[i+j].dataType(),currentBatchSize));
    j++;
            }
    */
    // Populate Partition Columns (Partition Columns Have to Be Declared Last)
    for (int k = 0, m = 0; k < partitionIds.size(); k++) {
        if (includedColumns.containsKey(partitionIds.get(k))) {
            columnBlocks[i + m] = new LazyColumnBlock(new LazyPartitionColumnBlockLoaderImpl(
                    fields[i + m].dataType(), currentBatchSize, partitionValues.get(k)));
            m++;
        }
    }

    // Populate Columnar Batch
    int l = 0;
    for (ColumnBlock columnBlock : columnBlocks) {
        columnarBatch.setColumn(l, columnBlock.getColumnVector());
        l++;
    }
    columnarBatch.setNumRows(currentBatchSize);
    return columnarBatch;
}