Example usage for com.google.common.primitives Doubles BYTES

List of usage examples for com.google.common.primitives Doubles BYTES

Introduction

In this page you can find the example usage for com.google.common.primitives Doubles BYTES.

Prototype

int BYTES

To view the source code for com.google.common.primitives Doubles BYTES.

Click Source Link

Document

The number of bytes required to represent a primitive double value.

Usage

From source file:io.druid.segment.data.BlockLayoutIndexedDoubleSupplier.java

public BlockLayoutIndexedDoubleSupplier(int totalSize, int sizePer, ByteBuffer fromBuffer, ByteOrder byteOrder,
        CompressedObjectStrategy.CompressionStrategy strategy, SmooshedFileMapper fileMapper) {

    baseDoubleBuffers = GenericIndexed.read(fromBuffer,
            VSizeCompressedObjectStrategy.getBufferForOrder(byteOrder, strategy, sizePer * Doubles.BYTES),
            fileMapper);/*from  w  w  w  .ja  v a2  s  . co  m*/

    this.totalSize = totalSize;
    this.sizePer = sizePer;
}

From source file:io.druid.segment.data.CompressedDoubleBufferObjectStrategy.java

private CompressedDoubleBufferObjectStrategy(ByteOrder order, CompressionStrategy compression, int sizePer) {
    super(order, new BufferConverter<DoubleBuffer>() {
        @Override//from   w  w  w .j a  v  a 2  s  .  c om
        public DoubleBuffer convert(ByteBuffer buf) {
            return buf.asDoubleBuffer();
        }

        @Override
        public int compare(DoubleBuffer lhs, DoubleBuffer rhs) {
            return Comparators.<DoubleBuffer>naturalNullsFirst().compare(lhs, rhs);
        }

        @Override
        public int sizeOf(int count) {
            return count * Doubles.BYTES;
        }

        @Override
        public DoubleBuffer combine(ByteBuffer into, DoubleBuffer from) {
            return into.asDoubleBuffer().put(from);
        }
    }, compression, sizePer);
}

From source file:io.druid.segment.data.EntireLayoutDoubleSupplierSerializer.java

public EntireLayoutDoubleSupplierSerializer(IOPeon ioPeon, String filenameBase, ByteOrder order) {
    this.ioPeon = ioPeon;
    this.valueFile = filenameBase + ".value";
    this.metaFile = filenameBase + ".format";
    this.orderBuffer = ByteBuffer.allocate(Doubles.BYTES);
    orderBuffer.order(order);//w w w. j a  v a 2  s .  co  m
}

From source file:io.druid.segment.data.BlockLayoutDoubleSupplierSerializer.java

public BlockLayoutDoubleSupplierSerializer(IOPeon ioPeon, String filenameBase, ByteOrder order,
        CompressedObjectStrategy.CompressionStrategy compression) {
    this.ioPeon = ioPeon;
    this.sizePer = CompressedPools.BUFFER_SIZE / Doubles.BYTES;
    this.flattener = new GenericIndexedWriter<>(ioPeon, filenameBase,
            CompressedDoubleBufferObjectStrategy.getBufferForOrder(order, compression, sizePer));
    this.metaFile = filenameBase + ".format";
    this.compression = compression;

    endBuffer = DoubleBuffer.allocate(sizePer);
    endBuffer.mark();/*www  .j ava  2 s. c  o m*/
}

From source file:io.druid.query.extraction.BucketExtractionFn.java

@Override
public byte[] getCacheKey() {
    return ByteBuffer.allocate(1 + 2 * Doubles.BYTES).put(ExtractionCacheHelper.CACHE_TYPE_ID_BUCKET)
            .putDouble(size).putDouble(offset).array();
}

From source file:io.druid.query.aggregation.variance.VarianceAggregatorCollector.java

static int getMaxIntermediateSize() {
    return Longs.BYTES + Doubles.BYTES + Doubles.BYTES;
}

From source file:com.metamx.druid.aggregation.DoubleSumAggregatorFactory.java

@Override
public int getMaxIntermediateSize() {
    return Doubles.BYTES;
}

From source file:co.cask.tigon.sql.io.GDATDecoder.java

@Override
public void skipDouble() throws IOException {
    dataRecord.position(dataRecord.position() + Doubles.BYTES);
}

From source file:co.cask.tigon.sql.io.GDATEncoder.java

@Override
public Encoder writeDouble(double v) throws IOException {
    //Get all 8 Bytes of a Double.
    sharedByteBuffer.clear();//from   www.j  ava  2  s  . c o  m
    sharedByteBuffer.putDouble(v);
    sharedByteBuffer.flip();
    buffer.write(sharedByteBuffer.array(), 0, Doubles.BYTES);
    return this;
}

From source file:io.druid.query.aggregation.variance.VarianceAggregatorCollector.java

public ByteBuffer toByteBuffer() {
    return ByteBuffer.allocate(Longs.BYTES + Doubles.BYTES + Doubles.BYTES).putLong(count).putDouble(sum)
            .putDouble(nvariance);
}