Example usage for org.apache.hadoop.io DataInputBuffer reset

List of usage examples for org.apache.hadoop.io DataInputBuffer reset

Introduction

In this page you can find the example usage for org.apache.hadoop.io DataInputBuffer reset.

Prototype

public void reset(byte[] input, int start, int length) 

Source Link

Document

Resets the data that the buffer reads.

Usage

From source file:FormatStorageBasicTest.java

License:Open Source License

public void testChunkToRecord() {
    try {//from   w w  w.  j av a  2s .co m
        String fileName = prefix + "testChunkToRecord";
        Path path = new Path(fileName);
        FileSystem fs = FileSystem.get(new Configuration());
        FSDataOutputStream out = fs.create(path);

        short fieldNum = 3;
        Record record = new Record(fieldNum);

        byte[] lb = new byte[ConstVar.Sizeof_Long];
        long l = 4;
        Util.long2bytes(lb, l);
        FieldValue fieldValue4 = new FieldValue(ConstVar.FieldType_Long, ConstVar.Sizeof_Long, lb, (short) 13);
        record.addValue(fieldValue4);

        byte[] fb = new byte[ConstVar.Sizeof_Float];
        float f = (float) 5.5;
        Util.float2bytes(fb, f);
        FieldValue fieldValue5 = new FieldValue(ConstVar.FieldType_Float, ConstVar.Sizeof_Float, fb,
                (short) 14);
        record.addValue(fieldValue5);

        String str = "hello konten";
        FieldValue fieldValue7 = new FieldValue(ConstVar.FieldType_String, (short) str.length(), str.getBytes(),
                (short) 16);
        record.addValue(fieldValue7);

        DataChunk chunk = new DataChunk(record);

        out.write(chunk.values, 0, (int) chunk.len);

        if (out.getPos() != chunk.len) {
            fail("error pos:" + out.getPos() + "chunk.len:" + chunk.len);
        }
        out.close();

        FSDataInputStream in = fs.open(path);

        FixedBitSet bitSet = new FixedBitSet(fieldNum);
        in.read(bitSet.bytes(), 0, bitSet.size());
        for (int i = 0; i < fieldNum; i++) {
            if (!bitSet.get(i)) {
                fail("should set:" + i);
            }
        }

        byte[] value = new byte[8];
        in.readFully(value);
        long lv = Util.bytes2long(value, 0, 8);
        if (lv != 4) {
            fail("error long value:" + lv);
        }

        value = new byte[4];
        in.readFully(value);
        float fv = Util.bytes2float(value, 0);
        if (fv != 5.5) {
            fail("error float value:" + fv);
        }

        short strLen = in.readShort();
        if (strLen != str.length()) {
            fail("error strLen:" + strLen);
        }
        value = new byte[strLen];
        in.readFully(value);
        String strv = new String(value);
        if (!strv.equals(str)) {
            fail("error strv:" + strv);
        }

        FieldMap fieldMap = new FieldMap();
        fieldMap.addField(new Field(ConstVar.FieldType_Long, 8, (short) 13));
        fieldMap.addField(new Field(ConstVar.FieldType_Float, 4, (short) 14));
        fieldMap.addField(new Field(ConstVar.FieldType_String, 8, (short) 16));

        in.seek(0);
        int valuelen = 1 + 8 + 4 + 2 + 12;
        DataChunk chunk2 = new DataChunk(fieldNum);

        ArrayList<byte[]> arrayList = new ArrayList<byte[]>(64);
        DataInputBuffer inputBuffer = new DataInputBuffer();
        byte[] buf = new byte[valuelen];
        in.read(buf, 0, valuelen);
        inputBuffer.reset(buf, 0, valuelen);
        chunk2.unpersistent(0, valuelen, inputBuffer);
        Record record2 = chunk2.toRecord(fieldMap, true, arrayList);

        bitSet = chunk2.fixedBitSet;
        if (bitSet.length() != (fieldNum / 8 + 1) * 8) {
            fail("bitSet.len:" + bitSet.length());
        }

        for (int i = 0; i < fieldNum; i++) {
            if (!bitSet.get(i)) {
                fail("bitSet should set:" + i);
            }
        }
        record = record2;

        int index = 0;
        byte type = record2.fieldValues().get(index).type;
        int len = record2.fieldValues().get(index).len;
        short idx = record2.fieldValues().get(index).idx;
        value = record2.fieldValues().get(index).value;
        if (len != ConstVar.Sizeof_Long) {
            fail("error len:" + len);
        }
        if (type != ConstVar.FieldType_Long) {
            fail("error fieldType:" + type);
        }
        if (idx != 13) {
            fail("error idx:" + idx);
        }
        if (value == null) {
            fail("error value null");
        }

        {
        }
        lv = Util.bytes2long(value, 0, len);
        if (lv != 4) {
            fail("error long value:" + lv);
        }

        index = 1;
        type = record.fieldValues().get(index).type;
        len = record.fieldValues().get(index).len;
        idx = record.fieldValues().get(index).idx;
        value = record.fieldValues().get(index).value;

        if (len != ConstVar.Sizeof_Float) {
            fail("error len:" + len);
        }
        if (type != ConstVar.FieldType_Float) {
            fail("error fieldType:" + type);
        }
        if (idx != 14) {
            fail("error idx:" + idx);
        }
        if (value == null) {
            fail("error value null");
        }
        {
        }
        fv = Util.bytes2float(value, 0);
        if (fv != 5.5) {
            fail("error float value:" + fv);
        }

        index = 2;
        type = record.fieldValues().get(index).type;
        len = record.fieldValues().get(index).len;
        idx = record.fieldValues().get(index).idx;
        value = record.fieldValues().get(index).value;

        str = "hello konten";
        if (len != str.length()) {
            fail("error len:" + len);
        }
        if (type != ConstVar.FieldType_String) {
            fail("error fieldType:" + type);
        }
        if (idx != 16) {
            fail("error idx:" + idx);
        }
        if (value == null) {
            fail("error value null");
        }
        {
        }
        String sv = new String(value, 0, len);
        if (!str.equals(sv)) {
            fail("error string value:" + sv);
        }

    } catch (Exception e) {
        fail("should not exception:" + e.getMessage());
    }
}

From source file:FormatStorageBasicTest.java

License:Open Source License

public void testChunkToRecordNull() {
    try {//  w  w  w .j ava 2s .  c  o m
        String fileName = prefix + "testChunkToRecord2";
        Path path = new Path(fileName);
        FileSystem fs = FileSystem.get(new Configuration());
        FSDataOutputStream out = fs.create(path);

        short fieldNum = 3;
        Record record = new Record(fieldNum);

        byte[] lb = new byte[ConstVar.Sizeof_Long];
        long l = 4;
        Util.long2bytes(lb, l);
        FieldValue fieldValue4 = new FieldValue(ConstVar.FieldType_Long, ConstVar.Sizeof_Long, lb, (short) 13);
        record.addValue(fieldValue4);

        FieldValue fieldValue5 = new FieldValue(ConstVar.FieldType_Float, ConstVar.Sizeof_Float, null,
                (short) 14);
        record.addValue(fieldValue5);

        String str = "hello konten";
        FieldValue fieldValue7 = new FieldValue(ConstVar.FieldType_String, (short) str.length(), str.getBytes(),
                (short) 16);
        record.addValue(fieldValue7);

        DataChunk chunk = new DataChunk(record);

        out.write(chunk.values, 0, (int) chunk.len);

        if (out.getPos() != chunk.len) {
            fail("error pos:" + out.getPos() + "chunk.len:" + chunk.len);
        }
        out.close();

        FSDataInputStream in = fs.open(path);

        FixedBitSet bitSet = new FixedBitSet(fieldNum);
        in.read(bitSet.bytes(), 0, bitSet.size());

        for (int i = 0; i < fieldNum; i++) {
            if (bitSet.get(1)) {
                fail("shoud not set");
            }

            if (!bitSet.get(i) && i != 1) {
                fail("should set:" + i);
            }
        }

        byte[] value = new byte[8];
        in.readFully(value);
        long lv = Util.bytes2long(value, 0, 8);
        if (lv != 4) {
            fail("error long value:" + lv);
        }

        in.readFloat();

        short strLen = in.readShort();
        if (strLen != str.length()) {
            fail("error strLen:" + strLen);
        }
        value = new byte[strLen];
        in.readFully(value);
        String strv = new String(value, 0, strLen);
        if (!strv.equals(str)) {
            fail("error strv:" + strv);
        }

        FieldMap fieldMap = new FieldMap();
        fieldMap.addField(new Field(ConstVar.FieldType_Long, 8, (short) 13));
        fieldMap.addField(new Field(ConstVar.FieldType_Float, 4, (short) 14));
        fieldMap.addField(new Field(ConstVar.FieldType_String, 8, (short) 16));

        in.seek(0);
        int valuelen = 1 + 8 + 4 + 2 + 12;
        DataChunk chunk2 = new DataChunk(fieldNum);

        ArrayList<byte[]> arrayList = new ArrayList<byte[]>(64);

        DataInputBuffer inputBuffer = new DataInputBuffer();
        byte[] buf = new byte[valuelen];
        in.read(buf, 0, valuelen);
        inputBuffer.reset(buf, 0, valuelen);
        chunk2.unpersistent(0, valuelen, inputBuffer);
        Record record2 = chunk2.toRecord(fieldMap, true, arrayList);

        bitSet = chunk2.fixedBitSet;

        for (int i = 0; i < fieldNum; i++) {
            if (bitSet.get(1)) {
                fail("shoud not set");
            }

            if (!bitSet.get(i) && i != 1) {
                fail("should set:" + i);
            }
        }
        record = record2;

        int index = 0;
        byte type = record2.fieldValues().get(index).type;
        int len = record2.fieldValues().get(index).len;
        short idx = record2.fieldValues().get(index).idx;
        value = record2.fieldValues().get(index).value;
        if (len != ConstVar.Sizeof_Long) {
            fail("error len:" + len);
        }
        if (type != ConstVar.FieldType_Long) {
            fail("error fieldType:" + type);
        }
        if (idx != 13) {
            fail("error idx:" + idx);
        }
        if (value == null) {
            fail("error value null");
        }
        {
        }
        lv = Util.bytes2long(value, 0, 8);
        if (lv != 4) {
            fail("error long value:" + lv);
        }

        index = 1;
        type = record.fieldValues().get(index).type;
        len = record.fieldValues().get(index).len;
        idx = record.fieldValues().get(index).idx;
        value = record.fieldValues().get(index).value;

        if (len != ConstVar.Sizeof_Float) {
            fail("error len:" + len);
        }
        if (type != ConstVar.FieldType_Float) {
            fail("error fieldType:" + type);
        }
        if (idx != 14) {
            fail("error idx:" + idx);
        }
        if (value != null) {
            fail("error value not null");
        }

        index = 2;
        type = record.fieldValues().get(index).type;
        len = record.fieldValues().get(index).len;
        idx = record.fieldValues().get(index).idx;
        value = record.fieldValues().get(index).value;

        str = "hello konten";
        if (len != str.length()) {
            fail("error len:" + len);
        }
        if (type != ConstVar.FieldType_String) {
            fail("error fieldType:" + type);
        }
        if (idx != 16) {
            fail("error idx:" + idx);
        }
        if (value == null) {
            fail("error value null");
        }
        {
        }
        String sv = new String(value, 0, len);
        if (!str.equals(sv)) {
            fail("error string value:" + sv);
        }

    } catch (Exception e) {
        e.printStackTrace();
        fail("should not exception:" + e.getMessage());
    }
}

From source file:FormatStorageBasicTest.java

License:Open Source License

public void testUnpersistenUnitVar() {
    try {/*ww w.  j a  v  a  2  s.c om*/
        FieldMap fieldMap = new FieldMap();
        fieldMap.addField(new Field(ConstVar.FieldType_Byte, ConstVar.Sizeof_Byte, (short) 0));
        fieldMap.addField(new Field(ConstVar.FieldType_Short, ConstVar.Sizeof_Short, (short) 1));
        fieldMap.addField(new Field(ConstVar.FieldType_Int, ConstVar.Sizeof_Int, (short) 2));
        fieldMap.addField(new Field(ConstVar.FieldType_Long, ConstVar.Sizeof_Long, (short) 3));
        fieldMap.addField(new Field(ConstVar.FieldType_Float, ConstVar.Sizeof_Float, (short) 4));
        fieldMap.addField(new Field(ConstVar.FieldType_Double, ConstVar.Sizeof_Double, (short) 5));
        fieldMap.addField(new Field(ConstVar.FieldType_String, 0, (short) 6));

        Head head = new Head();
        head.setFieldMap(fieldMap);

        Configuration conf = new Configuration();
        FormatDataFile fd = new FormatDataFile(conf);
        fd.setWorkStatus(ConstVar.WS_Read);
        fd.head = head;

        IndexInfo info = new IndexInfo();
        info.offset = 0;
        info.len = 100 * full7chunkLen + 100 * 8 + ConstVar.DataChunkMetaOffset;
        Segment seg = new Segment(info, fd);
        Unit unit = new Unit(info, seg);

        String file = prefix + "testPersistentUnitVar";
        Path path = new Path(file);
        FileSystem fs = FileSystem.get(new Configuration());
        FSDataInputStream in = fs.open(path);

        byte[] buffer = unit.loadUnitBuffer(in);

        unit.loadDataMeta(buffer, true);

        if (unit.recordNum() != 100) {
            fail("error recordNum:" + unit.recordNum());
        }

        if (unit.offsetArray() == null) {
            fail("error offsetArray, null");
        }

        if (unit.offsetArray().length != 100) {
            fail("error offsetArray len:" + unit.offsetArray().length);
        }

        ArrayList<byte[]> arrayList = new ArrayList<byte[]>(64);

        ByteArrayInputStream stream1 = new ByteArrayInputStream(buffer);
        DataInputStream stream = new DataInputStream(stream1);
        DataInputBuffer inputBuffer = new DataInputBuffer();
        inputBuffer.reset(buffer, 0, buffer.length);
        for (int i = 0; i < unit.offsetArray().length; i++) {
            if (unit.offsetArray()[i] != full7chunkLen * i) {
                fail("error meta offset:" + unit.offsetArray()[i] + "i:" + i);
            }

            DataChunk chunk = new DataChunk((short) 7);
            chunk.unpersistent(unit.offsetArray()[i], full7chunkLen, inputBuffer);

            Record record = chunk.toRecord(fieldMap, true, arrayList);
            judgeFixedRecord(record);
        }
    } catch (IOException e) {
        e.printStackTrace();
        fail("get IOException:" + e.getMessage());
    } catch (Exception e) {
        e.printStackTrace();
        fail("get Exception:" + e.getMessage());
    }
}

From source file:FormatStorageBasicTest.java

License:Open Source License

public void testUnpersistenUnitNotVar() {
    try {/*from w w  w  .j a  v a  2s .c  o m*/
        FieldMap fieldMap = new FieldMap();
        fieldMap.addField(new Field(ConstVar.FieldType_Byte, ConstVar.Sizeof_Byte, (short) 0));
        fieldMap.addField(new Field(ConstVar.FieldType_Short, ConstVar.Sizeof_Short, (short) 1));
        fieldMap.addField(new Field(ConstVar.FieldType_Int, ConstVar.Sizeof_Int, (short) 2));
        fieldMap.addField(new Field(ConstVar.FieldType_Long, ConstVar.Sizeof_Long, (short) 3));
        fieldMap.addField(new Field(ConstVar.FieldType_Float, ConstVar.Sizeof_Float, (short) 4));
        fieldMap.addField(new Field(ConstVar.FieldType_Double, ConstVar.Sizeof_Double, (short) 5));

        Head head = new Head();
        head.setFieldMap(fieldMap);

        Configuration conf = new Configuration();
        FormatDataFile fd = new FormatDataFile(conf);
        fd.setWorkStatus(ConstVar.WS_Read);
        fd.head = head;

        IndexInfo info = new IndexInfo();
        info.offset = 0;
        info.len = 100 * full6chunkLen + ConstVar.DataChunkMetaOffset;
        Segment seg = new Segment(info, fd);
        Unit unit = new Unit(info, seg);

        String file = prefix + "testPersistentUnitNotVar";
        Path path = new Path(file);
        FileSystem fs = FileSystem.get(new Configuration());
        FSDataInputStream in = fs.open(path);

        byte[] buffer = unit.loadUnitBuffer(in);

        unit.loadDataMeta(buffer, false);

        if (unit.recordNum() != 100) {
            fail("error recordNum:" + unit.recordNum());
        }

        ArrayList<byte[]> arrayList = new ArrayList<byte[]>(64);

        ByteArrayInputStream stream1 = new ByteArrayInputStream(buffer);
        DataInputStream stream = new DataInputStream(stream1);
        DataInputBuffer inputBuffer = new DataInputBuffer();
        inputBuffer.reset(buffer, 0, buffer.length);
        for (int i = 0; i < 100; i++) {
            DataChunk chunk = new DataChunk((short) 6);
            chunk.unpersistent(i * 29, full6chunkLen, inputBuffer);

            Record record = chunk.toRecord(fieldMap, true, arrayList);
            judgeFixedRecord(record);
        }
    } catch (IOException e) {
        e.printStackTrace();
        fail("get IOException:" + e.getMessage());
    } catch (Exception e) {
        e.printStackTrace();
        fail("get Exception:" + e.getMessage());
    }
}

From source file:FormatStorageBasicTest.java

License:Open Source License

public void testTransferUnit() {
    try {//ww w .  ja va 2 s  .c  o  m
        Head head = new Head();
        head.setVar((byte) 1);
        Configuration conf = new Configuration();
        FormatDataFile fd = new FormatDataFile(conf);
        fd.create(prefix + "testTransferUnitOneRecord_tmp", head);

        IndexInfo info = new IndexInfo();
        info.offset = 123;
        Segment seg = new Segment(info, fd);
        Unit unit = new Unit(info, seg);

        Record record = new Record(7);
        record.addValue(new FieldValue((byte) 1, (short) 0));
        record.addValue(new FieldValue((short) 2, (short) 1));
        record.addValue(new FieldValue((int) 3, (short) 2));
        record.addValue(new FieldValue((long) 4, (short) 3));
        record.addValue(new FieldValue((float) 5.5, (short) 4));
        record.addValue(new FieldValue((double) 6.6, (short) 5));
        record.addValue(new FieldValue("hello konten", (short) 6));

        for (int i = 0; i < 100; i++) {
            unit.addRecord(record);
        }

        if (unit.offset() != 123) {
            fail("error offset1:" + unit.offset());
        }

        DataInputBuffer inputBuffer = new DataInputBuffer();
        inputBuffer.reset(((DataOutputBuffer) unit.metasBuffer).getData(), 0,
                ((DataOutputBuffer) unit.metasBuffer).getLength());
        for (int i = 0; i < 100; i++) {
            long value = inputBuffer.readLong();
            if (value != 123 + i * full7chunkLen) {
                fail("error data offset1:" + value + "i:" + i);
            }
        }

        if (unit.metaOffset() != 123 + full7chunkLen * 100) {
            fail("error metaOffset1:" + unit.metaOffset());
        }

        unit.transfer(2000);

        if (unit.offset() != 2000) {
            fail("error offset2:" + unit.offset());
        }

        inputBuffer.reset(((DataOutputBuffer) unit.metasBuffer).getData(), 0,
                ((DataOutputBuffer) unit.metasBuffer).getLength());
        for (int i = 0; i < 100; i++) {
            long value = inputBuffer.readLong();
            if (value != 2000 + i * full7chunkLen) {
                fail("error data offset2:" + value + "i:" + i);
            }
        }
        if (unit.metaOffset() != 2000 + full7chunkLen * 100) {
            fail("error metaOffset2:" + unit.metaOffset());
        }
    } catch (IOException e) {
        e.printStackTrace();
        fail("get IOException:" + e.getMessage());
    } catch (Exception e) {
        e.printStackTrace();
        fail("get Exception:" + e.getMessage());
    }
}

From source file:cn.ac.ncic.mastiff.io.coding.DictionaryBitPackingRLEByteReader.java

License:Apache License

public byte[] CompressensureDecompressed() throws IOException {
    FlexibleEncoding.ORC.DynamicByteArray dynamicBuffer = new FlexibleEncoding.ORC.DynamicByteArray();
    dynamicBuffer.add(inBuf.getData(), 0, inBuf.getLength());
    ByteBuffer byteBuf = ByteBuffer.allocate(dynamicBuffer.size());
    dynamicBuffer.setByteBuffer(byteBuf, 0, dynamicBuffer.size());
    byteBuf.flip();//from w w  w .  ja  va  2s .c o  m
    DataInputBuffer dib = new DataInputBuffer();
    dib.reset(byteBuf.array(), 0, byteBuf.array().length);
    int dictionarySize = dib.readInt();
    int OnlydictionarySize = dib.readInt();
    dib.reset(byteBuf.array(), 8, dictionarySize - 4);
    byte[] dictionaryBuffer = dib.getData();
    dib.reset(byteBuf.array(), 4 + dictionarySize, (byteBuf.array().length - dictionarySize - 4));
    byte[] dictionaryId = dib.getData();
    dib.close();
    DictionaryValuesReader cr = initDicReader(OnlydictionarySize, dictionaryBuffer,
            PrimitiveType.PrimitiveTypeName.BINARY);
    cr.initFromPage(numPairs, dictionaryId, 0);
    DataOutputBuffer decoding = new DataOutputBuffer();
    decoding.writeInt(decompressedSize);
    decoding.writeInt(numPairs);
    decoding.writeInt(startPos);
    for (int i = 0; i < numPairs; i++) {
        byte tmp = Byte.parseByte(cr.readBytes().toStringUsingUTF8());
        decoding.writeInt(tmp);
    }
    byteBuf.clear();
    inBuf.close();
    return decoding.getData();
}

From source file:cn.ac.ncic.mastiff.io.coding.DictionaryBitPackingRLEByteReader.java

License:Apache License

@Override
public byte[] ensureDecompressed() throws IOException {
    FlexibleEncoding.ORC.DynamicByteArray dynamicBuffer = new FlexibleEncoding.ORC.DynamicByteArray();
    dynamicBuffer.add(inBuf.getData(), 12, inBuf.getLength() - 12);
    ByteBuffer byteBuf = ByteBuffer.allocate(dynamicBuffer.size());
    dynamicBuffer.setByteBuffer(byteBuf, 0, dynamicBuffer.size());
    byteBuf.flip();/*w  w w.  j a v  a2 s. co  m*/
    DataInputBuffer dib = new DataInputBuffer();
    dib.reset(byteBuf.array(), 0, byteBuf.array().length);
    int dictionarySize = dib.readInt();
    int OnlydictionarySize = dib.readInt();
    dib.reset(byteBuf.array(), 8, dictionarySize - 4);
    byte[] dictionaryBuffer = dib.getData();
    dib.reset(byteBuf.array(), 4 + dictionarySize, (byteBuf.array().length - dictionarySize - 4));
    byte[] dictionaryId = dib.getData();
    dib.close();
    DictionaryValuesReader cr = initDicReader(OnlydictionarySize, dictionaryBuffer,
            PrimitiveType.PrimitiveTypeName.BINARY);
    cr.initFromPage(numPairs, dictionaryId, 0);
    DataOutputBuffer decoding = new DataOutputBuffer();
    decoding.writeInt(decompressedSize);
    decoding.writeInt(numPairs);
    decoding.writeInt(startPos);
    for (int i = 0; i < numPairs; i++) {
        byte tmp = Byte.parseByte(cr.readBytes().toStringUsingUTF8());
        decoding.writeInt(tmp);
    }
    byteBuf.clear();
    inBuf.close();
    return decoding.getData();

}

From source file:cn.ac.ncic.mastiff.io.coding.DictionaryBitPackingRLEIntReader.java

License:Apache License

public byte[] CompressensureDecompressed() throws IOException {
    FlexibleEncoding.ORC.DynamicByteArray dynamicBuffer = new FlexibleEncoding.ORC.DynamicByteArray();
    dynamicBuffer.add(inBuf.getData(), 0, inBuf.getLength());
    ByteBuffer byteBuf = ByteBuffer.allocate(dynamicBuffer.size());
    dynamicBuffer.setByteBuffer(byteBuf, 0, dynamicBuffer.size());
    byteBuf.flip();//from ww w .  j av  a 2 s. co  m
    DataInputBuffer dib = new DataInputBuffer();
    dib.reset(byteBuf.array(), 0, byteBuf.array().length);
    int dictionarySize = dib.readInt();
    int OnlydictionarySize = dib.readInt();
    dib.reset(byteBuf.array(), 8, dictionarySize - 4);
    byte[] dictionaryBuffer = dib.getData();
    dib.reset(byteBuf.array(), 4 + dictionarySize, (byteBuf.array().length - dictionarySize - 4));
    byte[] dictionaryId = dib.getData();
    dib.close();
    DictionaryValuesReader cr = initDicReader(OnlydictionarySize, dictionaryBuffer,
            PrimitiveType.PrimitiveTypeName.INT32);
    cr.initFromPage(numPairs, dictionaryId, 0);
    DataOutputBuffer decoding = new DataOutputBuffer();
    decoding.writeInt(decompressedSize);
    decoding.writeInt(numPairs);
    decoding.writeInt(startPos);
    for (int i = 0; i < numPairs; i++) {
        int tmp = cr.readInteger();
        decoding.writeInt(tmp);
    }
    byteBuf.clear();

    inBuf.close();
    return decoding.getData();

}

From source file:cn.ac.ncic.mastiff.io.coding.DictionaryBitPackingRLEIntReader.java

License:Apache License

@Override
public byte[] ensureDecompressed() throws IOException {
    FlexibleEncoding.ORC.DynamicByteArray dynamicBuffer = new FlexibleEncoding.ORC.DynamicByteArray();
    dynamicBuffer.add(inBuf.getData(), 12, inBuf.getLength() - 12);
    ByteBuffer byteBuf = ByteBuffer.allocate(dynamicBuffer.size());
    dynamicBuffer.setByteBuffer(byteBuf, 0, dynamicBuffer.size());
    byteBuf.flip();/*  ww w .  j a va 2s . co m*/
    DataInputBuffer dib = new DataInputBuffer();
    dib.reset(byteBuf.array(), 0, byteBuf.array().length);
    int dictionarySize = dib.readInt();
    int OnlydictionarySize = dib.readInt();
    dib.reset(byteBuf.array(), 8, dictionarySize - 4);
    byte[] dictionaryBuffer = dib.getData();
    dib.reset(byteBuf.array(), 4 + dictionarySize, (byteBuf.array().length - dictionarySize - 4));
    byte[] dictionaryId = dib.getData();
    dib.close();
    DictionaryValuesReader cr = initDicReader(OnlydictionarySize, dictionaryBuffer,
            PrimitiveType.PrimitiveTypeName.INT32);
    cr.initFromPage(numPairs, dictionaryId, 0);
    DataOutputBuffer decoding = new DataOutputBuffer();
    decoding.writeInt(decompressedSize);
    decoding.writeInt(numPairs);
    decoding.writeInt(startPos);
    for (int i = 0; i < numPairs; i++) {
        int tmp = cr.readInteger();
        decoding.writeInt(tmp);
    }
    byteBuf.clear();
    inBuf.close();
    return decoding.getData();
}

From source file:cn.ac.ncic.mastiff.io.coding.RedBlackTreeStringReader.java

License:Apache License

@Override
public byte[] ensureDecompressed() throws IOException {
    DataOutputBuffer transfer = new DataOutputBuffer();
    transfer.write(inBuf.getData(), 12, inBuf.getLength() - 12);
    DataInputBuffer dib = new DataInputBuffer();
    dib.reset(transfer.getData(), 0, transfer.getLength());
    int dictionarySize = dib.readInt();
    int length1 = dib.readInt();
    byte[] data = transfer.getData();
    transfer.close();//w w  w  .  j ava  2s  .c o  m
    dib.reset(data, Integer.SIZE + Integer.SIZE, length1);
    FlexibleEncoding.ORC.StreamName name = new FlexibleEncoding.ORC.StreamName(0,
            OrcProto.Stream.Kind.DICTIONARY_DATA);
    ByteBuffer inBuf1 = ByteBuffer.allocate(length1);
    inBuf1.put(dib.getData(), 0, dib.getLength());
    inBuf1.flip();
    InStream in = InStream.create("test1", inBuf1, null, dictionarySize);
    if (in.available() > 0) {
        dictionaryBuffer = new DynamicByteArray(64, in.available());
        dictionaryBuffer.readAll(in);
        in.close();
        // read the lengths    google  proto buffer
        name = new StreamName(1, OrcProto.Stream.Kind.LENGTH);
        dib.reset(data, 4 + 4 + length1, 4);
        int length2 = dib.readInt();
        dib.reset(data, 4 + 4 + length1 + 4, length2);
        //  in = streams.get(name);
        ByteBuffer inBuf2 = ByteBuffer.allocate(length2);
        inBuf2.put(dib.getData(), 0, length2);
        inBuf2.flip();
        in = InStream.create("test2", inBuf2, null, dictionarySize);
        //    IntegerReader lenReader = createIntegerReader(encodings.get(columnId)
        //        .getKind(), in, false);
        IntegerReader lenReader = createIntegerReader(OrcProto.ColumnEncoding.Kind.DIRECT_V2, in, false);
        int offset = 0;
        dictionaryOffsets = new int[dictionarySize + 1];
        for (int i = 0; i < dictionarySize; ++i) {
            dictionaryOffsets[i] = offset;
            offset += (int) lenReader.next();
        }
        dictionaryOffsets[dictionarySize] = offset;
        in.close();
        name = new FlexibleEncoding.ORC.StreamName(2, OrcProto.Stream.Kind.DATA);
        dib.reset(data, 4 + 4 + length1 + 4 + length2, 4);
        int length3 = dib.readInt();
        dib.reset(data, 4 + 4 + length1 + 4 + length2 + 4, length3);
        ByteBuffer inBuf3 = ByteBuffer.allocate(length3);
        inBuf3.put(dib.getData(), 0, length3);
        inBuf3.flip();
        in = InStream.create("test3", inBuf3, null, dictionarySize);
        reader = createIntegerReader(OrcProto.ColumnEncoding.Kind.DIRECT_V2, in, false);
    }
    inBuf.close();
    DataOutputBuffer decoding = new DataOutputBuffer();
    DataOutputBuffer offsets = new DataOutputBuffer();
    decoding.writeInt(decompressedSize);
    decoding.writeInt(numPairs);
    decoding.writeInt(startPos);
    int dataoffset = 12;
    String str;
    for (int i = 0; i < numPairs; i++) {
        str = readEachValue(null);
        decoding.writeUTF(str);
        //      if(i<5){
        //        System.out.println("304  bin[i]  "+str+"  decoding    "+ decoding.size());
        //      }
        dataoffset = decoding.size();
        offsets.writeInt(dataoffset);
    }
    System.out.println("315  offset.size() " + offsets.size() + "  decoding.szie   " + decoding.size());
    System.out.println("316  dataoffet   " + dataoffset);
    decoding.write(offsets.getData(), 0, offsets.size());
    inBuf.close();
    offsets.close();
    dib.close();
    System.out.println("316   decoding   " + decoding.size() + decoding.getLength() + " decoding.getData()   "
            + decoding.getData().length);
    inBuf1.clear();
    return decoding.getData();
}