Example usage for org.apache.hadoop.io DataInputBuffer DataInputBuffer

List of usage examples for org.apache.hadoop.io DataInputBuffer DataInputBuffer

Introduction

In this page you can find the example usage for org.apache.hadoop.io DataInputBuffer DataInputBuffer.

Prototype

public DataInputBuffer() 

Source Link

Document

Constructs a new empty buffer.

Usage

From source file:com.netflix.suro.input.thrift.MessageSetSerDe.java

License:Apache License

@Override
public TMessageSet deserialize(byte[] payload) {
    DataInputBuffer inBuffer = new DataInputBuffer();
    inBuffer.reset(payload, payload.length);

    try {//from  w  w  w.ja va 2  s  .  co  m
        String app = inBuffer.readUTF();
        int numMessages = inBuffer.readInt();
        byte compression = inBuffer.readByte();
        long crc = inBuffer.readLong();
        byte[] messages = new byte[inBuffer.readInt()];
        inBuffer.read(messages);

        return new TMessageSet(app, numMessages, compression, crc, ByteBuffer.wrap(messages));
    } catch (Exception e) {
        throw new RuntimeException("Failed to de-serialize payload into TMessageSet: " + e.getMessage(), e);
    } finally {
        Closeables.closeQuietly(inBuffer);
    }
}

From source file:com.salesforce.phoenix.client.TestClientKeyValueLocal.java

License:Apache License

private void validate(KeyValue kv, byte[] row, byte[] family, byte[] qualifier, long ts, Type type,
        byte[] value) throws IOException {
    DataOutputBuffer out = new DataOutputBuffer();
    kv.write(out);/*from   ww w  .  j a  v  a2s. c  o m*/
    out.close();
    byte[] data = out.getData();
    // read it back in
    KeyValue read = new KeyValue();
    DataInputBuffer in = new DataInputBuffer();
    in.reset(data, data.length);
    read.readFields(in);
    in.close();

    // validate that its the same
    assertTrue("Row didn't match!", Bytes.equals(row, read.getRow()));
    assertTrue("Family didn't match!", Bytes.equals(family, read.getFamily()));
    assertTrue("Qualifier didn't match!", Bytes.equals(qualifier, read.getQualifier()));
    assertTrue("Value didn't match!", Bytes.equals(value, read.getValue()));
    assertEquals("Timestamp didn't match", ts, read.getTimestamp());
    assertEquals("Type didn't match", type.getCode(), read.getType());
}

From source file:com.scaleoutsoftware.soss.hserver.ObjectPrefetcher.java

License:Apache License

@SuppressWarnings("unchecked")
void startPrefetching() {
    new Thread(new Runnable() {

        public void run() {
            try {
                ObjectReader reader = BucketStore.getObjectReader(initialSize);
                DataInputBuffer buffer = new DataInputBuffer();
                for (StateServerKey id : keys) {
                    reader.read(id);//from   w w  w.ja  v a2 s . co  m

                    ObjectDescriptor<T> objectDescriptor = unused.take();
                    objectDescriptor.key = new CachedObjectId<T>(id);

                    if (isWritable) {
                        buffer.reset(reader.getBuffer(), reader.getLength());
                        if (objectDescriptor.object == null) {
                            objectDescriptor.object = ReflectionUtils.newInstance(objectClass, configuration);
                        }
                        ((Writable) objectDescriptor.object).readFields(buffer);
                    } else {
                        objectDescriptor.object = (T) ObjectArray.deserialize(reader.getBuffer(), 0,
                                reader.getLength(), serializer);
                    }
                    readyToBeServed.put(objectDescriptor);
                }
                readyToBeServed.put(endToken);
            } catch (Exception e) {
                //Save the exception to be later rethrown by next()
                exception = e;
            }
        }
    }).start();
}

From source file:com.scaleunlimited.classify.datum.ModelDatum.java

License:Apache License

public BaseModel getModel() throws Exception {
    String className = _tupleEntry.getString(MODEL_FN);
    BytesWritable modelData = (BytesWritable) (_tupleEntry.getObject(MODEL_DATA_FN));
    DataInputBuffer dib = new DataInputBuffer();
    dib.reset(modelData.getBytes(), modelData.getLength());
    BaseModel model = (BaseModel) Class.forName(className).newInstance();
    model.readFields(dib);//w  ww.jav a  2s .  c om
    return model;
}

From source file:com.sequoiadb.hadoop.io.BSONWritable.java

License:Apache License

protected synchronized void copy(Writable other) {
    if (other != null) {
        try {//from w w  w  .  j a  va2 s .  c o m
            DataOutputBuffer out = new DataOutputBuffer();
            other.write(out);
            DataInputBuffer in = new DataInputBuffer();
            in.reset(out.getData(), out.getLength());
            readFields(in);

        } catch (IOException e) {
            throw new IllegalArgumentException("map cannot be copied: " + e.getMessage());
        }

    } else {
        throw new IllegalArgumentException("source map cannot be null");
    }
}

From source file:com.vertica.hadoop.VerticaConfiguration.java

License:Apache License

/**
 * Return static input parameters if set
 * /*from w  w  w .j  av  a2  s.  c o  m*/
 * @return Collection of list of objects representing input parameters
 * @throws IOException
 */
public Collection<List<Object>> getInputParameters() throws IOException {
    Collection<List<Object>> values = null;
    String[] query_params = conf.getStrings(QUERY_PARAMS_PROP);
    if (query_params != null) {
        values = new ArrayList<List<Object>>();
        for (String str_params : query_params) {
            DataInputBuffer in = new DataInputBuffer();
            in.reset(StringUtils.hexStringToByte(str_params), str_params.length());
            int sz = in.readInt();
            ArrayList<Object> params = new ArrayList<Object>();
            for (int count = 0; count < sz; count++) {
                int type = in.readInt();
                params.add(VerticaRecord.readField(type, in));
            }
            values.add(params);
        }
    }
    return values;
}

From source file:Compress.TestLZO.java

License:Open Source License

public static void main(String[] argv) throws IOException {
    System.out.println(System.getProperty("java.library.path"));

    Configuration conf = new Configuration();

    conf.setInt("io.compression.codec.lzo.buffersize", 64 * 1024);

    LzoCodec codec = new LzoCodec();
    codec.setConf(conf);/*from  w  w  w.  j av  a2 s . c  om*/

    OutputStream out = new DataOutputBuffer();
    CompressionOutputStream out2 = codec.createOutputStream(out);

    byte[] str2 = new byte[20];

    int num = 10000;
    int before = 0;
    String part = "hello konten hello konten";
    for (long i = 0; i < num; i++) {
        Util.long2bytes(str2, i);
        out2.write(str2, 0, 8);

    }
    out2.finish();

    byte[] buffer = ((DataOutputBuffer) out).getData();

    System.out.println("org len:" + num * 8 + ", compressed len:" + ((DataOutputBuffer) out).getLength());

    InputStream in = new DataInputBuffer();
    ((DataInputBuffer) in).reset(((DataOutputBuffer) out).getData(), 0, ((DataOutputBuffer) out).getLength());

    CompressionInputStream in2 = codec.createInputStream(in);

    byte[] buf = new byte[100];
    for (long i = 0; i < num; i++) {
        int count = 0;
        count = in2.read(buf, 0, 8);
        if (count > 0) {
            long value = Util.bytes2long(buf, 0, 8);
            if (value != i) {
                System.out.println(i + ",count:" + count + ",value:" + value);
            } else if (i > (num - 20)) {
                System.out.println(i + ",value:" + value);
            }

        } else {
            System.out.println("count:" + count + ", string " + i);
            break;
        }
    }

    in2.close();

    System.out.println("test compress array...");

    OutputStream out3 = new DataOutputBuffer();
    CompressionOutputStream out4 = codec.createOutputStream(out3);

    DataOutputBuffer tout3 = new DataOutputBuffer();

    for (long i = 0; i < num; i++) {
        Util.long2bytes(str2, i);
        out4.write(str2, 0, 8);
    }
    out4.finish();

    buffer = ((DataOutputBuffer) out3).getData();

    System.out.println("org len:" + num * 8 + ", compressed len:" + ((DataOutputBuffer) out3).getLength());

    InputStream in3 = new DataInputBuffer();
    ((DataInputBuffer) in3).reset(((DataOutputBuffer) out3).getData(), 0,
            ((DataOutputBuffer) out3).getLength());

    CompressionInputStream in4 = codec.createInputStream(in3);

    for (long i = 0; i < num; i++) {
        int count = 0;
        count = in4.read(buf, 0, 8);
        if (count > 0) {
            long value = Util.bytes2long(buf, 0, 8);
            if (value != i) {
                System.out.println(i + ",count:" + count + ",value:" + value);
            }

            if (i > (num - 20)) {
                System.out.println(i + ",value:" + value);
            }

        } else {
            System.out.println("count:" + count + ", string " + i);
            break;
        }
    }

    in2.close();

}

From source file:cosmos.impl.KeyValueToMultimapQueryResult.java

License:Apache License

public MultimapRecord apply(Entry<Key, Value> input) {
    DataInputBuffer buf = new DataInputBuffer();
    buf.reset(input.getValue().get(), input.getValue().getSize());

    try {/*from w w w.j a  va  2  s.  com*/
        return MultimapRecord.recreate(buf);
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}

From source file:cosmos.results.MultimapQueryResultTest.java

License:Apache License

@Test
public void identityWritableEquality() throws Exception {
    Multimap<Column, RecordValue<?>> data = HashMultimap.create();

    data.put(Column.create("TEXT"), RecordValue.create("foo", VIZ));
    data.put(Column.create("TEXT"), RecordValue.create("bar", VIZ));

    MultimapRecord mqr = new MultimapRecord(data, "1", VIZ);

    DataOutputBuffer out = new DataOutputBuffer();
    mqr.write(out);/*from  ww  w.  ja  v  a  2 s .c om*/

    DataInputBuffer in = new DataInputBuffer();

    byte[] bytes = out.getData();
    in.reset(bytes, out.getLength());

    MultimapRecord mqr2 = MultimapRecord.recreate(in);

    Assert.assertEquals(mqr, mqr2);
}

From source file:edu.indiana.d2i.htrc.io.mem.HadoopWritableTranscoder.java

License:Apache License

@Override
public T decode(CachedData data) {
    try {//from  w  w  w .  j a  va  2 s.c o  m
        DataInputBuffer decodeBuffer = new DataInputBuffer();
        byte[] bytes = data.getData();
        decodeBuffer.reset(bytes, 0, bytes.length);
        writable.readFields(decodeBuffer);
    } catch (IOException e) {
        e.printStackTrace();
    }

    return writable;
}