Example usage for org.apache.hadoop.io DataInputBuffer reset

List of usage examples for org.apache.hadoop.io DataInputBuffer reset

Introduction

In this page you can find the example usage for org.apache.hadoop.io DataInputBuffer reset.

Prototype

public void reset(byte[] input, int start, int length) 

Source Link

Document

Resets the data that the buffer reads.

Usage

From source file:com.ibm.jaql.json.type.JsonJavaObject.java

License:Apache License

@Override
public JsonJavaObject getCopy(JsonValue target) throws Exception {
    if (target == this)
        target = null;//from  ww  w. j  av  a  2s  .com

    JsonJavaObject t;
    if (target instanceof JsonJavaObject) {
        t = (JsonJavaObject) target;
    } else {
        t = new JsonJavaObject();
    }

    if (value == null) {
        t.value = null;
    } else {
        if (t.value == null || t.value.getClass() != this.value.getClass()) {
            t.value = this.value.getClass().newInstance();
        }

        RandomAccessBuffer copyBuffer = new RandomAccessBuffer();
        DataOutputStream copyOutput = new DataOutputStream(copyBuffer);
        this.value.write(copyOutput);
        copyOutput.flush();

        DataInputBuffer copyInput = new DataInputBuffer(); // TODO: cache
        copyInput.reset(copyBuffer.getBuffer(), 0, copyBuffer.size());
        t.value.readFields(copyInput);
    }

    return t;
}

From source file:com.ibm.jaql.lang.expr.io.ReadSplitFn.java

License:Apache License

@Override
public JsonIterator iter(Context context) throws Exception {
    // Close the previous adapter, if still open:
    if (adapter != null) {
        adapter.close();/*from  www.  j a v a2s .  c  o  m*/
        adapter = null;
    }

    // evaluate the arguments
    JsonValue args = exprs[0].eval(context);
    JsonRecord splitRec = (JsonRecord) exprs[1].eval(context);

    if (splitRec == null) {
        return JsonIterator.EMPTY;
    }

    // get the InputAdapter according to the type
    HadoopInputAdapter hia = (HadoopInputAdapter) JaqlUtil.getAdapterStore().input.getAdapter(args);
    adapter = hia;
    JobConf conf = new JobConf(); // TODO: allow configuration
    hia.setParallel(conf); // right thing to do?

    JsonString jsplitClassName = (JsonString) splitRec.get(InputSplitsFn.CLASS_TAG);
    Class<? extends InputSplit> splitCls = (Class<? extends InputSplit>) ClassLoaderMgr
            .resolveClass(jsplitClassName.toString());
    InputSplit split = (InputSplit) ReflectionUtils.newInstance(splitCls, conf);

    DataInputBuffer in = new DataInputBuffer();
    JsonBinary rawSplit = (JsonBinary) splitRec.get(InputSplitsFn.SPLIT_TAG);
    in.reset(rawSplit.getInternalBytes(), rawSplit.bytesOffset(), rawSplit.bytesLength());
    split.readFields(in);

    RecordReader<JsonHolder, JsonHolder> rr = hia.getRecordReader(split, conf, Reporter.NULL);
    return new RecordReaderValueIter(rr);
}

From source file:edu.indiana.d2i.htrc.io.mem.HadoopWritableTranscoder.java

License:Apache License

@Override
public T decode(CachedData data) {
    try {/*from   w ww . j  av a 2  s  .co m*/
        DataInputBuffer decodeBuffer = new DataInputBuffer();
        byte[] bytes = data.getData();
        decodeBuffer.reset(bytes, 0, bytes.length);
        writable.readFields(decodeBuffer);
    } catch (IOException e) {
        e.printStackTrace();
    }

    return writable;
}

From source file:FormatStorage.Unit.java

License:Open Source License

public void transfer(long newOffset) throws Exception {
    long adjust = newOffset - offset;

    boolean VAR = segment.formatData.isVar();
    if (VAR) {/*from   w ww. ja v a2  s  .  com*/
        if (!compressed) {
            int tnum = ((DataOutputBuffer) metasBuffer).getLength() / ConstVar.Sizeof_Long;
            if (tnum != recordNum) {
                throw new SEException.InnerException("tnum != recordNum");
            }

            DataOutputBuffer tmpOuputBuffer = new DataOutputBuffer();
            DataInputBuffer tmpinput = new DataInputBuffer();
            tmpinput.reset(((DataOutputBuffer) metasBuffer).getData(), 0,
                    ((DataOutputBuffer) metasBuffer).getLength());
            for (int i = 0; i < recordNum; i++) {
                long value = tmpinput.readLong() + adjust;
                tmpOuputBuffer.writeLong(value);
            }

            tmpinput.reset(tmpOuputBuffer.getData(), 0, tmpOuputBuffer.getLength());
            ((DataOutputBuffer) metasBuffer).reset();
            for (int i = 0; i < recordNum; i++) {
                ((DataOutputBuffer) metasBuffer).writeLong(tmpinput.readLong());
            }

            tmpOuputBuffer = null;
            tmpinput = null;
        } else {
            compressedMetasOutput.finish();

            InputStream tmpMetasInputStream = new DataInputBuffer();
            ((DataInputBuffer) tmpMetasInputStream).reset(((DataOutputBuffer) metasBuffer).getData(), 0,
                    ((DataOutputBuffer) metasBuffer).getLength());
            CompressionInputStream tmpCompressedMetasInput = codec.createInputStream(tmpMetasInputStream);

            DataOutputBuffer tmpOutputBuffer = new DataOutputBuffer();
            for (int i = 0; i < recordNum; i++) {
                int count = 0;
                try {
                    count = tmpCompressedMetasInput.read(metaOffsetBytes, 0, ConstVar.Sizeof_Long);
                    long meta = Util.bytes2long(metaOffsetBytes, 0, ConstVar.Sizeof_Long) + adjust;

                    tmpOutputBuffer.writeLong(meta);

                } catch (Exception e) {
                    e.printStackTrace();
                    System.out.println("i:" + i + ",count:" + count);

                    throw e;
                }
            }

            ((DataOutputBuffer) metasBuffer).reset();
            compressedMetasOutput.resetState();

            DataInputBuffer tmpInputBuffer = new DataInputBuffer();
            tmpInputBuffer.reset(tmpOutputBuffer.getData(), 0, tmpOutputBuffer.getLength());
            for (int i = 0; i < recordNum; i++) {
                long newMeta = tmpInputBuffer.readLong();
                Util.long2bytes(metaOffsetBytes, newMeta);
                compressedMetasOutput.write(metaOffsetBytes, 0, ConstVar.Sizeof_Long);
            }
        }
    }

    metaOffset += adjust;
    setOffset(newOffset);
}

From source file:io.github.dlmarion.clowncar.hdfs.TestBloscCompressorDecompressor.java

License:Apache License

@Test
public void testCompressorDecompressorLogicWithCompressionStreams() {
    DataOutputStream deflateOut = null;
    DataInputStream inflateIn = null;
    int BYTE_SIZE = 1024 * 100;
    byte[] bytes = generate(BYTE_SIZE);
    int bufferSize = 262144;
    int compressionOverhead = (bufferSize / 6) + 32;
    try {//from   w w w.j a  v a 2 s . c o m
        Configuration conf = new Configuration(false);
        conf.set(BloscCompressor.COMPRESSOR_NAME_KEY, compressor);
        conf.set(BloscCompressor.COMPRESSION_LEVEL_KEY, Integer.toString(level));
        conf.set(BloscCompressor.BYTES_FOR_TYPE_KEY, Integer.toString(Integer.BYTES));
        conf.set(BloscCompressor.SHUFFLE_TYPE_KEY, Integer.toString(shuffle));
        conf.set(BloscCompressor.NUM_THREADS_KEY, Integer.toString(threads));
        DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
        CompressionOutputStream deflateFilter = new BlockCompressorStream(compressedDataBuffer,
                new BloscCompressor(bufferSize, conf), bufferSize, compressionOverhead);
        deflateOut = new DataOutputStream(new BufferedOutputStream(deflateFilter));
        deflateOut.write(bytes, 0, bytes.length);
        deflateOut.flush();
        deflateFilter.finish();

        DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
        deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0, compressedDataBuffer.getLength());

        CompressionInputStream inflateFilter = new BlockDecompressorStream(deCompressedDataBuffer,
                new BloscDecompressor(bufferSize), bufferSize);

        inflateIn = new DataInputStream(new BufferedInputStream(inflateFilter));

        byte[] result = new byte[BYTE_SIZE];
        inflateIn.read(result);

        assertArrayEquals("original array not equals compress/decompressed array", result, bytes);
    } catch (IOException e) {
        e.printStackTrace();
        fail("testBloscCompressorDecopressorLogicWithCompressionStreams ex error !!!");
    } finally {
        try {
            if (deflateOut != null)
                deflateOut.close();
            if (inflateIn != null)
                inflateIn.close();
        } catch (Exception e) {
        }
    }
}

From source file:org.apache.accumulo.master.state.MergeInfoTest.java

License:Apache License

MergeInfo readWrite(MergeInfo info) throws Exception {
    DataOutputBuffer buffer = new DataOutputBuffer();
    info.write(buffer);//from w  ww.j ava2 s . com
    DataInputBuffer in = new DataInputBuffer();
    in.reset(buffer.getData(), 0, buffer.getLength());
    MergeInfo info2 = new MergeInfo();
    info2.readFields(in);
    Assert.assertEquals(info.getExtent(), info2.getExtent());
    Assert.assertEquals(info.getState(), info2.getState());
    Assert.assertEquals(info.getOperation(), info2.getOperation());
    return info2;
}

From source file:org.apache.accumulo.server.master.state.MergeInfoTest.java

License:Apache License

private static MergeInfo readWrite(MergeInfo info) throws Exception {
    DataOutputBuffer buffer = new DataOutputBuffer();
    info.write(buffer);/* ww  w .j  a v a2  s. c  o m*/
    DataInputBuffer in = new DataInputBuffer();
    in.reset(buffer.getData(), 0, buffer.getLength());
    MergeInfo info2 = new MergeInfo();
    info2.readFields(in);
    assertEquals(info.getExtent(), info2.getExtent());
    assertEquals(info.getState(), info2.getState());
    assertEquals(info.getOperation(), info2.getOperation());
    return info2;
}

From source file:org.apache.blur.lucene.serializer.ProtoSerializer.java

License:Apache License

public static void main(String[] args) throws ParseException, IOException {

    QueryParser parser = new QueryParser(Version.LUCENE_40, "", new StandardAnalyzer(Version.LUCENE_40));

    Query query = parser.parse("a:v1 b:v2 c:v3~ c:asda*asda");

    SuperQuery superQuery = new SuperQuery(query, ScoreType.SUPER, new Term("_primedoc_"));

    QueryWritable queryWritable = new QueryWritable(superQuery);
    DataOutputBuffer buffer = new DataOutputBuffer();
    queryWritable.write(buffer);//from   ww  w  .j  a  v  a2s .co  m
    buffer.close();

    System.out.println(new String(buffer.getData(), 0, buffer.getLength()));

    QueryWritable qw = new QueryWritable();

    DataInputBuffer in = new DataInputBuffer();
    in.reset(buffer.getData(), 0, buffer.getLength());
    qw.readFields(in);

    System.out.println("------------");

    System.out.println(qw.getQuery());

    System.out.println("------------");

    while (true) {
        run(superQuery);
    }
}

From source file:org.apache.blur.lucene.serializer.ProtoSerializer.java

License:Apache License

private static void run(Query query) throws IOException {

    DataOutputBuffer buffer = new DataOutputBuffer();
    DataInputBuffer in = new DataInputBuffer();
    QueryWritable outQw = new QueryWritable();
    QueryWritable inQw = new QueryWritable();

    long s = System.nanoTime();
    int count = 100000;
    for (int i = 0; i < count; i++) {
        outQw.setQuery(query);//from  ww  w.  j  av a 2s.c  om
        outQw.write(buffer);

        in.reset(buffer.getData(), 0, buffer.getLength());
        inQw.readFields(in);

        buffer.reset();
    }
    long e = System.nanoTime();
    System.out.println((e - s) / 1000000.0 / (double) count);
    // System.out.println((e - s) / (double) count);
}

From source file:org.apache.hama.bsp.BSPPeerImpl.java

License:Apache License

@SuppressWarnings("unchecked")
public final void initInput() throws IOException {
    InputSplit inputSplit = null;// w w  w  .j a  v  a 2s  . co  m
    // reinstantiate the split
    try {
        if (splitClass != null) {
            inputSplit = (InputSplit) ReflectionUtils.newInstance(conf.getClassByName(splitClass), conf);
        }
    } catch (ClassNotFoundException exp) {
        IOException wrap = new IOException("Split class " + splitClass + " not found");
        wrap.initCause(exp);
        throw wrap;
    }

    if (inputSplit != null) {
        DataInputBuffer splitBuffer = new DataInputBuffer();
        splitBuffer.reset(split.getBytes(), 0, split.getLength());
        inputSplit.readFields(splitBuffer);
        if (in != null) {
            in.close();
        }
        in = new TrackedRecordReader<K1, V1>(bspJob.getInputFormat().getRecordReader(inputSplit, bspJob),
                getCounter(BSPPeerImpl.PeerCounter.TASK_INPUT_RECORDS),
                getCounter(BSPPeerImpl.PeerCounter.IO_BYTES_READ));
        this.splitSize = inputSplit.getLength();
    }
}