Example usage for org.apache.hadoop.fs FSDataInputStream FSDataInputStream

List of usage examples for org.apache.hadoop.fs FSDataInputStream FSDataInputStream

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FSDataInputStream FSDataInputStream.

Prototype

public FSDataInputStream(InputStream in) 

Source Link

Usage

From source file:org.apache.parquet.hadoop.util.TestHadoop1ByteBufferReads.java

License:Apache License

@Test
public void testDirectReadFullyPosition() throws Exception {
    final ByteBuffer readBuffer = ByteBuffer.allocateDirect(10);
    readBuffer.position(3);/*from ww w  .java  2 s.  com*/
    readBuffer.mark();

    final FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream(2, 3, 3));

    H1SeekableInputStream.readFullyDirectBuffer(hadoopStream, readBuffer, TEMP.get());
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    H1SeekableInputStream.readFullyDirectBuffer(hadoopStream, readBuffer, TEMP.get());
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    readBuffer.reset();
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 7), readBuffer);
}

From source file:org.apache.parquet.hadoop.util.TestHadoop1ByteBufferReads.java

License:Apache License

@Test
public void testDirectReadFullyLimit() throws Exception {
    final ByteBuffer readBuffer = ByteBuffer.allocateDirect(10);
    readBuffer.limit(7);//from ww w .j  a  va2  s.  co m

    final FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream(2, 3, 3));

    H1SeekableInputStream.readFullyDirectBuffer(hadoopStream, readBuffer, TEMP.get());
    Assert.assertEquals(7, readBuffer.position());
    Assert.assertEquals(7, readBuffer.limit());

    H1SeekableInputStream.readFullyDirectBuffer(hadoopStream, readBuffer, TEMP.get());
    Assert.assertEquals(7, readBuffer.position());
    Assert.assertEquals(7, readBuffer.limit());

    readBuffer.flip();
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 7), readBuffer);

    readBuffer.position(7);
    readBuffer.limit(10);
    H1SeekableInputStream.readFullyDirectBuffer(hadoopStream, readBuffer, TEMP.get());
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    readBuffer.flip();
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY), readBuffer);
}

From source file:org.apache.parquet.hadoop.util.TestHadoop1ByteBufferReads.java

License:Apache License

@Test
public void testDirectReadFullyPositionAndLimit() throws Exception {
    final ByteBuffer readBuffer = ByteBuffer.allocateDirect(10);
    readBuffer.position(3);//  w  ww  .  j a  v a  2s  . c  om
    readBuffer.limit(7);
    readBuffer.mark();

    final FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream(2, 3, 3));

    H1SeekableInputStream.readFullyDirectBuffer(hadoopStream, readBuffer, TEMP.get());
    Assert.assertEquals(7, readBuffer.position());
    Assert.assertEquals(7, readBuffer.limit());

    H1SeekableInputStream.readFullyDirectBuffer(hadoopStream, readBuffer, TEMP.get());
    Assert.assertEquals(7, readBuffer.position());
    Assert.assertEquals(7, readBuffer.limit());

    readBuffer.reset();
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 4), readBuffer);

    readBuffer.position(7);
    readBuffer.limit(10);
    H1SeekableInputStream.readFullyDirectBuffer(hadoopStream, readBuffer, TEMP.get());
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    readBuffer.reset();
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 7), readBuffer);
}

From source file:org.apache.parquet.hadoop.util.TestHadoop1ByteBufferReads.java

License:Apache License

@Test
public void testDirectReadFullySmallTempBufferWithPositionAndLimit() throws Exception {
    byte[] temp = new byte[2]; // this will cause readFully to loop

    final ByteBuffer readBuffer = ByteBuffer.allocateDirect(10);
    readBuffer.position(3);//from   w  w  w.  ja v  a2  s  .  c  o  m
    readBuffer.limit(7);
    readBuffer.mark();

    final FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream(2, 3, 3));

    H1SeekableInputStream.readFullyDirectBuffer(hadoopStream, readBuffer, temp);
    Assert.assertEquals(7, readBuffer.position());
    Assert.assertEquals(7, readBuffer.limit());

    H1SeekableInputStream.readFullyDirectBuffer(hadoopStream, readBuffer, temp);
    Assert.assertEquals(7, readBuffer.position());
    Assert.assertEquals(7, readBuffer.limit());

    readBuffer.reset();
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 4), readBuffer);

    readBuffer.position(7);
    readBuffer.limit(10);
    H1SeekableInputStream.readFullyDirectBuffer(hadoopStream, readBuffer, temp);
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    readBuffer.reset();
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 7), readBuffer);
}

From source file:org.apache.parquet.hadoop.util.TestHadoop2ByteBufferReads.java

License:Apache License

@Test
public void testHeapReadFullySmallBuffer() throws Exception {
    ByteBuffer readBuffer = ByteBuffer.allocate(8);

    FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream());
    MockBufferReader reader = new MockBufferReader(hadoopStream);

    H2SeekableInputStream.readFully(reader, readBuffer);
    Assert.assertEquals(8, readBuffer.position());
    Assert.assertEquals(8, readBuffer.limit());

    H2SeekableInputStream.readFully(reader, readBuffer);
    Assert.assertEquals(8, readBuffer.position());
    Assert.assertEquals(8, readBuffer.limit());

    readBuffer.flip();//from  w  w w.jav a  2 s.  c o  m
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 8), readBuffer);
}

From source file:org.apache.parquet.hadoop.util.TestHadoop2ByteBufferReads.java

License:Apache License

@Test
public void testHeapReadFullyLargeBuffer() throws Exception {
    final ByteBuffer readBuffer = ByteBuffer.allocate(20);

    FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream());
    final MockBufferReader reader = new MockBufferReader(hadoopStream);

    TestUtils.assertThrows("Should throw EOFException", EOFException.class, new Callable() {
        @Override/*from   w w w.  j a v a2 s  .c om*/
        public Object call() throws Exception {
            H2SeekableInputStream.readFully(reader, readBuffer);
            return null;
        }
    });

    // NOTE: This behavior differs from readFullyHeapBuffer because direct uses
    // several read operations that will read up to the end of the input. This
    // is a correct value because the bytes in the buffer are valid. This
    // behavior can't be implemented for the heap buffer without using the read
    // method instead of the readFully method on the underlying
    // FSDataInputStream.
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(20, readBuffer.limit());
}

From source file:org.apache.parquet.hadoop.util.TestHadoop2ByteBufferReads.java

License:Apache License

@Test
public void testHeapReadFullyJustRight() throws Exception {
    ByteBuffer readBuffer = ByteBuffer.allocate(10);

    FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream());
    MockBufferReader reader = new MockBufferReader(hadoopStream);

    // reads all of the bytes available without EOFException
    H2SeekableInputStream.readFully(reader, readBuffer);
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    // trying to read 0 more bytes doesn't result in EOFException
    H2SeekableInputStream.readFully(reader, readBuffer);
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    readBuffer.flip();/*from   w  ww .  ja va2s.  c o m*/
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY), readBuffer);
}

From source file:org.apache.parquet.hadoop.util.TestHadoop2ByteBufferReads.java

License:Apache License

@Test
public void testHeapReadFullySmallReads() throws Exception {
    ByteBuffer readBuffer = ByteBuffer.allocate(10);

    FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream(2, 3, 3));
    MockBufferReader reader = new MockBufferReader(hadoopStream);

    H2SeekableInputStream.readFully(reader, readBuffer);
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    H2SeekableInputStream.readFully(reader, readBuffer);
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    readBuffer.flip();/*  w  w  w  .  j a va 2 s.  c o m*/
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY), readBuffer);
}

From source file:org.apache.parquet.hadoop.util.TestHadoop2ByteBufferReads.java

License:Apache License

@Test
public void testHeapReadFullyPosition() throws Exception {
    ByteBuffer readBuffer = ByteBuffer.allocate(10);
    readBuffer.position(3);//from w  w  w . java  2  s .  c  om
    readBuffer.mark();

    FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream(2, 3, 3));
    MockBufferReader reader = new MockBufferReader(hadoopStream);

    H2SeekableInputStream.readFully(reader, readBuffer);
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    H2SeekableInputStream.readFully(reader, readBuffer);
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    readBuffer.reset();
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 7), readBuffer);
}

From source file:org.apache.parquet.hadoop.util.TestHadoop2ByteBufferReads.java

License:Apache License

@Test
public void testHeapReadFullyLimit() throws Exception {
    ByteBuffer readBuffer = ByteBuffer.allocate(10);
    readBuffer.limit(7);/*from   ww  w  .j a  va2s.c om*/

    FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream(2, 3, 3));
    MockBufferReader reader = new MockBufferReader(hadoopStream);

    H2SeekableInputStream.readFully(reader, readBuffer);
    Assert.assertEquals(7, readBuffer.position());
    Assert.assertEquals(7, readBuffer.limit());

    H2SeekableInputStream.readFully(reader, readBuffer);
    Assert.assertEquals(7, readBuffer.position());
    Assert.assertEquals(7, readBuffer.limit());

    readBuffer.flip();
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 7), readBuffer);

    readBuffer.position(7);
    readBuffer.limit(10);
    H2SeekableInputStream.readFully(reader, readBuffer);
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    readBuffer.flip();
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY), readBuffer);
}