Example usage for org.apache.hadoop.fs FSDataInputStream FSDataInputStream

List of usage examples for org.apache.hadoop.fs FSDataInputStream FSDataInputStream

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FSDataInputStream FSDataInputStream.

Prototype

public FSDataInputStream(InputStream in) 

Source Link

Usage

From source file:org.apache.parquet.hadoop.util.TestHadoop1ByteBufferReads.java

License:Apache License

@Test
public void testHeapReadFullyLargeBuffer() throws Exception {
    final ByteBuffer readBuffer = ByteBuffer.allocate(20);

    final FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream());

    TestUtils.assertThrows("Should throw EOFException", EOFException.class, new Callable() {
        @Override//w w  w.j a va 2  s.co  m
        public Object call() throws Exception {
            H1SeekableInputStream.readFullyHeapBuffer(hadoopStream, readBuffer);
            return null;
        }
    });

    Assert.assertEquals(0, readBuffer.position());
    Assert.assertEquals(20, readBuffer.limit());
}

From source file:org.apache.parquet.hadoop.util.TestHadoop1ByteBufferReads.java

License:Apache License

@Test
public void testHeapReadFullyJustRight() throws Exception {
    final ByteBuffer readBuffer = ByteBuffer.allocate(10);

    final FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream());

    // reads all of the bytes available without EOFException
    H1SeekableInputStream.readFullyHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    // trying to read 0 more bytes doesn't result in EOFException
    H1SeekableInputStream.readFullyHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    readBuffer.flip();// ww  w.j a  va 2s .  co m
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY), readBuffer);
}

From source file:org.apache.parquet.hadoop.util.TestHadoop1ByteBufferReads.java

License:Apache License

@Test
public void testHeapReadFullySmallReads() throws Exception {
    final ByteBuffer readBuffer = ByteBuffer.allocate(10);

    final FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream(2, 3, 3));

    H1SeekableInputStream.readFullyHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    H1SeekableInputStream.readFullyHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    readBuffer.flip();/*  w  w  w  .  j  av  a2  s  .c o m*/
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY), readBuffer);
}

From source file:org.apache.parquet.hadoop.util.TestHadoop1ByteBufferReads.java

License:Apache License

@Test
public void testHeapReadFullyPosition() throws Exception {
    final ByteBuffer readBuffer = ByteBuffer.allocate(10);
    readBuffer.position(3);//  w  w  w . jav a  2 s .  co m
    readBuffer.mark();

    final FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream(2, 3, 3));

    H1SeekableInputStream.readFullyHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    H1SeekableInputStream.readFullyHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    readBuffer.reset();
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 7), readBuffer);
}

From source file:org.apache.parquet.hadoop.util.TestHadoop1ByteBufferReads.java

License:Apache License

@Test
public void testHeapReadFullyLimit() throws Exception {
    final ByteBuffer readBuffer = ByteBuffer.allocate(10);
    readBuffer.limit(7);//from  w  w  w.j  a v a2  s.  co m

    final FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream(2, 3, 3));

    H1SeekableInputStream.readFullyHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(7, readBuffer.position());
    Assert.assertEquals(7, readBuffer.limit());

    H1SeekableInputStream.readFullyHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(7, readBuffer.position());
    Assert.assertEquals(7, readBuffer.limit());

    readBuffer.flip();
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 7), readBuffer);

    readBuffer.position(7);
    readBuffer.limit(10);
    H1SeekableInputStream.readFullyHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    readBuffer.flip();
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY), readBuffer);
}

From source file:org.apache.parquet.hadoop.util.TestHadoop1ByteBufferReads.java

License:Apache License

@Test
public void testHeapReadFullyPositionAndLimit() throws Exception {
    final ByteBuffer readBuffer = ByteBuffer.allocate(10);
    readBuffer.position(3);//  w ww  .  j  a  va2s  . c o m
    readBuffer.limit(7);
    readBuffer.mark();

    final FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream(2, 3, 3));

    H1SeekableInputStream.readFullyHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(7, readBuffer.position());
    Assert.assertEquals(7, readBuffer.limit());

    H1SeekableInputStream.readFullyHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(7, readBuffer.position());
    Assert.assertEquals(7, readBuffer.limit());

    readBuffer.reset();
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 4), readBuffer);

    readBuffer.position(7);
    readBuffer.limit(10);
    H1SeekableInputStream.readFullyHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    readBuffer.reset();
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 7), readBuffer);
}

From source file:org.apache.parquet.hadoop.util.TestHadoop1ByteBufferReads.java

License:Apache License

@Test
public void testDirectReadFullySmallBuffer() throws Exception {
    ByteBuffer readBuffer = ByteBuffer.allocateDirect(8);

    FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream());

    H1SeekableInputStream.readFullyDirectBuffer(hadoopStream, readBuffer, TEMP.get());
    Assert.assertEquals(8, readBuffer.position());
    Assert.assertEquals(8, readBuffer.limit());

    H1SeekableInputStream.readFullyDirectBuffer(hadoopStream, readBuffer, TEMP.get());
    Assert.assertEquals(8, readBuffer.position());
    Assert.assertEquals(8, readBuffer.limit());

    readBuffer.flip();/*  w w  w.  ja  va  2s.c o m*/
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 8), readBuffer);
}

From source file:org.apache.parquet.hadoop.util.TestHadoop1ByteBufferReads.java

License:Apache License

@Test
public void testDirectReadFullyLargeBuffer() throws Exception {
    final ByteBuffer readBuffer = ByteBuffer.allocateDirect(20);

    final FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream());

    TestUtils.assertThrows("Should throw EOFException", EOFException.class, new Callable() {
        @Override// w  ww . j av  a2 s .c o m
        public Object call() throws Exception {
            H1SeekableInputStream.readFullyDirectBuffer(hadoopStream, readBuffer, TEMP.get());
            return null;
        }
    });

    // NOTE: This behavior differs from readFullyHeapBuffer because direct uses
    // several read operations that will read up to the end of the input. This
    // is a correct value because the bytes in the buffer are valid. This
    // behavior can't be implemented for the heap buffer without using the read
    // method instead of the readFully method on the underlying
    // FSDataInputStream.
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(20, readBuffer.limit());
}

From source file:org.apache.parquet.hadoop.util.TestHadoop1ByteBufferReads.java

License:Apache License

@Test
public void testDirectReadFullyJustRight() throws Exception {
    final ByteBuffer readBuffer = ByteBuffer.allocateDirect(10);

    final FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream());

    // reads all of the bytes available without EOFException
    H1SeekableInputStream.readFullyDirectBuffer(hadoopStream, readBuffer, TEMP.get());
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    // trying to read 0 more bytes doesn't result in EOFException
    H1SeekableInputStream.readFullyDirectBuffer(hadoopStream, readBuffer, TEMP.get());
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    readBuffer.flip();//from  ww w.j a v a  2s . co  m
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY), readBuffer);
}

From source file:org.apache.parquet.hadoop.util.TestHadoop1ByteBufferReads.java

License:Apache License

@Test
public void testDirectReadFullySmallReads() throws Exception {
    final ByteBuffer readBuffer = ByteBuffer.allocateDirect(10);

    final FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream(2, 3, 3));

    H1SeekableInputStream.readFullyDirectBuffer(hadoopStream, readBuffer, TEMP.get());
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    H1SeekableInputStream.readFullyDirectBuffer(hadoopStream, readBuffer, TEMP.get());
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    readBuffer.flip();/*from  w ww .j a  v  a2 s .  c  o  m*/
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY), readBuffer);
}