List of usage examples for org.apache.hadoop.fs FSDataInputStream FSDataInputStream
public FSDataInputStream(InputStream in)
From source file:org.apache.parquet.hadoop.util.TestHadoop2ByteBufferReads.java
License:Apache License
@Test public void testHeapReadFullyPositionAndLimit() throws Exception { ByteBuffer readBuffer = ByteBuffer.allocate(10); readBuffer.position(3);/*from w w w .j a v a2 s. c om*/ readBuffer.limit(7); readBuffer.mark(); FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream(2, 3, 3)); MockBufferReader reader = new MockBufferReader(hadoopStream); H2SeekableInputStream.readFully(reader, readBuffer); Assert.assertEquals(7, readBuffer.position()); Assert.assertEquals(7, readBuffer.limit()); H2SeekableInputStream.readFully(reader, readBuffer); Assert.assertEquals(7, readBuffer.position()); Assert.assertEquals(7, readBuffer.limit()); readBuffer.reset(); Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 4), readBuffer); readBuffer.position(7); readBuffer.limit(10); H2SeekableInputStream.readFully(reader, readBuffer); Assert.assertEquals(10, readBuffer.position()); Assert.assertEquals(10, readBuffer.limit()); readBuffer.reset(); Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 7), readBuffer); }
From source file:org.apache.parquet.hadoop.util.TestHadoop2ByteBufferReads.java
License:Apache License
@Test public void testDirectReadFullySmallBuffer() throws Exception { ByteBuffer readBuffer = ByteBuffer.allocateDirect(8); FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream()); MockBufferReader reader = new MockBufferReader(hadoopStream); H2SeekableInputStream.readFully(reader, readBuffer); Assert.assertEquals(8, readBuffer.position()); Assert.assertEquals(8, readBuffer.limit()); H2SeekableInputStream.readFully(reader, readBuffer); Assert.assertEquals(8, readBuffer.position()); Assert.assertEquals(8, readBuffer.limit()); readBuffer.flip();/*from w ww . j a v a2 s.co m*/ Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 8), readBuffer); }
From source file:org.apache.parquet.hadoop.util.TestHadoop2ByteBufferReads.java
License:Apache License
@Test public void testDirectReadFullyLargeBuffer() throws Exception { final ByteBuffer readBuffer = ByteBuffer.allocateDirect(20); FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream()); final MockBufferReader reader = new MockBufferReader(hadoopStream); TestUtils.assertThrows("Should throw EOFException", EOFException.class, new Callable() { @Override/*from w ww.j ava 2 s .co m*/ public Object call() throws Exception { H2SeekableInputStream.readFully(reader, readBuffer); return null; } }); // NOTE: This behavior differs from readFullyHeapBuffer because direct uses // several read operations that will read up to the end of the input. This // is a correct value because the bytes in the buffer are valid. This // behavior can't be implemented for the heap buffer without using the read // method instead of the readFully method on the underlying // FSDataInputStream. Assert.assertEquals(10, readBuffer.position()); Assert.assertEquals(20, readBuffer.limit()); }
From source file:org.apache.parquet.hadoop.util.TestHadoop2ByteBufferReads.java
License:Apache License
@Test public void testDirectReadFullyJustRight() throws Exception { ByteBuffer readBuffer = ByteBuffer.allocateDirect(10); FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream()); MockBufferReader reader = new MockBufferReader(hadoopStream); // reads all of the bytes available without EOFException H2SeekableInputStream.readFully(reader, readBuffer); Assert.assertEquals(10, readBuffer.position()); Assert.assertEquals(10, readBuffer.limit()); // trying to read 0 more bytes doesn't result in EOFException H2SeekableInputStream.readFully(reader, readBuffer); Assert.assertEquals(10, readBuffer.position()); Assert.assertEquals(10, readBuffer.limit()); readBuffer.flip();/*w w w .ja va2 s . c om*/ Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY), readBuffer); }
From source file:org.apache.parquet.hadoop.util.TestHadoop2ByteBufferReads.java
License:Apache License
@Test public void testDirectReadFullySmallReads() throws Exception { ByteBuffer readBuffer = ByteBuffer.allocateDirect(10); FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream(2, 3, 3)); MockBufferReader reader = new MockBufferReader(hadoopStream); H2SeekableInputStream.readFully(reader, readBuffer); Assert.assertEquals(10, readBuffer.position()); Assert.assertEquals(10, readBuffer.limit()); H2SeekableInputStream.readFully(reader, readBuffer); Assert.assertEquals(10, readBuffer.position()); Assert.assertEquals(10, readBuffer.limit()); readBuffer.flip();//from ww w . ja va2 s . co m Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY), readBuffer); }
From source file:org.apache.parquet.hadoop.util.TestHadoop2ByteBufferReads.java
License:Apache License
@Test public void testDirectReadFullyPosition() throws Exception { ByteBuffer readBuffer = ByteBuffer.allocateDirect(10); readBuffer.position(3);/*from w w w.ja v a2 s .co m*/ readBuffer.mark(); FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream(2, 3, 3)); MockBufferReader reader = new MockBufferReader(hadoopStream); H2SeekableInputStream.readFully(reader, readBuffer); Assert.assertEquals(10, readBuffer.position()); Assert.assertEquals(10, readBuffer.limit()); H2SeekableInputStream.readFully(reader, readBuffer); Assert.assertEquals(10, readBuffer.position()); Assert.assertEquals(10, readBuffer.limit()); readBuffer.reset(); Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 7), readBuffer); }
From source file:org.apache.parquet.hadoop.util.TestHadoop2ByteBufferReads.java
License:Apache License
@Test public void testDirectReadFullyLimit() throws Exception { ByteBuffer readBuffer = ByteBuffer.allocateDirect(10); readBuffer.limit(7);//www. j a va 2s.c om FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream(2, 3, 3)); H2SeekableInputStream.Reader reader = new MockBufferReader(hadoopStream); H2SeekableInputStream.readFully(reader, readBuffer); Assert.assertEquals(7, readBuffer.position()); Assert.assertEquals(7, readBuffer.limit()); H2SeekableInputStream.readFully(reader, readBuffer); Assert.assertEquals(7, readBuffer.position()); Assert.assertEquals(7, readBuffer.limit()); readBuffer.flip(); Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 7), readBuffer); readBuffer.position(7); readBuffer.limit(10); H2SeekableInputStream.readFully(reader, readBuffer); Assert.assertEquals(10, readBuffer.position()); Assert.assertEquals(10, readBuffer.limit()); readBuffer.flip(); Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY), readBuffer); }
From source file:org.apache.parquet.hadoop.util.TestHadoop2ByteBufferReads.java
License:Apache License
@Test public void testDirectReadFullyPositionAndLimit() throws Exception { ByteBuffer readBuffer = ByteBuffer.allocateDirect(10); readBuffer.position(3);/*from w w w. j a v a 2s.c o m*/ readBuffer.limit(7); readBuffer.mark(); FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream(2, 3, 3)); MockBufferReader reader = new MockBufferReader(hadoopStream); H2SeekableInputStream.readFully(reader, readBuffer); Assert.assertEquals(7, readBuffer.position()); Assert.assertEquals(7, readBuffer.limit()); H2SeekableInputStream.readFully(reader, readBuffer); Assert.assertEquals(7, readBuffer.position()); Assert.assertEquals(7, readBuffer.limit()); readBuffer.reset(); Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 4), readBuffer); readBuffer.position(7); readBuffer.limit(10); H2SeekableInputStream.readFully(reader, readBuffer); Assert.assertEquals(10, readBuffer.position()); Assert.assertEquals(10, readBuffer.limit()); readBuffer.reset(); Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 7), readBuffer); }
From source file:org.apache.parquet.hadoop.VecParquetReader.java
License:Apache License
public static byte[] readFooterAsBytes(Vec vec) { FSDataInputStream f = null;//from ww w .j a va 2 s . c o m try { f = new FSDataInputStream(new VecDataInputStream(vec)); final int FOOTER_LENGTH_SIZE = 4; if (vec.length() < MAGIC.length + FOOTER_LENGTH_SIZE + MAGIC.length) { // MAGIC + data + footer + footerIndex + MAGIC throw new RuntimeException("Vec doesn't represent a Parquet data (too short)"); } long footerLengthIndex = vec.length() - FOOTER_LENGTH_SIZE - MAGIC.length; f.seek(footerLengthIndex); int footerLength = readIntLittleEndian(f); byte[] magic = new byte[MAGIC.length]; f.readFully(magic); if (!Arrays.equals(MAGIC, magic)) { throw new RuntimeException("Vec is not a Parquet file. expected magic number at tail " + Arrays.toString(MAGIC) + " but found " + Arrays.toString(magic)); } long footerIndex = footerLengthIndex - footerLength; if (footerIndex < MAGIC.length || footerIndex >= footerLengthIndex) { throw new RuntimeException("corrupted file: the footer index is not within the Vec"); } f.seek(footerIndex); byte[] metadataBytes = new byte[footerLength]; f.readFully(metadataBytes); return metadataBytes; } catch (IOException e) { throw new RuntimeException("Failed to read Parquet metadata", e); } finally { try { if (f != null) f.close(); } catch (Exception e) { Log.warn("Failed to close Vec data input stream", e); } } }
From source file:org.apache.tajo.util.FileUtil.java
License:Apache License
public static Message loadProto(FileSystem fs, Path path, Message proto) throws IOException { FSDataInputStream in = null;//ww w. j a v a2 s . c om try { in = new FSDataInputStream(fs.open(path)); Message.Builder builder = proto.newBuilderForType().mergeFrom(in); return builder.build(); } finally { IOUtils.closeStream(in); } }