Example usage for org.apache.hadoop.fs FSDataInputStream FSDataInputStream

List of usage examples for org.apache.hadoop.fs FSDataInputStream FSDataInputStream

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FSDataInputStream FSDataInputStream.

Prototype

public FSDataInputStream(InputStream in) 

Source Link

Usage

From source file:org.apache.mahout.classifier.mlp.NeuralNetwork.java

License:Apache License

/**
 * Read the model meta-data from the specified location.
 * //from   ww  w. j a  v  a 2  s .com
 * @throws IOException
 */
protected void readFromModel() throws IOException {
    log.info("Load model from {}", modelPath);
    Preconditions.checkArgument(modelPath != null, "Model path has not been set.");
    FSDataInputStream is = null;
    try {
        Path path = new Path(modelPath);
        FileSystem fs = path.getFileSystem(new Configuration());
        is = new FSDataInputStream(fs.open(path));
        readFields(is);
    } finally {
        Closeables.close(is, true);
    }
}

From source file:org.apache.mahout.df.DecisionForest.java

License:Apache License

/**
 * Load the forest from a single file or a directory of files
 *//*w w  w .  j a  va  2 s  .  c o  m*/
public static DecisionForest load(Configuration conf, Path forestPath) throws IOException {
    FileSystem fs = forestPath.getFileSystem(conf);
    Path[] files;
    if (fs.getFileStatus(forestPath).isDir()) {
        files = DFUtils.listOutputFiles(fs, forestPath);
    } else {
        files = new Path[] { forestPath };
    }

    DecisionForest forest = null;
    for (Path path : files) {
        FSDataInputStream dataInput = new FSDataInputStream(fs.open(path));
        try {
            if (forest == null) {
                forest = read(dataInput);
            } else {
                forest.readFields(dataInput);
            }
        } finally {
            dataInput.close();
        }
    }

    return forest;

}

From source file:org.apache.orc.bench.core.NullFileSystem.java

License:Apache License

@Override
public FSDataInputStream open(Path path, int i) {
    return new FSDataInputStream(new InputStream() {
        @Override/*from ww w .  j a  va  2 s . c  om*/
        public int read() {
            return -1;
        }
    });
}

From source file:org.apache.orc.impl.TestReaderImpl.java

License:Apache License

private void prepareTestCase(byte[] bytes) {
    buffer = ByteBuffer.wrap(bytes);
    psLen = buffer.get(bytes.length - 1) & 0xff;
    in = new FSDataInputStream(new SeekableByteArrayInputStream(bytes));
}

From source file:org.apache.orc.impl.TestRecordReaderImpl.java

License:Apache License

@Test
public void testMaxLengthToReader() throws Exception {
    Configuration conf = new Configuration();
    OrcProto.Type rowType = OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.STRUCT).build();
    OrcProto.Footer footer = OrcProto.Footer.newBuilder().setHeaderLength(0).setContentLength(0)
            .setNumberOfRows(0).setRowIndexStride(0).addTypes(rowType).build();
    OrcProto.PostScript ps = OrcProto.PostScript.newBuilder().setCompression(OrcProto.CompressionKind.NONE)
            .setFooterLength(footer.getSerializedSize()).setMagic("ORC").addVersion(0).addVersion(11).build();
    DataOutputBuffer buffer = new DataOutputBuffer();
    footer.writeTo(buffer);/*from  w  w  w  .j a  va 2  s .  c  o m*/
    ps.writeTo(buffer);
    buffer.write(ps.getSerializedSize());
    FileSystem fs = mock(FileSystem.class, settings);
    FSDataInputStream file = new FSDataInputStream(new BufferInStream(buffer.getData(), buffer.getLength()));
    Path p = new Path("/dir/file.orc");
    when(fs.open(p)).thenReturn(file);
    OrcFile.ReaderOptions options = OrcFile.readerOptions(conf);
    options.filesystem(fs);
    options.maxLength(buffer.getLength());
    when(fs.getFileStatus(p)).thenReturn(new FileStatus(10, false, 3, 3000, 0, p));
    Reader reader = OrcFile.createReader(p, options);
}

From source file:org.apache.parquet.hadoop.util.TestHadoop1ByteBufferReads.java

License:Apache License

@Test
public void testHeapRead() throws Exception {
    ByteBuffer readBuffer = ByteBuffer.allocate(20);

    FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream());

    int len = H1SeekableInputStream.readHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(10, len);//from ww  w  . j ava2 s. com
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(20, readBuffer.limit());

    len = H1SeekableInputStream.readHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(-1, len);

    readBuffer.flip();
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY), readBuffer);
}

From source file:org.apache.parquet.hadoop.util.TestHadoop1ByteBufferReads.java

License:Apache License

@Test
public void testHeapSmallBuffer() throws Exception {
    ByteBuffer readBuffer = ByteBuffer.allocate(5);

    FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream());

    int len = H1SeekableInputStream.readHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(5, len);// ww  w.  j  a  v  a2 s  . c om
    Assert.assertEquals(5, readBuffer.position());
    Assert.assertEquals(5, readBuffer.limit());

    len = H1SeekableInputStream.readHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(0, len);

    readBuffer.flip();
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 5), readBuffer);
}

From source file:org.apache.parquet.hadoop.util.TestHadoop1ByteBufferReads.java

License:Apache License

@Test
public void testHeapSmallReads() throws Exception {
    ByteBuffer readBuffer = ByteBuffer.allocate(10);

    FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream(2, 3, 3));

    int len = H1SeekableInputStream.readHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(2, len);/*  w  ww.j  a  va  2  s. co m*/
    Assert.assertEquals(2, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    len = H1SeekableInputStream.readHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(3, len);
    Assert.assertEquals(5, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    len = H1SeekableInputStream.readHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(3, len);
    Assert.assertEquals(8, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    len = H1SeekableInputStream.readHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(2, len);
    Assert.assertEquals(10, readBuffer.position());
    Assert.assertEquals(10, readBuffer.limit());

    readBuffer.flip();
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY), readBuffer);
}

From source file:org.apache.parquet.hadoop.util.TestHadoop1ByteBufferReads.java

License:Apache License

@Test
public void testHeapPosition() throws Exception {
    ByteBuffer readBuffer = ByteBuffer.allocate(20);
    readBuffer.position(10);/* w  w  w  .  j  av a  2 s  . com*/
    readBuffer.mark();

    FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream(8));

    int len = H1SeekableInputStream.readHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(8, len);
    Assert.assertEquals(18, readBuffer.position());
    Assert.assertEquals(20, readBuffer.limit());

    len = H1SeekableInputStream.readHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(2, len);
    Assert.assertEquals(20, readBuffer.position());
    Assert.assertEquals(20, readBuffer.limit());

    len = H1SeekableInputStream.readHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(-1, len);

    readBuffer.reset();
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY), readBuffer);
}

From source file:org.apache.parquet.hadoop.util.TestHadoop1ByteBufferReads.java

License:Apache License

@Test
public void testHeapLimit() throws Exception {
    ByteBuffer readBuffer = ByteBuffer.allocate(20);
    readBuffer.limit(8);/*ww w  . ja v  a  2 s .co  m*/

    FSDataInputStream hadoopStream = new FSDataInputStream(new MockInputStream(7));

    int len = H1SeekableInputStream.readHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(7, len);
    Assert.assertEquals(7, readBuffer.position());
    Assert.assertEquals(8, readBuffer.limit());

    len = H1SeekableInputStream.readHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(1, len);
    Assert.assertEquals(8, readBuffer.position());
    Assert.assertEquals(8, readBuffer.limit());

    len = H1SeekableInputStream.readHeapBuffer(hadoopStream, readBuffer);
    Assert.assertEquals(0, len);

    readBuffer.flip();
    Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 8), readBuffer);
}