Example usage for org.apache.hadoop.fs Seekable getPos

List of usage examples for org.apache.hadoop.fs Seekable getPos

Introduction

In this page you can find the example usage for org.apache.hadoop.fs Seekable getPos.

Prototype

long getPos() throws IOException;

Source Link

Document

Return the current offset from the start of the file

Usage

From source file:alluxio.client.hadoop.HdfsFileInputStreamIntegrationTest.java

License:Apache License

private void seekTest(Seekable stream) throws Exception {
    stream.seek(0);//  ww w  . ja  v a2s  .c om
    Assert.assertEquals(0, stream.getPos());

    stream.seek(FILE_LEN / 2);
    Assert.assertEquals(FILE_LEN / 2, stream.getPos());

    stream.seek(1);
    Assert.assertEquals(1, stream.getPos());
}

From source file:alluxio.hadoop.HdfsFileInputStreamIntegrationTest.java

License:Apache License

private void seekTest(Seekable stream) throws IOException {
    stream.seek(0);/*from w w w  .  ja v  a2  s .c o m*/
    Assert.assertEquals(0, stream.getPos());

    stream.seek(FILE_LEN / 2);
    Assert.assertEquals(FILE_LEN / 2, stream.getPos());

    stream.seek(1);
    Assert.assertEquals(1, stream.getPos());
}

From source file:co.cask.tigon.io.SeekableInputStream.java

License:Apache License

/**
 * Creates a {@link SeekableInputStream} from the given {@link java.io.InputStream}. Exception will be
 * thrown if failed to do so./*  www .jav  a 2s.  c o  m*/
 *
 * @throws java.io.IOException If the given input stream is not seekable.
 */
public static SeekableInputStream create(InputStream input) throws IOException {
    if (input instanceof SeekableInputStream) {
        return (SeekableInputStream) input;
    }
    if (input instanceof FileInputStream) {
        return create((FileInputStream) input);
    }
    if (input instanceof Seekable) {
        final Seekable seekable = (Seekable) input;
        return new SeekableInputStream(input) {
            @Override
            public void seek(long pos) throws IOException {
                seekable.seek(pos);
            }

            @Override
            public long getPos() throws IOException {
                return seekable.getPos();
            }

            @Override
            public boolean seekToNewSource(long targetPos) throws IOException {
                return seekable.seekToNewSource(targetPos);
            }
        };
    }

    throw new IOException("Failed to create SeekableInputStream from " + input.getClass());
}

From source file:de.l3s.streamcorpus.terrier.ThriftFileCollectionRecordReader.java

License:Apache License

private static long getFilePosition(boolean isCompressedInput, Seekable filePosition, long pos)
        throws IOException {
    long retVal;/*from  w ww.j  av a2s .co m*/
    if (isCompressedInput && null != filePosition) {
        retVal = filePosition.getPos();
    } else {
        retVal = pos;
    }
    return retVal;
}

From source file:org.apache.sqoop.connector.hdfs.HdfsExtractor.java

License:Apache License

/**
 * Extracts Text file//w  w  w  .  j  a v a2  s  .  c o m
 * @param file
 * @param start
 * @param length
 * @throws IOException
 */
@SuppressWarnings("resource")
private void extractTextFile(Path file, long start, long length) throws IOException {
    LOG.info("Extracting text file");
    long end = start + length;
    FileSystem fs = file.getFileSystem(conf);
    FSDataInputStream filestream = fs.open(file);
    CompressionCodec codec = (new CompressionCodecFactory(conf)).getCodec(file);
    LineReader filereader;
    Seekable fileseeker = filestream;

    // Hadoop 1.0 does not have support for custom record delimiter and thus
    // we
    // are supporting only default one.
    // We might add another "else if" case for SplittableCompressionCodec once
    // we drop support for Hadoop 1.0.
    if (codec == null) {
        filestream.seek(start);
        filereader = new LineReader(filestream);
    } else {
        filereader = new LineReader(codec.createInputStream(filestream, codec.createDecompressor()), conf);
        fileseeker = filestream;
    }
    if (start != 0) {
        // always throw away first record because
        // one extra line is read in previous split
        start += filereader.readLine(new Text(), 0);
    }
    int size;
    LOG.info("Start position: " + String.valueOf(start));
    long next = start;
    while (next <= end) {
        Text line = new Text();
        size = filereader.readLine(line, Integer.MAX_VALUE);
        if (size == 0) {
            break;
        }
        if (codec == null) {
            next += size;
        } else {
            next = fileseeker.getPos();
        }
        rowRead++;
        dataWriter.writeStringRecord(line.toString());
    }
    LOG.info("Extracting ended on position: " + fileseeker.getPos());
    filestream.close();
}