Example usage for org.apache.hadoop.io.compress CodecPool returnDecompressor

List of usage examples for org.apache.hadoop.io.compress CodecPool returnDecompressor

Introduction

In this page you can find the example usage for org.apache.hadoop.io.compress CodecPool returnDecompressor.

Prototype

public static void returnDecompressor(Decompressor decompressor) 

Source Link

Document

Return the Decompressor to the pool.

Usage

From source file:ca.sparkera.adapters.mapreduce.MainframeVBRecordReader.java

License:Apache License

@Override
public synchronized void close() throws IOException {
    try {/*w ww  . j a v  a  2s. c om*/
        if (inputStream != null) {
            inputStream.close();
            inputStream = null;
        }
    } finally {
        if (decompressor != null) {
            CodecPool.returnDecompressor(decompressor);
            decompressor = null;
        }
    }
}

From source file:cascading.tuple.hadoop.collect.HadoopSpillableTupleList.java

License:Open Source License

@Override
protected TupleInputStream createTupleInputStream(File file) {
    try {//from  w  w  w .  j a v  a 2s . c  o  m
        InputStream inputStream;

        inputStream = new FileInputStream(file);

        Decompressor decompressor = null;

        if (codec != null) {
            decompressor = getDecompressor();
            inputStream = codec.createInputStream(inputStream, decompressor);
        }

        final Decompressor finalDecompressor = decompressor;
        return new HadoopTupleInputStream(inputStream, tupleSerialization.getElementReader()) {
            @Override
            public void close() throws IOException {
                try {
                    super.close();
                } finally {
                    if (finalDecompressor != null)
                        CodecPool.returnDecompressor(finalDecompressor);
                }
            }
        };
    } catch (IOException exception) {
        throw new TupleException("unable to create temporary file output stream", exception);
    }
}

From source file:cn.uc.hadoop.mapreduce.lib.input.FileNameLineRecordReader.java

License:Apache License

public synchronized void close() throws IOException {
    try {//from   w  w w . j av  a 2  s  .  c o  m
        if (in != null) {
            in.close();
        }
    } finally {
        if (decompressor != null) {
            CodecPool.returnDecompressor(decompressor);
        }
    }
}

From source file:com.aliyun.fs.oss.common.OssRecordReader.java

License:Apache License

public void close() throws IOException {
    try {/*from  w w w . j a  v a2  s.  co  m*/
        if (in != null) {
            in.close();
        }
    } finally {
        if (decompressor != null) {
            CodecPool.returnDecompressor(decompressor);
        }
    }
}

From source file:com.ashishpaliwal.hadoop.utils.inputformat.CsvRecordReader.java

License:Apache License

public synchronized void close() throws IOException {
    try {/* ww  w . ja  v a2s  . c  o m*/
        if (this.in != null)
            this.in.close();
    } finally {
        if (this.decompressor != null)
            CodecPool.returnDecompressor(this.decompressor);
    }
}

From source file:com.facebook.presto.rcfile.HadoopDecompressor.java

License:Apache License

@Override
public void destroy() {
    if (destroyed) {
        return;
    }
    destroyed = true;
    CodecPool.returnDecompressor(decompressor);
}

From source file:com.hadoop.mapreduce.TestLzoLazyLoading.java

License:Open Source License

public static String readFile(String name) throws IOException {
    Path file = new Path(TEST_ROOT_DIR + "/" + name);
    CompressionCodec codec = new CompressionCodecFactory(conf).getCodec(file);
    InputStream f;/*from   w  w w  .  j  a v a 2  s . co m*/
    Decompressor decompressor = null;
    if (codec == null) {
        f = localFs.open(file);
    } else {
        decompressor = CodecPool.getDecompressor(codec);
        f = codec.createInputStream(localFs.open(file), decompressor);
    }
    BufferedReader b = new BufferedReader(new InputStreamReader(f));
    StringBuilder result = new StringBuilder();
    String line = b.readLine();
    while (line != null) {
        result.append(line);
        result.append('\n');
        line = b.readLine();
    }
    b.close();
    if (decompressor != null) {
        CodecPool.returnDecompressor(decompressor);
    }
    return result.toString();
}

From source file:com.ricemap.spateDB.core.SpatialSite.java

License:Apache License

public static boolean isRTree(FileSystem fs, Path path) throws IOException {
    FileStatus file = fs.getFileStatus(path);
    Path fileToCheck;//www  .j  a  va2s .  c  o m
    if (file.isDir()) {
        // Check any cell (e.g., first cell)
        GlobalIndex<Partition> gIndex = getGlobalIndex(fs, path);
        if (gIndex == null)
            return false;
        fileToCheck = new Path(path, gIndex.iterator().next().filename);
    } else {
        fileToCheck = file.getPath();
    }
    InputStream fileIn = fs.open(fileToCheck);

    // Check if file is compressed
    CompressionCodec codec = compressionCodecs.getCodec(fileToCheck);
    Decompressor decompressor = null;
    if (codec != null) {
        decompressor = CodecPool.getDecompressor(codec);
        fileIn = codec.createInputStream(fileIn, decompressor);
    }
    byte[] signature = new byte[RTreeFileMarkerB.length];
    fileIn.read(signature);
    fileIn.close();
    if (decompressor != null) {
        CodecPool.returnDecompressor(decompressor);
    }
    return Arrays.equals(signature, SpatialSite.RTreeFileMarkerB);
}

From source file:com.ricemap.spateDB.mapred.SpatialRecordReader.java

License:Apache License

@Override
public void close() throws IOException {
    try {/*from  www . j ava2s .c  o  m*/
        if (lineReader != null) {
            lineReader.close();
        } else if (in != null) {
            in.close();
        }
        lineReader = null;
        in = null;
    } finally {
        if (decompressor != null) {
            CodecPool.returnDecompressor(decompressor);
        }
    }
}

From source file:de.l3s.streamcorpus.terrier.ThriftFileCollectionRecordReader.java

License:Apache License

/** 
 * Reading a bunch of lines of file paths in a list.
 * The code in this method is redistributed from Hadoop LineRecordReader
 * /*from   w w  w . j a v  a 2s.com*/
 * @throws IOException 
 */
private void loadPathsFromInputSplit(InputSplit split, Configuration conf) throws IOException {
    FileSplit fileSplit = (FileSplit) split;
    Path path = fileSplit.getPath();

    long begin = fileSplit.getStart();
    long end = begin + fileSplit.getLength();

    LOG.info("Reading paths in file " + path.getName());

    // First check the compression codec
    CompressionCodecFactory compressionCodec = new CompressionCodecFactory(conf);
    CompressionCodec codec = compressionCodec.getCodec(path);
    FSDataInputStream fis = fs.open(path);
    SplitLineReader in;

    Seekable filePosition;

    boolean compressed = false;
    Decompressor decompressor = null;
    if (null != codec) {
        compressed = true;
        decompressor = CodecPool.getDecompressor(codec);
        if (codec instanceof SplittableCompressionCodec) {
            final SplitCompressionInputStream cIn = ((SplittableCompressionCodec) codec).createInputStream(fis,
                    decompressor, begin, end, SplittableCompressionCodec.READ_MODE.BYBLOCK);
            in = new CompressedSplitLineReader(cIn, conf, (byte[]) null);
            begin = cIn.getAdjustedStart();
            end = cIn.getAdjustedEnd();
            filePosition = cIn;
        } else {
            in = new SplitLineReader(codec.createInputStream(fis, decompressor), conf, null);
            filePosition = fis;
        }
    } else {
        fis.seek(begin);
        in = new SplitLineReader(fis, conf, (byte[]) null);
        filePosition = fis;
    }
    // If this is not the first split, we always throw away first record
    // because we always (except the last split) read one extra line in
    // next() method.
    if (begin != 0) {
        begin += in.readLine(new Text(), 0, maxBytesToConsume(compressed, begin, end));
    }
    long pos = begin;

    int newSize = 0;
    final Text nextLine = new Text();
    paths = new ArrayList<>();
    while (getFilePosition(compressed, filePosition, pos) <= end || in.needAdditionalRecordAfterSplit()) {

        if (pos == 0) {
            // Strip BOM(Byte Order Mark)
            // Text only support UTF-8, we only need to check UTF-8 BOM
            // (0xEF,0xBB,0xBF) at the start of the text stream.
            newSize = in.readLine(nextLine, Integer.MAX_VALUE, Integer.MAX_VALUE);
            pos += newSize;
            int textLength = nextLine.getLength();
            byte[] textBytes = nextLine.getBytes();
            if ((textLength >= 3) && (textBytes[0] == (byte) 0xEF) && (textBytes[1] == (byte) 0xBB)
                    && (textBytes[2] == (byte) 0xBF)) {
                // find UTF-8 BOM, strip it.
                LOG.info("Found UTF-8 BOM and skipped it");
                textLength -= 3;
                newSize -= 3;
                if (textLength > 0) {
                    // It may work to use the same buffer and 
                    // not do the copyBytes
                    textBytes = nextLine.copyBytes();
                    nextLine.set(textBytes, 3, textLength);
                } else {
                    nextLine.clear();
                }
            }
        } else {
            newSize = in.readLine(nextLine, Integer.MAX_VALUE, maxBytesToConsume(compressed, pos, end));
            pos += newSize;
        }

        paths.add(nextLine.toString());
        // line too long. try again
        LOG.info("Skipped line of size " + newSize + " at pos " + (pos - newSize));
    }

    try {
        if (in != null) {
            in.close();
        }
        if (fis != null) {
            fis.close();
        }
    } finally {
        if (decompressor != null) {
            CodecPool.returnDecompressor(decompressor);
        }
    }
}