Example usage for org.apache.hadoop.io.compress CompressionOutputStream close

List of usage examples for org.apache.hadoop.io.compress CompressionOutputStream close

Introduction

In this page you can find the example usage for org.apache.hadoop.io.compress CompressionOutputStream close.

Prototype

@Override
    public void close() throws IOException 

Source Link

Usage

From source file:org.apache.carbondata.hadoop.csv.CSVInputFormatTest.java

License:Apache License

/**
 * generate compressed files, no need to call this method.
 * @throws Exception/*w  ww  . j  a v  a  2 s .  c o m*/
 */
public void generateCompressFiles() throws Exception {
    String pwd = new File("src/test/resources").getCanonicalPath();
    String inputFile = pwd + "/data.csv";
    FileInputStream input = new FileInputStream(inputFile);
    Configuration conf = new Configuration();

    // .gz
    String outputFile = pwd + "/data.csv.gz";
    FileOutputStream output = new FileOutputStream(outputFile);
    GzipCodec gzip = new GzipCodec();
    gzip.setConf(conf);
    CompressionOutputStream outputStream = gzip.createOutputStream(output);
    int i = -1;
    while ((i = input.read()) != -1) {
        outputStream.write(i);
    }
    outputStream.close();
    input.close();

    // .bz2
    input = new FileInputStream(inputFile);
    outputFile = pwd + "/data.csv.bz2";
    output = new FileOutputStream(outputFile);
    BZip2Codec bzip2 = new BZip2Codec();
    bzip2.setConf(conf);
    outputStream = bzip2.createOutputStream(output);
    i = -1;
    while ((i = input.read()) != -1) {
        outputStream.write(i);
    }
    outputStream.close();
    input.close();

    // .snappy
    input = new FileInputStream(inputFile);
    outputFile = pwd + "/data.csv.snappy";
    output = new FileOutputStream(outputFile);
    SnappyCodec snappy = new SnappyCodec();
    snappy.setConf(conf);
    outputStream = snappy.createOutputStream(output);
    i = -1;
    while ((i = input.read()) != -1) {
        outputStream.write(i);
    }
    outputStream.close();
    input.close();

    //.lz4
    input = new FileInputStream(inputFile);
    outputFile = pwd + "/data.csv.lz4";
    output = new FileOutputStream(outputFile);
    Lz4Codec lz4 = new Lz4Codec();
    lz4.setConf(conf);
    outputStream = lz4.createOutputStream(output);
    i = -1;
    while ((i = input.read()) != -1) {
        outputStream.write(i);
    }
    outputStream.close();
    input.close();

}

From source file:org.apache.carbondata.processing.csvload.CSVInputFormatTest.java

License:Apache License

/**
 * generate compressed files, no need to call this method.
 * @throws Exception//from  w w w. j a v a  2  s. c o m
 */
public void generateCompressFiles() throws Exception {
    String pwd = new File("src/test/resources/csv").getCanonicalPath();
    String inputFile = pwd + "/data.csv";
    FileInputStream input = new FileInputStream(inputFile);
    Configuration conf = new Configuration();

    // .gz
    String outputFile = pwd + "/data.csv.gz";
    FileOutputStream output = new FileOutputStream(outputFile);
    GzipCodec gzip = new GzipCodec();
    gzip.setConf(conf);
    CompressionOutputStream outputStream = gzip.createOutputStream(output);
    int i = -1;
    while ((i = input.read()) != -1) {
        outputStream.write(i);
    }
    outputStream.close();
    input.close();

    // .bz2
    input = new FileInputStream(inputFile);
    outputFile = pwd + "/data.csv.bz2";
    output = new FileOutputStream(outputFile);
    BZip2Codec bzip2 = new BZip2Codec();
    bzip2.setConf(conf);
    outputStream = bzip2.createOutputStream(output);
    i = -1;
    while ((i = input.read()) != -1) {
        outputStream.write(i);
    }
    outputStream.close();
    input.close();

    // .snappy
    input = new FileInputStream(inputFile);
    outputFile = pwd + "/data.csv.snappy";
    output = new FileOutputStream(outputFile);
    SnappyCodec snappy = new SnappyCodec();
    snappy.setConf(conf);
    outputStream = snappy.createOutputStream(output);
    i = -1;
    while ((i = input.read()) != -1) {
        outputStream.write(i);
    }
    outputStream.close();
    input.close();

    //.lz4
    input = new FileInputStream(inputFile);
    outputFile = pwd + "/data.csv.lz4";
    output = new FileOutputStream(outputFile);
    Lz4Codec lz4 = new Lz4Codec();
    lz4.setConf(conf);
    outputStream = lz4.createOutputStream(output);
    i = -1;
    while ((i = input.read()) != -1) {
        outputStream.write(i);
    }
    outputStream.close();
    input.close();

}

From source file:org.bgi.flexlab.gaea.tools.bamqualtiycontrol.report.CNVDepthReport.java

License:Open Source License

public void toReport(BamQualityControlOptions options, FileSystem fs, Configuration conf, String sampleName)
        throws IOException {
    for (int i = 0; i < depths.length; i++) {
        Map<String, WrappedIntArray> sampleDepth = depths[i].laneDepth;
        for (String chrName : depths[i].laneDepth.keySet()) {
            StringBuffer cnvDepthFilePath = new StringBuffer();
            cnvDepthFilePath.append(options.getOutputPath());
            cnvDepthFilePath.append("/");
            cnvDepthFilePath.append("cnvDepth");
            cnvDepthFilePath.append("/");
            cnvDepthFilePath.append(sampleName);
            cnvDepthFilePath.append("-lane");
            cnvDepthFilePath.append(i);/*from  w  w w .  ja  v  a  2s. c o m*/
            cnvDepthFilePath.append("/");
            cnvDepthFilePath.append(chrName);
            cnvDepthFilePath.append(".dep.gz");
            Path cnvDepthPath = new Path(cnvDepthFilePath.toString());
            FSDataOutputStream cnvDepthStream = fs.create(cnvDepthPath);
            CompressionCodecFactory codecFactory = new CompressionCodecFactory(conf);
            CompressionCodec codec = codecFactory.getCodec(cnvDepthPath);
            CompressionOutputStream compressedOutput = codec.createOutputStream(cnvDepthStream);
            //ChrLaneDepth laneChrDepths = depths[i].laneDepth.get(chrName);
            //Map<Integer, Integer> depthLanePos = laneChrDepths.depth;
            int[] depth = sampleDepth.get(chrName).getArray();
            StringBuilder sb = new StringBuilder();
            for (int j = 0; j < depth.length; j += 2) {
                sb.append(chrName);
                sb.append("\t");
                sb.append(depth[j] + 1);
                sb.append("\t");
                sb.append(depth[j + 1]);
                sb.append("\n");
            }
            compressedOutput.write(sb.toString().getBytes());
            compressedOutput.close();
            cnvDepthStream.close();
        }
    }
}

From source file:tv.icntv.log.crawl.TestWriterHdfs.java

License:Apache License

public static void test1() throws ClassNotFoundException, IOException {
    String codecClassName = "org.apache.hadoop.io.compression.GzipCodec";
    Class<?> codecClass = Class.forName(codecClassName);
    Configuration config = new Configuration();
    CompressionCodec codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, config);
    CompressionOutputStream out = codec.createOutputStream(System.out);
    IOUtils.copyBytes(new FileInputStream(new File("d:\\11.txt")), System.out, 4096, false);
    out.close();
}

From source file:tv.icntv.log.crawl.TestWriterHdfs.java

License:Apache License

public static void test2() throws ClassNotFoundException, IOException {
    String codecClassName = "org.apache.hadoop.io.compression.GzipCodec";
    Class<?> codecClass = Class.forName(codecClassName);
    Configuration config = new Configuration();
    CompressionCodec codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, config);
    CompressionOutputStream out = codec.createOutputStream(System.out);
    IOUtils.copyBytes(new FileInputStream(new File("d:\\11.txt")), System.out, 4096, false);
    out.close();
}