Example usage for org.apache.hadoop.io IOUtils closeStream

List of usage examples for org.apache.hadoop.io IOUtils closeStream

Introduction

In this page you can find the example usage for org.apache.hadoop.io IOUtils closeStream.

Prototype

public static void closeStream(java.io.Closeable stream) 

Source Link

Document

Closes the stream ignoring Throwable .

Usage

From source file:org.hadoop.tdg.TestPseudoHadoop.java

License:Apache License

private void printStream(InputStream is) throws IOException {
    File f1 = new File(HOME_FILE);
    File f2 = new File(HOME + "/test.cpy");
    FileOutputStream fos = null;/* w ww.j  a  v  a 2 s.c  o m*/
    try {
        fos = new FileOutputStream(f2);
        IOUtils.copyBytes(is, fos, 4096, false);
        Files.equal(f1, f2);
    } finally {
        IOUtils.closeStream(is);
        IOUtils.closeStream(fos);
    }
}

From source file:org.hadoop.tdg.TestPseudoHadoop.java

License:Apache License

@Test
public void sequenceFileIO() throws IOException {
    IntWritable key = new IntWritable();
    Text value = new Text();
    //write//  w  w w . j a  v  a 2s.com
    SequenceFile.Writer writer = null;
    try {
        writer = SequenceFile.createWriter(fs, fs.getConf(), p, key.getClass(), value.getClass());
        for (int i = 0; i < 100; i++) {
            key.set(100 - i);
            value.set(DATA[i % DATA.length]);
            writer.append(key, value);
        }
    } finally {
        IOUtils.closeStream(writer);
    }
    //read
    SequenceFile.Reader reader = null;
    try {
        reader = new SequenceFile.Reader(fs, p, fs.getConf());
        Writable readerKey = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), fs.getConf());
        Writable readerValue = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), fs.getConf());
        long pos = reader.getPosition();
        while (reader.next(readerKey, readerValue)) {
            String syncSeen = reader.syncSeen() ? "*" : "";
            System.out.printf("[%s%s]\t%s\t%s\n", pos, syncSeen, readerKey, readerValue);
            pos = reader.getPosition();
        }
    } finally {
        IOUtils.closeStream(writer);
    }
}

From source file:org.hadoop.tdg.TestPseudoHadoop.java

License:Apache License

/**
 * sorted sequence file/*  w ww .j  ava 2s  .  c o  m*/
 *
 * @throws IOException
 */
@Test
public void mapFileIO() throws IOException {
    LongWritable key = new LongWritable();
    Text value = new Text();
    MapFile.Writer writer = null;
    try {
        writer = new MapFile.Writer(fs.getConf(), fs, DST, key.getClass(), value.getClass());
        for (int i = 0; i < 100; i++) {
            key.set(i);
            value.set(DATA[i % DATA.length]);
            writer.append(key, value);
        }
    } finally {
        IOUtils.closeStream(writer);
    }

    MapFile.Reader reader = null;
    try {
        reader = new MapFile.Reader(fs, DST, fs.getConf());
        LongWritable readerKey = (LongWritable) ReflectionUtils.newInstance(reader.getKeyClass(), fs.getConf());
        Text readerValue = (Text) ReflectionUtils.newInstance(reader.getValueClass(), fs.getConf());
        while (reader.next(readerKey, readerValue)) {
            System.out.printf("%s\t%s\n", readerKey, readerValue);
        }
    } finally {
        IOUtils.closeStream(writer);
    }
}

From source file:org.htuple.examples.SecondarySort.java

License:Apache License

/**
 * Writes the contents of {@link #EXAMPLE_NAMES} into a file in the job input directory in HDFS.
 *
 * @param conf     the Hadoop config//from   w  w  w .  j  av a2s  . c o  m
 * @param inputDir the HDFS input directory where we'll write a file
 * @throws IOException if something goes wrong
 */
public static void writeInput(Configuration conf, Path inputDir) throws IOException {
    FileSystem fs = FileSystem.get(conf);

    if (fs.exists(inputDir)) {
        throw new IOException(
                String.format("Input directory '%s' exists - please remove and rerun this example", inputDir));
    }

    OutputStreamWriter writer = new OutputStreamWriter(fs.create(new Path(inputDir, "input.txt")));
    for (String name : EXAMPLE_NAMES) {
        writer.write(name);
    }
    IOUtils.closeStream(writer);
}

From source file:org.interactiverobotics.source_code_crawler.step6.TextFileRecordReader.java

License:Open Source License

@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
    if (!processed) {
        final byte[] contents = new byte[(int) fileSplit.getLength()];
        final Path file = fileSplit.getPath();
        final FileSystem fileSystem = file.getFileSystem(configuration);
        FSDataInputStream in = null;/* w w  w.jav a 2s  . c o m*/
        try {
            in = fileSystem.open(file);
            IOUtils.readFully(in, contents, 0, contents.length);
            key.set(file.toString());
            value.set(contents, 0, contents.length);
        } finally {
            IOUtils.closeStream(in);
        }
        processed = true;
        return true;
    }
    return false;
}

From source file:org.kitesdk.cli.commands.TarImportCommand.java

License:Apache License

@Override
public int run() throws IOException {
    Preconditions.checkArgument(targets != null && targets.size() == 2,
            "Tar path and target dataset URI are required.");

    Preconditions.checkArgument(SUPPORTED_TAR_COMPRESSION_TYPES.contains(compressionType),
            "Compression type " + compressionType + " is not supported");

    String source = targets.get(0);
    String datasetUri = targets.get(1);

    long blockSize = getConf().getLong("dfs.blocksize", DEFAULT_BLOCK_SIZE);

    int success = 0;

    View<TarFileEntry> targetDataset;
    if (Datasets.exists(datasetUri)) {
        console.debug("Using existing dataset: {}", datasetUri);
        targetDataset = Datasets.load(datasetUri, TarFileEntry.class);
    } else {/*w ww.  ja v a  2  s  .c  om*/
        console.info("Creating new dataset: {}", datasetUri);
        DatasetDescriptor.Builder descriptorBuilder = new DatasetDescriptor.Builder();
        descriptorBuilder.format(Formats.AVRO);
        descriptorBuilder.schema(TarFileEntry.class);
        targetDataset = Datasets.create(datasetUri, descriptorBuilder.build(), TarFileEntry.class);
    }

    DatasetWriter<TarFileEntry> writer = targetDataset.newWriter();

    // Create a Tar input stream wrapped in appropriate decompressor
    // TODO: Enhancement would be to use native compression libs
    TarArchiveInputStream tis;
    CompressionType tarCompressionType = CompressionType.NONE;

    if (compressionType.isEmpty()) {
        if (source.endsWith(".tar")) {
            tarCompressionType = CompressionType.NONE;
        } else if (source.endsWith(".tar.gz")) {
            tarCompressionType = CompressionType.GZIP;
        } else if (source.endsWith(".tar.bz2")) {
            tarCompressionType = CompressionType.BZIP2;
        }
    } else if (compressionType.equals("gzip")) {
        tarCompressionType = CompressionType.GZIP;
    } else if (compressionType.equals("bzip2")) {
        tarCompressionType = CompressionType.BZIP2;
    } else {
        tarCompressionType = CompressionType.NONE;
    }

    console.info("Using {} compression", tarCompressionType);

    switch (tarCompressionType) {
    case GZIP:
        tis = new TarArchiveInputStream(new GzipCompressorInputStream(open(source)));
        break;
    case BZIP2:
        tis = new TarArchiveInputStream(new BZip2CompressorInputStream(open(source)));
        break;
    case NONE:
    default:
        tis = new TarArchiveInputStream(open(source));
    }

    TarArchiveEntry entry;

    try {
        int count = 0;
        while ((entry = tis.getNextTarEntry()) != null) {
            if (!entry.isDirectory()) {
                long size = entry.getSize();
                if (size >= blockSize) {
                    console.warn(
                            "Entry \"{}\" (size {}) is larger than the "
                                    + "HDFS block size of {}. This may result in remote block reads",
                            new Object[] { entry.getName(), size, blockSize });
                }

                byte[] buf = new byte[(int) size];
                try {
                    IOUtils.readFully(tis, buf, 0, (int) size);
                } catch (IOException e) {
                    console.error("Did not read entry {} successfully (entry size {})", entry.getName(), size);
                    success = 1;
                    throw e;
                }
                writer.write(TarFileEntry.newBuilder().setFilename(entry.getName())
                        .setFilecontent(ByteBuffer.wrap(buf)).build());
                count++;
            }
        }
        console.info("Added {} records to \"{}\"", count, targetDataset.getDataset().getName());
    } finally {
        IOUtils.closeStream(writer);
        IOUtils.closeStream(tis);
    }

    return success;
}

From source file:org.kitesdk.cli.commands.TestTarImportCommand.java

License:Apache License

@BeforeClass
public static void createTestInputFiles() throws IOException {
    TestTarImportCommand.cleanup();/*w  w  w . j  a  v  a 2 s  . co m*/

    Path testData = new Path(TEST_DATA_DIR);
    FileSystem testFS = testData.getFileSystem(new Configuration());

    datasetUri = "dataset:file:" + System.getProperty("user.dir") + "/" + TEST_DATASET_DIR + "/"
            + TEST_DATASET_NAME;

    TarArchiveOutputStream tosNoCompression = null;
    TarArchiveOutputStream tosGzipCompression = null;
    TarArchiveOutputStream tosBzip2Compression = null;
    TarArchiveOutputStream tosLargeEntry = null;
    TarArchiveEntry tarArchiveEntry = null;
    try {
        // No compression
        tosNoCompression = new TarArchiveOutputStream(testFS.create(new Path(TAR_TEST_FILE), true));
        writeToTarFile(tosNoCompression, TAR_TEST_ROOT_PREFIX + "/", null);

        // Gzip compression
        tosGzipCompression = new TarArchiveOutputStream(
                new GzipCompressorOutputStream(testFS.create(new Path(TAR_TEST_GZIP_FILE), true)));
        writeToTarFile(tosGzipCompression, TAR_TEST_GZIP_ROOT_PREFIX + "/", null);

        // BZip2 compression
        tosBzip2Compression = new TarArchiveOutputStream(
                new BZip2CompressorOutputStream(testFS.create(new Path(TAR_TEST_BZIP2_FILE), true)));
        writeToTarFile(tosBzip2Compression, TAR_TEST_BZIP2_ROOT_PREFIX + "/", null);

        // "Large" entry file (10000 bytes)
        tosLargeEntry = new TarArchiveOutputStream(testFS.create(new Path(TAR_TEST_LARGE_ENTRY_FILE), true));
        String largeEntry = RandomStringUtils.randomAscii(10000);
        writeToTarFile(tosLargeEntry, "largeEntry", largeEntry);

        // Generate test files with random names and content
        Random random = new Random(1);
        for (int i = 0; i < NUM_TEST_FILES; ++i) {
            // Create random file and data
            int fNameLength = random.nextInt(MAX_FILENAME_LENGTH);
            int fContentLength = random.nextInt(MAX_FILECONTENT_LENGTH);
            String fName = RandomStringUtils.randomAlphanumeric(fNameLength);
            String fContent = RandomStringUtils.randomAscii(fContentLength);

            // Write the file to tarball
            writeToTarFile(tosNoCompression, TAR_TEST_ROOT_PREFIX + "/" + fName, fContent);
            writeToTarFile(tosGzipCompression, TAR_TEST_GZIP_ROOT_PREFIX + "/" + fName, fContent);
            writeToTarFile(tosBzip2Compression, TAR_TEST_BZIP2_ROOT_PREFIX + "/" + fName, fContent);

            System.out.println("Wrote " + fName + " [" + fContentLength + "]");
        }
    } finally {
        IOUtils.closeStream(tosNoCompression);
        IOUtils.closeStream(tosGzipCompression);
        IOUtils.closeStream(tosBzip2Compression);
        IOUtils.closeStream(tosLargeEntry);
    }
}

From source file:org.mrgeo.hdfs.utils.HadoopFileUtils.java

License:Apache License

@SuppressWarnings("squid:S2095") // hadoop FileSystem cannot be closed, or else subsequent uses will fail
public static void copyFileToHdfs(final String fromFile, final String toFile, final boolean overwrite)
        throws IOException {
    final Path toPath = new Path(toFile);
    final Path fromPath = new Path(fromFile);
    final FileSystem srcFS = HadoopFileUtils.getFileSystem(toPath);
    final FileSystem dstFS = HadoopFileUtils.getFileSystem(fromPath);

    final Configuration conf = HadoopUtils.createConfiguration();
    InputStream in = null;/* w ww  .ja v a2  s  .c o m*/
    OutputStream out = null;
    try {
        in = srcFS.open(fromPath);
        out = dstFS.create(toPath, overwrite);

        IOUtils.copyBytes(in, out, conf, true);
    } catch (final IOException e) {
        IOUtils.closeStream(out);
        IOUtils.closeStream(in);
        throw e;
    }
}

From source file:org.mrgeo.pdf.PdfFactory.java

License:Apache License

private static PdfCurve _loadPdf(Path[] files, Path pdfPath, Configuration conf)
        throws IOException, ClassNotFoundException, SecurityException, NoSuchMethodException,
        IllegalArgumentException, IllegalAccessException, InvocationTargetException {
    Vector<Path> pdfFiles = new Vector<Path>();
    Path metadataPath = null;/*ww  w .j a va  2  s  .co m*/
    for (Path f : files) {
        String name = f.getName();
        if (name.equals("metadata")) {
            metadataPath = f;
        } else if (name.startsWith("part")) {
            pdfFiles.add(f);
        }
    }
    if (metadataPath == null) {
        throw new IOException("Cannot load PDF " + pdfPath.toString() + ". Missing metadata file.");
    }

    FSDataInputStream is = null;
    try {
        FileSystem dfs = metadataPath.getFileSystem(conf);
        is = dfs.open(metadataPath);
        String className = is.readUTF();
        Class<?> c = Class.forName(className);
        Path[] passPdfFiles = new Path[pdfFiles.size()];
        pdfFiles.toArray(passPdfFiles);
        Method m = c.getMethod("load", DataInput.class, Path[].class, Configuration.class);
        Object result = m.invoke(null, is, passPdfFiles, conf);
        if (result instanceof PdfCurve) {
            return (PdfCurve) result;
        }
        throw new IllegalArgumentException(
                "PdfFactory expected an instance of PdfCurve, not: " + result.getClass().getName());
    } finally {
        IOUtils.closeStream(is);
    }
}

From source file:org.mrgeo.pdf.TriangularDistributionPdfCurve.java

License:Apache License

public void writeMetadata(Path output, Configuration conf) throws IOException {
    FSDataOutputStream os = null;/*from   w ww  .  j  ava 2  s.c o m*/
    try {
        FileSystem dfs = output.getFileSystem(conf);
        os = dfs.create(new Path(output, "metadata"));
        os.writeUTF(TriangularDistributionPdfCurve.class.getName());

        os.writeInt(1); // version
        os.writeDouble(_min);
        os.writeDouble(_max);
        os.writeDouble(_bin);
        os.writeDouble(_mode);
    } finally {
        IOUtils.closeStream(os);
    }
}