Example usage for org.apache.hadoop.io IOUtils closeStream

List of usage examples for org.apache.hadoop.io IOUtils closeStream

Introduction

In this page you can find the example usage for org.apache.hadoop.io IOUtils closeStream.

Prototype

public static void closeStream(java.io.Closeable stream) 

Source Link

Document

Closes the stream ignoring Throwable .

Usage

From source file:org.mrgeo.pdf.TriangularDistributionPdfCurve.java

License:Apache License

private void _computeCurve(Path[] pdfFiles, Configuration conf) throws IOException {
    SequenceFile.Reader r = null;
    _likelihoods = new double[(int) _bin];
    int index = 0;
    try {//  w  w w  . ja v  a2 s.  c  o  m
        // Loop through each of the output files from the reduce to process all of
        // the PDF histogram bins
        for (Path pdfFile : pdfFiles) {
            // ignore all the non-part files
            if (!pdfFile.getName().startsWith("part")) {
                continue;
            }
            r = new SequenceFile.Reader(pdfFile.getFileSystem(conf), pdfFile, conf);
            DoubleWritable key = new DoubleWritable();
            DoubleWritable value = new DoubleWritable();
            while (r.next(key, value)) {
                _likelihoods[index] = value.get();
                index++;
            }
        }
    } finally {
        IOUtils.closeStream(r);
    }
}

From source file:org.shaf.core.io.hadoop.WholeFileRecordReader.java

License:Apache License

@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
    if (fileProcessed) {
        return false;
    }/*from w  ww  . j  a  va 2 s  .  c o  m*/

    int fileLength = (int) split.getLength();
    byte[] result = new byte[fileLength];

    FileSystem fs = FileSystem.get(config);
    FSDataInputStream in = null;
    try {
        this.key.set(split.getPath().toString());
        in = fs.open(split.getPath());
        IOUtils.readFully(in, result, 0, fileLength);
        value.set(new String(result, 0, fileLength));

    } finally {
        IOUtils.closeStream(in);
    }
    this.fileProcessed = true;
    return true;
}

From source file:org.springframework.data.hadoop.fs.FsShell.java

License:Apache License

public Collection<String> text(String... uris) {
    Collection<String> texts = new PrettyPrintList<String>(new ListPrinter<String>() {

        @Override/*ww w  .  j a  va 2  s.  c  om*/
        public String toString(String e) throws Exception {
            return e + "\n";
        }
    });

    for (String uri : uris) {

        InputStream in = null;
        FSDataInputStream i = null;

        try {
            Path srcPat = new Path(uri);
            FileSystem srcFs = getFS(srcPat);

            for (Path src : FileUtil.stat2Paths(srcFs.globStatus(srcPat), srcPat)) {
                Assert.isTrue(srcFs.isFile(src), "Source must be a file");
                i = srcFs.open(src);
                switch (i.readShort()) {
                case 0x1f8b: // RFC 1952
                    i.seek(0);
                    in = new GZIPInputStream(i);
                    break;
                case 0x5345: // 'S' 'E'
                    if (i.readByte() == 'Q') {
                        i.close();
                        in = new TextRecordInputStream(src, srcFs, configuration);
                    }
                    break;
                default:
                    in = i;
                    break;
                }
                i.seek(0);
                texts.add(getContent(in));
            }
        } catch (IOException ex) {
            throw new HadoopException("Cannot read " + uri + ";" + ex.getMessage(), ex);
        } finally {
            IOUtils.closeStream(in);
            IOUtils.closeStream(i);
        }
    }
    return texts;
}

From source file:org.springframework.data.hadoop.fs.FsShell.java

License:Apache License

public void touchz(String... uris) {
    for (String uri : uris) {
        try {/*from   ww w.  j  a  va  2  s  .co  m*/
            Path src = new Path(uri);
            FileSystem srcFs = getFS(src);
            FileStatus st;
            if (srcFs.exists(src)) {
                st = srcFs.getFileStatus(src);
                if (st.isDir()) {
                    // TODO: handle this
                    throw new IllegalArgumentException(src + " is a directory");
                } else if (st.getLen() != 0)
                    throw new IllegalArgumentException(src + " must be a zero-length file");
            } else {
                IOUtils.closeStream(srcFs.create(src));
            }
        } catch (IOException ex) {
            throw new HadoopException("Cannot touchz " + uri + ";" + ex.getMessage(), ex);
        }
    }
}

From source file:org.springframework.data.hadoop.hive.HiveUtils.java

License:Apache License

/**
 * Runs (or executes) the given script with the given parameters. Note that in order to support the given
 * parameters, the utility will execute extra commands (hence the returned result will reflect that). 
 * As these are client variables, they are bound to the hiveconf namespace. That means other scripts do not see them
 * and they need to be accessed using the ${hiveconf:XXX} syntax.
 * /*from   ww  w . j  a  va 2 s . com*/
 * @param hive hive client
 * @param script script to run
 * @param encoding script encoding
 * @param params script parameters
 * @return the script results
 * @throws Exception
 */
private static List<String> run(HiveClient hive, HiveScript script) throws Exception {
    BufferedReader reader;
    InputStream stream;
    try {
        stream = script.getResource().getInputStream();
        reader = new BufferedReader(new InputStreamReader(stream));
    } catch (Exception ex) {
        throw new IllegalArgumentException("Cannot open script [" + script.getResource() + "]", ex);
    }

    List<String> results = new ArrayList<String>();

    Map<String, String> params = script.getArguments();
    // process params first
    if (params != null) {
        for (Map.Entry<String, String> entry : params.entrySet()) {
            results.addAll(runCommand(hive, "SET hiveconf:" + entry.getKey() + "=" + entry.getValue()));
        }
    }

    String line = null;
    try {
        String command = "";
        while ((line = reader.readLine()) != null) {
            // strip whitespace
            line = line.trim();
            // ignore comments
            if (!line.startsWith("--")) {
                int nrCmds = StringUtils.countOccurrencesOf(line, ";");
                for (String token : line.split(";")) {
                    token = token.trim();
                    // skip empty lines 
                    if (StringUtils.hasText(token)) {
                        command += token.concat(" ");
                        if (nrCmds > 0) {
                            results.addAll(runCommand(hive, command));
                            nrCmds--;
                            command = "";
                        }
                    }
                }
            }
        }
        // make sure to flush any command left (w/o ;)
        if (StringUtils.hasText(command)) {
            results.addAll(runCommand(hive, command));
        }
    } catch (IOException ex) {
        throw new IllegalArgumentException("Cannot read scripts", ex);
    } finally {
        IOUtils.closeStream(reader);
    }

    return results;
}

From source file:org.springframework.data.hadoop.mapreduce.ExecutionUtils.java

License:Apache License

private static boolean isLegacyJar(Resource jar) throws IOException {
    JarInputStream jis = new JarInputStream(jar.getInputStream());
    JarEntry entry = null;/*from   w w  w .j  a va2 s .  co  m*/
    try {
        while ((entry = jis.getNextJarEntry()) != null) {
            String name = entry.getName();
            if (name.startsWith("lib/") //|| name.startsWith("classes/")
            ) {
                return true;
            }
        }
    } finally {
        IOUtils.closeStream(jis);
    }
    return false;
}

From source file:org.springframework.data.hadoop.mapreduce.ExecutionUtils.java

License:Apache License

private static void unjar(Resource jar, File baseDir) throws IOException {
    JarInputStream jis = new JarInputStream(jar.getInputStream());
    JarEntry entry = null;/*w  w  w  . j  a v a2  s . c  o  m*/
    try {
        while ((entry = jis.getNextJarEntry()) != null) {
            if (!entry.isDirectory()) {
                File file = new File(baseDir, entry.getName());
                if (!file.getParentFile().mkdirs()) {
                    if (!file.getParentFile().isDirectory()) {
                        throw new IOException("Mkdirs failed to create " + file.getParentFile().toString());
                    }
                }
                OutputStream out = new FileOutputStream(file);
                try {
                    byte[] buffer = new byte[8192];
                    int i;
                    while ((i = jis.read(buffer)) != -1) {
                        out.write(buffer, 0, i);
                    }
                } finally {
                    IOUtils.closeStream(out);
                }
            }
        }
    } finally {
        IOUtils.closeStream(jis);
    }
}

From source file:org.springframework.data.hadoop.mapreduce.ExecutionUtils.java

License:Apache License

static String mainClass(Resource jar) throws IOException {
    JarInputStream jis = new JarInputStream(jar.getInputStream());
    try {//  w w w  . j a va  2 s  . com
        Manifest mf = jis.getManifest();
        if (mf != null) {
            String main = mf.getMainAttributes().getValue("Main-Class");
            if (StringUtils.hasText(main)) {
                return main.replace("/", ".");
            }
        }
        return null;
    } finally {
        IOUtils.closeStream(jis);
    }
}

From source file:org.springframework.data.hadoop.pig.PigUtils.java

License:Apache License

static List<ExecJob> run(PigServer pig, Iterable<PigScript> scripts) throws ExecException, IOException {
    Assert.notNull(scripts, "at least one script is required");

    if (!pig.isBatchOn()) {
        pig.setBatchOn();/* w w w .  j  a  v a  2 s. c  o m*/
    }

    List<ExecJob> jobs = new ArrayList<ExecJob>();

    pig.getPigContext().connect();

    InputStream in = null;
    try {
        for (PigScript script : scripts) {
            try {
                in = script.getResource().getInputStream();
            } catch (IOException ex) {
                throw new IllegalArgumentException("Cannot open script [" + script.getResource() + "]", ex);
            }

            // register the script (with fallback for old Pig versions)
            registerScript(pig, in, script.getArguments());
            jobs.addAll(pig.executeBatch());
        }
    } finally {
        IOUtils.closeStream(in);
    }
    return jobs;
}

From source file:org.springframework.data.hadoop.serialization.ResourceSerializationFormat.java

License:Apache License

/**
 * Writes the content of Spring {@link Resource}s to a single HDFS location.
 *//*from   w w  w. jav  a 2s .  c om*/
@Override
protected SerializationWriterSupport createWriter(final OutputStream output) {
    // Extend and customize Serialization Writer template
    return new SerializationWriterSupport() {

        private OutputStream outputStream = output;

        private InputStream resourceSeparatorInputStream;

        @Override
        protected Closeable doOpen() throws IOException {

            resourceSeparatorInputStream = null;

            CompressionCodec codec = CompressionUtils.getHadoopCompression(getConfiguration(),
                    getCompressionAlias());

            // If a compression is not specified and if passed stream does have compression capabilities...
            if (codec == null || CompressionOutputStream.class.isInstance(outputStream)) {
                // ...just return original stream untouched
                return outputStream;
            }

            // Eventually re-use Compressor from underlying CodecPool
            final Compressor compressor = CodecPool.getCompressor(codec);

            // Create compression stream wrapping passed stream
            outputStream = codec.createOutputStream(outputStream, compressor);

            return new Closeable() {

                @Override
                public void close() throws IOException {
                    resourceSeparatorInputStream = null;
                    IOUtils.closeStream(outputStream);
                    CodecPool.returnCompressor(compressor);
                }
            };
        }

        @Override
        protected void doWrite(Resource source) throws IOException {
            InputStream inputStream = null;
            try {
                writeSeparator();

                inputStream = source.getInputStream();

                // Write source to HDFS destination
                copyBytes(inputStream, outputStream, getConfiguration(), /* close */false);

            } finally {
                closeStream(inputStream);
            }
        }

        protected void writeSeparator() throws IOException {
            if (getResourceSeparator() == null) {
                return;
            }

            if (resourceSeparatorInputStream == null) {

                // First call inits 'resourceSeparatorInputStream' and does not write anything

                resourceSeparatorInputStream = toInputStream(getResourceSeparator(), "UTF-8");

                return;
            }

            resourceSeparatorInputStream.reset();

            // Write resource separator to HDFS destination
            copyBytes(resourceSeparatorInputStream, outputStream, getConfiguration(), /* close */false);
        }
    };
}