Example usage for org.apache.hadoop.io IOUtils closeStream

List of usage examples for org.apache.hadoop.io IOUtils closeStream

Introduction

In this page you can find the example usage for org.apache.hadoop.io IOUtils closeStream.

Prototype

public static void closeStream(java.io.Closeable stream) 

Source Link

Document

Closes the stream ignoring Throwable .

Usage

From source file:explain.ExplainTask.java

License:Apache License

public int explain(String stageid, Task<? extends Serializable> rootTask, OutputStream outS, JobConf jobconf) {

    this.stageid = stageid;
    this.jobconf = jobconf;
    this.mrBlock = new MRBlockInfo();

    String sql = jobconf.get("hive.query.string", "").replace("\n", " ");

    if (!sql.equals(parseredSQL)) { //if need to refresh
        try {/* w  w w .  j a  v a2s.c o m*/
            queryBlock = parser.getQueryBlock(sql);
            parseredSQL = sql;
        } catch (ParseException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }

    if (rootTask != null && rootTask instanceof MapRedTask) {
        mapredwork = ((MapRedTask) rootTask).getWork();
    }
    PrintStream out = null;
    try {
        out = new PrintStream(outS);
        List rootTasks = new ArrayList();
        rootTasks.add(rootTask);
        // Go over this task and dump out the plan
        outputStagePlans(out, rootTasks, 0);

        //output the sql this task will execute
        outputSQL(out);

        return (0);
    } catch (Exception e) {
        e.printStackTrace();
        return (1);
    } finally {
        IOUtils.closeStream(out);
    }
}

From source file:fire.util.fileformats.combineimagefileinputformat.CombineFileImageRecordReader.java

License:Apache License

public boolean nextKeyValue() throws IOException {
    if (numRecordsRead > 0)
        return false;

    if (key == null) {
        key = new Text(path.getName());
    }//from ww  w .  j av a 2s.  co m
    if (value == null) {
        value = new BytesWritable();
    }

    //String uri = key.toString();
    //Configuration conf = new Configuration();
    //FileSystem fs = FileSystem.get(URI.create(uri), conf);
    FSDataInputStream in = null;
    try {

        /***
        in = fs.open(path);
        java.io.ByteArrayOutputStream bout = new ByteArrayOutputStream();
        byte buffer[] = new byte[1024 * 1024];
                
        while( in.read(buffer, 0, buffer.length) >= 0 ) {
        bout.write(buffer);
        }
         ***/

        in = fs.open(path);
        byte buffer[] = new byte[in.available()];
        in.read(buffer);

        value = new BytesWritable(buffer);
    } finally {
        IOUtils.closeStream(in);
    }

    numRecordsRead++;

    return true;
}

From source file:fire.util.fileformats.pdf.PdfRecordReader.java

License:Apache License

@Override
public boolean nextKeyValue() throws IOException, InterruptedException {

    if (!processed) {
        Path file = fileSplit.getPath();
        FileSystem fs = file.getFileSystem(conf);
        FSDataInputStream in = null;/*  ww  w  .  jav a2s .  c  o  m*/
        PDDocument pdf = null;
        String parsedText = null;
        PDFTextStripper stripper;
        try {
            in = fs.open(file);
            pdf = PDDocument.load(in);
            stripper = new PDFTextStripper();
            parsedText = stripper.getText(pdf);
            this.key = new Text(file.getName());
            this.value = new Text(parsedText);
        } finally {
            IOUtils.closeStream(in);
        }
        processed = true;
        return true;
    }
    return false;
}

From source file:fm.last.pigtail.test.TestTypedBytesSequenceFileLoader.java

License:Apache License

private String createSequenceFile(Object[] data) throws IOException {
    File tmpFile = File.createTempFile("test", ".tbseq");
    String tmpFileName = tmpFile.getAbsolutePath();

    System.err.println("fileName: " + tmpFileName);
    Path path = new Path("file:///" + tmpFileName);
    JobConf conf = new JobConf();
    FileSystem fs = FileSystem.get(path.toUri(), conf);

    TypedBytesWritable key = new TypedBytesWritable();
    TypedBytesWritable value = new TypedBytesWritable();
    SequenceFile.Writer writer = null;
    try {/*from   w w  w  . ja  va 2 s.c  om*/
        writer = SequenceFile.createWriter(fs, conf, path, key.getClass(), value.getClass());
        for (int i = 0; i < data.length; i += 2) {
            key.setValue(data[i]);
            value.setValue(data[i + 1]);
            writer.append(key, value);
        }
    } finally {
        IOUtils.closeStream(writer);
    }

    // fix the file path string on Windows
    String regex = "\\\\";
    String replacement = quoteReplacement("\\\\");
    return tmpFileName.replaceAll(regex, replacement);
}

From source file:gaffer.accumulo.utils.AccumuloConfig.java

License:Apache License

private void readProperties() {
    InputStream accIs = getClass().getResourceAsStream(propFileLocation);
    try {// w w w. j ava2s.c  o  m
        if (accIs != null) {
            props = new Properties();
            props.load(accIs);
        }
    } catch (IOException e) {
        throw new RuntimeException(e);
    } finally {
        IOUtils.closeStream(accIs);
    }

    if (props == null) {
        // Properties file was not found in the jar, check if it is
        // present in the local file system
        FileInputStream inputStream = null;
        try {
            inputStream = new FileInputStream(propFileLocation);
            props = new Properties();
            props.load(inputStream);
        } catch (FileNotFoundException e) {
            throw new RuntimeException(String.format("Could not find properties file %s", propFileLocation), e);
        } catch (IOException e) {
            throw new RuntimeException(String.format("Could not find properties file %s", propFileLocation), e);
        } catch (Exception e) {
            throw new RuntimeException(
                    String.format("Error occurred while trying to read properties file %s:", propFileLocation),
                    e);
        } finally {
            if (inputStream != null) {
                IOUtils.closeStream(inputStream);
            }
        }
    }
}

From source file:gaffer.accumulo.utils.IngestUtils.java

License:Apache License

/**
 * Given some split points, write a Base64 encoded splits file.
 * //from ww w  .  jav a2  s .c  o m
 * @param splits  The split points
 * @param fs  The FileSystem in which to create the splits file
 * @param splitsFile  The location of the output splits file
 * @throws IOException
 */
public static void writeSplitsFile(Collection<Text> splits, FileSystem fs, Path splitsFile) throws IOException {
    PrintStream out = null;
    try {
        out = new PrintStream(new BufferedOutputStream(fs.create(splitsFile, true)));
        for (Text split : splits) {
            out.println(new String(Base64.encodeBase64(split.getBytes())));
        }
    } finally {
        IOUtils.closeStream(out);
    }
}

From source file:hadoop.examples.hdfs.FileSystemCat.java

License:Open Source License

public static void main(String[] args) throws Exception {
    String uri = "hdfs://exoplatform:9000/user/haint/input-0/test.txt";
    FileSystem fs = FileSystem.get(URI.create(uri), new Configuration());
    InputStream in = null;//w w w . j a v a2  s .  c o m
    try {
        in = fs.open(new Path(uri));
        IOUtils.copyBytes(in, System.out, 4096, false);
    } finally {
        IOUtils.closeStream(in);
    }
    System.out.println("---------------------------------------------------");
    FSDataInputStream fsDataInputStream = null;
    try {
        fsDataInputStream = fs.open(new Path(uri));
        IOUtils.copyBytes(fsDataInputStream, System.out, 256, false);
        System.out.println("---------------------------------------------------");
        fsDataInputStream.seek(0);
        IOUtils.copyBytes(fsDataInputStream, System.out, 256, false);
    } finally {
        IOUtils.closeStream(fsDataInputStream);
    }
}

From source file:hadoop.examples.hdfs.SequenceFileReadDemo.java

License:Open Source License

public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    String uri = "hdfs://exoplatform:9000/user/haint/temp.file";
    Path path = new Path(uri);
    FileSystem fs = FileSystem.get(URI.create(uri), conf);

    SequenceFile.Reader reader = null;
    try {//from w  w w . ja v  a2  s .  c o m
        reader = new SequenceFile.Reader(fs, path, conf);
        Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
        Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
        long position = reader.getPosition();

        //
        while (reader.next(key, value)) {
            String syncSee = reader.syncSeen() ? "*" : "";
            System.out.printf("[%s%s]\t%s\t%s\n", position, syncSee, key, value);
            position = reader.getPosition();
        }
    } finally {
        IOUtils.closeStream(reader);
    }
}

From source file:hadoop.examples.hdfs.SequenceFileWriteDemo.java

License:Open Source License

public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    String uri = "hdfs://exoplatform:9000/user/haint/temp.file";
    Path path = new Path(uri);
    FileSystem fs = FileSystem.get(URI.create(uri), conf);

    ///*from w  w  w .j a va 2  s . c  om*/
    IntWritable key = new IntWritable();
    Text value = new Text();
    SequenceFile.Writer writer = null;
    try {
        writer = SequenceFile.createWriter(fs, conf, path, key.getClass(), value.getClass());
        for (int i = 0; i < 100; i++) {
            key.set(100 - i);
            value.set(DATA[i % DATA.length]);
            System.out.printf("[%s]\t%s\t%s\n", writer.getLength(), key, value);
            writer.append(key, value);
        }
    } finally {
        IOUtils.closeStream(writer);
    }
}

From source file:hadoop.examples.hdfs.URLCat.java

License:Open Source License

public static void main(String[] args) {
    InputStream is = null;/*  w  ww  . j av a 2s .  c o m*/
    try {
        is = new URL("hdfs://192.168.56.1:9000/user/haint/input-0/test.txt").openStream();
        BufferedInputStream bis = new BufferedInputStream(is);
        byte[] buff = new byte[256];
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        for (int length = bis.read(buff); length != -1; length = bis.read(buff)) {
            baos.write(buff, 0, length);
        }
        System.out.println(new String(baos.toByteArray()));
    } catch (IOException e) {
        IOUtils.closeStream(is);
    }
}