Example usage for org.apache.hadoop.io IOUtils closeStream

List of usage examples for org.apache.hadoop.io IOUtils closeStream

Introduction

In this page you can find the example usage for org.apache.hadoop.io IOUtils closeStream.

Prototype

public static void closeStream(java.io.Closeable stream) 

Source Link

Document

Closes the stream ignoring Throwable .

Usage

From source file:crunch.MaxTemperature.java

License:Apache License

public static void main(String[] args) throws IOException {
        String uri = args[0];/*from   w w  w.  j a va2s .  c  o m*/
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(uri), conf);
        Path path = new Path(uri);

        SequenceFile.Reader reader = null;
        try {
            reader = new SequenceFile.Reader(fs, path, conf);
            Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
            Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
            long position = reader.getPosition();
            while (reader.next(key, value)) {
                String syncSeen = reader.syncSeen() ? "*" : "";
                System.out.printf("[%s%s]\t%s\t%s\n", position, syncSeen, key, value);
                position = reader.getPosition(); // beginning of next record
            }
        } finally {
            IOUtils.closeStream(reader);
        }
    }

From source file:crunch.MaxTemperature.java

License:Apache License

public static void main(String[] args) throws IOException {
        String uri = args[0];/*from www . j  a  v  a 2 s  .  com*/
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(uri), conf);
        Path path = new Path(uri);

        IntWritable key = new IntWritable();
        Text value = new Text();
        SequenceFile.Writer writer = null;
        try {
            writer = SequenceFile.createWriter(fs, conf, path, key.getClass(), value.getClass());

            for (int i = 0; i < 100; i++) {
                key.set(100 - i);
                value.set(DATA[i % DATA.length]);
                System.out.printf("[%s]\t%s\t%s\n", writer.getLength(), key, value);
                writer.append(key, value);
            }
        } finally {
            IOUtils.closeStream(writer);
        }
    }

From source file:crunch.MaxTemperature.java

License:Apache License

@Override
    public boolean nextKeyValue() throws IOException, InterruptedException {
        if (!processed) {
            byte[] contents = new byte[(int) fileSplit.getLength()];
            Path file = fileSplit.getPath();
            FileSystem fs = file.getFileSystem(conf);
            FSDataInputStream in = null;
            try {
                in = fs.open(file);/*from   w ww.ja v a 2 s . c  om*/
                IOUtils.readFully(in, contents, 0, contents.length);
                value.set(contents, 0, contents.length);
            } finally {
                IOUtils.closeStream(in);
            }
            processed = true;
            return true;
        }
        return false;
    }

From source file:crunch.MaxTemperature.java

License:Apache License

@Override
    public boolean next(NullWritable key, BytesWritable value) throws IOException {
        if (!processed) {
            byte[] contents = new byte[(int) fileSplit.getLength()];
            Path file = fileSplit.getPath();
            FileSystem fs = file.getFileSystem(conf);
            FSDataInputStream in = null;
            try {
                in = fs.open(file);/*from w w  w.j  a v  a  2  s. com*/
                IOUtils.readFully(in, contents, 0, contents.length);
                value.set(contents, 0, contents.length);
            } finally {
                IOUtils.closeStream(in);
            }
            processed = true;
            return true;
        }
        return false;
    }

From source file:crunch.MaxTemperature.java

License:Apache License

/**
     * Read the Avro schema from the first file in the input directory.
     *//*from w  w  w.  j  a  va  2 s. c o m*/
    private Schema readSchema(Path inputDir, Configuration conf) throws IOException {
        FsInput fsInput = null;
        FileReader<Object> reader = null;
        try {
            fsInput = new FsInput(new Path(inputDir, "part-m-00000.avro"), conf);
            reader = DataFileReader.openReader(fsInput, new GenericDatumReader<Object>());
            return reader.getSchema();
        } finally {
            IOUtils.closeStream(fsInput);
            IOUtils.closeStream(reader);
        }
    }

From source file:crunch.MaxTemperature.java

License:Apache License

public void initialize(File file) throws IOException {
        BufferedReader in = null;
        try {/* w  w  w  .  j a v a2  s  .c om*/
            in = new BufferedReader(new InputStreamReader(new FileInputStream(file)));
            NcdcStationMetadataParser parser = new NcdcStationMetadataParser();
            String line;
            while ((line = in.readLine()) != null) {
                if (parser.parse(line)) {
                    stationIdToName.put(parser.getStationId(), parser.getStationName());
                }
            }
        } finally {
            IOUtils.closeStream(in);
        }
    }

From source file:de.l3s.common.hadoop.WholeFileRecordReader.java

License:Apache License

@Override
public boolean nextKeyValue() throws IOException {
    if (!processed) {
        byte[] contents = new byte[(int) fileSplit.getLength()];

        Path file = fileSplit.getPath();
        FileSystem fs = file.getFileSystem(conf);

        FSDataInputStream in = null;/* ww w . j a va 2 s  .co m*/
        try {
            in = fs.open(file);
            IOUtils.readFully(in, contents, 0, contents.length);
            value.set(contents, 0, contents.length);
        } finally {
            IOUtils.closeStream(in);
        }
        processed = true;
        return true;
    }
    return false;
}

From source file:dz.lab.hdfs.SeekReadFile.java

/**
 * @param args/*  w w  w .  j  a  v  a2s  . co  m*/
 */
public static void main(String[] args) throws IOException {
    Path fileToRead = new Path("/tmp/quotes.csv");
    // read configuration from core-site.xml available in the classpath (under /resources)
    FileSystem fs = FileSystem.get(new Configuration());

    FSDataInputStream input = null;
    try {
        // start at position 0
        input = fs.open(fileToRead);
        System.out.print("start position=" + input.getPos() + ":");
        IOUtils.copyBytes(input, System.out, 4096, false);

        // seek to position 11
        input.seek(11);
        System.out.print("start position=" + input.getPos() + ":");
        IOUtils.copyBytes(input, System.out, 4096, false);

        // seek back to position 0
        input.seek(11);
        System.out.print("start position=" + input.getPos() + ":");
        IOUtils.copyBytes(input, System.out, 4096, false);
    } finally {
        IOUtils.closeStream(input);
    }
}

From source file:edu.bigdata.training.fileformats.compress.MapFileWriter.java

@SuppressWarnings("deprecation")
public static void main(String[] args) throws IOException, URISyntaxException {

    Configuration conf = new Configuration();
    FileSystem fs;/*from w ww.  j  a v  a 2 s .  c o  m*/

    try {
        fs = FileSystem.get(conf);

        URI inputURI = MapFileWriter.class.getClassLoader().getResource("sample.txt").toURI();
        Path inputFile = new Path(inputURI);
        Path outputFile = new Path("mapfile");

        Text txtKey = new Text();
        Text txtValue = new Text();

        MapFile.Writer writer = null;

        FSDataInputStream inputStream = fs.open(inputFile);

        try {
            System.out.println(outputFile.toString());
            writer = new MapFile.Writer(conf, fs, outputFile.toString(), txtKey.getClass(), txtKey.getClass());
            writer.setIndexInterval(1);//Need this as the default is 128, and my data is just 9 records
            DataInputStream stream = new DataInputStream(inputStream);
            BufferedReader reader = new BufferedReader(new InputStreamReader(stream));
            String strLineInInputFile = null;
            int line = 1;
            while ((strLineInInputFile = reader.readLine()) != null) {
                String lstKeyValuePair[] = strLineInInputFile.split(",");
                txtKey.set(lstKeyValuePair[0]);
                txtValue.set(lstKeyValuePair[1]);
                writer.append(txtKey, txtValue);
            }
        } finally {
            IOUtils.closeStream(writer);
            System.out.println("Map file created successfully!!");
        }
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:edu.bigdata.training.fileformats.compress.SequenceFileReader.java

public static void main(String[] args) throws IOException {
    String uri = "output";
    Configuration conf = new Configuration();
    Path path = new Path(uri);
    SequenceFile.Reader reader = null;
    try {/*from  w w w  .  j  a va2  s  . c om*/
        reader = new SequenceFile.Reader(conf, Reader.file(path), Reader.bufferSize(4096), Reader.start(0));
        Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
        Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
        //long position = reader.getPosition();
        //reader.seek(position);
        while (reader.next(key, value)) {
            String syncSeen = reader.syncSeen() ? "*" : "";
            System.out.printf("[%s]\t%s\t%s\n", syncSeen, key, value);
        }
    } finally {
        IOUtils.closeStream(reader);
    }
}