Example usage for org.apache.hadoop.io IOUtils closeStream

List of usage examples for org.apache.hadoop.io IOUtils closeStream

Introduction

In this page you can find the example usage for org.apache.hadoop.io IOUtils closeStream.

Prototype

public static void closeStream(java.io.Closeable stream) 

Source Link

Document

Closes the stream ignoring Throwable .

Usage

From source file:com.taobao.datax.plugins.writer.hdfswriter.HdfsWriter.java

License:Open Source License

private void closeAll() {
    try {/*from  ww w  . java 2  s  . c om*/
        IOUtils.closeStream(fs);
    } catch (Exception e) {
        throw new DataExchangeException(
                String.format("HdfsWriter closing filesystem failed: %s,%s", e.getMessage(), e.getCause()));
    }
}

From source file:com.tomslabs.grid.avro.HadoopTestBase.java

License:Apache License

protected Path localResourceToPath(String path, String target) throws IOException {
    try {//from w  w  w  .j av  a2  s . c o m
        final InputStream resource = this.getClass().getResourceAsStream(path);
        if (resource == null)
            throw new IllegalArgumentException(path + " not found");
        final Path targetPath = new Path(localFs.getWorkingDirectory(), "target/hadoop-test/imported/" + target)
                .makeQualified(localFs);
        localFs.delete(targetPath, true);
        OutputStream out = null;
        try {
            out = localFs.create(targetPath, true);
            IOUtils.copyBytes(resource, out, localConf, true);
        } catch (IOException e) {
            IOUtils.closeStream(out);
            IOUtils.closeStream(resource);
            throw e;
        }
        return targetPath;
    } catch (Exception e) {
        throw new IOException(e);
    }
}

From source file:com.twitter.hraven.mapreduce.JobFileRawLoaderMapper.java

License:Apache License

/**
 * Get the raw bytes and the last modification millis for this JobFile
 * /*from w  ww.  j a  v a2 s .  com*/
 * @return the contents of the job file.
 * @throws IOException
 *           when bad things happen during reading
 */
private byte[] readJobFile(FileStatus fileStatus) throws IOException {
    byte[] rawBytes = null;
    FSDataInputStream fsdis = null;
    try {
        long fileLength = fileStatus.getLen();
        int fileLengthInt = (int) fileLength;
        rawBytes = new byte[fileLengthInt];
        fsdis = hdfs.open(fileStatus.getPath());
        IOUtils.readFully(fsdis, rawBytes, 0, fileLengthInt);
    } finally {
        IOUtils.closeStream(fsdis);
    }
    return rawBytes;
}

From source file:com.uber.hoodie.common.table.HoodieTableMetaClientTest.java

License:Apache License

@Test
public void checkArchiveCommitTimeline() throws IOException {
    Path archiveLogPath = HoodieArchivedTimeline.getArchiveLogPath(metaClient.getArchivePath());
    SequenceFile.Writer writer = SequenceFile.createWriter(metaClient.getHadoopConf(),
            SequenceFile.Writer.file(archiveLogPath), SequenceFile.Writer.keyClass(Text.class),
            SequenceFile.Writer.valueClass(Text.class));

    writer.append(new Text("1"), new Text("data1"));
    writer.append(new Text("2"), new Text("data2"));
    writer.append(new Text("3"), new Text("data3"));

    IOUtils.closeStream(writer);

    HoodieArchivedTimeline archivedTimeline = metaClient.getArchivedTimeline();

    HoodieInstant instant1 = new HoodieInstant(false, HoodieTimeline.COMMIT_ACTION, "1");
    HoodieInstant instant2 = new HoodieInstant(false, HoodieTimeline.COMMIT_ACTION, "2");
    HoodieInstant instant3 = new HoodieInstant(false, HoodieTimeline.COMMIT_ACTION, "3");

    assertEquals(Lists.newArrayList(instant1, instant2, instant3),
            archivedTimeline.getInstants().collect(Collectors.toList()));

    assertArrayEquals(new Text("data1").getBytes(), archivedTimeline.getInstantDetails(instant1).get());
    assertArrayEquals(new Text("data2").getBytes(), archivedTimeline.getInstantDetails(instant2).get());
    assertArrayEquals(new Text("data3").getBytes(), archivedTimeline.getInstantDetails(instant3).get());
}

From source file:com.yahoo.labs.samoa.streams.fs.HDFSFileStreamSource.java

License:Apache License

private void closeFileStream() {
    IOUtils.closeStream(fileStream);
}

From source file:crunch.MaxTemperature.java

License:Apache License

  public static void main(String[] args) throws Exception {
  String uri = args[0];//from   ww  w  .j av  a 2 s . co  m
  Configuration conf = new Configuration();
  FileSystem fs = FileSystem.get(URI.create(uri), conf);
  InputStream in = null;
  try {
    in = fs.open(new Path(uri));
    IOUtils.copyBytes(in, System.out, 4096, false);
  } finally {
    IOUtils.closeStream(in);
  }
}

From source file:crunch.MaxTemperature.java

License:Apache License

  public static void main(String[] args) throws Exception {
  String uri = args[0];/*from w ww. jav  a  2 s. c  o  m*/
  Configuration conf = new Configuration();
  FileSystem fs = FileSystem.get(URI.create(uri), conf);
  FSDataInputStream in = null; // XXX FSDataInputStream is Seekable, pervious examples had InputStream (no seeking was done)
  try {
    in = fs.open(new Path(uri));
    IOUtils.copyBytes(in, System.out, 4096, false);
    in.seek(0); // go back to the start of the file XXX
    IOUtils.copyBytes(in, System.out, 4096, false);
  } finally {
    IOUtils.closeStream(in);
  }
}

From source file:crunch.MaxTemperature.java

License:Apache License

  public static void main(String[] args) throws Exception {
  InputStream in = null;/*from w  w  w.j  a v  a  2 s.com*/
  try {
    in = new URL(args[0]).openStream();
    IOUtils.copyBytes(in, System.out, 4096, false);
  } finally {
    IOUtils.closeStream(in);
  }
}

From source file:crunch.MaxTemperature.java

License:Apache License

public static void main(String[] args) throws Exception {
        String uri = args[0];/*  w w  w.  j a  v  a 2  s.  c  o m*/
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(uri), conf);

        Path inputPath = new Path(uri);
        CompressionCodecFactory factory = new CompressionCodecFactory(conf);
        CompressionCodec codec = factory.getCodec(inputPath);
        if (codec == null) {
            System.err.println("No codec found for " + uri);
            System.exit(1);
        }

        String outputUri = CompressionCodecFactory.removeSuffix(uri, codec.getDefaultExtension());

        InputStream in = null;
        OutputStream out = null;
        try {
            in = codec.createInputStream(fs.open(inputPath));
            out = fs.create(new Path(outputUri));
            IOUtils.copyBytes(in, out, conf);
        } finally {
            IOUtils.closeStream(in);
            IOUtils.closeStream(out);
        }
    }

From source file:crunch.MaxTemperature.java

License:Apache License

public static void main(String[] args) throws IOException {
        String uri = args[0];/*www.  ja va 2  s  . c o  m*/
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(uri), conf);

        IntWritable key = new IntWritable();
        Text value = new Text();
        MapFile.Writer writer = null;
        try {
            writer = new MapFile.Writer(conf, fs, uri, key.getClass(), value.getClass());

            for (int i = 0; i < 1024; i++) {
                key.set(i + 1);
                value.set(DATA[i % DATA.length]);
                writer.append(key, value);
            }
        } finally {
            IOUtils.closeStream(writer);
        }
    }