List of usage examples for org.apache.hadoop.io IOUtils closeStream
public static void closeStream(java.io.Closeable stream)
From source file:com.aliyun.fs.oss.TestAliyunOSSInputStream.java
License:Apache License
@Test public void testSeekFile() throws Exception { Path smallSeekFile = setPath("/test/smallSeekFile.txt"); long size = 5 * 1024 * 1024; ContractTestUtils.generateTestFile(this.fs, smallSeekFile, size, 256, 255); LOG.info("5MB file created: smallSeekFile.txt"); FSDataInputStream instream = this.fs.open(smallSeekFile); int seekTimes = 5; LOG.info("multiple fold position seeking test...:"); for (int i = 0; i < seekTimes; i++) { long pos = size / (seekTimes - i) - 1; LOG.info("begin seeking for pos: " + pos); instream.seek(pos);//w w w .j av a 2 s . c om assertTrue("expected position at:" + pos + ", but got:" + instream.getPos(), instream.getPos() == pos); LOG.info("completed seeking at pos: " + instream.getPos()); } LOG.info("random position seeking test...:"); Random rand = new Random(); for (int i = 0; i < seekTimes; i++) { long pos = Math.abs(rand.nextLong()) % size; LOG.info("begin seeking for pos: " + pos); instream.seek(pos); assertTrue("expected position at:" + pos + ", but got:" + instream.getPos(), instream.getPos() == pos); LOG.info("completed seeking at pos: " + instream.getPos()); } IOUtils.closeStream(instream); }
From source file:com.aliyun.fs.oss.TestAliyunOSSInputStream.java
License:Apache License
@Test public void testReadFile() throws Exception { final int bufLen = 256; final int sizeFlag = 5; String filename = "readTestFile_" + sizeFlag + ".txt"; Path readTestFile = setPath("/test/" + filename); long size = sizeFlag * 1024 * 1024; ContractTestUtils.generateTestFile(this.fs, readTestFile, size, 256, 255); LOG.info(sizeFlag + "MB file created: /test/" + filename); FSDataInputStream instream = this.fs.open(readTestFile); byte[] buf = new byte[bufLen]; long bytesRead = 0; while (bytesRead < size) { int bytes; if (size - bytesRead < bufLen) { int remaining = (int) (size - bytesRead); bytes = instream.read(buf, 0, remaining); } else {/*from w w w . ja v a 2s.c om*/ bytes = instream.read(buf, 0, bufLen); } bytesRead += bytes; if (bytesRead % (1024 * 1024) == 0) { int available = instream.available(); int remaining = (int) (size - bytesRead); assertTrue("expected remaining:" + remaining + ", but got:" + available, remaining == available); LOG.info("Bytes read: " + Math.round((double) bytesRead / (1024 * 1024)) + " MB"); } } assertTrue(instream.available() == 0); IOUtils.closeStream(instream); }
From source file:com.avira.couchdoop.imp.PageFileWriter.java
License:Apache License
@Override public void close() throws IOException { IOUtils.closeStream(outputStream); }
From source file:com.bizosys.hsearch.kv.indexing.MapFileSizeReader.java
License:Apache License
public static void main(String[] args) { if (args.length < 1) { System.out.println("Usage: " + MapFileSizeReader.class + " <<hdfs-filepath>> <<key>>"); System.exit(1);// w ww . j a v a 2 s . c om } String hdfsFilePath = args[0].trim(); String askedKey = null; if (args.length == 2) askedKey = (args[1].trim()); MapFile.Reader reader = null; try { Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(URI.create(hdfsFilePath), conf); reader = new MapFile.Reader(fs, hdfsFilePath, conf); if (null == askedKey) { Text key = (Text) ReflectionUtils.newInstance(reader.getKeyClass(), conf); BytesWritable value = (BytesWritable) ReflectionUtils.newInstance(reader.getValueClass(), conf); while (reader.next(key, value)) { if (null == value) System.out.println(key.toString() + "\t0"); System.out.println(key.toString() + "\t" + value.getLength()); } } else { Text key = (Text) ReflectionUtils.newInstance(reader.getKeyClass(), conf); key.set(askedKey.getBytes()); BytesWritable value = (BytesWritable) ReflectionUtils.newInstance(reader.getValueClass(), conf); reader.get(key, value); System.out.println(key.toString() + "\t" + value.getLength()); } } catch (Exception e) { System.err.println("Error in reading from HDFSFilepath:" + hdfsFilePath); e.printStackTrace(System.out); } finally { IOUtils.closeStream(reader); } }
From source file:com.ceph.rados.fs.INode.java
License:Apache License
public InputStream serialize() throws IOException { ByteArrayOutputStream bytes = new ByteArrayOutputStream(); DataOutputStream out = new DataOutputStream(bytes); try {//from w w w . j av a 2s.c om out.writeByte(fileType.ordinal()); if (isFile()) { if (blocks != null) { out.writeInt(blocks.length); for (int i = 0; i < blocks.length; i++) { out.writeLong(blocks[i].getId()); out.writeLong(blocks[i].getLength()); } } } else { out.writeInt(0); } out.close(); out = null; } finally { IOUtils.closeStream(out); } return new ByteArrayInputStream(bytes.toByteArray()); }
From source file:com.cloudera.sa.ExcelRecordReader.java
License:Apache License
@Override public void close() throws IOException { IOUtils.closeStream(in); }
From source file:com.cloudera.sqoop.manager.CubridManagerImportTest.java
License:Apache License
private void doImportAndVerify(String tableName, String[] expectedResults, String... extraArgs) throws IOException { Path warehousePath = new Path(this.getWarehouseDir()); Path tablePath = new Path(warehousePath, tableName); Path filePath = new Path(tablePath, "part-m-00000"); File tableFile = new File(tablePath.toString()); if (tableFile.exists() && tableFile.isDirectory()) { // remove the directory before running the import. FileListing.recursiveDeleteDir(tableFile); }/*from w w w . ja v a 2 s .c om*/ String[] argv = getArgv(tableName, extraArgs); try { runImport(argv); } catch (IOException ioe) { LOG.error("Got IOException during import: " + ioe.toString()); ioe.printStackTrace(); fail(ioe.toString()); } File f = new File(filePath.toString()); assertTrue("Could not find imported data file", f.exists()); BufferedReader r = null; try { // Read through the file and make sure it's all there. r = new BufferedReader(new InputStreamReader(new FileInputStream(f))); for (String expectedLine : expectedResults) { assertEquals(expectedLine, r.readLine()); } } catch (IOException ioe) { LOG.error("Got IOException verifying results: " + ioe.toString()); ioe.printStackTrace(); fail(ioe.toString()); } finally { IOUtils.closeStream(r); } }
From source file:com.cloudera.sqoop.manager.DB2ManagerImportManualTest.java
License:Apache License
private void runDb2Test(String[] expectedResults) throws IOException { Path warehousePath = new Path(this.getWarehouseDir()); Path tablePath = new Path(warehousePath, getTableName()); Path filePath = new Path(tablePath, "part-m-00000"); File tableFile = new File(tablePath.toString()); if (tableFile.exists() && tableFile.isDirectory()) { // remove the directory before running the import. FileListing.recursiveDeleteDir(tableFile); }/* w w w . ja v a 2s. c o m*/ String[] argv = getArgv(); try { runImport(argv); } catch (IOException ioe) { LOG.error("Got IOException during import: " + ioe.toString()); ioe.printStackTrace(); fail(ioe.toString()); } File f = new File(filePath.toString()); assertTrue("Could not find imported data file", f.exists()); BufferedReader r = null; try { // Read through the file and make sure it's all there. r = new BufferedReader(new InputStreamReader(new FileInputStream(f))); for (String expectedLine : expectedResults) { assertEquals(expectedLine, r.readLine()); } } catch (IOException ioe) { LOG.error("Got IOException verifying results: " + ioe.toString()); ioe.printStackTrace(); fail(ioe.toString()); } finally { IOUtils.closeStream(r); } }
From source file:com.cloudera.sqoop.manager.DirectMySQLTest.java
License:Apache License
private void doImport(boolean mysqlOutputDelims, boolean isDirect, String tableName, String[] expectedResults, String[] extraArgs) throws IOException { Path warehousePath = new Path(this.getWarehouseDir()); Path tablePath = new Path(warehousePath, tableName); Path filePath = new Path(tablePath, "part-m-00000"); File tableFile = new File(tablePath.toString()); if (tableFile.exists() && tableFile.isDirectory()) { // remove the directory before running the import. FileListing.recursiveDeleteDir(tableFile); }//from w ww . ja va2 s . c om String[] argv = getArgv(mysqlOutputDelims, isDirect, tableName, extraArgs); try { runImport(argv); } catch (IOException ioe) { LOG.error("Got IOException during import: " + ioe.toString()); ioe.printStackTrace(); fail(ioe.toString()); } File f = new File(filePath.toString()); assertTrue("Could not find imported data file: " + f, f.exists()); BufferedReader r = null; try { // Read through the file and make sure it's all there. r = new BufferedReader(new InputStreamReader(new FileInputStream(f))); for (String expectedLine : expectedResults) { assertEquals(expectedLine, r.readLine()); } } catch (IOException ioe) { LOG.error("Got IOException verifying results: " + ioe.toString()); ioe.printStackTrace(); fail(ioe.toString()); } finally { IOUtils.closeStream(r); } }
From source file:com.cloudera.sqoop.manager.MySQLAuthTest.java
License:Apache License
/** * Connect to a db and ensure that password-based authentication * succeeds.//from w w w . java 2 s. c o m */ @Test public void testAuthAccess() { String[] argv = getArgv(true, true, AUTH_CONNECT_STRING, AUTH_TABLE_NAME); try { runImport(argv); } catch (IOException ioe) { LOG.error("Got IOException during import: " + ioe.toString()); ioe.printStackTrace(); fail(ioe.toString()); } Path warehousePath = new Path(this.getWarehouseDir()); Path tablePath = new Path(warehousePath, AUTH_TABLE_NAME); Path filePath = new Path(tablePath, "part-m-00000"); File f = new File(filePath.toString()); assertTrue("Could not find imported data file", f.exists()); BufferedReader r = null; try { // Read through the file and make sure it's all there. r = new BufferedReader(new InputStreamReader(new FileInputStream(f))); assertEquals("1,'Aaron'", r.readLine()); } catch (IOException ioe) { LOG.error("Got IOException verifying results: " + ioe.toString()); ioe.printStackTrace(); fail(ioe.toString()); } finally { IOUtils.closeStream(r); } }