Example usage for org.apache.hadoop.hdfs TestFileCreation writeFile

List of usage examples for org.apache.hadoop.hdfs TestFileCreation writeFile

Introduction

In this page you can find the example usage for org.apache.hadoop.hdfs TestFileCreation writeFile.

Prototype

public static void writeFile(FSDataOutputStream stm, int size) throws IOException 

Source Link

Usage

From source file:com.wandisco.s3hdfs.rewrite.filter.TestBasicUsage.java

License:Apache License

@Test
public void testBasicPutGet()
        throws IOException, URISyntaxException, ServiceException, NoSuchAlgorithmException {
    S3HdfsPath s3HdfsPath = testUtil.setUpS3HdfsPath("rewrite", "readme.txt");

    // Create file and blank metadata in HDFS (but not s3)
    Path path = new Path(s3HdfsPath.getFullHdfsObjPath());
    FSDataOutputStream out = TestFileCreation.createFile(hdfs, path, 3);
    TestFileCreation.writeFile(out, 128);
    out.close();// w  w  w.j av a  2  s.  c om

    Path pathMeta = new Path(s3HdfsPath.getFullHdfsMetaPath());
    FSDataOutputStream outMeta = hdfs.create(pathMeta);
    outMeta.close();

    // Get the object
    S3Bucket bucket = new S3Bucket(s3HdfsPath.getBucketName());
    String objectKey = s3HdfsPath.getObjectName();

    S3Object returnedObject1 = s3Service.getObject(bucket.getName(), objectKey);
    System.out.println("RETURNED_OBJECT_1");
    System.out.println(returnedObject1); // returned has dataInputStream!

    // Verify the object
    assertEquals(bucket.getName(), returnedObject1.getBucketName());
    assertEquals(objectKey, returnedObject1.getKey());

    // verify returned data
    testUtil.compareS3ObjectWithHdfsFile(returnedObject1.getDataInputStream(), path);

    // List objects
    S3Object[] ls = s3Service.listObjects(bucket.getName());
    assertEquals("Should be one object", 1, ls.length);
    System.out.println("LISTED_OBJECTS_1");
    System.out.println(ls[0]);
}