Example usage for org.apache.hadoop.hdfs TestFileCreation createFile

List of usage examples for org.apache.hadoop.hdfs TestFileCreation createFile

Introduction

In this page you can find the example usage for org.apache.hadoop.hdfs TestFileCreation createFile.

Prototype

public static FSDataOutputStream createFile(FileSystem fileSys, Path name, int repl) throws IOException 

Source Link

Usage

From source file:com.wandisco.s3hdfs.rewrite.filter.TestBasicUsage.java

License:Apache License

@Test
public void testBasicPutGet()
        throws IOException, URISyntaxException, ServiceException, NoSuchAlgorithmException {
    S3HdfsPath s3HdfsPath = testUtil.setUpS3HdfsPath("rewrite", "readme.txt");

    // Create file and blank metadata in HDFS (but not s3)
    Path path = new Path(s3HdfsPath.getFullHdfsObjPath());
    FSDataOutputStream out = TestFileCreation.createFile(hdfs, path, 3);
    TestFileCreation.writeFile(out, 128);
    out.close();//www  .jav  a  2 s  .co m

    Path pathMeta = new Path(s3HdfsPath.getFullHdfsMetaPath());
    FSDataOutputStream outMeta = hdfs.create(pathMeta);
    outMeta.close();

    // Get the object
    S3Bucket bucket = new S3Bucket(s3HdfsPath.getBucketName());
    String objectKey = s3HdfsPath.getObjectName();

    S3Object returnedObject1 = s3Service.getObject(bucket.getName(), objectKey);
    System.out.println("RETURNED_OBJECT_1");
    System.out.println(returnedObject1); // returned has dataInputStream!

    // Verify the object
    assertEquals(bucket.getName(), returnedObject1.getBucketName());
    assertEquals(objectKey, returnedObject1.getKey());

    // verify returned data
    testUtil.compareS3ObjectWithHdfsFile(returnedObject1.getDataInputStream(), path);

    // List objects
    S3Object[] ls = s3Service.listObjects(bucket.getName());
    assertEquals("Should be one object", 1, ls.length);
    System.out.println("LISTED_OBJECTS_1");
    System.out.println(ls[0]);
}