Example usage for org.apache.hadoop.fs FileUtil fullyDelete

List of usage examples for org.apache.hadoop.fs FileUtil fullyDelete

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileUtil fullyDelete.

Prototype

public static boolean fullyDelete(final File dir) 

Source Link

Document

Delete a directory and all its contents.

Usage

From source file:org.apache.avro.mapred.TestGenericJob.java

License:Apache License

@After
public void tearDown() throws IOException {
    FileUtil.fullyDelete(new File(dir));
}

From source file:org.apache.coheigea.bigdata.hdfs.HDFSAccessControlEnforcerTest.java

License:Apache License

@org.junit.AfterClass
public static void cleanup() throws Exception {
    FileUtil.fullyDelete(baseDir);
    hdfsCluster.shutdown();
}

From source file:org.apache.coheigea.bigdata.hdfs.HDFSKerberosTest.java

License:Apache License

@org.junit.AfterClass
public static void cleanup() throws Exception {
    FileUtil.fullyDelete(baseDir);
    hdfsCluster.shutdown();/*from   w ww . j ava2s.  c o  m*/
    if (kerbyServer != null) {
        kerbyServer.stop();
    }
    System.clearProperty("java.security.krb5.conf");
}

From source file:org.apache.coheigea.bigdata.hive.HIVEAuthorizerTest.java

License:Apache License

@org.junit.AfterClass
public static void cleanup() throws Exception {
    hiveServer.stop();//from www . j a v  a2 s .  c  o  m
    FileUtil.fullyDelete(hdfsBaseDir);
    File metastoreDir = new File("./target/authzmetastore/").getAbsoluteFile();
    FileUtil.fullyDelete(metastoreDir);
}

From source file:org.apache.coheigea.bigdata.hive.HIVETest.java

License:Apache License

@org.junit.AfterClass
public static void cleanup() throws Exception {
    hiveServer.stop();/*from  w  w w .  j  a va  2 s. com*/
    FileUtil.fullyDelete(hdfsBaseDir);
    File metastoreDir = new File("./target/metastore/").getAbsoluteFile();
    FileUtil.fullyDelete(metastoreDir);
}

From source file:org.apache.coheigea.bigdata.hive.ranger.HIVERangerAuthorizerTest.java

License:Apache License

@org.junit.AfterClass
public static void cleanup() throws Exception {
    hiveServer.stop();//from w ww.  ja  v  a2 s .co m
    FileUtil.fullyDelete(hdfsBaseDir);
    File metastoreDir = new File("./target/rangerauthzmetastore/").getAbsoluteFile();
    FileUtil.fullyDelete(metastoreDir);
}

From source file:org.apache.drill.cv.exec.coord.zk.MiniZooKeeperCluster.java

License:Apache License

private void recreateDir(File dir) throws IOException {
    if (dir.exists()) {
        FileUtil.fullyDelete(dir);
    }/*from   w w w.  j a va  2 s .co  m*/
    try {
        dir.mkdirs();
    } catch (SecurityException e) {
        throw new IOException("creating dir: " + dir, e);
    }
}

From source file:org.apache.falcon.cluster.util.MiniHdfsClusterUtil.java

License:Apache License

public static void cleanupDfs(MiniDFSCluster miniDFSCluster, File baseDir) throws Exception {
    miniDFSCluster.shutdown();//w  w w.ja v  a 2s  .c om
    FileUtil.fullyDelete(baseDir);
}

From source file:org.apache.flink.hdfstests.ContinuousFileMonitoringFunctionITCase.java

License:Apache License

@Before
public void createHDFS() {
    try {//  w w  w.ja  v  a  2 s.c o  m
        baseDir = new File("./target/hdfs/hdfsTesting").getAbsoluteFile();
        FileUtil.fullyDelete(baseDir);

        org.apache.hadoop.conf.Configuration hdConf = new org.apache.hadoop.conf.Configuration();
        hdConf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, baseDir.getAbsolutePath());
        hdConf.set("dfs.block.size", String.valueOf(1048576)); // this is the minimum we can set.

        MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(hdConf);
        hdfsCluster = builder.build();

        hdfsURI = "hdfs://" + hdfsCluster.getURI().getHost() + ":" + hdfsCluster.getNameNodePort() + "/";
        hdfs = new org.apache.hadoop.fs.Path(hdfsURI).getFileSystem(hdConf);

    } catch (Throwable e) {
        e.printStackTrace();
        Assert.fail("Test failed " + e.getMessage());
    }
}

From source file:org.apache.flink.hdfstests.ContinuousFileMonitoringFunctionITCase.java

License:Apache License

@After
public void destroyHDFS() {
    try {/*  www  . j  a  va  2  s.com*/
        FileUtil.fullyDelete(baseDir);
        hdfsCluster.shutdown();
    } catch (Throwable t) {
        throw new RuntimeException(t);
    }
}