Example usage for org.apache.hadoop.fs Trash Trash

List of usage examples for org.apache.hadoop.fs Trash Trash

Introduction

In this page you can find the example usage for org.apache.hadoop.fs Trash Trash.

Prototype

public Trash(FileSystem fs, Configuration conf) throws IOException 

Source Link

Document

Construct a trash can accessor for the FileSystem provided.

Usage

From source file:org.springframework.data.hadoop.impala.hdfs.FsShellCommands.java

License:Apache License

@CliCommand(value = PREFIX + "rm", help = "Remove files in the HDFS")
public void rm(@CliOption(key = {
        "" }, mandatory = false, specifiedDefaultValue = ".", unspecifiedDefaultValue = ".", help = "directory to be listed") final String path,
        @CliOption(key = {/* w  w w.jav  a  2 s  .c  o m*/
                "skipTrash" }, mandatory = false, specifiedDefaultValue = "true", unspecifiedDefaultValue = "false", help = "whether skip trash") final boolean skipTrash,
        @CliOption(key = {
                "recursive" }, mandatory = false, specifiedDefaultValue = "true", unspecifiedDefaultValue = "false", help = "whether with recursion") final boolean recursive) {
    try {
        Path file = new Path(path);
        FileSystem fs = file.getFileSystem(getHadoopConfiguration());
        for (Path p : FileUtil.stat2Paths(fs.globStatus(file), file)) {
            FileStatus status = fs.getFileStatus(p);
            if (status.isDir() && !recursive) {
                LOG.severe("To remove directory, please use fs rm --recursive instead");
                return;
            }
            if (!skipTrash) {
                Trash trash = new Trash(fs, getHadoopConfiguration());
                trash.moveToTrash(p);
            }
            fs.delete(p, recursive);
        }
    } catch (Throwable t) {
        LOG.severe("run HDFS shell failed. Message is: " + t.getMessage());
    }

}

From source file:org.springframework.xd.shell.hadoop.FsShellCommands.java

License:Apache License

@CliCommand(value = PREFIX + "rm", help = "Remove files in the HDFS")
public void rm(@CliOption(key = { "",
        PATH }, mandatory = false, unspecifiedDefaultValue = ".", help = "path to be deleted") final String path,
        @CliOption(key = {//from  ww  w . j a  va 2s.com
                "skipTrash" }, mandatory = false, specifiedDefaultValue = TRUE, unspecifiedDefaultValue = FALSE, help = "whether to skip trash") final boolean skipTrash,
        @CliOption(key = {
                RECURSIVE }, mandatory = false, specifiedDefaultValue = TRUE, unspecifiedDefaultValue = FALSE, help = "whether to recurse") final boolean recursive) {
    try {
        Path file = new Path(path);
        FileSystem fs = file.getFileSystem(getHadoopConfiguration());
        for (Path p : FileUtil.stat2Paths(fs.globStatus(file), file)) {
            FileStatus status = fs.getFileStatus(p);
            if (status.isDirectory() && !recursive) {
                LOG.error("To remove directory, please use 'fs rm </path/to/dir> --recursive' instead");
                return;
            }
            if (!skipTrash) {
                Trash trash = new Trash(fs, getHadoopConfiguration());
                trash.moveToTrash(p);
            }
            fs.delete(p, recursive);
        }
    } catch (Exception t) {
        LOG.error("Exception: run HDFS shell failed. Message is: " + t.getMessage());
    } catch (Error t) {
        LOG.error("Error: run HDFS shell failed. Message is: " + t.getMessage());
    }
}