Example usage for org.apache.hadoop.fs FileStatus getOwner

List of usage examples for org.apache.hadoop.fs FileStatus getOwner

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileStatus getOwner.

Prototype

public String getOwner() 

Source Link

Document

Get the owner of the file.

Usage

From source file:org.godhuli.rhipe.FileUtils.java

License:Apache License

private void ls__(String path, ArrayList<String> lsco, boolean dorecurse)
        throws IOException, FileNotFoundException, URISyntaxException {

    Path spath = null;/*from  w  w w.  j  a  v  a2s  .c  o  m*/
    spath = new Path(path);
    FileSystem srcFS = spath.getFileSystem(getConf());
    FileStatus[] srcs;
    srcs = srcFS.globStatus(spath);
    if (srcs == null || srcs.length == 0) {
        throw new FileNotFoundException("Cannot access " + path + ": No such file or directory.");
    }
    if (srcs.length == 1 && srcs[0].isDir())
        srcs = srcFS.listStatus(srcs[0].getPath());
    Calendar c = Calendar.getInstance();
    for (FileStatus status : srcs) {
        StringBuilder sb = new StringBuilder();
        boolean idir = status.isDir();
        String x = idir ? "d" : "-";
        if (dorecurse && idir)
            ls__(status.getPath().toUri().getPath(), lsco, dorecurse);
        else {
            sb.append(x);
            sb.append(status.getPermission().toString());
            sb.append(fsep);

            sb.append(status.getOwner());
            sb.append(fsep);

            sb.append(status.getGroup());
            sb.append(fsep);

            sb.append(status.getLen());
            sb.append(fsep);

            Date d = new Date(status.getModificationTime());
            sb.append(formatter.format(d));
            sb.append(fsep);

            sb.append(status.getPath().toUri().getPath());
            lsco.add(sb.toString());
        }
    }
}

From source file:org.mrgeo.cmd.mrsimageinfo.MrsImageInfo.java

License:Apache License

private void printFileInfo(final Path pfile, PrintStream out) throws IOException {
    // TODO: The following is HDFS-sepcific; needs to be re-factored
    final FileSystem fs = pfile.getFileSystem(config);
    final FileStatus stat = fs.getFileStatus(pfile);

    out.print("    date: " + DateTimeFormat.shortDateTime().print(stat.getModificationTime()));
    out.println("  size: " + human(stat.getLen()));

    final FsPermission p = stat.getPermission();

    if (debug) {/*w w w.j  av a2s  .com*/
        out.print("    ");
        out.print(stat.isDir() ? "d" : "f");
        out.print(" u: " + stat.getOwner() + " (" + p.getUserAction().toString().toLowerCase() + ")");
        out.print(" g: " + stat.getGroup() + " (" + p.getGroupAction().toString().toLowerCase() + ")");
        out.print(" o: " + "(" + p.getOtherAction().toString().toLowerCase() + ")");

        out.print(" blk: " + human(stat.getBlockSize()));
        out.println(" repl: " + stat.getReplication());
    }
}

From source file:org.mrgeo.cmd.showconfiguration.ShowConfiguration.java

License:Apache License

@SuppressWarnings("squid:S1166") // Exception caught and handled
public String reportHDFSPath(String p) {
    StringBuffer sb = new StringBuffer();

    if (props.get(p) == null) {
        sb.append("HDFS directory for '" + p + "' is not set.\n");
    } else {//from  w w w. j a v a  2s . co  m
        sb.append("HDFS image directory: " + props.get(p));
        try {
            if (fs.exists(new Path((String) props.get(p)))) {
                sb.append("\tExists: ");
                FileStatus fstat = fs.getFileStatus(new Path((String) props.get(p)));
                sb.append("\tuser.group = " + fstat.getOwner() + "." + fstat.getGroup());
                FsPermission fsperm = fstat.getPermission();
                sb.append("\tu: " + fsperm.getUserAction());
                sb.append("\tg: " + fsperm.getGroupAction());
                sb.append("\to: " + fsperm.getOtherAction() + "\n");

            } else {
                sb.append("\tDoes not exist.\n");
            }
        } catch (IOException ioe) {
            sb.append("\tDoes not exist.\n");
        }
    }
    return sb.toString();
}

From source file:org.opencloudengine.garuda.model.HdfsFileInfo.java

License:Open Source License

public HdfsFileInfo(FileStatus fileStatus, ContentSummary contentSummary) {
    this.fullyQualifiedPath = fileStatus.getPath().toUri().getPath();
    this.filename = isEmpty(getFilename(fullyQualifiedPath)) ? getDirectoryName(fullyQualifiedPath)
            : getFilename(fullyQualifiedPath);
    this.length = fileStatus.getLen();
    this.path = getPath(fullyQualifiedPath);
    this.directory = fileStatus.isDirectory();
    this.file = !fileStatus.isDirectory();
    this.owner = fileStatus.getOwner();
    this.group = fileStatus.getGroup();
    this.blockSize = fileStatus.getBlockSize();
    this.replication = fileStatus.getReplication();
    this.modificationTime = fileStatus.getModificationTime();
    if (contentSummary != null) {
        this.spaceConsumed = contentSummary.getSpaceConsumed();
        this.quota = contentSummary.getQuota();
        this.spaceQuota = contentSummary.getSpaceQuota();
        this.directoryCount = contentSummary.getDirectoryCount();
        this.fileCount = contentSummary.getFileCount();
    }//from ww  w . j a va  2  s  .  co  m
    this.accessTime = fileStatus.getAccessTime();
    this.permission = fileStatus.getPermission().toString();
}

From source file:org.openflamingo.fs.hdfs.HdfsFileInfo.java

License:Apache License

/**
 *  ??.//  w w  w.  j  av a 2  s.c  o  m
 *
 * @param fileStatus HDFS File Status
 */
public HdfsFileInfo(FileStatus fileStatus) {
    this.fullyQualifiedPath = fileStatus.getPath().toUri().getPath();
    this.filename = StringUtils.isEmpty(FileUtils.getFilename(fullyQualifiedPath))
            ? FileUtils.getDirectoryName(fullyQualifiedPath)
            : FileUtils.getFilename(fullyQualifiedPath);
    this.length = fileStatus.getLen();
    this.path = FileUtils.getPath(fullyQualifiedPath);
    this.directory = fileStatus.isDir();
    this.file = !fileStatus.isDir();
    this.owner = fileStatus.getOwner();
    this.group = fileStatus.getGroup();
    this.blockSize = fileStatus.getBlockSize();
    this.replication = fileStatus.getReplication();
    this.modificationTime = fileStatus.getModificationTime();
    this.accessTime = fileStatus.getAccessTime();
    this.setText(this.filename);
    this.setLeaf(file ? true : false);
    this.setCls(directory ? "folder" : "file");
    this.setId(fullyQualifiedPath);
    this.permission = fileStatus.getPermission().toString();
}

From source file:org.smartfrog.services.hadoop.operations.dfs.DfsListDirImpl.java

License:Open Source License

/**
 * do the work//from www  .j av a2s  .c o  m
 *
 * @param fileSystem the filesystem; this is closed afterwards
 * @param conf       the configuration driving this operation
 * @throws Exception on any failure
 */
@Override
protected void performDfsOperation(FileSystem fileSystem, ManagedConfiguration conf) throws Exception {
    Path path = getPath();
    if (path == null) {
        throw new SmartFrogLivenessException("No path for the DfsListDir operation", this);
    }
    int minFileCount = sfResolve(ATTR_MIN_FILE_COUNT, 0, true);
    int maxFileCount = sfResolve(ATTR_MAX_FILE_COUNT, 0, true);
    long minTotalFileSize = sfResolve(ATTR_MIN_TOTAL_FILE_SIZE, 0L, true);
    long maxTotalFileSize = sfResolve(ATTR_MAX_TOTAL_FILE_SIZE, 0L, true);
    try {
        long size = 0;
        FileStatus[] stats = fileSystem.listStatus(path);
        if (stats == null) {
            throw new SmartFrogLivenessException("Path not found in the remote filesystem: " + path, this);
        }
        StringBuilder builder = new StringBuilder();
        builder.append("Listing of ").append(path).append("/\n");
        for (FileStatus file : stats) {
            size += file.getLen();
            builder.append(file.getPath().getName());
            builder.append("\n  size=").append(file.getLen());
            builder.append("\n  replication=").append(file.getReplication());
            builder.append("\n  last modified=").append(new Date(file.getModificationTime()).toString());
            builder.append("\n  owner=").append(file.getOwner());
            builder.append("\n  group=").append(file.getGroup());
            builder.append("\n  permissions=").append(file.getPermission()).append('\n');
        }
        String listing = builder.toString();
        sfLog().info(listing);
        int count = stats.length;
        sfLog().info("Files: " + count + "  total size=" + size);
        if (count < minFileCount) {
            throw new SmartFrogLivenessException("File count " + count + " is below the minFileCount value of "
                    + minFileCount + "\n" + listing, this);
        }
        if (maxFileCount > -1 && count > maxFileCount) {
            throw new SmartFrogLivenessException("File count " + count + " is above the maxFileCount value of "
                    + minFileCount + "\n" + listing, this);
        }
        if (size < minTotalFileSize) {
            throw new SmartFrogLivenessException("File size " + size
                    + " is below the minTotalFileSize value of " + minTotalFileSize + "\n" + listing, this);
        }
        if (maxFileCount > -1 && size > maxFileCount) {
            throw new SmartFrogLivenessException("File size " + size
                    + " is above the maxTotalFileSize value of " + maxTotalFileSize + "\n" + listing, this);
        }

    } catch (IOException e) {
        if (isIdempotent()) {
            sfLog().info("Failed to stat " + path, e);
        } else {
            throw e;
        }
    }
}

From source file:org.springframework.data.hadoop.fs.FsShell.java

License:Apache License

public Collection<FileStatus> ls(boolean recursive, String... match) {

    Collection<FileStatus> results = new PrettyPrintList<FileStatus>(new ListPrinter<FileStatus>() {
        @Override/* w  w w  .  j a v  a2 s  .c o  m*/
        public String toString(FileStatus stat) throws Exception {
            final SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm");
            int maxReplication = 3, maxLen = 10, maxOwner = 10, maxGroup = 10;

            StringBuilder sb = new StringBuilder();
            sb.append((stat.isDir() ? "d" : "-") + stat.getPermission() + " ");
            sb.append(
                    String.format("%" + maxReplication + "s ", (!stat.isDir() ? stat.getReplication() : "-")));
            sb.append(String.format("%-" + maxOwner + "s ", stat.getOwner()));
            sb.append(String.format("%-" + maxGroup + "s ", stat.getGroup()));
            sb.append(String.format("%" + maxLen + "d ", stat.getLen()));
            sb.append(df.format(new Date(stat.getModificationTime())) + " ");
            sb.append(stat.getPath().toUri().getPath());
            return sb.toString();
        }
    });

    try {
        for (String src : match) {
            Path srcPath = new Path(src);

            FileSystem srcFs = getFS(srcPath);
            FileStatus[] srcs = srcFs.globStatus(srcPath);
            if (!ObjectUtils.isEmpty(srcs)) {
                for (FileStatus status : srcs) {
                    ls(status, srcFs, recursive, results);
                }
            } else {
                throw new IllegalArgumentException("Cannot access " + srcPath + ": No such file or directory.");
            }
        }

        return Collections.unmodifiableCollection(results);

    } catch (IOException ex) {
        throw new HadoopException("Cannot list resources " + ex.getMessage(), ex);
    }
}

From source file:org.trafodion.sql.HBaseAccess.HBaseClient.java

License:Apache License

public boolean setArchivePermissions(String tabName) throws IOException, ServiceException {
    if (logger.isTraceEnabled())
        logger.trace("[Snapshot Scan] SnapshotScanHelper.setArchivePermissions() called. ");
    Path rootDir = FSUtils.getRootDir(config);
    FileSystem myfs = FileSystem.get(rootDir.toUri(), config);
    FileStatus fstatus = myfs.getFileStatus(rootDir);
    String hbaseUser = fstatus.getOwner();
    assert (hbaseUser != null && hbaseUser.length() != 0);
    Path tabArcPath = HFileArchiveUtil.getTableArchivePath(config, TableName.valueOf(tabName));
    if (tabArcPath == null)
        return true;
    List<AclEntry> lacl = AclEntry.parseAclSpec("user:" + hbaseUser + ":rwx", true);
    try {//from w  w w  . ja  v  a2  s .  co  m
        myfs.modifyAclEntries(tabArcPath, lacl);
    } catch (IOException e) {
        //if failure just log exception and continue
        if (logger.isTraceEnabled())
            logger.trace("[Snapshot Scan] SnapshotScanHelper.setArchivePermissions() exception. " + e);
    }
    FileStatus[] files = FSUtils.listStatus(myfs, tabArcPath);
    updatePermissionForEntries(files, hbaseUser, myfs);
    return true;
}

From source file:org.trustedanalytics.auth.gateway.hdfs.HdfsClient.java

License:Apache License

/**
 * Modify permissions of file/directory under provided path.
 * //ww  w  .  j  av a2s  .co m
 * @param path Path to file/directory
 * @param group New group
 * @param permission Set of permissions
 * @throws IOException
 */
public void modifyPermissions(Path path, String group, FsPermission permission) throws IOException {
    FileStatus status = fileSystem.getFileStatus(path);
    FsPermission fsPermission = status.getPermission();

    FsAction userAction = fsPermission.getUserAction().or(permission.getUserAction());
    FsAction groupAction = fsPermission.getGroupAction().or(permission.getGroupAction());
    FsAction otherAction = fsPermission.getOtherAction().or(permission.getOtherAction());
    fsPermission = new FsPermission(userAction, groupAction, otherAction);

    fileSystem.setPermission(path, fsPermission);
    fileSystem.setOwner(path, status.getOwner(), group);
}

From source file:org.trustedanalytics.auth.gateway.hdfs.HdfsClientTest.java

License:Apache License

@Test()
public void createDirectory_directoryAlreadyExists_updatePrivileges() throws IOException {
    when(fileSystem.exists(TEST_PATH)).thenReturn(true);

    FileStatus status = mock(FileStatus.class);
    FsPermission permission = mock(FsPermission.class);
    when(fileSystem.getFileStatus(TEST_PATH)).thenReturn(status);
    when(status.getPermission()).thenReturn(userPermission);
    when(status.getOwner()).thenReturn("test_admin");

    hdfsClient.create(TEST_PATH, userPermission, "test_admin", "test");
    verify(fileSystem, times(0)).mkdirs(TEST_PATH);
    verify(fileSystem).setPermission(TEST_PATH, userPermission);
    verify(fileSystem).setOwner(TEST_PATH, "test_admin", "test");
}