Example usage for org.apache.hadoop.fs.permission FsPermission toString

List of usage examples for org.apache.hadoop.fs.permission FsPermission toString

Introduction

In this page you can find the example usage for org.apache.hadoop.fs.permission FsPermission toString.

Prototype

@Override
    public String toString() 

Source Link

Usage

From source file:alluxio.hadoop.AbstractFileSystem.java

License:Apache License

/**
 * Changes permission of a path./*from   w w  w. ja  v  a 2 s.c  o m*/
 *
 * @param path path to set permission
 * @param permission permission set to path
 * @throws IOException if the path failed to be changed permission
 */
public void setPermission(Path path, FsPermission permission) throws IOException {
    LOG.info("setMode({},{})", path, permission.toString());
    AlluxioURI uri = new AlluxioURI(HadoopUtils.getPathWithoutScheme(path));
    SetAttributeOptions options = SetAttributeOptions.defaults().setMode(permission.toShort())
            .setRecursive(false);
    try {
        sFileSystem.setAttribute(uri, options);
    } catch (AlluxioException e) {
        throw new IOException(e);
    }
}

From source file:com.streamsets.pipeline.stage.origin.hdfs.spooler.HdfsFile.java

License:Apache License

@SuppressWarnings("unchecked")
public Map<String, Object> getFileMetadata() throws IOException {
    FileStatus file = fs.getFileStatus(filePath);
    Map<String, Object> metadata = new HashMap<>();
    metadata.put(HeaderAttributeConstants.FILE_NAME, file.getPath().getName());
    metadata.put(HeaderAttributeConstants.FILE, file.getPath().toUri().getPath());
    metadata.put(HeaderAttributeConstants.LAST_MODIFIED_TIME, file.getModificationTime());
    metadata.put(HeaderAttributeConstants.LAST_ACCESS_TIME, file.getAccessTime());
    metadata.put(HeaderAttributeConstants.IS_DIRECTORY, file.isDirectory());
    metadata.put(HeaderAttributeConstants.IS_SYMBOLIC_LINK, file.isSymlink());
    metadata.put(HeaderAttributeConstants.SIZE, file.getLen());
    metadata.put(HeaderAttributeConstants.OWNER, file.getOwner());
    metadata.put(HeaderAttributeConstants.GROUP, file.getGroup());
    metadata.put(HeaderAttributeConstants.BLOCK_SIZE, file.getBlockSize());
    metadata.put(HeaderAttributeConstants.REPLICATION, file.getReplication());
    metadata.put(HeaderAttributeConstants.IS_ENCRYPTED, file.isEncrypted());

    FsPermission permission = file.getPermission();
    if (permission != null) {
        metadata.put(PERMISSIONS, permission.toString());
    }//  w  w  w  .j  a  v  a  2  s  . c  o  m

    return metadata;
}

From source file:org.apache.falcon.entity.parser.ClusterEntityParser.java

License:Apache License

private void createWorkingDirUnderStaging(FileSystem fs, Cluster cluster, Location stagingLocation)
        throws ValidationException {
    Path workingDirPath = new Path(stagingLocation.getPath(), ClusterHelper.WORKINGDIR);
    try {//w ww .  j  a va  2s  .  c  o m
        if (!fs.exists(workingDirPath)) { //Checking if the staging dir has the working dir to be created
            HadoopClientFactory.mkdirs(fs, workingDirPath, HadoopClientFactory.READ_EXECUTE_PERMISSION);
        } else {
            if (fs.isDirectory(workingDirPath)) {
                FsPermission workingPerms = fs.getFileStatus(workingDirPath).getPermission();
                if (!workingPerms.equals(HadoopClientFactory.READ_EXECUTE_PERMISSION)) { //perms check
                    throw new ValidationException(
                            "Falcon needs subdir " + ClusterHelper.WORKINGDIR + " inside staging dir:"
                                    + stagingLocation.getPath() + " when staging location not specified with "
                                    + HadoopClientFactory.READ_EXECUTE_PERMISSION.toString() + " got "
                                    + workingPerms.toString());
                }
            } else {
                throw new ValidationException("Falcon needs subdir " + ClusterHelper.WORKINGDIR
                        + " inside staging dir:" + stagingLocation.getPath()
                        + " when staging location not specified. Got a file at " + workingDirPath.toString());
            }
        }
    } catch (IOException e) {
        throw new ValidationException("Unable to create path for " + workingDirPath.toString() + " with path: "
                + workingDirPath.toString() + " for cluster " + cluster.getName(), e);
    }
}

From source file:org.apache.falcon.oozie.feed.OozieFeedWorkflowBuilderTest.java

License:Apache License

private void verifyWorkflowUMask(FileSystem fs, COORDINATORAPP coord, String defaultUMask) throws IOException {
    Assert.assertEquals(fs.getConf().get("fs.permissions.umask-mode"), defaultUMask);

    String appPath = coord.getAction().getWorkflow().getAppPath().replace("${nameNode}", "");
    Path wfPath = new Path(appPath);
    FileStatus[] fileStatuses = fs.listStatus(wfPath);
    for (FileStatus fileStatus : fileStatuses) {
        Assert.assertEquals(fileStatus.getOwner(), CurrentUser.getProxyUGI().getShortUserName());

        final FsPermission permission = fileStatus.getPermission();
        if (!fileStatus.isDirectory()) {
            Assert.assertEquals(permission.toString(),
                    HadoopClientFactory.getFileDefaultPermission(fs.getConf()).toString());
        }//from   w w w .jav a  2s  . c  o  m
    }
}

From source file:org.apache.hcatalog.cli.HCatCli.java

License:Apache License

private static void validatePermissions(CliSessionState ss, HiveConf conf, String perms) {
    perms = perms.trim();//www .  j av  a 2 s  .c om
    FsPermission fp = null;

    if (perms.matches("^\\s*([r,w,x,-]{9})\\s*$")) {
        fp = FsPermission.valueOf("d" + perms);
    } else if (perms.matches("^\\s*([0-7]{3})\\s*$")) {
        fp = new FsPermission(Short.decode("0" + perms));
    } else {
        ss.err.println("Invalid permission specification: " + perms);
        System.exit(1);
    }

    if (!HCatUtil.validateMorePermissive(fp.getUserAction(), fp.getGroupAction())) {
        ss.err.println("Invalid permission specification: " + perms
                + " : user permissions must be more permissive than group permission ");
        System.exit(1);
    }
    if (!HCatUtil.validateMorePermissive(fp.getGroupAction(), fp.getOtherAction())) {
        ss.err.println("Invalid permission specification: " + perms
                + " : group permissions must be more permissive than other permission ");
        System.exit(1);
    }
    if ((!HCatUtil.validateExecuteBitPresentIfReadOrWrite(fp.getUserAction()))
            || (!HCatUtil.validateExecuteBitPresentIfReadOrWrite(fp.getGroupAction()))
            || (!HCatUtil.validateExecuteBitPresentIfReadOrWrite(fp.getOtherAction()))) {
        ss.err.println("Invalid permission specification: " + perms
                + " : permissions must have execute permissions if read or write permissions are specified ");
        System.exit(1);
    }

    conf.set(HCatConstants.HCAT_PERMS, "d" + fp.toString());

}

From source file:org.apache.hive.hcatalog.cli.HCatCli.java

License:Apache License

private static void validatePermissions(CliSessionState ss, HiveConf conf, String perms) {
    perms = perms.trim();//  www  .  ja va2 s.c om
    FsPermission fp = null;

    if (perms.matches("^\\s*([r,w,x,-]{9})\\s*$")) {
        fp = FsPermission.valueOf("d" + perms);
    } else if (perms.matches("^\\s*([0-7]{3})\\s*$")) {
        fp = new FsPermission(Short.decode("0" + perms));
    } else {
        ss.err.println("Invalid permission specification: " + perms);
        sysExit(ss, 1);
    }

    if (!HCatUtil.validateMorePermissive(fp.getUserAction(), fp.getGroupAction())) {
        ss.err.println("Invalid permission specification: " + perms
                + " : user permissions must be more permissive than group permission ");
        sysExit(ss, 1);
    }
    if (!HCatUtil.validateMorePermissive(fp.getGroupAction(), fp.getOtherAction())) {
        ss.err.println("Invalid permission specification: " + perms
                + " : group permissions must be more permissive than other permission ");
        sysExit(ss, 1);
    }
    if ((!HCatUtil.validateExecuteBitPresentIfReadOrWrite(fp.getUserAction()))
            || (!HCatUtil.validateExecuteBitPresentIfReadOrWrite(fp.getGroupAction()))
            || (!HCatUtil.validateExecuteBitPresentIfReadOrWrite(fp.getOtherAction()))) {
        ss.err.println("Invalid permission specification: " + perms
                + " : permissions must have execute permissions if read or write permissions are specified ");
        sysExit(ss, 1);
    }

    conf.set(HCatConstants.HCAT_PERMS, "d" + fp.toString());

}