Example usage for org.apache.hadoop.fs.permission FsPermission FsPermission

List of usage examples for org.apache.hadoop.fs.permission FsPermission FsPermission

Introduction

In this page you can find the example usage for org.apache.hadoop.fs.permission FsPermission FsPermission.

Prototype

public FsPermission(FsAction u, FsAction g, FsAction o, boolean sb) 

Source Link

Usage

From source file:com.uber.hoodie.common.table.timeline.dto.FSPermissionDTO.java

License:Apache License

public static FsPermission fromFsPermissionDTO(FSPermissionDTO dto) {
    if (null == dto) {
        return null;
    }/*www .j av a2 s  .co  m*/
    return new FsPermission(dto.useraction, dto.groupaction, dto.otheraction, dto.stickyBit);
}

From source file:io.hops.hopsworks.apiV2.projects.DatasetsResource.java

License:Open Source License

@ApiOperation(value = "Make dataset editable", notes = "Allow data scientists to create and modify own "
        + "files in dataset.")
@PUT/*ww  w  .j  a v  a  2 s . c  om*/
@Path("/{dsName}/permissions")
public Response setPermissions(@PathParam("dsName") String name) throws AppException, AccessControlException {
    //TODO(Theofilos): Change according to same method in API v1
    Dataset dataSet = getDataset(name);
    FsPermission fsPermission = new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.NONE, true);
    changeDatasetPermissions(dataSet, fsPermission);
    datasetController.changePermissions(dataSet);

    return Response.noContent().build();
}

From source file:io.hops.hopsworks.apiV2.projects.DatasetsResource.java

License:Open Source License

@ApiOperation(value = "Make dataset non-editable", notes = "Disallow data scientists creating files in dataset.")
@DELETE/*from  w w w  . j a  v  a  2 s .c  o m*/
@Path("/{dsName}/editable")
public Response makeNonEditable(@PathParam("dsName") String name) throws AppException, AccessControlException {
    Dataset dataset = getDataset(name);
    FsPermission fsPermission = new FsPermission(FsAction.ALL, FsAction.READ_EXECUTE, FsAction.NONE, false);
    changeDatasetPermissions(dataset, fsPermission);
    datasetController.changePermissions(dataset);

    return Response.noContent().build();
}

From source file:io.hops.hopsworks.common.hive.HiveController.java

License:Open Source License

@TransactionAttribute(TransactionAttributeType.NEVER)
public void createDatabase(Project project, Users user, DistributedFileSystemOps dfso)
        throws SQLException, IOException {
    if (conn == null || conn.isClosed()) {
        initConnection();/*from w w  w.j a  va  2  s.  c  o m*/
    }

    Statement stmt = null;
    try {
        // Create database
        stmt = conn.createStatement();
        // Project name cannot include any spacial character or space.
        stmt.executeUpdate("create database " + project.getName());
    } finally {
        if (stmt != null) {
            stmt.close();
        }
    }

    // Hive database names are case insensitive and lower case
    Path dbPath = getDbPath(project.getName());
    Inode dbInode = inodeFacade.getInodeAtPath(dbPath.toString());

    // Persist Hive db as dataset in the Hopsworks database
    Dataset dbDataset = new Dataset(dbInode, project);
    dbDataset.setType(DatasetType.HIVEDB);
    // As we are running Zeppelin as projectGenericUser, we have to make
    // the directory editable by default
    dbDataset.setEditable(DatasetPermissions.GROUP_WRITABLE_SB);
    dbDataset.setDescription(buildDescription(project.getName()));
    dbDataset.setSearchable(true);
    datasetFacade.persistDataset(dbDataset);

    dfso.setMetaEnabled(dbPath);
    datasetController.logDataset(dbDataset, OperationType.Add);

    try {
        // Assign database directory to the user and project group
        hdfsUsersBean.addDatasetUsersGroups(user, project, dbDataset, dfso);

        // Make the dataset editable by default
        FsPermission fsPermission = new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.NONE, true);
        dfso.setPermission(dbPath, fsPermission);

        // Set the default quota
        dfso.setHdfsSpaceQuotaInMBs(dbPath, settings.getHiveDbDefaultQuota());
        projectFacade.setTimestampQuotaUpdate(project, new Date());
    } catch (IOException e) {
        logger.log(Level.SEVERE, "Cannot assign Hive database directory " + dbPath.toString()
                + " to correct user/group. Trace: " + e);

        // Remove the database directory and cleanup the metadata
        try {
            dfso.rm(dbPath, true);
        } catch (IOException rmEx) {
            // Nothing we can really do here
            logger.log(Level.SEVERE,
                    "Cannot delete Hive database directory: " + dbPath.toString() + " Trace: " + rmEx);
        }

        throw new IOException(e);
    }
}

From source file:org.apache.hive.service.TestDFSErrorHandling.java

License:Apache License

@Test
public void testAccessDenied() throws Exception {
    assertTrue("Test setup failed. MiniHS2 is not initialized", miniHS2 != null && miniHS2.isStarted());

    Class.forName(MiniHS2.getJdbcDriverName());
    Path scratchDir = new Path(HiveConf.getVar(hiveConf, HiveConf.ConfVars.SCRATCHDIR));

    MiniDFSShim dfs = miniHS2.getDfs();//from   w w  w  .ja v  a2  s .c o m
    FileSystem fs = dfs.getFileSystem();

    Path stickyBitDir = new Path(scratchDir, "stickyBitDir");

    fs.mkdirs(stickyBitDir);

    String dataFileDir = hiveConf.get("test.data.files").replace('\\', '/').replace("c:", "").replace("C:", "")
            .replace("D:", "").replace("d:", "");
    Path dataFilePath = new Path(dataFileDir, "kv1.txt");

    fs.copyFromLocalFile(dataFilePath, stickyBitDir);

    FsPermission fsPermission = new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL, true);

    // Sets the sticky bit on stickyBitDir - now removing file kv1.txt from stickyBitDir by
    // unprivileged user will result in a DFS error.
    fs.setPermission(stickyBitDir, fsPermission);

    FileStatus[] files = fs.listStatus(stickyBitDir);

    // Connecting to HS2 as foo.
    Connection hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL(), "foo", "bar");
    Statement stmt = hs2Conn.createStatement();

    String tableName = "stickyBitTable";

    stmt.execute("drop table if exists " + tableName);
    stmt.execute("create table " + tableName + " (foo int, bar string)");

    try {
        // This statement will attempt to move kv1.txt out of stickyBitDir as user foo.  HS2 is
        // expected to return 20009.
        stmt.execute("LOAD DATA INPATH '" + stickyBitDir.toUri().getPath() + "/kv1.txt' "
                + "OVERWRITE INTO TABLE " + tableName);
    } catch (Exception e) {
        if (e instanceof SQLException) {
            SQLException se = (SQLException) e;
            Assert.assertEquals("Unexpected error code", 20009, se.getErrorCode());
            System.out.println(String.format("Error Message: %s", se.getMessage()));
        } else
            throw e;
    }

    stmt.execute("drop table if exists " + tableName);

    stmt.close();
    hs2Conn.close();
}