Example usage for org.apache.hadoop.security AccessControlException AccessControlException

List of usage examples for org.apache.hadoop.security AccessControlException AccessControlException

Introduction

In this page you can find the example usage for org.apache.hadoop.security AccessControlException AccessControlException.

Prototype

public AccessControlException(Throwable cause) 

Source Link

Document

Constructs a new exception with the specified cause and a detail message of (cause==null ?

Usage

From source file:com.bigstep.datalake.DLFileSystem.java

License:Apache License

private static Map<?, ?> validateResponse(final HttpOpParam.Op op, final HttpURLConnection conn,
        boolean unwrapException) throws IOException {
    final int code = conn.getResponseCode();
    // server is demanding an authentication we don't support
    if (code == HttpURLConnection.HTTP_UNAUTHORIZED) {
        // match hdfs/rpc exception
        throw new AccessControlException(conn.getResponseMessage());
    }/*from  w w  w  . j  a v  a 2  s.c om*/
    if (code != op.getExpectedHttpResponseCode()) {
        final Map<?, ?> m;
        try {
            m = jsonParse(conn, true);
        } catch (Exception e) {
            throw new IOException(
                    "Unexpected HTTP response: code=" + code + " != " + op.getExpectedHttpResponseCode() + ", "
                            + op.toQueryString() + ", message=" + conn.getResponseMessage(),
                    e);
        }

        if (m == null) {
            throw new IOException(
                    "Unexpected HTTP response: code=" + code + " != " + op.getExpectedHttpResponseCode() + ", "
                            + op.toQueryString() + ", message=" + conn.getResponseMessage());
        } else if (m.get(RemoteException.class.getSimpleName()) == null) {
            return m;
        }

        IOException re = JsonUtil.toRemoteException(m);
        // extract UGI-related exceptions and unwrap InvalidToken
        // the NN mangles these exceptions but the DN does not and may need
        // to re-fetch a token if either report the token is expired
        if (re.getMessage() != null && re.getMessage().startsWith(SecurityUtil.FAILED_TO_GET_UGI_MSG_HEADER)) {
            String[] parts = re.getMessage().split(":\\s+", 3);
            re = new RemoteException(parts[1], parts[2]);
            re = ((RemoteException) re).unwrapRemoteException(SecretManager.InvalidToken.class);
        }
        throw unwrapException ? toIOException(re) : re;
    }
    return null;
}

From source file:com.bigstep.datalake.DLFileSystem.java

License:Apache License

@Override
public Token<DelegationTokenIdentifier> getDelegationToken(final String renewer) throws IOException {
    final HttpOpParam.Op op = GetOpParam.Op.GETDELEGATIONTOKEN;
    Token<DelegationTokenIdentifier> token = new FsPathResponseRunner<Token<DelegationTokenIdentifier>>(op,
            null, new RenewerParam(renewer)) {
        @Override//from  ww w .j  a v  a  2s.  c  o m
        Token<DelegationTokenIdentifier> decodeResponse(Map<?, ?> json) throws IOException {
            return JsonUtil.toDelegationToken(json);
        }
    }.run();
    if (token != null) {
        token.setService(tokenServiceName);
    } else {
        if (disallowFallbackToInsecureCluster) {
            throw new AccessControlException(CANT_FALLBACK_TO_INSECURE_MSG);
        }
    }
    return token;
}

From source file:com.quixey.hadoop.fs.oss.CloudOSSFileSystemStore.java

License:Apache License

/**
 * Translates Aliyun Exceptions into Hadoop-compatible exceptions.
 *
 * @param thrown Service Exception//from www.j a v a  2s  .co m
 * @param key    associated OSS key
 * @return exception
 */
private IOException handleException(ServiceException thrown, String key) {
    LOG.debug("ServiceException encountered.", thrown);
    switch (thrown.getErrorCode()) {
    case OSSErrorCode.NO_SUCH_KEY:
        return new FileNotFoundException(key);
    case OSSErrorCode.NO_SUCH_BUCKET:
        return new FileNotFoundException(bucket);
    case OSSErrorCode.ACCESS_DENIED:
    case OSSErrorCode.INVALID_ACCESS_KEY_ID:
        return new AccessControlException("bucket: " + bucket + ", key: " + key);
    default:
        return new IOException(thrown);
    }
}

From source file:es.tid.cosmos.platform.injection.server.HadoopSshFileTest.java

License:Open Source License

/**
 * /*from   w ww.  j  ava2s  .  c  o  m*/
 * @throws Exception
 */
@Test
public void testListFilesWhenDirectoryNotReadable() throws Exception {
    when(this.mockedFileSystem.listStatus(Matchers.<Path>any()))
            .thenThrow(new AccessControlException("not authorized"));
    HadoopSshFile mockedDir = spy(this.neverExists);
    doReturn(true).when(mockedDir).isDirectory();
    assertEquals(1, mockedDir.listSshFiles().size());
}

From source file:io.hops.hopsworks.apiV2.projects.BlobsResource.java

License:Open Source License

private Response downloadFromHdfs(String projectUsername, org.apache.hadoop.fs.Path fullPath)
        throws AppException, AccessControlException {

    FSDataInputStream stream;//from  ww w.  j a va2s . c  o m
    DistributedFileSystemOps udfso;
    try {
        if (projectUsername != null) {
            udfso = dfs.getDfsOps(projectUsername);
            stream = udfso.open(fullPath);
            return Response.ok(buildOutputStream(stream, udfso)).header("Content-disposition", "attachment;")
                    .build();
        } else {
            throw new AppException(Response.Status.INTERNAL_SERVER_ERROR, "No matching HDFS-user found.");
        }

    } catch (AccessControlException ex) {
        throw new AccessControlException("Permission denied: You can not download the file ");
    } catch (IOException ex) {
        logger.log(Level.SEVERE, null, ex);
        throw new AppException(Response.Status.NOT_FOUND, "File does not exist: " + fullPath);
    }
}

From source file:io.hops.hopsworks.apiV2.projects.DatasetsResource.java

License:Open Source License

/**
 * This function is used only for deletion of dataset directories
 * as it does not accept a path/*  www.j  av  a2  s.  c om*/
 * @param name
 * @param sc
 * @param req
 * @return
 * @throws io.hops.hopsworks.common.exception.AppException
 * @throws org.apache.hadoop.security.AccessControlException
 */
@ApiOperation(value = "Delete dataset", notes = "Delete a dataset and all its files. Only allowed for data-owners.")
@DELETE
@Path("/{dsName}")
@Produces(MediaType.APPLICATION_JSON)
@AllowedProjectRoles({ AllowedProjectRoles.DATA_OWNER })
public Response deleteDataSet(@PathParam("dsName") String name, @Context SecurityContext sc,
        @Context HttpServletRequest req) throws AppException, AccessControlException {

    Dataset dataset = getDataset(name);

    if (dataset.isShared()) {
        // The user is trying to delete a dataset. Drop it from the table
        // But leave it in hopsfs because the user doesn't have the right to delete it
        hdfsUsersBean.unShareDataset(project, dataset);
        datasetFacade.removeDataset(dataset);
        return Response.noContent().build();
    }

    org.apache.hadoop.fs.Path fullPath = pathValidator.getFullPath(new DatasetPath(dataset, "/"));
    Users user = userFacade.findByEmail(sc.getUserPrincipal().getName());
    DistributedFileSystemOps dfso = getDfsOpsForUserHelper(dataset, user);
    boolean success;
    try {
        success = datasetController.deleteDatasetDir(dataset, fullPath, dfso);
    } catch (AccessControlException ex) {
        logger.log(Level.FINE, null, ex);
        throw new AccessControlException(
                "Permission denied: You can not delete the file " + fullPath.toString());
    } catch (IOException ex) {
        throw new AppException(Response.Status.BAD_REQUEST.getStatusCode(),
                "Could not delete the file at " + fullPath.toString());
    } finally {
        if (dfso != null) {
            dfs.closeDfsClient(dfso);
        }
    }

    if (!success) {
        throw new AppException(Response.Status.BAD_REQUEST.getStatusCode(),
                "Could not delete the file at " + fullPath.toString());
    }

    //remove the group associated with this dataset as it is a toplevel ds
    try {
        hdfsUsersBean.deleteDatasetGroup(dataset);
    } catch (IOException ex) {
        //FIXME: take an action?
        logger.log(Level.WARNING, "Error while trying to delete a dataset group", ex);
    }
    return Response.noContent().build();
}

From source file:io.hops.hopsworks.apiV2.projects.DatasetsResource.java

License:Open Source License

private void changeDatasetPermissions(Dataset dataset, FsPermission fsPermission)
        throws AccessControlException, AppException {
    DistributedFileSystemOps dfso = null;
    try {/*from  w  w  w  .  ja  va 2 s.co m*/
        // change the permissions as superuser
        dfso = dfs.getDfsOps();
        datasetController.recChangeOwnershipAndPermission(datasetController.getDatasetPath(dataset),
                fsPermission, null, null, null, dfso);
    } catch (AccessControlException ex) {
        logger.log(Level.FINE, null, ex);
        throw new AccessControlException("Permission denied: Can not change the permission of this file.");
    } catch (IOException e) {
        throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
                "Error while creating directory: " + e.getLocalizedMessage());
    } finally {
        if (dfso != null) {
            dfso.close();
        }
    }
}

From source file:io.hops.hopsworks.apiV2.projects.DatasetsResource.java

License:Open Source License

@ApiOperation(value = "Delete a file or directory", notes = "Delete a file or directory from the dataset.")
@DELETE//from w  w w .  j  av  a2  s .  c o m
@Path("/{dsName}/files/{path: .+}")
public Response deleteFileOrDir(@PathParam("dsName") String datasetName, @PathParam("path") String path,
        @Context SecurityContext sc, @Context HttpServletRequest req)
        throws AccessControlException, AppException {
    Users user = userFacade.findByEmail(sc.getUserPrincipal().getName());
    Dataset dataset = getDataset(datasetName);
    DistributedFileSystemOps dfso = getDfsOpsForUserHelper(dataset, user);
    org.apache.hadoop.fs.Path fullPath = pathValidator.getFullPath(new DatasetPath(dataset, path));

    boolean success;
    try {
        success = dfso.rm(fullPath, true);
    } catch (AccessControlException ex) {
        logger.log(Level.FINE, null, ex);
        throw new AccessControlException("Permission denied: You can not delete the file " + fullPath);
    } catch (IOException ex) {
        throw new AppException(Response.Status.BAD_REQUEST.getStatusCode(),
                "Could not delete the file at " + fullPath);
    } finally {
        if (dfso != null) {
            dfs.closeDfsClient(dfso);
        }
    }
    if (!success) {
        throw new AppException(Response.Status.BAD_REQUEST.getStatusCode(),
                "Could not delete the file at " + fullPath);
    }

    return Response.noContent().build();
}

From source file:org.apache.hcatalog.security.HdfsAuthorizationProvider.java

License:Apache License

/**
 * Checks the permissions for the given path and current user on Hadoop FS. If the given path
 * does not exists, it returns./*  w w  w  . j  ava  2s  . co m*/
 */
@SuppressWarnings("deprecation")
protected static void checkPermissions(final FileSystem fs, final Path path, final EnumSet<FsAction> actions,
        String user, String[] groups) throws IOException, AccessControlException {

    final FileStatus stat;

    try {
        stat = fs.getFileStatus(path);
    } catch (FileNotFoundException fnfe) {
        // File named by path doesn't exist; nothing to validate.
        return;
    } catch (org.apache.hadoop.fs.permission.AccessControlException ace) {
        // Older hadoop version will throw this @deprecated Exception.
        throw new AccessControlException(ace.getMessage());
    }

    final FsPermission dirPerms = stat.getPermission();
    final String grp = stat.getGroup();

    for (FsAction action : actions) {
        if (user.equals(stat.getOwner())) {
            if (dirPerms.getUserAction().implies(action)) {
                continue;
            }
        }
        if (ArrayUtils.contains(groups, grp)) {
            if (dirPerms.getGroupAction().implies(action)) {
                continue;
            }
        }
        if (dirPerms.getOtherAction().implies(action)) {
            continue;
        }
        throw new AccessControlException(
                "action " + action + " not permitted on path " + path + " for user " + user);
    }
}

From source file:org.apache.tajo.master.rule.FileSystemRule.java

License:Apache License

private void canAccessToPath(FileStatus fsStatus, FsAction action) throws Exception {
    FsPermission permission = fsStatus.getPermission();
    UserGroupInformation userGroupInformation = UserGroupInformation.getCurrentUser();
    String userName = userGroupInformation.getShortUserName();
    List<String> groupList = Arrays.asList(userGroupInformation.getGroupNames());

    if (userName.equals(fsStatus.getOwner())) {
        if (permission.getUserAction().implies(action)) {
            return;
        }/*  ww w .  ja  v a2 s .  c o  m*/
    } else if (groupList.contains(fsStatus.getGroup())) {
        if (permission.getGroupAction().implies(action)) {
            return;
        }
    } else {
        if (permission.getOtherAction().implies(action)) {
            return;
        }
    }
    throw new AccessControlException(
            String.format("Permission denied: user=%s, path=\"%s\":%s:%s:%s%s", userName, fsStatus.getPath(),
                    fsStatus.getOwner(), fsStatus.getGroup(), fsStatus.isDirectory() ? "d" : "-", permission));
}