Example usage for org.apache.hadoop.security UserGroupInformation getGroupNames

List of usage examples for org.apache.hadoop.security UserGroupInformation getGroupNames

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation getGroupNames.

Prototype

public String[] getGroupNames() 

Source Link

Document

Get the group names for this user.

Usage

From source file:org.apache.hawq.ranger.authorization.RangerHawqAuthorizer.java

License:Apache License

/**
 * Returns a set of groups the user belongs to
 * @param user user name// w w w .  j a v a 2  s .co  m
 * @return set of groups for the user
 */
private Set<String> getUserGroups(String user) {
    String[] userGroups = null;
    try {
        UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user);
        userGroups = ugi.getGroupNames();
        if (LOG.isDebugEnabled()) {
            LOG.debug(String.format("Determined user=%s belongs to groups=%s", user,
                    Arrays.toString(userGroups)));
        }
    } catch (Throwable e) {
        LOG.warn("Failed to determine groups for user=" + user, e);
    }
    return userGroups == null ? Collections.<String>emptySet() : new HashSet<String>(Arrays.asList(userGroups));
}

From source file:org.apache.hawq.ranger.authorization.RangerHawqAuthorizerTest.java

License:Apache License

@Test
public void testAuthorize_allAllowed_group() throws Exception {
    UserGroupInformation mockUgi = mock(UserGroupInformation.class);
    when(mockUgi.getGroupNames()).thenReturn(new String[] { "foo", "bar" });
    PowerMockito.mockStatic(UserGroupInformation.class);
    when(UserGroupInformation.createRemoteUser(TEST_USER)).thenReturn(mockUgi);
    when(mockRangerPlugin.isAccessAllowed(argThat(new UGIMatcher(TEST_USER, "foo", "bar"))))
            .thenReturn(mockRangerAccessResult);
    when(mockRangerAccessResult.getIsAllowed()).thenReturn(true);
    testRequest(TEST_RESOURCE_REQUEST, TEST_RESOURCE_RESPONSE_ALL_TRUE);
}

From source file:org.apache.hcatalog.security.HdfsAuthorizationProvider.java

License:Apache License

/**
 * Checks the permissions for the given path and current user on Hadoop FS. If the given path
 * does not exists, it checks for it's parent folder.
 *///w w w  .j  a v a  2s . c  o m
protected static void checkPermissions(final Configuration conf, final Path path,
        final EnumSet<FsAction> actions) throws IOException, LoginException {

    if (path == null) {
        throw new IllegalArgumentException("path is null");
    }

    HadoopShims shims = ShimLoader.getHadoopShims();
    final UserGroupInformation ugi;
    if (conf.get(PROXY_USER_NAME) != null) {
        ugi = UserGroupInformation.createRemoteUser(conf.get(PROXY_USER_NAME));
    } else {
        ugi = shims.getUGIForConf(conf);
    }
    final String user = shims.getShortUserName(ugi);

    final FileSystem fs = path.getFileSystem(conf);

    if (fs.exists(path)) {
        checkPermissions(fs, path, actions, user, ugi.getGroupNames());
    } else if (path.getParent() != null) {
        // find the ancestor which exists to check it's permissions
        Path par = path.getParent();
        while (par != null) {
            if (fs.exists(par)) {
                break;
            }
            par = par.getParent();
        }

        checkPermissions(fs, par, actions, user, ugi.getGroupNames());
    }
}

From source file:org.apache.impala.util.FsPermissionChecker.java

License:Apache License

private FsPermissionChecker() throws IOException {
    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
    groups_.addAll(Arrays.asList(ugi.getGroupNames()));
    supergroup_ = CONF.get(DFS_PERMISSIONS_SUPERUSERGROUP_KEY, DFS_PERMISSIONS_SUPERUSERGROUP_DEFAULT);
    user_ = ugi.getShortUserName();/*ww w  . jav  a2  s .  c  o m*/
}

From source file:org.apache.ranger.audit.provider.MiscUtil.java

License:Apache License

/**
 * @param userName/*  ww w  . ja  v a 2  s.co m*/
 * @return
 */
static public Set<String> getGroupsForRequestUser(String userName) {
    if (userName == null) {
        return null;
    }
    try {
        UserGroupInformation ugi = UserGroupInformation.createRemoteUser(userName);
        String groups[] = ugi.getGroupNames();
        if (groups != null && groups.length > 0) {
            Set<String> groupsSet = new java.util.HashSet<String>();
            for (int i = 0; i < groups.length; i++) {
                groupsSet.add(groups[i]);
            }
            return groupsSet;
        }
    } catch (Throwable e) {
        logErrorMessageByInterval(logger, "Error getting groups for users. userName=" + userName, e);
    }
    return null;
}

From source file:org.apache.ranger.authorization.hive.authorizer.RangerHiveAuthorizer.java

License:Apache License

/**
 * Check if user has privileges to do this action on these objects
 * @param hiveOpType// www  .  ja  v  a 2s .co m
 * @param inputHObjs
 * @param outputHObjs
 * @param context
 * @throws HiveAuthzPluginException
 * @throws HiveAccessControlException
 */
@Override
public void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs,
        List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context)
        throws HiveAuthzPluginException, HiveAccessControlException {
    UserGroupInformation ugi = getCurrentUserGroupInfo();

    if (ugi == null) {
        throw new HiveAccessControlException("Permission denied: user information not available");
    }

    RangerHiveAuditHandler auditHandler = new RangerHiveAuditHandler();

    try {
        HiveAuthzSessionContext sessionContext = getHiveAuthzSessionContext();
        String user = ugi.getShortUserName();
        Set<String> groups = Sets.newHashSet(ugi.getGroupNames());

        if (LOG.isDebugEnabled()) {
            LOG.debug(toString(hiveOpType, inputHObjs, outputHObjs, context, sessionContext));
        }

        if (hiveOpType == HiveOperationType.DFS) {
            handleDfsCommand(hiveOpType, inputHObjs, user, auditHandler);

            return;
        }

        List<RangerHiveAccessRequest> requests = new ArrayList<RangerHiveAccessRequest>();

        if (!CollectionUtils.isEmpty(inputHObjs)) {
            for (HivePrivilegeObject hiveObj : inputHObjs) {
                RangerHiveResource resource = getHiveResource(hiveOpType, hiveObj);

                if (resource == null) { // possible if input object/object is of a kind that we don't currently authorize
                    continue;
                }

                if (resource.getObjectType() == HiveObjectType.URI) {
                    String path = hiveObj.getObjectName();
                    FsAction permission = FsAction.READ;

                    if (!isURIAccessAllowed(user, permission, path, getHiveConf())) {
                        throw new HiveAccessControlException(String.format(
                                "Permission denied: user [%s] does not have [%s] privilege on [%s]", user,
                                permission.name(), path));
                    }

                    continue;
                }

                HiveAccessType accessType = getAccessType(hiveObj, hiveOpType, true);

                if (accessType == HiveAccessType.NONE) {
                    continue;
                }

                if (!existsByResourceAndAccessType(requests, resource, accessType)) {
                    RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups,
                            hiveOpType, accessType, context, sessionContext);

                    requests.add(request);
                }
            }
        } else {
            // this should happen only for SHOWDATABASES
            if (hiveOpType == HiveOperationType.SHOWDATABASES) {
                RangerHiveResource resource = new RangerHiveResource(HiveObjectType.DATABASE, null);
                RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups,
                        hiveOpType.name(), HiveAccessType.USE, context, sessionContext);
                requests.add(request);
            } else {
                if (LOG.isDebugEnabled()) {
                    LOG.debug("RangerHiveAuthorizer.checkPrivileges: Unexpected operation type[" + hiveOpType
                            + "] received with empty input objects list!");
                }
            }
        }

        if (!CollectionUtils.isEmpty(outputHObjs)) {
            for (HivePrivilegeObject hiveObj : outputHObjs) {
                RangerHiveResource resource = getHiveResource(hiveOpType, hiveObj);

                if (resource == null) { // possible if input object/object is of a kind that we don't currently authorize
                    continue;
                }

                if (resource.getObjectType() == HiveObjectType.URI) {
                    String path = hiveObj.getObjectName();
                    FsAction permission = FsAction.WRITE;

                    if (!isURIAccessAllowed(user, permission, path, getHiveConf())) {
                        throw new HiveAccessControlException(String.format(
                                "Permission denied: user [%s] does not have [%s] privilege on [%s]", user,
                                permission.name(), path));
                    }

                    continue;
                }

                HiveAccessType accessType = getAccessType(hiveObj, hiveOpType, false);

                if (accessType == HiveAccessType.NONE) {
                    continue;
                }

                if (!existsByResourceAndAccessType(requests, resource, accessType)) {
                    RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups,
                            hiveOpType, accessType, context, sessionContext);

                    requests.add(request);
                }
            }
        }

        buildRequestContextWithAllAccessedResources(requests);

        for (RangerHiveAccessRequest request : requests) {
            if (LOG.isDebugEnabled()) {
                LOG.debug("request: " + request);
            }
            RangerHiveResource resource = (RangerHiveResource) request.getResource();
            RangerAccessResult result = null;

            if (resource.getObjectType() == HiveObjectType.COLUMN
                    && StringUtils.contains(resource.getColumn(), COLUMN_SEP)) {
                List<RangerAccessRequest> colRequests = new ArrayList<RangerAccessRequest>();

                String[] columns = StringUtils.split(resource.getColumn(), COLUMN_SEP);

                // in case of multiple columns, original request is not sent to the plugin; hence service-def will not be set
                resource.setServiceDef(hivePlugin.getServiceDef());

                for (String column : columns) {
                    if (column != null) {
                        column = column.trim();
                    }
                    if (StringUtils.isBlank(column)) {
                        continue;
                    }

                    RangerHiveResource colResource = new RangerHiveResource(HiveObjectType.COLUMN,
                            resource.getDatabase(), resource.getTable(), column);

                    RangerHiveAccessRequest colRequest = request.copy();
                    colRequest.setResource(colResource);

                    colRequests.add(colRequest);
                }

                Collection<RangerAccessResult> colResults = hivePlugin.isAccessAllowed(colRequests,
                        auditHandler);

                if (colResults != null) {
                    for (RangerAccessResult colResult : colResults) {
                        result = colResult;

                        if (result != null && !result.getIsAllowed()) {
                            break;
                        }
                    }
                }
            } else {
                result = hivePlugin.isAccessAllowed(request, auditHandler);
            }

            if ((result == null || result.getIsAllowed())
                    && isBlockAccessIfRowfilterColumnMaskSpecified(hiveOpType, request)) {
                // check if row-filtering is applicable for the table/view being accessed
                HiveAccessType savedAccessType = request.getHiveAccessType();
                RangerHiveResource tblResource = new RangerHiveResource(HiveObjectType.TABLE,
                        resource.getDatabase(), resource.getTable());

                request.setHiveAccessType(HiveAccessType.SELECT); // filtering/masking policies are defined only for SELECT
                request.setResource(tblResource);

                RangerRowFilterResult rowFilterResult = getRowFilterResult(request);

                if (isRowFilterEnabled(rowFilterResult)) {
                    if (result == null) {
                        result = new RangerAccessResult(rowFilterResult.getServiceName(),
                                rowFilterResult.getServiceDef(), request);
                    }

                    result.setIsAllowed(false);
                    result.setPolicyId(rowFilterResult.getPolicyId());
                    result.setReason("User does not have acces to all rows of the table");
                } else {
                    // check if masking is enabled for any column in the table/view
                    request.setResourceMatchingScope(
                            RangerAccessRequest.ResourceMatchingScope.SELF_OR_DESCENDANTS);

                    RangerDataMaskResult dataMaskResult = getDataMaskResult(request);

                    if (isDataMaskEnabled(dataMaskResult)) {
                        if (result == null) {
                            result = new RangerAccessResult(dataMaskResult.getServiceName(),
                                    dataMaskResult.getServiceDef(), request);
                        }

                        result.setIsAllowed(false);
                        result.setPolicyId(dataMaskResult.getPolicyId());
                        result.setReason("User does not have acces to unmasked column values");
                    }
                }

                request.setHiveAccessType(savedAccessType);
                request.setResource(resource);

                if (result != null && !result.getIsAllowed()) {
                    auditHandler.processResult(result);
                }
            }

            if (result != null && !result.getIsAllowed()) {
                String path = resource.getAsString();

                throw new HiveAccessControlException(
                        String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", user,
                                request.getHiveAccessType().name(), path));
            }
        }
    } finally {
        auditHandler.flushAudit();
    }
}

From source file:org.apache.ranger.authorization.hive.authorizer.RangerHiveAuthorizer.java

License:Apache License

/**
 * Check if user has privileges to do this action on these objects
 * @param objs//from   w  w  w .  j  a  v a 2s  .  c om
 * @param context
 * @throws HiveAuthzPluginException
 * @throws HiveAccessControlException
 */
// Commented out to avoid build errors until this interface is stable in Hive Branch
// @Override
public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> objs, HiveAuthzContext context)
        throws HiveAuthzPluginException, HiveAccessControlException {

    if (LOG.isDebugEnabled()) {
        LOG.debug(String.format("==> filterListCmdObjects(%s, %s)", objs, context));
    }

    List<HivePrivilegeObject> ret = null;

    // bail out early if nothing is there to validate!
    if (objs == null) {
        LOG.debug("filterListCmdObjects: meta objects list was null!");
    } else if (objs.isEmpty()) {
        LOG.debug("filterListCmdObjects: meta objects list was empty!");
        ret = objs;
    } else if (getCurrentUserGroupInfo() == null) {
        /*
         * This is null for metastore and there doesn't seem to be a way to tell if one is running as metastore or hiveserver2! 
         */
        LOG.warn("filterListCmdObjects: user information not available");
        ret = objs;
    } else {
        if (LOG.isDebugEnabled()) {
            LOG.debug("filterListCmdObjects: number of input objects[" + objs.size() + "]");
        }
        // get user/group info
        UserGroupInformation ugi = getCurrentUserGroupInfo(); // we know this can't be null since we checked it above!
        HiveAuthzSessionContext sessionContext = getHiveAuthzSessionContext();
        String user = ugi.getShortUserName();
        Set<String> groups = Sets.newHashSet(ugi.getGroupNames());
        if (LOG.isDebugEnabled()) {
            LOG.debug(String.format("filterListCmdObjects: user[%s], groups%s", user, groups));
        }

        if (ret == null) { // if we got any items to filter then we can't return back a null.  We must return back a list even if its empty.
            ret = new ArrayList<HivePrivilegeObject>(objs.size());
        }
        for (HivePrivilegeObject privilegeObject : objs) {
            if (LOG.isDebugEnabled()) {
                HivePrivObjectActionType actionType = privilegeObject.getActionType();
                HivePrivilegeObjectType objectType = privilegeObject.getType();
                String objectName = privilegeObject.getObjectName();
                String dbName = privilegeObject.getDbname();
                List<String> columns = privilegeObject.getColumns();
                List<String> partitionKeys = privilegeObject.getPartKeys();
                String commandString = context == null ? null : context.getCommandString();
                String ipAddress = context == null ? null : context.getIpAddress();

                final String format = "filterListCmdObjects: actionType[%s], objectType[%s], objectName[%s], dbName[%s], columns[%s], partitionKeys[%s]; context: commandString[%s], ipAddress[%s]";
                LOG.debug(String.format(format, actionType, objectType, objectName, dbName, columns,
                        partitionKeys, commandString, ipAddress));
            }

            RangerHiveResource resource = createHiveResource(privilegeObject);
            if (resource == null) {
                LOG.error("filterListCmdObjects: RangerHiveResource returned by createHiveResource is null");
            } else {
                RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, context,
                        sessionContext);
                RangerAccessResult result = hivePlugin.isAccessAllowed(request);
                if (result == null) {
                    LOG.error(
                            "filterListCmdObjects: Internal error: null RangerAccessResult object received back from isAccessAllowed()!");
                } else if (!result.getIsAllowed()) {
                    if (!LOG.isDebugEnabled()) {
                        String path = resource.getAsString();
                        LOG.debug(String.format(
                                "filterListCmdObjects: Permission denied: user [%s] does not have [%s] privilege on [%s]. resource[%s], request[%s], result[%s]",
                                user, request.getHiveAccessType().name(), path, resource, request, result));
                    }
                } else {
                    if (LOG.isDebugEnabled()) {
                        LOG.debug(String.format(
                                "filterListCmdObjects: access allowed. resource[%s], request[%s], result[%s]",
                                resource, request, result));
                    }
                    ret.add(privilegeObject);
                }
            }
        }
    }

    if (LOG.isDebugEnabled()) {
        int count = ret == null ? 0 : ret.size();
        LOG.debug(String.format("<== filterListCmdObjects: count[%d], ret[%s]", count, ret));
    }
    return ret;
}

From source file:org.apache.ranger.authorization.hive.authorizer.RangerHiveAuthorizer.java

License:Apache License

private String getRowFilterExpression(HiveAuthzContext context, String databaseName, String tableOrViewName)
        throws SemanticException {
    UserGroupInformation ugi = getCurrentUserGroupInfo();

    if (ugi == null) {
        throw new SemanticException("user information not available");
    }/*from  w  w  w .j a v a  2  s . com*/

    if (LOG.isDebugEnabled()) {
        LOG.debug("==> getRowFilterExpression(" + databaseName + ", " + tableOrViewName + ")");
    }

    String ret = null;

    RangerHiveAuditHandler auditHandler = new RangerHiveAuditHandler();

    try {
        HiveAuthzSessionContext sessionContext = getHiveAuthzSessionContext();
        String user = ugi.getShortUserName();
        Set<String> groups = Sets.newHashSet(ugi.getGroupNames());
        HiveObjectType objectType = HiveObjectType.TABLE;
        RangerHiveResource resource = new RangerHiveResource(objectType, databaseName, tableOrViewName);
        RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, objectType.name(),
                HiveAccessType.SELECT, context, sessionContext);

        RangerRowFilterResult result = hivePlugin.evalRowFilterPolicies(request, auditHandler);

        if (isRowFilterEnabled(result)) {
            ret = result.getFilterExpr();
        }
    } finally {
        auditHandler.flushAudit();
    }

    if (LOG.isDebugEnabled()) {
        LOG.debug("<== getRowFilterExpression(" + databaseName + ", " + tableOrViewName + "): " + ret);
    }

    return ret;
}

From source file:org.apache.ranger.authorization.hive.authorizer.RangerHiveAuthorizer.java

License:Apache License

private String getCellValueTransformer(HiveAuthzContext context, String databaseName, String tableOrViewName,
        String columnName) throws SemanticException {
    UserGroupInformation ugi = getCurrentUserGroupInfo();

    if (ugi == null) {
        throw new SemanticException("user information not available");
    }/*w w w .ja  v  a 2 s . c o m*/

    if (LOG.isDebugEnabled()) {
        LOG.debug("==> getCellValueTransformer(" + databaseName + ", " + tableOrViewName + ", " + columnName
                + ")");
    }

    String ret = columnName;

    RangerHiveAuditHandler auditHandler = new RangerHiveAuditHandler();

    try {
        HiveAuthzSessionContext sessionContext = getHiveAuthzSessionContext();
        String user = ugi.getShortUserName();
        Set<String> groups = Sets.newHashSet(ugi.getGroupNames());
        HiveObjectType objectType = HiveObjectType.COLUMN;
        RangerHiveResource resource = new RangerHiveResource(objectType, databaseName, tableOrViewName,
                columnName);
        RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, objectType.name(),
                HiveAccessType.SELECT, context, sessionContext);

        RangerDataMaskResult result = hivePlugin.evalDataMaskPolicies(request, auditHandler);

        if (isDataMaskEnabled(result)) {
            String maskType = result.getMaskType();
            RangerDataMaskTypeDef maskTypeDef = result.getMaskTypeDef();
            String transformer = null;
            if (maskTypeDef != null) {
                transformer = maskTypeDef.getTransformer();
            }

            if (StringUtils.equalsIgnoreCase(maskType, MASK_TYPE_NULL)) {
                ret = "NULL";
            } else if (StringUtils.equalsIgnoreCase(maskType, MASK_TYPE_CUSTOM)) {
                String maskedValue = result.getMaskedValue();

                if (maskedValue == null) {
                    ret = "NULL";
                } else {
                    ret = maskedValue.replace("{col}", columnName);
                }

            } else if (StringUtils.isNotEmpty(transformer)) {
                ret = transformer.replace("{col}", columnName);
            }

            /*
            String maskCondition = result.getMaskCondition();
                    
            if(StringUtils.isNotEmpty(maskCondition)) {
               ret = "if(" + maskCondition + ", " + ret + ", " + columnName + ")";
            }
            */
        }
    } finally {
        auditHandler.flushAudit();
    }

    if (LOG.isDebugEnabled()) {
        LOG.debug("<== getCellValueTransformer(" + databaseName + ", " + tableOrViewName + ", " + columnName
                + "): " + ret);
    }

    return ret;
}

From source file:org.apache.ranger.authorization.storm.authorizer.RangerStormAuthorizer.java

License:Apache License

/**
  * permit() method is invoked for each incoming Thrift request.
  * @param context request context includes info about 
  * @param operation operation name//from   www  .j  a v  a2 s.c om
  * @param topology_storm configuration of targeted topology 
  * @return true if the request is authorized, false if reject
  */

@Override
public boolean permit(ReqContext aRequestContext, String aOperationName, Map aTopologyConfigMap) {

    boolean accessAllowed = false;
    boolean isAuditEnabled = false;

    String topologyName = null;

    try {
        topologyName = (aTopologyConfigMap == null ? ""
                : (String) aTopologyConfigMap.get(Config.TOPOLOGY_NAME));

        if (LOG.isDebugEnabled()) {
            LOG.debug("[req " + aRequestContext.requestID() + "] Access " + " from: ["
                    + aRequestContext.remoteAddress() + "]" + " user: [" + aRequestContext.principal() + "],"
                    + " op:   [" + aOperationName + "]," + "topology: [" + topologyName + "]");

            if (aTopologyConfigMap != null) {
                for (Object keyObj : aTopologyConfigMap.keySet()) {
                    Object valObj = aTopologyConfigMap.get(keyObj);
                    LOG.debug("TOPOLOGY CONFIG MAP [" + keyObj + "] => [" + valObj + "]");
                }
            } else {
                LOG.debug("TOPOLOGY CONFIG MAP is passed as null.");
            }
        }

        if (noAuthzOperations.contains(aOperationName)) {
            accessAllowed = true;
        } else if (plugin == null) {
            LOG.info("Ranger plugin not initialized yet! Skipping authorization;  allowedFlag => ["
                    + accessAllowed + "], Audit Enabled:" + isAuditEnabled);
        } else {
            String userName = null;
            String[] groups = null;

            Principal user = aRequestContext.principal();

            if (user != null) {
                userName = user.getName();
                if (userName != null) {
                    UserGroupInformation ugi = UserGroupInformation.createRemoteUser(userName);
                    userName = ugi.getShortUserName();
                    groups = ugi.getGroupNames();
                    if (LOG.isDebugEnabled()) {
                        LOG.debug("User found from principal [" + user.getName() + "] => user:[" + userName
                                + "], groups:[" + StringUtil.toString(groups) + "]");
                    }
                }
            }

            if (userName != null) {
                String clientIp = (aRequestContext.remoteAddress() == null ? null
                        : aRequestContext.remoteAddress().getHostAddress());
                RangerAccessRequest accessRequest = plugin.buildAccessRequest(userName, groups, clientIp,
                        topologyName, aOperationName);
                RangerAccessResult result = plugin.isAccessAllowed(accessRequest);
                accessAllowed = result != null && result.getIsAllowed();
                isAuditEnabled = result != null && result.getIsAudited();

                if (LOG.isDebugEnabled()) {
                    LOG.debug("User found from principal [" + userName + "], groups ["
                            + StringUtil.toString(groups) + "]: verifying using [" + plugin.getClass().getName()
                            + "], allowedFlag => [" + accessAllowed + "], Audit Enabled:" + isAuditEnabled);
                }
            } else {
                LOG.info("NULL User found from principal [" + user
                        + "]: Skipping authorization;  allowedFlag => [" + accessAllowed + "], Audit Enabled:"
                        + isAuditEnabled);
            }
        }
    } catch (Throwable t) {
        LOG.error("RangerStormAuthorizer found this exception", t);
    } finally {
        if (LOG.isDebugEnabled()) {
            LOG.debug("[req " + aRequestContext.requestID() + "] Access " + " from: ["
                    + aRequestContext.remoteAddress() + "]" + " user: [" + aRequestContext.principal() + "],"
                    + " op:   [" + aOperationName + "]," + "topology: [" + topologyName + "] => returns ["
                    + accessAllowed + "], Audit Enabled:" + isAuditEnabled);
        }
    }

    return accessAllowed;
}