List of usage examples for org.apache.hadoop.security UserGroupInformation getShortUserName
public String getShortUserName()
From source file:org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopV2TaskContext.java
License:Apache License
/** {@inheritDoc} */ @Override// www . ja v a 2 s . c o m public <T> T runAsJobOwner(final Callable<T> c) throws IgniteCheckedException { String user = job.info().user(); user = IgfsUtils.fixUserName(user); assert user != null; String ugiUser; try { UserGroupInformation currUser = UserGroupInformation.getCurrentUser(); assert currUser != null; ugiUser = currUser.getShortUserName(); } catch (IOException ioe) { throw new IgniteCheckedException(ioe); } try { if (F.eq(user, ugiUser)) // if current UGI context user is the same, do direct call: return c.call(); else { UserGroupInformation ugi = UserGroupInformation.getBestUGI(null, user); return ugi.doAs(new PrivilegedExceptionAction<T>() { @Override public T run() throws Exception { return c.call(); } }); } } catch (Exception e) { throw new IgniteCheckedException(e); } }
From source file:org.apache.impala.util.FsPermissionChecker.java
License:Apache License
private FsPermissionChecker() throws IOException { UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); groups_.addAll(Arrays.asList(ugi.getGroupNames())); supergroup_ = CONF.get(DFS_PERMISSIONS_SUPERUSERGROUP_KEY, DFS_PERMISSIONS_SUPERUSERGROUP_DEFAULT); user_ = ugi.getShortUserName(); }
From source file:org.apache.metron.maas.service.yarn.YarnUtils.java
License:Apache License
public void publishContainerEndEvent(final TimelineClient timelineClient, ContainerStatus container, String domainId, UserGroupInformation ugi) { final TimelineEntity entity = new TimelineEntity(); entity.setEntityId(container.getContainerId().toString()); entity.setEntityType(ApplicationMaster.DSEntity.DS_CONTAINER.toString()); entity.setDomainId(domainId);/*from w w w . j a v a2s.com*/ entity.addPrimaryFilter("user", ugi.getShortUserName()); TimelineEvent event = new TimelineEvent(); event.setTimestamp(System.currentTimeMillis()); event.setEventType(ContainerEvents.CONTAINER_END.toString()); event.addEventInfo("State", container.getState().name()); event.addEventInfo("Exit Status", container.getExitStatus()); entity.addEvent(event); try { timelineClient.putEntities(entity); } catch (YarnException | IOException e) { LOG.error("Container end event could not be published for " + container.getContainerId().toString(), e); } }
From source file:org.apache.metron.maas.service.yarn.YarnUtils.java
License:Apache License
public void publishApplicationAttemptEvent(final TimelineClient timelineClient, String appAttemptId, ContainerEvents appEvent, String domainId, UserGroupInformation ugi) { final TimelineEntity entity = new TimelineEntity(); entity.setEntityId(appAttemptId);/*from w w w. j a va 2 s.c om*/ entity.setEntityType(ApplicationMaster.DSEntity.DS_APP_ATTEMPT.toString()); entity.setDomainId(domainId); entity.addPrimaryFilter("user", ugi.getShortUserName()); TimelineEvent event = new TimelineEvent(); event.setEventType(appEvent.toString()); event.setTimestamp(System.currentTimeMillis()); entity.addEvent(event); try { timelineClient.putEntities(entity); } catch (YarnException | IOException e) { LOG.error("App Attempt " + (appEvent.equals(ContainerEvents.APP_ATTEMPT_START) ? "start" : "end") + " event could not be published for " + appAttemptId.toString(), e); } }
From source file:org.apache.metron.maas.service.yarn.YarnUtils.java
License:Apache License
public void publishContainerStartEvent(final TimelineClient timelineClient, Container container, String domainId, UserGroupInformation ugi) { final TimelineEntity entity = new TimelineEntity(); entity.setEntityId("" + container.getId()); entity.setEntityType(ApplicationMaster.DSEntity.DS_CONTAINER.toString()); entity.setDomainId(domainId);//from w w w. ja v a2 s.co m entity.addPrimaryFilter("user", ugi.getShortUserName()); TimelineEvent event = new TimelineEvent(); event.setTimestamp(System.currentTimeMillis()); event.setEventType(ContainerEvents.CONTAINER_START.toString()); event.addEventInfo("Node", container.getNodeId().toString()); event.addEventInfo("Resources", container.getResource().toString()); entity.addEvent(event); try { ugi.doAs(new PrivilegedExceptionAction<TimelinePutResponse>() { @Override public TimelinePutResponse run() throws Exception { return timelineClient.putEntities(entity); } }); } catch (Exception e) { LOG.error("Container start event could not be published for " + container.getId().toString(), e instanceof UndeclaredThrowableException ? e.getCause() : e); } }
From source file:org.apache.phoenix.end2end.HttpParamImpersonationQueryServerIT.java
License:Apache License
@Test public void testSuccessfulImpersonation() throws Exception { final Entry<String, File> user1 = getUser(1); final Entry<String, File> user2 = getUser(2); // Build the JDBC URL by hand with the doAs final String doAsUrlTemplate = Driver.CONNECT_STRING_PREFIX + "url=http://localhost:" + PQS_PORT + "?" + QueryServicesOptions.DEFAULT_QUERY_SERVER_REMOTEUSEREXTRACTOR_PARAM + "=%s;authentication=SPNEGO;serialization=PROTOBUF"; final String tableName = "POSITIVE_IMPERSONATION"; final int numRows = 5; final UserGroupInformation serviceUgi = UserGroupInformation .loginUserFromKeytabAndReturnUGI(SERVICE_PRINCIPAL, KEYTAB.getAbsolutePath()); serviceUgi.doAs(new PrivilegedExceptionAction<Void>() { @Override/*w w w.j a v a2 s .c o m*/ public Void run() throws Exception { createTable(tableName, numRows); grantUsersToPhoenixSystemTables(Arrays.asList(user1.getKey(), user2.getKey())); return null; } }); UserGroupInformation user1Ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(user1.getKey(), user1.getValue().getAbsolutePath()); user1Ugi.doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { // This user should not be able to read the table readAndExpectPermissionError(PQS_URL, tableName, numRows); // Run the same query with the same credentials, but with a doAs. We should be permitted since the user we're impersonating can run the query final String doAsUrl = String.format(doAsUrlTemplate, serviceUgi.getShortUserName()); try (Connection conn = DriverManager.getConnection(doAsUrl); Statement stmt = conn.createStatement()) { conn.setAutoCommit(true); readRows(stmt, tableName, numRows); } return null; } }); }
From source file:org.apache.phoenix.end2end.HttpParamImpersonationQueryServerIT.java
License:Apache License
@Test public void testDisallowedImpersonation() throws Exception { final Entry<String, File> user2 = getUser(2); // Build the JDBC URL by hand with the doAs final String doAsUrlTemplate = Driver.CONNECT_STRING_PREFIX + "url=http://localhost:" + PQS_PORT + "?" + QueryServicesOptions.DEFAULT_QUERY_SERVER_REMOTEUSEREXTRACTOR_PARAM + "=%s;authentication=SPNEGO;serialization=PROTOBUF"; final String tableName = "DISALLOWED_IMPERSONATION"; final int numRows = 5; final UserGroupInformation serviceUgi = UserGroupInformation .loginUserFromKeytabAndReturnUGI(SERVICE_PRINCIPAL, KEYTAB.getAbsolutePath()); serviceUgi.doAs(new PrivilegedExceptionAction<Void>() { @Override/* w w w.java 2s.c o m*/ public Void run() throws Exception { createTable(tableName, numRows); grantUsersToPhoenixSystemTables(Arrays.asList(user2.getKey())); return null; } }); UserGroupInformation user2Ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(user2.getKey(), user2.getValue().getAbsolutePath()); user2Ugi.doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { // This user is disallowed to read this table readAndExpectPermissionError(PQS_URL, tableName, numRows); // This user is also not allowed to impersonate final String doAsUrl = String.format(doAsUrlTemplate, serviceUgi.getShortUserName()); try (Connection conn = DriverManager.getConnection(doAsUrl); Statement stmt = conn.createStatement()) { conn.setAutoCommit(true); readRows(stmt, tableName, numRows); fail("user2 should not be allowed to impersonate the service user"); } catch (Exception e) { LOG.info("Caught expected exception", e); } return null; } }); }
From source file:org.apache.phoenix.end2end.SystemTablePermissionsIT.java
License:Apache License
@Test public void testSystemTablePermissions() throws Exception { testUtil = new HBaseTestingUtility(); clientProperties = new Properties(); Configuration conf = testUtil.getConfiguration(); setCommonConfigProperties(conf);//from w ww. ja v a 2s . c o m conf.set(QueryServices.IS_NAMESPACE_MAPPING_ENABLED, "false"); clientProperties.setProperty(QueryServices.IS_NAMESPACE_MAPPING_ENABLED, "false"); testUtil.startMiniCluster(1); final UserGroupInformation superUser = UserGroupInformation.createUserForTesting(SUPERUSER, new String[0]); final UserGroupInformation regularUser = UserGroupInformation.createUserForTesting("user", new String[0]); superUser.doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { createTable(); readTable(); return null; } }); Set<String> tables = getHBaseTables(); assertTrue("HBase tables do not include expected Phoenix tables: " + tables, tables.containsAll(PHOENIX_SYSTEM_TABLES)); // Grant permission to the system tables for the unprivileged user superUser.doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { try { grantPermissions(regularUser.getShortUserName(), PHOENIX_SYSTEM_TABLES, Action.EXEC, Action.READ); grantPermissions(regularUser.getShortUserName(), Collections.singleton(TABLE_NAME), Action.READ); } catch (Throwable e) { if (e instanceof Exception) { throw (Exception) e; } else { throw new Exception(e); } } return null; } }); // Make sure that the unprivileged user can read the table regularUser.doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { // We expect this to not throw an error readTable(); return null; } }); }
From source file:org.apache.phoenix.end2end.SystemTablePermissionsIT.java
License:Apache License
@Test public void testNamespaceMappedSystemTables() throws Exception { testUtil = new HBaseTestingUtility(); clientProperties = new Properties(); Configuration conf = testUtil.getConfiguration(); setCommonConfigProperties(conf);//from ww w.j a va2s .com testUtil.getConfiguration().set(QueryServices.IS_NAMESPACE_MAPPING_ENABLED, "true"); clientProperties.setProperty(QueryServices.IS_NAMESPACE_MAPPING_ENABLED, "true"); testUtil.startMiniCluster(1); final UserGroupInformation superUser = UserGroupInformation.createUserForTesting(SUPERUSER, new String[0]); final UserGroupInformation regularUser = UserGroupInformation.createUserForTesting("user", new String[0]); superUser.doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { createTable(); readTable(); return null; } }); Set<String> tables = getHBaseTables(); assertTrue("HBase tables do not include expected Phoenix tables: " + tables, tables.containsAll(PHOENIX_NAMESPACE_MAPPED_SYSTEM_TABLES)); // Grant permission to the system tables for the unprivileged user // An unprivileged user should only need to be able to Read and eXecute on them. superUser.doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { try { grantPermissions(regularUser.getShortUserName(), PHOENIX_NAMESPACE_MAPPED_SYSTEM_TABLES, Action.EXEC, Action.READ); grantPermissions(regularUser.getShortUserName(), Collections.singleton(TABLE_NAME), Action.READ); } catch (Throwable e) { if (e instanceof Exception) { throw (Exception) e; } else { throw new Exception(e); } } return null; } }); regularUser.doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { // We expect this to not throw an error readTable(); return null; } }); }
From source file:org.apache.ranger.authorization.hive.authorizer.RangerHiveAuthorizer.java
License:Apache License
/** * Check if user has privileges to do this action on these objects * @param hiveOpType//from w ww . j av a 2s . c o m * @param inputHObjs * @param outputHObjs * @param context * @throws HiveAuthzPluginException * @throws HiveAccessControlException */ @Override public void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs, List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException { UserGroupInformation ugi = getCurrentUserGroupInfo(); if (ugi == null) { throw new HiveAccessControlException("Permission denied: user information not available"); } RangerHiveAuditHandler auditHandler = new RangerHiveAuditHandler(); try { HiveAuthzSessionContext sessionContext = getHiveAuthzSessionContext(); String user = ugi.getShortUserName(); Set<String> groups = Sets.newHashSet(ugi.getGroupNames()); if (LOG.isDebugEnabled()) { LOG.debug(toString(hiveOpType, inputHObjs, outputHObjs, context, sessionContext)); } if (hiveOpType == HiveOperationType.DFS) { handleDfsCommand(hiveOpType, inputHObjs, user, auditHandler); return; } List<RangerHiveAccessRequest> requests = new ArrayList<RangerHiveAccessRequest>(); if (!CollectionUtils.isEmpty(inputHObjs)) { for (HivePrivilegeObject hiveObj : inputHObjs) { RangerHiveResource resource = getHiveResource(hiveOpType, hiveObj); if (resource == null) { // possible if input object/object is of a kind that we don't currently authorize continue; } if (resource.getObjectType() == HiveObjectType.URI) { String path = hiveObj.getObjectName(); FsAction permission = FsAction.READ; if (!isURIAccessAllowed(user, permission, path, getHiveConf())) { throw new HiveAccessControlException(String.format( "Permission denied: user [%s] does not have [%s] privilege on [%s]", user, permission.name(), path)); } continue; } HiveAccessType accessType = getAccessType(hiveObj, hiveOpType, true); if (accessType == HiveAccessType.NONE) { continue; } if (!existsByResourceAndAccessType(requests, resource, accessType)) { RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, hiveOpType, accessType, context, sessionContext); requests.add(request); } } } else { // this should happen only for SHOWDATABASES if (hiveOpType == HiveOperationType.SHOWDATABASES) { RangerHiveResource resource = new RangerHiveResource(HiveObjectType.DATABASE, null); RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, hiveOpType.name(), HiveAccessType.USE, context, sessionContext); requests.add(request); } else { if (LOG.isDebugEnabled()) { LOG.debug("RangerHiveAuthorizer.checkPrivileges: Unexpected operation type[" + hiveOpType + "] received with empty input objects list!"); } } } if (!CollectionUtils.isEmpty(outputHObjs)) { for (HivePrivilegeObject hiveObj : outputHObjs) { RangerHiveResource resource = getHiveResource(hiveOpType, hiveObj); if (resource == null) { // possible if input object/object is of a kind that we don't currently authorize continue; } if (resource.getObjectType() == HiveObjectType.URI) { String path = hiveObj.getObjectName(); FsAction permission = FsAction.WRITE; if (!isURIAccessAllowed(user, permission, path, getHiveConf())) { throw new HiveAccessControlException(String.format( "Permission denied: user [%s] does not have [%s] privilege on [%s]", user, permission.name(), path)); } continue; } HiveAccessType accessType = getAccessType(hiveObj, hiveOpType, false); if (accessType == HiveAccessType.NONE) { continue; } if (!existsByResourceAndAccessType(requests, resource, accessType)) { RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, hiveOpType, accessType, context, sessionContext); requests.add(request); } } } buildRequestContextWithAllAccessedResources(requests); for (RangerHiveAccessRequest request : requests) { if (LOG.isDebugEnabled()) { LOG.debug("request: " + request); } RangerHiveResource resource = (RangerHiveResource) request.getResource(); RangerAccessResult result = null; if (resource.getObjectType() == HiveObjectType.COLUMN && StringUtils.contains(resource.getColumn(), COLUMN_SEP)) { List<RangerAccessRequest> colRequests = new ArrayList<RangerAccessRequest>(); String[] columns = StringUtils.split(resource.getColumn(), COLUMN_SEP); // in case of multiple columns, original request is not sent to the plugin; hence service-def will not be set resource.setServiceDef(hivePlugin.getServiceDef()); for (String column : columns) { if (column != null) { column = column.trim(); } if (StringUtils.isBlank(column)) { continue; } RangerHiveResource colResource = new RangerHiveResource(HiveObjectType.COLUMN, resource.getDatabase(), resource.getTable(), column); RangerHiveAccessRequest colRequest = request.copy(); colRequest.setResource(colResource); colRequests.add(colRequest); } Collection<RangerAccessResult> colResults = hivePlugin.isAccessAllowed(colRequests, auditHandler); if (colResults != null) { for (RangerAccessResult colResult : colResults) { result = colResult; if (result != null && !result.getIsAllowed()) { break; } } } } else { result = hivePlugin.isAccessAllowed(request, auditHandler); } if ((result == null || result.getIsAllowed()) && isBlockAccessIfRowfilterColumnMaskSpecified(hiveOpType, request)) { // check if row-filtering is applicable for the table/view being accessed HiveAccessType savedAccessType = request.getHiveAccessType(); RangerHiveResource tblResource = new RangerHiveResource(HiveObjectType.TABLE, resource.getDatabase(), resource.getTable()); request.setHiveAccessType(HiveAccessType.SELECT); // filtering/masking policies are defined only for SELECT request.setResource(tblResource); RangerRowFilterResult rowFilterResult = getRowFilterResult(request); if (isRowFilterEnabled(rowFilterResult)) { if (result == null) { result = new RangerAccessResult(rowFilterResult.getServiceName(), rowFilterResult.getServiceDef(), request); } result.setIsAllowed(false); result.setPolicyId(rowFilterResult.getPolicyId()); result.setReason("User does not have acces to all rows of the table"); } else { // check if masking is enabled for any column in the table/view request.setResourceMatchingScope( RangerAccessRequest.ResourceMatchingScope.SELF_OR_DESCENDANTS); RangerDataMaskResult dataMaskResult = getDataMaskResult(request); if (isDataMaskEnabled(dataMaskResult)) { if (result == null) { result = new RangerAccessResult(dataMaskResult.getServiceName(), dataMaskResult.getServiceDef(), request); } result.setIsAllowed(false); result.setPolicyId(dataMaskResult.getPolicyId()); result.setReason("User does not have acces to unmasked column values"); } } request.setHiveAccessType(savedAccessType); request.setResource(resource); if (result != null && !result.getIsAllowed()) { auditHandler.processResult(result); } } if (result != null && !result.getIsAllowed()) { String path = resource.getAsString(); throw new HiveAccessControlException( String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", user, request.getHiveAccessType().name(), path)); } } } finally { auditHandler.flushAudit(); } }