Example usage for org.apache.hadoop.security UserGroupInformation getUGIFromSubject

List of usage examples for org.apache.hadoop.security UserGroupInformation getUGIFromSubject

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation getUGIFromSubject.

Prototype

public static UserGroupInformation getUGIFromSubject(Subject subject) throws IOException 

Source Link

Document

Create a UserGroupInformation from a Subject with Kerberos principal.

Usage

From source file:com.streamsets.pipeline.stage.origin.hdfs.cluster.ClusterHdfsSource.java

License:Apache License

private void validateHadoopFS(List<ConfigIssue> issues) {
    boolean validHapoopFsUri = true;
    hadoopConf = getHadoopConfiguration(issues);
    String hdfsUriInConf;//from   w w  w. j a  v a2s.c  o  m
    if (hdfsUri != null && !hdfsUri.isEmpty()) {
        hadoopConf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, hdfsUri);
    } else {
        hdfsUriInConf = hadoopConf.get(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY);
        if (hdfsUriInConf == null) {
            issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), "hdfsUri", Errors.HADOOPFS_19));
            return;
        } else {
            hdfsUri = hdfsUriInConf;
        }
    }
    if (hdfsUri.contains("://")) {
        try {
            URI uri = new URI(hdfsUri);
            if (!"hdfs".equals(uri.getScheme())) {
                issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), "hdfsUri",
                        Errors.HADOOPFS_12, hdfsUri, uri.getScheme()));
                validHapoopFsUri = false;
            } else if (uri.getAuthority() == null) {
                issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), "hdfsUri",
                        Errors.HADOOPFS_13, hdfsUri));
                validHapoopFsUri = false;
            }
        } catch (Exception ex) {
            issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), "hdfsUri", Errors.HADOOPFS_22,
                    hdfsUri, ex.getMessage(), ex));
            validHapoopFsUri = false;
        }
    } else {
        issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), "hdfsUri", Errors.HADOOPFS_02,
                hdfsUri));
        validHapoopFsUri = false;
    }

    StringBuilder logMessage = new StringBuilder();
    try {
        // forcing UGI to initialize with the security settings from the stage
        UserGroupInformation.setConfiguration(hadoopConf);
        Subject subject = Subject.getSubject(AccessController.getContext());
        if (UserGroupInformation.isSecurityEnabled()) {
            loginUgi = UserGroupInformation.getUGIFromSubject(subject);
        } else {
            UserGroupInformation.loginUserFromSubject(subject);
            loginUgi = UserGroupInformation.getLoginUser();
        }
        LOG.info("Subject = {}, Principals = {}, Login UGI = {}", subject,
                subject == null ? "null" : subject.getPrincipals(), loginUgi);
        if (hdfsKerberos) {
            logMessage.append("Using Kerberos");
            if (loginUgi.getAuthenticationMethod() != UserGroupInformation.AuthenticationMethod.KERBEROS) {
                issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), "hdfsKerberos",
                        Errors.HADOOPFS_00, loginUgi.getAuthenticationMethod(),
                        UserGroupInformation.AuthenticationMethod.KERBEROS));
            }
        } else {
            logMessage.append("Using Simple");
            hadoopConf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION,
                    UserGroupInformation.AuthenticationMethod.SIMPLE.name());
        }
        if (validHapoopFsUri) {
            getUGI().doAs(new PrivilegedExceptionAction<Void>() {
                @Override
                public Void run() throws Exception {
                    try (FileSystem fs = getFileSystemForInitDestroy()) { //to trigger the close
                    }
                    return null;
                }
            });
        }
    } catch (Exception ex) {
        LOG.info("Error connecting to FileSystem: " + ex, ex);
        issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), null, Errors.HADOOPFS_11, hdfsUri,
                String.valueOf(ex), ex));
    }
    LOG.info("Authentication Config: " + logMessage);
}

From source file:org.apache.drill.exec.rpc.security.kerberos.KerberosFactory.java

License:Apache License

@Override
public UserGroupInformation createAndLoginUser(final Map<String, ?> properties) throws IOException {
    final Configuration conf = new Configuration();
    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION,
            UserGroupInformation.AuthenticationMethod.KERBEROS.toString());
    UserGroupInformation.setConfiguration(conf);

    final String keytab = (String) properties.get(DrillProperties.KEYTAB);
    final boolean assumeSubject = properties.containsKey(DrillProperties.KERBEROS_FROM_SUBJECT)
            && Boolean.parseBoolean((String) properties.get(DrillProperties.KERBEROS_FROM_SUBJECT));
    try {/*  w  w w . j a v  a2  s  . com*/
        final UserGroupInformation ugi;
        if (assumeSubject) {
            ugi = UserGroupInformation.getUGIFromSubject(Subject.getSubject(AccessController.getContext()));
            logger.debug("Assuming subject for {}.", ugi.getShortUserName());
        } else {
            if (keytab != null) {
                ugi = UserGroupInformation
                        .loginUserFromKeytabAndReturnUGI((String) properties.get(DrillProperties.USER), keytab);
                logger.debug("Logged in {} using keytab.", ugi.getShortUserName());
            } else {
                // includes Kerberos ticket login
                ugi = UserGroupInformation.getCurrentUser();
                logger.debug("Logged in {} using ticket.", ugi.getShortUserName());
            }
        }
        return ugi;
    } catch (final IOException e) {
        logger.debug("Login failed.", e);
        final Throwable cause = e.getCause();
        if (cause instanceof LoginException) {
            throw new SaslException("Failed to login.", cause);
        }
        throw new SaslException("Unexpected failure trying to login.", cause);
    }
}

From source file:org.apache.ranger.audit.provider.MiscUtil.java

License:Apache License

public static UserGroupInformation createUGIFromSubject(Subject subject) throws IOException {
    logger.info("SUBJECT " + (subject == null ? "not found" : "found"));
    UserGroupInformation ugi = null;//from   w w  w . j  a va2 s. com
    if (subject != null) {
        logger.info("SUBJECT.PRINCIPALS.size()=" + subject.getPrincipals().size());
        Set<Principal> principals = subject.getPrincipals();
        for (Principal principal : principals) {
            logger.info("SUBJECT.PRINCIPAL.NAME=" + principal.getName());
        }
        try {
            // Do not remove the below statement. The default
            // getLoginUser does some initialization which is needed
            // for getUGIFromSubject() to work.
            UserGroupInformation.getLoginUser();
            logger.info("Default UGI before using new Subject:" + UserGroupInformation.getLoginUser());
        } catch (Throwable t) {
            logger.error(t);
        }
        ugi = UserGroupInformation.getUGIFromSubject(subject);
        logger.info("SUBJECT.UGI.NAME=" + ugi.getUserName() + ", ugi=" + ugi);
    } else {
        logger.info("Server username is not available");
    }
    return ugi;
}

From source file:org.apache.sentry.service.thrift.HiveSimpleConnectionFactory.java

License:Apache License

/**
 * Connect to HMS in unsecure mode or in Kerberos mode according to config.
 *
 * @return HMS connection//from   w ww  .  ja va 2 s  .  c  om
 * @throws IOException          if could not establish connection
 * @throws InterruptedException if connection was interrupted
 * @throws MetaException        if other errors happened
 */
public HMSClient connect() throws IOException, InterruptedException, MetaException {
    UserGroupInformation clientUGI;

    if (insecure) {
        clientUGI = UserGroupInformation.getCurrentUser();
    } else {
        clientUGI = UserGroupInformation.getUGIFromSubject(kerberosContext.getSubject());
    }
    return new HMSClient(clientUGI.doAs(new PrivilegedExceptionAction<HiveMetaStoreClient>() {
        @Override
        public HiveMetaStoreClient run() throws MetaException {
            return new HiveMetaStoreClient(hiveConf);
        }
    }));
}

From source file:org.trustedanalytics.auth.gateway.hbase.config.HbaseConfiguration.java

License:Apache License

private Connection getSecuredHBaseClient(Configuration hbaseConf)
        throws InterruptedException, URISyntaxException, LoginException, IOException, KrbException {
    LOGGER.info("Trying kerberos authentication");
    KrbLoginManager loginManager = KrbLoginManagerFactory.getInstance()
            .getKrbLoginManagerInstance(kerberosHbaseProperties.getKdc(), kerberosHbaseProperties.getRealm());

    SystemEnvironment systemEnvironment = new SystemEnvironment();
    Subject subject = loginManager.loginWithKeyTab(systemEnvironment.getVariable(SystemEnvironment.KRB_USER),
            systemEnvironment.getVariable(SystemEnvironment.KRB_KEYTAB));
    loginManager.loginInHadoop(subject, hbaseConf);
    Configuration conf = HBaseConfiguration.create(hbaseConf);
    User user = UserProvider.instantiate(conf).create(UserGroupInformation.getUGIFromSubject(subject));
    return ConnectionFactory.createConnection(conf, user);
}

From source file:org.trustedanalytics.ingestion.gearpump.hbase.HBaseConnManager.java

License:Apache License

private User getUserFromSubject(Subject subject) throws IOException {
    return UserProvider.instantiate(hbaseConfiguration).create(UserGroupInformation.getUGIFromSubject(subject));
}

From source file:org.trustedanalytics.servicebroker.hbase.config.HbaseConfiguration.java

License:Apache License

private Admin getSecuredHBaseClient()
        throws InterruptedException, URISyntaxException, LoginException, IOException {
    LOGGER.info("Trying kerberos authentication");
    KrbLoginManager loginManager = KrbLoginManagerFactory.getInstance()
            .getKrbLoginManagerInstance(kerberosProperties.getKdc(), kerberosProperties.getRealm());

    Subject subject = loginManager.loginWithCredentials(configuration.getUser(),
            configuration.getPassword().toCharArray());
    loginManager.loginInHadoop(subject, hbaseConf);
    Configuration conf = HBaseConfiguration.create(hbaseConf);
    User user = UserProvider.instantiate(conf).create(UserGroupInformation.getUGIFromSubject(subject));
    Connection connection = ConnectionFactory.createConnection(conf, user);
    return connection.getAdmin();
}