Example usage for org.apache.hadoop.security UserGroupInformation getUGIFromSubject

List of usage examples for org.apache.hadoop.security UserGroupInformation getUGIFromSubject

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation getUGIFromSubject.

Prototype

public static UserGroupInformation getUGIFromSubject(Subject subject) throws IOException 

Source Link

Document

Create a UserGroupInformation from a Subject with Kerberos principal.

Usage

From source file:com.hortonworks.streamline.streams.cluster.service.metadata.HBaseMetadataService.java

License:Apache License

/**
 * Creates secure {@link HBaseMetadataService} which delegates to {@link Admin}
 * instantiated with with the {@link Configuration} provided using the first parameter
 *//*from w  w  w.  jav a  2s . c o  m*/
public static HBaseMetadataService newInstance(Configuration hbaseConfig, SecurityContext securityContext,
        Subject subject, Component hbaseMaster, Collection<ComponentProcess> hbaseMasterProcesses)
        throws IOException, EntityNotFoundException {

    if (SecurityUtil.isKerberosAuthenticated(securityContext)) {
        UserGroupInformation.setConfiguration(hbaseConfig); // Sets Kerberos rules
        final UserGroupInformation ugiFromSubject = UserGroupInformation.getUGIFromSubject(subject); // Adds User principal to the subject
        final UserGroupInformation proxyUserForImpersonation = UserGroupInformation
                .createProxyUser(securityContext.getUserPrincipal().getName(), ugiFromSubject);
        final User user = User.create(proxyUserForImpersonation);

        return new HBaseMetadataService(ConnectionFactory.createConnection(hbaseConfig, user).getAdmin(),
                securityContext, subject, user, hbaseMaster, hbaseMasterProcesses);
    } else {
        return newInstance(hbaseConfig);
    }
}

From source file:com.hortonworks.streamline.streams.cluster.service.metadata.HiveMetadataService.java

License:Apache License

/**
 * Creates secure {@link HiveMetadataService}, which delegates to {@link HiveMetaStoreClient}
 * instantiated with the {@link HiveConf} provided using the first parameter
 *//*w ww  . ja  va 2  s .  c o m*/
public static HiveMetadataService newInstance(HiveConf hiveConf, SecurityContext securityContext,
        Subject subject, Component hiveMetastore, Collection<ComponentProcess> hiveMetastoreProcesses)
        throws MetaException, IOException, EntityNotFoundException, PrivilegedActionException {

    if (SecurityUtil.isKerberosAuthenticated(securityContext)) {
        UserGroupInformation.setConfiguration(hiveConf); // Sets Kerberos rules
        UserGroupInformation.getUGIFromSubject(subject); // Adds User principal to this subject

        return new HiveMetadataService(
                SecurityUtil.execute(() -> new HiveMetaStoreClient(hiveConf), securityContext, subject),
                hiveConf, securityContext, subject, hiveMetastore, hiveMetastoreProcesses);
    } else {
        return new HiveMetadataService(new HiveMetaStoreClient(hiveConf), hiveConf, securityContext, subject,
                hiveMetastore, hiveMetastoreProcesses);
    }
}

From source file:com.intel.databackend.datasources.hbase.HbaseConnManger.java

License:Apache License

private User getUserFromSubject(Configuration configuration, Subject subject) throws IOException {
    return UserProvider.instantiate(configuration).create(UserGroupInformation.getUGIFromSubject(subject));
}

From source file:com.streamsets.datacollector.security.DefaultLoginUgiProvider.java

License:Apache License

@Override
public UserGroupInformation getLoginUgi(Configuration hdfsConfiguration) throws IOException {
    AccessControlContext accessContext = AccessController.getContext();
    Subject subject = Subject.getSubject(accessContext);
    UserGroupInformation loginUgi;//w  ww . j av a 2 s. co m
    //HADOOP-13805
    HadoopConfigurationUtils.configureHadoopTreatSubjectExternal(hdfsConfiguration);
    UserGroupInformation.setConfiguration(hdfsConfiguration);
    if (UserGroupInformation.isSecurityEnabled()) {
        loginUgi = UserGroupInformation.getUGIFromSubject(subject);
    } else {
        UserGroupInformation.loginUserFromSubject(subject);
        loginUgi = UserGroupInformation.getLoginUser();
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug("Subject = {}, Principals = {}, Login UGI = {}", subject,
                subject == null ? "null" : subject.getPrincipals(), loginUgi);
    }
    return loginUgi;
}

From source file:com.streamsets.datacollector.security.HadoopSecurityUtil.java

License:Apache License

public static UserGroupInformation getLoginUser(Configuration hdfsConfiguration) throws IOException {
    UserGroupInformation loginUgi;//  w ww.j a v a 2 s  .c om
    AccessControlContext accessContext = AccessController.getContext();
    Subject subject = Subject.getSubject(accessContext);
    // As per SDC-2917 doing this avoids deadlock
    synchronized (SecurityUtil.getSubjectDomainLock(accessContext)) {
        // call some method to force load static block in KerberosName
        KerberosName.hasRulesBeenSet();
    }
    // This should be always out of sync block
    UserGroupInformation.setConfiguration(hdfsConfiguration);
    synchronized (SecurityUtil.getSubjectDomainLock(accessContext)) {
        if (UserGroupInformation.isSecurityEnabled()) {
            loginUgi = UserGroupInformation.getUGIFromSubject(subject);
        } else {
            UserGroupInformation.loginUserFromSubject(subject);
            loginUgi = UserGroupInformation.getLoginUser();
        }
        if (LOG.isDebugEnabled()) {
            LOG.debug("Subject = {}, Principals = {}, Login UGI = {}", subject,
                    subject == null ? "null" : subject.getPrincipals(), loginUgi);
        }
    }
    return loginUgi;
}

From source file:com.streamsets.datacollector.security.LoginUgiProviderFactory.java

License:Apache License

private static LoginUgiProviderFactory getDefaultLoginUgiProviderFactory() {
    return new LoginUgiProviderFactory() {
        @Override//from   w  ww.  j a  v a2  s  .  co m
        public LoginUgiProvider createLoginUgiProvider() {
            return new LoginUgiProvider() {
                @Override
                public UserGroupInformation getLoginUgi(Subject subject) throws IOException {
                    UserGroupInformation loginUgi;
                    if (UserGroupInformation.isSecurityEnabled()) {
                        loginUgi = UserGroupInformation.getUGIFromSubject(subject);
                    } else {
                        UserGroupInformation.loginUserFromSubject(subject);
                        loginUgi = UserGroupInformation.getLoginUser();
                    }
                    return loginUgi;
                }
            };
        }
    };
}

From source file:com.streamsets.datacollector.security.MapRLoginUgiProvider.java

License:Apache License

@Override
public UserGroupInformation getLoginUgi(Configuration hdfsConfiguration) throws IOException {
    // check system property to see if MapR U/P security is enabled
    String maprLoginEnabled = System.getProperty(MAPR_USERNAME_PASSWORD_SECURITY_ENABLED_KEY,
            MAPR_USERNAME_PASSWORD_SECURITY_ENABLED_DEFAULT);
    boolean isMapRLogin = Boolean.parseBoolean(maprLoginEnabled);
    AccessControlContext accessControlContext = AccessController.getContext();
    Subject subject = Subject.getSubject(accessControlContext);
    //HADOOP-13805
    HadoopConfigurationUtils.configureHadoopTreatSubjectExternal(hdfsConfiguration);
    // SDC-4015 As privateclassloader is false for MapR, UGI is shared and it also needs to be under jvm lock
    UserGroupInformation.setConfiguration(hdfsConfiguration);
    UserGroupInformation loginUgi;//from  w  ww  .  ja v a2  s . c  om

    if (UserGroupInformation.isSecurityEnabled() && !isMapRLogin) {
        // The code in this block must only be executed in case Kerberos is enabled.
        // MapR implementation of UserGroupInformation.isSecurityEnabled() returns true even if Kerberos is not enabled.
        // System property helps to avoid this code path in such a case
        loginUgi = UserGroupInformation.getUGIFromSubject(subject);
    } else {
        UserGroupInformation.loginUserFromSubject(subject);
        loginUgi = UserGroupInformation.getLoginUser();
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug("Subject = {}, Principals = {}, Login UGI = {}", subject,
                subject == null ? "null" : subject.getPrincipals(), loginUgi);
    }
    return loginUgi;

}

From source file:com.streamsets.pipeline.stage.destination.hbase.HBaseTarget.java

License:Apache License

private void validateSecurityConfigs(List<ConfigIssue> issues) {
    try {/*from   www. j a v a 2s . c  o m*/
        if (kerberosAuth) {
            hbaseConf.set(User.HBASE_SECURITY_CONF_KEY,
                    UserGroupInformation.AuthenticationMethod.KERBEROS.name());
            hbaseConf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION,
                    UserGroupInformation.AuthenticationMethod.KERBEROS.name());
            if (hbaseConf.get(MASTER_KERBEROS_PRINCIPAL) == null) {
                try {
                    hbaseConf.set(MASTER_KERBEROS_PRINCIPAL, "hbase/_HOST@" + KerberosUtil.getDefaultRealm());
                } catch (Exception e) {
                    issues.add(getContext().createConfigIssue(Groups.HBASE.name(), "masterPrincipal",
                            Errors.HBASE_22));
                }
            }
            if (hbaseConf.get(REGIONSERVER_KERBEROS_PRINCIPAL) == null) {
                try {
                    hbaseConf.set(REGIONSERVER_KERBEROS_PRINCIPAL,
                            "hbase/_HOST@" + KerberosUtil.getDefaultRealm());
                } catch (Exception e) {
                    issues.add(getContext().createConfigIssue(Groups.HBASE.name(), "regionServerPrincipal",
                            Errors.HBASE_23));
                }
            }
        }

        UserGroupInformation.setConfiguration(hbaseConf);
        Subject subject = Subject.getSubject(AccessController.getContext());
        if (UserGroupInformation.isSecurityEnabled()) {
            loginUgi = UserGroupInformation.getUGIFromSubject(subject);
        } else {
            UserGroupInformation.loginUserFromSubject(subject);
            loginUgi = UserGroupInformation.getLoginUser();
        }
        LOG.info("Subject = {}, Principals = {}, Login UGI = {}", subject,
                subject == null ? "null" : subject.getPrincipals(), loginUgi);
        StringBuilder logMessage = new StringBuilder();
        if (kerberosAuth) {
            logMessage.append("Using Kerberos");
            if (loginUgi.getAuthenticationMethod() != UserGroupInformation.AuthenticationMethod.KERBEROS) {
                issues.add(getContext().createConfigIssue(Groups.HBASE.name(), "kerberosAuth", Errors.HBASE_16,
                        loginUgi.getAuthenticationMethod()));
            }
        } else {
            logMessage.append("Using Simple");
            hbaseConf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION,
                    UserGroupInformation.AuthenticationMethod.SIMPLE.name());
        }
        LOG.info("Authentication Config: " + logMessage);
    } catch (Exception ex) {
        LOG.info("Error validating security configuration: " + ex, ex);
        issues.add(
                getContext().createConfigIssue(Groups.HBASE.name(), null, Errors.HBASE_17, ex.toString(), ex));
    }
}

From source file:com.streamsets.pipeline.stage.destination.hdfs.HdfsTarget.java

License:Apache License

private boolean validateHadoopFS(List<ConfigIssue> issues) {
    boolean validHapoopFsUri = true;
    if (hdfsUri.contains("://")) {
        try {/*from ww  w  . j  a  va  2 s  .c  o m*/
            new URI(hdfsUri);
        } catch (Exception ex) {
            issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), null, Errors.HADOOPFS_22,
                    hdfsUri, ex.toString(), ex));
            validHapoopFsUri = false;
        }
    } else {
        issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), "hdfsUri", Errors.HADOOPFS_18,
                hdfsUri));
        validHapoopFsUri = false;
    }

    StringBuilder logMessage = new StringBuilder();
    try {
        hdfsConfiguration = getHadoopConfiguration(issues);

        hdfsConfiguration.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, hdfsUri);

        // forcing UGI to initialize with the security settings from the stage
        UserGroupInformation.setConfiguration(hdfsConfiguration);
        Subject subject = Subject.getSubject(AccessController.getContext());
        if (UserGroupInformation.isSecurityEnabled()) {
            loginUgi = UserGroupInformation.getUGIFromSubject(subject);
        } else {
            UserGroupInformation.loginUserFromSubject(subject);
            loginUgi = UserGroupInformation.getLoginUser();
        }
        LOG.info("Subject = {}, Principals = {}, Login UGI = {}", subject,
                subject == null ? "null" : subject.getPrincipals(), loginUgi);
        if (hdfsKerberos) {
            logMessage.append("Using Kerberos");
            if (loginUgi.getAuthenticationMethod() != UserGroupInformation.AuthenticationMethod.KERBEROS) {
                issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), "hdfsKerberos",
                        Errors.HADOOPFS_00, loginUgi.getAuthenticationMethod(),
                        UserGroupInformation.AuthenticationMethod.KERBEROS));
            }
        } else {
            logMessage.append("Using Simple");
            hdfsConfiguration.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION,
                    UserGroupInformation.AuthenticationMethod.SIMPLE.name());
        }
        if (validHapoopFsUri) {
            getUGI().doAs(new PrivilegedExceptionAction<Void>() {
                @Override
                public Void run() throws Exception {
                    try (FileSystem fs = getFileSystemForInitDestroy()) { //to trigger the close
                    }
                    return null;
                }
            });
        }
    } catch (Exception ex) {
        LOG.info("Validation Error: " + Errors.HADOOPFS_01.getMessage(), hdfsUri, ex.toString(), ex);
        issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), null, Errors.HADOOPFS_01, hdfsUri,
                String.valueOf(ex), ex));
    }
    LOG.info("Authentication Config: " + logMessage);
    return validHapoopFsUri;
}

From source file:com.streamsets.pipeline.stage.destination.hive.HiveTarget.java

License:Apache License

@Override
protected List<ConfigIssue> init() {
    List<ConfigIssue> issues = super.init();

    partitionsToFields = new HashMap<>();
    columnsToFields = new HashMap<>();

    hiveConf = new HiveConf();
    if (null != hiveConfDir && !hiveConfDir.isEmpty()) {
        File hiveConfDir = new File(this.hiveConfDir);

        if (!hiveConfDir.isAbsolute()) {
            hiveConfDir = new File(getContext().getResourcesDirectory(), this.hiveConfDir).getAbsoluteFile();
        }//  ww w. j a  va2 s  . c  o  m

        if (hiveConfDir.exists()) {
            File coreSite = new File(hiveConfDir.getAbsolutePath(), "core-site.xml");
            File hiveSite = new File(hiveConfDir.getAbsolutePath(), "hive-site.xml");
            File hdfsSite = new File(hiveConfDir.getAbsolutePath(), "hdfs-site.xml");

            if (!coreSite.exists()) {
                issues.add(getContext().createConfigIssue(Groups.HIVE.name(), "hiveConfDir", Errors.HIVE_06,
                        coreSite.getName(), this.hiveConfDir));
            } else {
                hiveConf.addResource(new Path(coreSite.getAbsolutePath()));
            }

            if (!hdfsSite.exists()) {
                issues.add(getContext().createConfigIssue(Groups.HIVE.name(), "hiveConfDir", Errors.HIVE_06,
                        hdfsSite.getName(), this.hiveConfDir));
            } else {
                hiveConf.addResource(new Path(hdfsSite.getAbsolutePath()));
            }

            if (!hiveSite.exists()) {
                issues.add(getContext().createConfigIssue(Groups.HIVE.name(), "hiveConfDir", Errors.HIVE_06,
                        hiveSite.getName(), this.hiveConfDir));
            } else {
                hiveConf.addResource(new Path(hiveSite.getAbsolutePath()));
            }
        } else {
            issues.add(getContext().createConfigIssue(Groups.HIVE.name(), "hiveConfDir", Errors.HIVE_07,
                    this.hiveConfDir));
        }
    } else if (hiveThriftUrl == null || hiveThriftUrl.isEmpty()) {
        issues.add(getContext().createConfigIssue(Groups.HIVE.name(), "hiveThriftUrl", Errors.HIVE_13));
    }

    // Specified URL overrides what's in the Hive Conf
    hiveConf.set(HIVE_METASTORE_URI, hiveThriftUrl);
    // Add any additional hive conf overrides
    for (Map.Entry<String, String> entry : additionalHiveProperties.entrySet()) {
        hiveConf.set(entry.getKey(), entry.getValue());
    }

    try {
        // forcing UGI to initialize with the security settings from the stage
        UserGroupInformation.setConfiguration(hiveConf);
        Subject subject = Subject.getSubject(AccessController.getContext());
        if (UserGroupInformation.isSecurityEnabled()) {
            loginUgi = UserGroupInformation.getUGIFromSubject(subject);
        } else {
            UserGroupInformation.loginUserFromSubject(subject);
            loginUgi = UserGroupInformation.getLoginUser();
        }
        LOG.info("Subject = {}, Principals = {}, Login UGI = {}", subject,
                subject == null ? "null" : subject.getPrincipals(), loginUgi);
        // Proxy users are not currently supported due to: https://issues.apache.org/jira/browse/HIVE-11089
    } catch (IOException e) {
        issues.add(getContext().createConfigIssue(Groups.HIVE.name(), null, Errors.HIVE_11, e.getMessage()));
    }

    try {
        issues.addAll(loginUgi.doAs(new PrivilegedExceptionAction<List<ConfigIssue>>() {
            @Override
            public List<ConfigIssue> run() {
                List<ConfigIssue> issues = new ArrayList<>();
                HiveMetaStoreClient client = null;
                try {
                    client = new HiveMetaStoreClient(hiveConf);

                    List<FieldSchema> columnNames = client.getFields(schema, tableName);
                    for (FieldSchema field : columnNames) {
                        columnsToFields.put(field.getName(), SDC_FIELD_SEP + field.getName());
                    }

                    Table table = client.getTable(schema, tableName);
                    List<FieldSchema> partitionKeys = table.getPartitionKeys();
                    for (FieldSchema field : partitionKeys) {
                        partitionsToFields.put(field.getName(), SDC_FIELD_SEP + field.getName());
                    }
                } catch (UnknownDBException e) {
                    issues.add(getContext().createConfigIssue(Groups.HIVE.name(), "schema", Errors.HIVE_02,
                            schema));
                } catch (UnknownTableException e) {
                    issues.add(getContext().createConfigIssue(Groups.HIVE.name(), "table", Errors.HIVE_03,
                            schema, tableName));
                } catch (MetaException e) {
                    issues.add(getContext().createConfigIssue(Groups.HIVE.name(), "hiveUrl", Errors.HIVE_05,
                            e.getMessage()));
                } catch (TException e) {
                    issues.add(getContext().createConfigIssue(Groups.HIVE.name(), "hiveUrl", Errors.HIVE_04,
                            e.getMessage()));
                } finally {
                    if (null != client) {
                        client.close();
                    }
                }
                return issues;
            }
        }));
    } catch (Error | IOException | InterruptedException e) {
        LOG.error("Received unknown error in validation: {}", e.toString(), e);
        issues.add(getContext().createConfigIssue(Groups.HIVE.name(), "", Errors.HIVE_01, e.toString()));
    } catch (UndeclaredThrowableException e) {
        LOG.error("Received unknown error in validation: {}", e.toString(), e);
        issues.add(getContext().createConfigIssue(Groups.HIVE.name(), "", Errors.HIVE_01,
                e.getUndeclaredThrowable().toString()));
    }

    // Now apply any custom mappings
    if (validColumnMappings(issues)) {
        for (FieldMappingConfig mapping : columnMappings) {
            LOG.debug("Custom mapping field {} to column {}", mapping.field, mapping.columnName);
            if (columnsToFields.containsKey(mapping.columnName)) {
                LOG.debug("Mapping field {} to column {}", mapping.field, mapping.columnName);
                columnsToFields.put(mapping.columnName, mapping.field);
            } else if (partitionsToFields.containsKey(mapping.columnName)) {
                LOG.debug("Mapping field {} to partition {}", mapping.field, mapping.columnName);
                partitionsToFields.put(mapping.columnName, mapping.field);
            }
        }
    }

    dataGeneratorFactory = createDataGeneratorFactory();

    // Note that cleanup is done synchronously by default while servicing .get
    hiveConnectionPool = CacheBuilder.newBuilder().maximumSize(10).expireAfterAccess(10, TimeUnit.MINUTES)
            .removalListener(new HiveConnectionRemovalListener()).build(new HiveConnectionLoader());

    recordWriterPool = CacheBuilder.newBuilder().maximumSize(10).expireAfterAccess(10, TimeUnit.MINUTES)
            .build(new HiveRecordWriterLoader());

    LOG.debug("Total issues: {}", issues.size());
    return issues;
}