Example usage for org.apache.hadoop.security UserGroupInformation setConfiguration

List of usage examples for org.apache.hadoop.security UserGroupInformation setConfiguration

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation setConfiguration.

Prototype

@InterfaceAudience.Public
@InterfaceStability.Evolving
public static void setConfiguration(Configuration conf) 

Source Link

Document

Set the static configuration for UGI.

Usage

From source file:org.apache.drill.exec.rpc.security.plain.PlainFactory.java

License:Apache License

@Override
public UserGroupInformation createAndLoginUser(Map<String, ?> properties) throws IOException {
    final Configuration conf = new Configuration();
    UserGroupInformation.setConfiguration(conf);
    try {/*from  www.  ja  v  a2 s.  c  om*/
        return UserGroupInformation.getCurrentUser();
    } catch (final IOException e) {
        logger.debug("Login failed.", e);
        final Throwable cause = e.getCause();
        if (cause instanceof LoginException) {
            throw new SaslException("Failed to login.", cause);
        }
        throw new SaslException("Unexpected failure trying to login. ", cause);
    }
}

From source file:org.apache.drill.exec.server.BootStrapContext.java

License:Apache License

private void login(final DrillConfig config) throws DrillbitStartupException {
    try {//from   w ww  . j a va2  s  . c om
        if (config.hasPath(SERVICE_PRINCIPAL)) {
            // providing a service principal => Kerberos mechanism
            final Configuration loginConf = new Configuration();
            loginConf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION,
                    UserGroupInformation.AuthenticationMethod.KERBEROS.toString());

            // set optional user name mapping
            if (config.hasPath(KERBEROS_NAME_MAPPING)) {
                loginConf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTH_TO_LOCAL,
                        config.getString(KERBEROS_NAME_MAPPING));
            }

            UserGroupInformation.setConfiguration(loginConf);

            // service principal canonicalization
            final String principal = config.getString(SERVICE_PRINCIPAL);
            final String parts[] = KerberosUtil.splitPrincipalIntoParts(principal);
            if (parts.length != 3) {
                throw new DrillbitStartupException(String.format(
                        "Invalid %s, Drill service principal must be of format: primary/instance@REALM",
                        SERVICE_PRINCIPAL));
            }
            parts[1] = KerberosUtil.canonicalizeInstanceName(parts[1], hostName);

            final String canonicalizedPrincipal = KerberosUtil.getPrincipalFromParts(parts[0], parts[1],
                    parts[2]);
            final String keytab = config.getString(SERVICE_KEYTAB_LOCATION);

            // login to KDC (AS)
            // Note that this call must happen before any call to UserGroupInformation#getLoginUser,
            // but there is no way to enforce the order (this static init. call and parameters from
            // DrillConfig are both required).
            UserGroupInformation.loginUserFromKeytab(canonicalizedPrincipal, keytab);

            logger.info("Process user name: '{}' and logged in successfully as '{}'", processUserName,
                    canonicalizedPrincipal);
        } else {
            UserGroupInformation.getLoginUser(); // init
        }

        // ugi does not support logout
    } catch (final IOException e) {
        throw new DrillbitStartupException("Failed to login.", e);
    }

}

From source file:org.apache.drill.exec.server.rest.auth.SpnegoConfig.java

License:Apache License

private UserGroupInformation loginAndReturnUgi() throws DrillException {

    validateSpnegoConfig();//from   w  w  w.  j  a va 2s.  c  o m

    UserGroupInformation ugi;
    try {
        // Check if security is not enabled and try to set the security parameter to login the principal.
        // After the login is performed reset the static UGI state.
        if (!UserGroupInformation.isSecurityEnabled()) {
            final Configuration newConfig = new Configuration();
            newConfig.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION,
                    UserGroupInformation.AuthenticationMethod.KERBEROS.toString());

            if (clientNameMapping != null) {
                newConfig.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTH_TO_LOCAL, clientNameMapping);
            }

            UserGroupInformation.setConfiguration(newConfig);
            ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keytab);

            // Reset the original configuration for static UGI
            UserGroupInformation.setConfiguration(new Configuration());
        } else {
            // Let's not overwrite the rules here since it might be possible that CUSTOM security is configured for
            // JDBC/ODBC with default rules. If Kerberos was enabled then the correct rules must already be set
            ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keytab);
        }
    } catch (Exception e) {
        throw new DrillException(String.format("Login failed for %s with given keytab", principal), e);
    }
    return ugi;
}

From source file:org.apache.druid.indexer.JobHelper.java

License:Apache License

/**
 * Dose authenticate against a secured hadoop cluster
 * In case of any bug fix make sure to fix the code at HdfsStorageAuthentication#authenticate as well.
 *
 * @param config containing the principal name and keytab path.
 *///w w  w .ja v  a  2 s  .  c om
public static void authenticate(HadoopDruidIndexerConfig config) {
    String principal = config.HADOOP_KERBEROS_CONFIG.getPrincipal();
    String keytab = config.HADOOP_KERBEROS_CONFIG.getKeytab();
    if (!Strings.isNullOrEmpty(principal) && !Strings.isNullOrEmpty(keytab)) {
        Configuration conf = new Configuration();
        UserGroupInformation.setConfiguration(conf);
        if (UserGroupInformation.isSecurityEnabled()) {
            try {
                if (UserGroupInformation.getCurrentUser().hasKerberosCredentials() == false
                        || !UserGroupInformation.getCurrentUser().getUserName().equals(principal)) {
                    log.info("trying to authenticate user [%s] with keytab [%s]", principal, keytab);
                    UserGroupInformation.loginUserFromKeytab(principal, keytab);
                }
            } catch (IOException e) {
                throw new ISE(e, "Failed to authenticate user principal [%s] with keytab [%s]", principal,
                        keytab);
            }
        }
    }
}

From source file:org.apache.druid.security.kerberos.DruidKerberosUtil.java

License:Apache License

public static void authenticateIfRequired(String internalClientPrincipal, String internalClientKeytab) {
    if (!Strings.isNullOrEmpty(internalClientPrincipal) && !Strings.isNullOrEmpty(internalClientKeytab)) {
        Configuration conf = new Configuration();
        conf.setClassLoader(DruidKerberosModule.class.getClassLoader());
        conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
        UserGroupInformation.setConfiguration(conf);
        try {//from  w w w  .j a  v  a 2s  .  c  o  m
            //login for the first time.
            if (UserGroupInformation.getCurrentUser().hasKerberosCredentials() == false
                    || !UserGroupInformation.getCurrentUser().getUserName().equals(internalClientPrincipal)) {
                log.info("trying to authenticate user [%s] with keytab [%s]", internalClientPrincipal,
                        internalClientKeytab);
                UserGroupInformation.loginUserFromKeytab(internalClientPrincipal, internalClientKeytab);
                return;
            }
            //try to relogin in case the TGT expired
            if (UserGroupInformation.isLoginKeytabBased()) {
                log.info("Re-Login from key tab [%s] with principal [%s]", internalClientKeytab,
                        internalClientPrincipal);
                UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab();
                return;
            } else if (UserGroupInformation.isLoginTicketBased()) {
                log.info("Re-Login from Ticket cache");
                UserGroupInformation.getLoginUser().reloginFromTicketCache();
                return;
            }
        } catch (IOException e) {
            throw new ISE(e, "Failed to authenticate user principal [%s] with keytab [%s]",
                    internalClientPrincipal, internalClientKeytab);
        }
    }
}

From source file:org.apache.eagle.app.utils.HadoopSecurityUtil.java

License:Apache License

public static void login(Configuration kConfig) throws IOException {
    String keytab = kConfig.get(EAGLE_KEYTAB_FILE_KEY);
    String principal = kConfig.get(EAGLE_PRINCIPAL_KEY);
    if (keytab == null || principal == null || keytab.isEmpty() || principal.isEmpty()) {
        return;//from  w  w  w.j ava2s.  c  o  m
    }

    kConfig.setBoolean("hadoop.security.authorization", true);
    kConfig.set("hadoop.security.authentication", "kerberos");
    UserGroupInformation.setConfiguration(kConfig);
    UserGroupInformation.loginUserFromKeytab(kConfig.get(EAGLE_PRINCIPAL_KEY),
            kConfig.get(EAGLE_KEYTAB_FILE_KEY));
}

From source file:org.apache.eagle.jpm.util.HDFSUtil.java

License:Apache License

public static void login(Configuration kConfig) throws IOException {
    if (kConfig.get("hdfs.kerberos.principal") == null || kConfig.get("hdfs.kerberos.principal").isEmpty()) {
        if (kConfig.get("hadoop.job.ugi") != null) {
            System.setProperty("HADOOP_USER_NAME", kConfig.get("hadoop.job.ugi"));
        }/* w  w w  .ja va 2 s.c  om*/
        return;
    }
    kConfig.setBoolean("hadoop.security.authorization", true);
    kConfig.set("hadoop.security.authentication", "kerberos");
    UserGroupInformation.setConfiguration(kConfig);
    UserGroupInformation.loginUserFromKeytab(kConfig.get("hdfs.kerberos.principal"),
            kConfig.get("hdfs.keytab.file"));
}

From source file:org.apache.eagle.security.util.HadoopSecurityUtil.java

License:Apache License

public static void login(Configuration kConfig) throws IOException {
    if (kConfig.get(EAGLE_KEYTAB_FILE_KEY) == null || kConfig.get(EAGLE_USER_NAME_KEY) == null)
        return;/*from  ww w .  j  a v a2s.com*/

    kConfig.setBoolean("hadoop.security.authorization", true);
    kConfig.set("hadoop.security.authentication", "kerberos");
    UserGroupInformation.setConfiguration(kConfig);
    UserGroupInformation.loginUserFromKeytab(kConfig.get(EAGLE_USER_NAME_KEY),
            kConfig.get(EAGLE_KEYTAB_FILE_KEY));
}

From source file:org.apache.falcon.hadoop.HadoopClientFactoryTest.java

License:Apache License

@Test
public void testCreateFileSystem() throws Exception {
    Configuration conf = embeddedCluster.getConf();

    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation realUser = UserGroupInformation.createUserForTesting(FalconTestUtil.TEST_USER_2,
            new String[] { "testgroup" });
    UserGroupInformation.createProxyUserForTesting("proxyuser", realUser, new String[] { "proxygroup" });

    URI uri = new URI(conf.get(HadoopClientFactory.FS_DEFAULT_NAME_KEY));
    Assert.assertNotNull(uri);/*  ww w . jav a 2s.  c o m*/
    FileSystem fs = HadoopClientFactory.get().createFileSystem(realUser, uri, conf);
    Assert.assertNotNull(fs);
}

From source file:org.apache.falcon.hadoop.HadoopClientFactoryTest.java

License:Apache License

@Test
public void testCreateFileSystemWithUser() throws Exception {
    Configuration conf = embeddedCluster.getConf();

    UserGroupInformation realUser = UserGroupInformation.createUserForTesting(FalconTestUtil.TEST_USER_2,
            new String[] { "testgroup" });
    UserGroupInformation.createProxyUserForTesting("proxyuser", realUser, new String[] { "proxygroup" });
    UserGroupInformation.setConfiguration(conf);

    URI uri = new URI(conf.get(HadoopClientFactory.FS_DEFAULT_NAME_KEY));
    Assert.assertNotNull(uri);//from  ww  w  .  j a  v  a 2 s .c  o  m

    CurrentUser.authenticate(System.getProperty("user.name"));
    FileSystem fs = HadoopClientFactory.get().createFileSystem(CurrentUser.getProxyUGI(), uri, conf);
    Assert.assertNotNull(fs);
}