Example usage for org.apache.hadoop.security UserGroupInformation isSecurityEnabled

List of usage examples for org.apache.hadoop.security UserGroupInformation isSecurityEnabled

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation isSecurityEnabled.

Prototype

public static boolean isSecurityEnabled() 

Source Link

Document

Determine if UserGroupInformation is using Kerberos to determine user identities or is relying on simple authentication

Usage

From source file:org.apache.falcon.hadoop.HadoopClientFactory.java

License:Apache License

private void validateInputs(UserGroupInformation ugi, final URI uri, final Configuration conf)
        throws FalconException {
    Validate.notNull(ugi, "ugi cannot be null");
    Validate.notNull(conf, "configuration cannot be null");

    try {//w  w  w  .j  a va 2 s.  co m
        if (UserGroupInformation.isSecurityEnabled()) {
            LOG.debug("Revalidating Auth Token with auth method {}",
                    UserGroupInformation.getLoginUser().getAuthenticationMethod().name());
            UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab();
        }
    } catch (IOException ioe) {
        throw new FalconException(
                "Exception while getting FileSystem. Unable to check TGT for user " + ugi.getShortUserName(),
                ioe);
    }

    validateNameNode(uri, conf);
}

From source file:org.apache.falcon.hadoop.HadoopClientFactory.java

License:Apache License

/**
 * This method validates if the execute url is able to reach the MR endpoint.
 *
 * @param executeUrl jt url or RM url/*  w  w  w  .  ja va 2  s.co m*/
 * @throws IOException
 */
public void validateJobClient(String executeUrl, String rmPrincipal) throws IOException {
    final JobConf jobConf = new JobConf();
    jobConf.set(MR_JT_ADDRESS_KEY, executeUrl);
    jobConf.set(YARN_RM_ADDRESS_KEY, executeUrl);
    /**
     * It is possible that the RM/JT principal can be different between clusters,
     * for example, the cluster is using a different KDC with cross-domain trust
     * with the Falcon KDC.   in that case, we want to allow the user to provide
     * the RM principal similar to NN principal.
     */
    if (UserGroupInformation.isSecurityEnabled() && StringUtils.isNotEmpty(rmPrincipal)) {
        jobConf.set(SecurityUtil.RM_PRINCIPAL, rmPrincipal);
    }
    UserGroupInformation loginUser = UserGroupInformation.getLoginUser();
    try {
        JobClient jobClient = loginUser.doAs(new PrivilegedExceptionAction<JobClient>() {
            public JobClient run() throws Exception {
                return new JobClient(jobConf);
            }
        });

        jobClient.getClusterStatus().getMapTasks();
    } catch (InterruptedException e) {
        throw new IOException("Exception creating job client:" + e.getMessage(), e);
    }
}

From source file:org.apache.falcon.recipe.RecipeTool.java

License:Apache License

private FileSystem getFileSystemForHdfs(final Properties recipeProperties, final Configuration conf)
        throws Exception {
    String storageEndpoint = RecipeToolOptions.CLUSTER_HDFS_WRITE_ENDPOINT.getName();
    String nameNode = recipeProperties.getProperty(storageEndpoint);
    conf.set(FS_DEFAULT_NAME_KEY, nameNode);
    if (UserGroupInformation.isSecurityEnabled()) {
        String nameNodePrincipal = recipeProperties
                .getProperty(RecipeToolOptions.RECIPE_NN_PRINCIPAL.getName());
        conf.set(NN_PRINCIPAL, nameNodePrincipal);
    }/*from   w  w  w.j  a va 2s.  co m*/
    return createFileSystem(UserGroupInformation.getLoginUser(), new URI(nameNode), conf);
}

From source file:org.apache.falcon.security.BasicAuthFilter.java

License:Apache License

/**
 * Returns the configuration from Oozie configuration to be used by the authentication filter.
 * <p/>/*from   ww w.  ja  v  a2  s .  c om*/
 * All properties from Oozie configuration which name starts with {@link #FALCON_PREFIX} will
 * be returned. The keys of the returned properties are trimmed from the {@link #FALCON_PREFIX}
 * prefix, for example the Oozie configuration property name 'oozie.authentication.type' will
 * be just 'type'.
 *
 * @param configPrefix configuration prefix, this parameter is ignored by this implementation.
 * @param filterConfig filter configuration, this parameter is ignored by this implementation.
 * @return all Oozie configuration properties prefixed with {@link #FALCON_PREFIX}, without the
 * prefix.
 */
@Override
protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) {
    Properties authProperties = new Properties();
    Properties configProperties = StartupProperties.get();

    // setting the cookie path to root '/' so it is used for all resources.
    authProperties.setProperty(AuthenticationFilter.COOKIE_PATH, "/");

    for (Map.Entry entry : configProperties.entrySet()) {
        String name = (String) entry.getKey();
        if (name.startsWith(FALCON_PREFIX)) {
            String value = (String) entry.getValue();
            name = name.substring(FALCON_PREFIX.length());
            authProperties.setProperty(name, value);
        }
    }

    if (UserGroupInformation.isSecurityEnabled()) { // replace _HOST in principal
        String principal = getKerberosPrincipalWithSubstitutedHost(configProperties);
        // principal cannot be null in secure mode, is validated in submission
        authProperties.setProperty(KerberosAuthenticationHandler.PRINCIPAL, principal);
    }

    return authProperties;
}

From source file:org.apache.falcon.security.BasicAuthFilterTest.java

License:Apache License

@Test
public void testGetKerberosPrincipalWithSubstitutedHostSecure() throws Exception {
    String principal = StartupProperties.get().getProperty(BasicAuthFilter.KERBEROS_PRINCIPAL);

    String expectedPrincipal = "falcon/" + SecurityUtil.getLocalHostName() + "@Example.com";
    try {//from w  ww .  ja v  a 2  s. c  o  m
        Configuration conf = new Configuration(false);
        conf.set("hadoop.security.authentication", "kerberos");
        UserGroupInformation.setConfiguration(conf);
        Assert.assertTrue(UserGroupInformation.isSecurityEnabled());

        StartupProperties.get().setProperty(BasicAuthFilter.KERBEROS_PRINCIPAL, "falcon/_HOST@Example.com");
        BasicAuthFilter filter = new BasicAuthFilter();
        Properties properties = filter.getConfiguration(BasicAuthFilter.FALCON_PREFIX, null);
        Assert.assertEquals(properties.get(KerberosAuthenticationHandler.PRINCIPAL), expectedPrincipal);
    } finally {
        StartupProperties.get().setProperty(BasicAuthFilter.KERBEROS_PRINCIPAL, principal);
    }
}

From source file:org.apache.falcon.security.BasicAuthFilterTest.java

License:Apache License

@Test
public void testGetKerberosPrincipalWithSubstitutedHostNonSecure() throws Exception {
    String principal = StartupProperties.get().getProperty(BasicAuthFilter.KERBEROS_PRINCIPAL);
    Configuration conf = new Configuration(false);
    conf.set("hadoop.security.authentication", "simple");
    UserGroupInformation.setConfiguration(conf);
    Assert.assertFalse(UserGroupInformation.isSecurityEnabled());

    BasicAuthFilter filter = new BasicAuthFilter();
    Properties properties = filter.getConfiguration(BasicAuthFilter.FALCON_PREFIX, null);
    Assert.assertEquals(properties.get(KerberosAuthenticationHandler.PRINCIPAL), principal);
}

From source file:org.apache.falcon.security.FalconAuthenticationFilter.java

License:Apache License

/**
 * Returns the configuration from Oozie configuration to be used by the authentication filter.
 * <p/>/*from  www .j  a va2 s  .  c  o m*/
 * All properties from Oozie configuration which name starts with {@link #FALCON_PREFIX} will
 * be returned. The keys of the returned properties are trimmed from the {@link #FALCON_PREFIX}
 * prefix, for example the Oozie configuration property name 'oozie.authentication.type' will
 * be just 'type'.
 *
 * @param configPrefix configuration prefix, this parameter is ignored by this implementation.
 * @param filterConfig filter configuration, this parameter is ignored by this implementation.
 * @return all Oozie configuration properties prefixed with {@link #FALCON_PREFIX}, without the
 * prefix.
 */
@Override
protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) {
    Properties authProperties = new Properties();
    Properties configProperties = StartupProperties.get();

    // setting the cookie path to root '/' so it is used for all resources.
    authProperties.setProperty(
            org.apache.hadoop.security.authentication.server.AuthenticationFilter.COOKIE_PATH, "/");

    for (Map.Entry entry : configProperties.entrySet()) {
        String name = (String) entry.getKey();
        if (name.startsWith(FALCON_PREFIX)) {
            String value = (String) entry.getValue();
            name = name.substring(FALCON_PREFIX.length());
            authProperties.setProperty(name, value);
        }
    }

    if (UserGroupInformation.isSecurityEnabled()) { // replace _HOST in principal
        String principal = getKerberosPrincipalWithSubstitutedHost(configProperties);
        // principal cannot be null in secure mode, is validated in submission
        authProperties.setProperty(KerberosAuthenticationHandler.PRINCIPAL, principal);
    }

    return authProperties;
}

From source file:org.apache.falcon.security.FalconAuthenticationFilterTest.java

License:Apache License

@Test
public void testGetKerberosPrincipalWithSubstitutedHostSecure() throws Exception {
    String principal = StartupProperties.get().getProperty(FalconAuthenticationFilter.KERBEROS_PRINCIPAL);

    String expectedPrincipal = "falcon/" + SecurityUtil.getLocalHostName().toLowerCase() + "@Example.com";
    try {/* www  . ja  v  a 2 s .c  o m*/
        Configuration conf = new Configuration(false);
        conf.set("hadoop.security.authentication", "kerberos");
        UserGroupInformation.setConfiguration(conf);
        Assert.assertTrue(UserGroupInformation.isSecurityEnabled());

        StartupProperties.get().setProperty(FalconAuthenticationFilter.KERBEROS_PRINCIPAL,
                "falcon/_HOST@Example.com");
        FalconAuthenticationFilter filter = new FalconAuthenticationFilter();
        Properties properties = filter.getConfiguration(FalconAuthenticationFilter.FALCON_PREFIX, null);
        Assert.assertEquals(properties.get(KerberosAuthenticationHandler.PRINCIPAL), expectedPrincipal);
    } finally {
        StartupProperties.get().setProperty(FalconAuthenticationFilter.KERBEROS_PRINCIPAL, principal);
    }
}

From source file:org.apache.falcon.security.FalconAuthenticationFilterTest.java

License:Apache License

@Test
public void testGetKerberosPrincipalWithSubstitutedHostNonSecure() throws Exception {
    String principal = StartupProperties.get().getProperty(FalconAuthenticationFilter.KERBEROS_PRINCIPAL);
    Configuration conf = new Configuration(false);
    conf.set("hadoop.security.authentication", "simple");
    UserGroupInformation.setConfiguration(conf);
    Assert.assertFalse(UserGroupInformation.isSecurityEnabled());

    FalconAuthenticationFilter filter = new FalconAuthenticationFilter();
    Properties properties = filter.getConfiguration(FalconAuthenticationFilter.FALCON_PREFIX, null);
    Assert.assertEquals(properties.get(KerberosAuthenticationHandler.PRINCIPAL), principal);
}

From source file:org.apache.flink.runtime.security.modules.HadoopModule.java

License:Apache License

@Override
public void install(SecurityUtils.SecurityConfiguration securityConfig) throws SecurityInstallException {

    UserGroupInformation.setConfiguration(securityConfig.getHadoopConfiguration());

    try {//from  w  ww.  j  a  v  a 2s  .  c  o m
        if (UserGroupInformation.isSecurityEnabled() && !StringUtils.isBlank(securityConfig.getKeytab())
                && !StringUtils.isBlank(securityConfig.getPrincipal())) {
            String keytabPath = (new File(securityConfig.getKeytab())).getAbsolutePath();

            UserGroupInformation.loginUserFromKeytab(securityConfig.getPrincipal(), keytabPath);

            loginUser = UserGroupInformation.getLoginUser();

            // supplement with any available tokens
            String fileLocation = System.getenv(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
            if (fileLocation != null) {
                /*
                 * Use reflection API since the API semantics are not available in Hadoop1 profile. Below APIs are
                 * used in the context of reading the stored tokens from UGI.
                 * Credentials cred = Credentials.readTokenStorageFile(new File(fileLocation), config.hadoopConf);
                 * loginUser.addCredentials(cred);
                */
                try {
                    Method readTokenStorageFileMethod = Credentials.class.getMethod("readTokenStorageFile",
                            File.class, org.apache.hadoop.conf.Configuration.class);
                    Credentials cred = (Credentials) readTokenStorageFileMethod.invoke(null,
                            new File(fileLocation), securityConfig.getHadoopConfiguration());
                    Method addCredentialsMethod = UserGroupInformation.class.getMethod("addCredentials",
                            Credentials.class);
                    addCredentialsMethod.invoke(loginUser, cred);
                } catch (NoSuchMethodException e) {
                    LOG.warn("Could not find method implementations in the shaded jar. Exception: {}", e);
                } catch (InvocationTargetException e) {
                    throw e.getTargetException();
                }
            }
        } else {
            // login with current user credentials (e.g. ticket cache, OS login)
            // note that the stored tokens are read automatically
            try {
                //Use reflection API to get the login user object
                //UserGroupInformation.loginUserFromSubject(null);
                Method loginUserFromSubjectMethod = UserGroupInformation.class.getMethod("loginUserFromSubject",
                        Subject.class);
                loginUserFromSubjectMethod.invoke(null, (Subject) null);
            } catch (NoSuchMethodException e) {
                LOG.warn("Could not find method implementations in the shaded jar. Exception: {}", e);
            } catch (InvocationTargetException e) {
                throw e.getTargetException();
            }

            loginUser = UserGroupInformation.getLoginUser();
        }

        if (UserGroupInformation.isSecurityEnabled()) {
            // note: UGI::hasKerberosCredentials inaccurately reports false
            // for logins based on a keytab (fixed in Hadoop 2.6.1, see HADOOP-10786),
            // so we check only in ticket cache scenario.
            if (securityConfig.useTicketCache() && !loginUser.hasKerberosCredentials()) {
                // a delegation token is an adequate substitute in most cases
                if (!HadoopUtils.hasHDFSDelegationToken()) {
                    LOG.warn(
                            "Hadoop security is enabled but current login user does not have Kerberos credentials");
                }
            }
        }

        LOG.info("Hadoop user set to {}", loginUser);

    } catch (Throwable ex) {
        throw new SecurityInstallException("Unable to set the Hadoop login user", ex);
    }
}