Example usage for org.apache.hadoop.security UserGroupInformation getLoginUser

List of usage examples for org.apache.hadoop.security UserGroupInformation getLoginUser

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation getLoginUser.

Prototype

@InterfaceAudience.Public
@InterfaceStability.Evolving
public static UserGroupInformation getLoginUser() throws IOException 

Source Link

Document

Get the currently logged in user.

Usage

From source file:org.apache.atlas.web.listeners.LoginProcessor.java

License:Apache License

protected void doServiceLogin(Configuration hadoopConfig,
        org.apache.commons.configuration.Configuration configuration) {
    UserGroupInformation.setConfiguration(hadoopConfig);

    UserGroupInformation ugi = null;//from w  w  w . j  a  v  a  2  s  .  co  m
    UserGroupInformation.AuthenticationMethod authenticationMethod = SecurityUtil
            .getAuthenticationMethod(hadoopConfig);
    try {
        if (authenticationMethod == UserGroupInformation.AuthenticationMethod.SIMPLE) {
            UserGroupInformation.loginUserFromSubject(null);
        } else if (authenticationMethod == UserGroupInformation.AuthenticationMethod.KERBEROS) {
            String bindAddress = getHostname(configuration);
            UserGroupInformation.loginUserFromKeytab(
                    getServerPrincipal(configuration.getString(AUTHENTICATION_PRINCIPAL), bindAddress),
                    configuration.getString(AUTHENTICATION_KEYTAB));
        }
        LOG.info("Logged in user {}", UserGroupInformation.getLoginUser());
    } catch (IOException e) {
        throw new IllegalStateException(String.format("Unable to perform %s login.", authenticationMethod), e);
    }
}

From source file:org.apache.atlas.web.listeners.LoginProcessorIT.java

License:Apache License

@Test
public void testKerberosLogin() throws Exception {
    final File keytab = setupKDCAndPrincipals();

    LoginProcessor processor = new LoginProcessor() {
        @Override//from w w  w .  ja va 2 s. c om
        protected org.apache.commons.configuration.Configuration getApplicationConfiguration() {
            PropertiesConfiguration config = new PropertiesConfiguration();
            config.setProperty("atlas.authentication.method", "kerberos");
            config.setProperty("atlas.authentication.principal", "dgi@EXAMPLE.COM");
            config.setProperty("atlas.authentication.keytab", keytab.getAbsolutePath());
            return config;
        }

        @Override
        protected Configuration getHadoopConfiguration() {
            Configuration config = new Configuration(false);
            config.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
            config.setBoolean(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, true);
            config.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTH_TO_LOCAL, kerberosRule);

            return config;
        }

        @Override
        protected boolean isHadoopCluster() {
            return true;
        }
    };
    processor.login();

    Assert.assertTrue(UserGroupInformation.getLoginUser().getShortUserName().endsWith("dgi"));
    Assert.assertNotNull(UserGroupInformation.getCurrentUser());
    Assert.assertTrue(UserGroupInformation.isLoginKeytabBased());
    Assert.assertTrue(UserGroupInformation.isSecurityEnabled());

    kdc.stop();

}

From source file:org.apache.atlas.web.security.SSLAndKerberosTest.java

License:Apache License

@BeforeClass
public void setUp() throws Exception {
    jksPath = new Path(Files.createTempDirectory("tempproviders").toString(), "test.jks");
    providerUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file/" + jksPath.toUri();

    String persistDir = TestUtils.getTempDirectory();

    setupKDCAndPrincipals();/*  ww  w.j  ava2  s.c  o  m*/
    setupCredentials();

    // client will actually only leverage subset of these properties
    final PropertiesConfiguration configuration = getSSLConfiguration(providerUrl);

    TestUtils.writeConfiguration(configuration,
            persistDir + File.separator + ApplicationProperties.APPLICATION_PROPERTIES);

    String confLocation = System.getProperty("atlas.conf");
    URL url;
    if (confLocation == null) {
        url = SSLAndKerberosTest.class.getResource("/" + ApplicationProperties.APPLICATION_PROPERTIES);
    } else {
        url = new File(confLocation, ApplicationProperties.APPLICATION_PROPERTIES).toURI().toURL();
    }
    configuration.load(url);
    configuration.setProperty(TLS_ENABLED, true);
    configuration.setProperty("atlas.http.authentication.enabled", "true");
    configuration.setProperty("atlas.http.authentication.type", "kerberos");
    configuration.setProperty("atlas.http.authentication.kerberos.principal",
            "HTTP/localhost@" + kdc.getRealm());
    configuration.setProperty("atlas.http.authentication.kerberos.keytab", httpKeytabFile.getAbsolutePath());
    configuration.setProperty("atlas.http.authentication.kerberos.name.rules",
            "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT");

    TestUtils.writeConfiguration(configuration, persistDir + File.separator + "atlas-application.properties");

    subject = loginTestUser();
    UserGroupInformation.loginUserFromSubject(subject);
    UserGroupInformation proxyUser = UserGroupInformation.createProxyUser("testUser",
            UserGroupInformation.getLoginUser());

    dgiCLient = proxyUser.doAs(new PrivilegedExceptionAction<AtlasClient>() {
        @Override
        public AtlasClient run() throws Exception {
            return new AtlasClient(DGI_URL) {
                @Override
                protected PropertiesConfiguration getClientProperties() {
                    return configuration;
                }
            };
        }
    });

    // save original setting
    originalConf = System.getProperty("atlas.conf");
    System.setProperty("atlas.conf", persistDir);
    secureEmbeddedServer = new TestSecureEmbeddedServer(21443, getWarPath()) {
        @Override
        public PropertiesConfiguration getConfiguration() {
            return configuration;
        }
    };
    secureEmbeddedServer.getServer().start();
}

From source file:org.apache.carbondata.core.util.CarbonUtil.java

License:Apache License

/**
 * This method will be used to delete the folder and files
 *
 * @param path file path array/*  w w w  .  j  a va 2 s .  c  o  m*/
 * @throws Exception exception
 */
public static void deleteFoldersAndFiles(final File... path) throws CarbonUtilException {
    try {
        UserGroupInformation.getLoginUser().doAs(new PrivilegedExceptionAction<Void>() {

            @Override
            public Void run() throws Exception {
                for (int i = 0; i < path.length; i++) {
                    deleteRecursive(path[i]);
                }
                return null;
            }
        });
    } catch (IOException e) {
        throw new CarbonUtilException("Error while deleting the folders and files");
    } catch (InterruptedException e) {
        throw new CarbonUtilException("Error while deleting the folders and files");
    }

}

From source file:org.apache.carbondata.core.util.CarbonUtil.java

License:Apache License

public static void deleteFoldersAndFiles(final CarbonFile... file) throws CarbonUtilException {
    try {/* ww  w.  j  ava  2  s. co m*/
        UserGroupInformation.getLoginUser().doAs(new PrivilegedExceptionAction<Void>() {

            @Override
            public Void run() throws Exception {
                for (int i = 0; i < file.length; i++) {
                    deleteRecursive(file[i]);
                }
                return null;
            }
        });
    } catch (IOException e) {
        throw new CarbonUtilException("Error while deleting the folders and files");
    } catch (InterruptedException e) {
        throw new CarbonUtilException("Error while deleting the folders and files");
    }
}

From source file:org.apache.carbondata.core.util.CarbonUtil.java

License:Apache License

public static void deleteFoldersAndFilesSilent(final CarbonFile... file) throws CarbonUtilException {
    try {//from  w  w w . j  a v a 2s. c o  m
        UserGroupInformation.getLoginUser().doAs(new PrivilegedExceptionAction<Void>() {

            @Override
            public Void run() throws Exception {
                for (int i = 0; i < file.length; i++) {
                    deleteRecursiveSilent(file[i]);
                }
                return null;
            }
        });
    } catch (IOException e) {
        throw new CarbonUtilException("Error while deleting the folders and files");
    } catch (InterruptedException e) {
        throw new CarbonUtilException("Error while deleting the folders and files");
    }
}

From source file:org.apache.carbondata.spark.dictionary.server.SecureDictionaryServer.java

License:Apache License

private SecureDictionaryServer(SparkConf conf, String host, int port) {
    this.conf = conf;
    this.host = host;
    this.port = port;
    try {/* www  . j a v a  2  s  .com*/
        UserGroupInformation.getLoginUser().doAs(new PrivilegedExceptionAction<Void>() {
            @Override
            public Void run() throws Exception {
                startServer();
                return null;
            }
        });
    } catch (IOException | InterruptedException io) {
        LOGGER.error("Failed to start Dictionary Server in secure mode", io);
    }
}

From source file:org.apache.carbondata.spark.dictionary.server.SecureDictionaryServer.java

License:Apache License

/**
 * shutdown dictionary server//from www . j a va  2 s .  co  m
 *
 * @throws Exception
 */
@Override
public void shutdown() throws Exception {
    LOGGER.info("Shutting down dictionary server");
    try {
        UserGroupInformation.getLoginUser().doAs(new PrivilegedExceptionAction<Void>() {
            @Override
            public Void run() throws Exception {
                worker.shutdownGracefully();
                boss.shutdownGracefully();
                return null;
            }
        });
    } catch (IOException | InterruptedException e) {
        LOGGER.error("Failed to stop Dictionary Server in secure mode", e);
    }
}

From source file:org.apache.drill.exec.rpc.AbstractServerConnection.java

License:Apache License

@Override
public void initSaslServer(String mechanismName) throws SaslException {
    checkState(saslServer == null);//  w ww.ja  v  a 2  s.c  om
    try {
        this.saslServer = config.getAuthProvider().getAuthenticatorFactory(mechanismName)
                .createSaslServer(UserGroupInformation.getLoginUser(), null
        /** properties; default QOP is auth */
        );
    } catch (final IOException e) {
        getLogger().debug("Login failed.", e);
        final Throwable cause = e.getCause();
        if (cause instanceof LoginException) {
            throw new SaslException("Failed to login.", cause);
        }
        throw new SaslException("Unexpected failure trying to login.", cause);
    }
    if (saslServer == null) {
        throw new SaslException("Server could not initiate authentication. Insufficient parameters?");
    }
}

From source file:org.apache.drill.exec.rpc.AbstractServerConnection.java

License:Apache License

@Override
public void finalizeSaslSession() throws IOException {
    final String authorizationID = getSaslServer().getAuthorizationID();
    final String remoteShortName = new HadoopKerberosName(authorizationID).getShortName();
    final String localShortName = UserGroupInformation.getLoginUser().getShortUserName();
    if (!localShortName.equals(remoteShortName)) {
        throw new SaslException(String.format(
                "'primary' part of remote drillbit's service principal "
                        + "does not match with this drillbit's. Expected: '%s' Actual: '%s'",
                localShortName, remoteShortName));
    }//from  ww w  .  j  a  v a 2 s. c o m
    getLogger().debug("Authenticated connection for {}", authorizationID);
}