Example usage for org.apache.hadoop.security UserGroupInformation setConfiguration

List of usage examples for org.apache.hadoop.security UserGroupInformation setConfiguration

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation setConfiguration.

Prototype

@InterfaceAudience.Public
@InterfaceStability.Evolving
public static void setConfiguration(Configuration conf) 

Source Link

Document

Set the static configuration for UGI.

Usage

From source file:org.apache.accumulo.test.replication.KerberosReplicationIT.java

License:Apache License

@After
public void teardown() throws Exception {
    if (null != peer) {
        peer.stop();/* w ww. j av  a2 s . c o m*/
    }
    if (null != primary) {
        primary.stop();
    }
    UserGroupInformation.setConfiguration(new Configuration(false));
}

From source file:org.apache.accumulo.test.security.KerberosClientOptsTest.java

License:Apache License

@Before
public void resetUgiForKrb() {
    Configuration conf = new Configuration(false);
    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    UserGroupInformation.setConfiguration(conf);
}

From source file:org.apache.ambari.view.hive.client.Connection.java

License:Apache License

/**
 * Based on JDBC implementation of HiveConnection.createBinaryTransport
 *
 * @return transport/*from  w  ww  . ja v  a  2  s. c  o  m*/
 * @throws HiveClientException
 */
protected TTransport getTransport() throws HiveClientException, TTransportException {
    TTransport transport;
    boolean assumeSubject = Utils.HiveAuthenticationParams.AUTH_KERBEROS_AUTH_TYPE_FROM_SUBJECT
            .equals(authParams.get(Utils.HiveAuthenticationParams.AUTH_KERBEROS_AUTH_TYPE));
    try {
        if (!Utils.HiveAuthenticationParams.AUTH_SIMPLE
                .equalsIgnoreCase(authParams.get(Utils.HiveAuthenticationParams.AUTH_TYPE))) {
            // If Kerberos
            Map<String, String> saslProps = new HashMap<String, String>();
            SaslQOP saslQOP = SaslQOP.AUTH;
            if (authParams.containsKey(Utils.HiveAuthenticationParams.AUTH_PRINCIPAL)) {
                if (authParams.containsKey(Utils.HiveAuthenticationParams.AUTH_QOP)) {
                    try {
                        saslQOP = SaslQOP.fromString(authParams.get(Utils.HiveAuthenticationParams.AUTH_QOP));
                    } catch (IllegalArgumentException e) {
                        throw new HiveClientException("Invalid " + Utils.HiveAuthenticationParams.AUTH_QOP
                                + " parameter. " + e.getMessage(), e);
                    }
                }
                saslProps.put(Sasl.QOP, saslQOP.toString());
                saslProps.put(Sasl.SERVER_AUTH, "true");

                Configuration conf = new Configuration();
                conf.set("hadoop.security.authentication", "kerberos");
                UserGroupInformation.setConfiguration(conf);

                transport = KerberosSaslHelper.getKerberosTransport(
                        authParams.get(Utils.HiveAuthenticationParams.AUTH_PRINCIPAL), host,
                        HiveAuthFactory.getSocketTransport(host, port, 10000), saslProps, assumeSubject);
            } else {
                // If there's a delegation token available then use token based connection
                String tokenStr = getClientDelegationToken(authParams);
                if (tokenStr != null) {
                    transport = KerberosSaslHelper.getTokenTransport(tokenStr, host,
                            HiveAuthFactory.getSocketTransport(host, port, 10000), saslProps);
                } else {
                    // we are using PLAIN Sasl connection with user/password
                    String userName = getAuthParamDefault(Utils.HiveAuthenticationParams.AUTH_USER,
                            getUsername());
                    String passwd = getAuthParamDefault(Utils.HiveAuthenticationParams.AUTH_PASSWD,
                            Utils.HiveAuthenticationParams.ANONYMOUS_USER);
                    // Note: Thrift returns an SSL socket that is already bound to the specified host:port
                    // Therefore an open called on this would be a no-op later
                    // Hence, any TTransportException related to connecting with the peer are thrown here.
                    // Bubbling them up the call hierarchy so that a retry can happen in openTransport,
                    // if dynamic service discovery is configured.
                    if (isSslConnection()) {
                        // get SSL socket
                        String sslTrustStore = authParams.get(Utils.HiveAuthenticationParams.SSL_TRUST_STORE);
                        String sslTrustStorePassword = authParams
                                .get(Utils.HiveAuthenticationParams.SSL_TRUST_STORE_PASSWORD);
                        if (sslTrustStore == null || sslTrustStore.isEmpty()) {
                            transport = HiveAuthFactory.getSSLSocket(host, port, 10000);
                        } else {
                            transport = HiveAuthFactory.getSSLSocket(host, port, 10000, sslTrustStore,
                                    sslTrustStorePassword);
                        }
                    } else {
                        // get non-SSL socket transport
                        transport = HiveAuthFactory.getSocketTransport(host, port, 10000);
                    }
                    // Overlay the SASL transport on top of the base socket transport (SSL or non-SSL)
                    transport = PlainSaslHelper.getPlainTransport(userName, passwd, transport);
                }
            }
        } else {
            //NOSASL
            return HiveAuthFactory.getSocketTransport(host, port, 10000);
        }
    } catch (SaslException e) {
        throw new HiveClientException("Could not create secure connection to " + host + ": " + e.getMessage(),
                e);
    }
    return transport;
}

From source file:org.apache.ambari.view.slider.SliderAppsViewControllerImpl.java

License:Apache License

private <T> T invokeHDFSClientRunnable(final HDFSClientRunnable<T> runnable,
        final Map<String, String> hadoopConfigs) throws IOException, InterruptedException {
    ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader();
    Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
    try {//from   w ww. j  a v  a 2  s .  co m
        boolean securityEnabled = Boolean.valueOf(hadoopConfigs.get("security_enabled"));
        final HdfsConfiguration hdfsConfiguration = new HdfsConfiguration();
        for (Entry<String, String> entry : hadoopConfigs.entrySet()) {
            hdfsConfiguration.set(entry.getKey(), entry.getValue());
        }
        UserGroupInformation.setConfiguration(hdfsConfiguration);
        UserGroupInformation sliderUser;
        String loggedInUser = getUserToRunAs(hadoopConfigs);
        if (securityEnabled) {
            String viewPrincipal = getViewParameterValue(PARAM_VIEW_PRINCIPAL);
            String viewPrincipalKeytab = getViewParameterValue(PARAM_VIEW_PRINCIPAL_KEYTAB);
            UserGroupInformation ambariUser = UserGroupInformation
                    .loginUserFromKeytabAndReturnUGI(viewPrincipal, viewPrincipalKeytab);
            if (loggedInUser.equals(ambariUser.getShortUserName())) {
                // HDFS throws exception when caller tries to impresonate themselves.
                // User: admin@EXAMPLE.COM is not allowed to impersonate admin
                sliderUser = ambariUser;
            } else {
                sliderUser = UserGroupInformation.createProxyUser(loggedInUser, ambariUser);
            }
        } else {
            sliderUser = UserGroupInformation.getBestUGI(null, loggedInUser);
        }
        try {
            T value = sliderUser.doAs(new PrivilegedExceptionAction<T>() {
                @Override
                public T run() throws Exception {
                    String fsPath = hadoopConfigs.get("fs.defaultFS");
                    FileSystem fs = FileSystem.get(URI.create(fsPath), hdfsConfiguration);
                    try {
                        return runnable.run(fs);
                    } finally {
                        fs.close();
                    }
                }
            });
            return value;
        } catch (UndeclaredThrowableException e) {
            throw e;
        }
    } finally {
        Thread.currentThread().setContextClassLoader(currentClassLoader);
    }
}

From source file:org.apache.ambari.view.slider.SliderAppsViewControllerImpl.java

License:Apache License

private <T> T invokeSliderClientRunnable(final SliderClientContextRunnable<T> runnable)
        throws IOException, InterruptedException, YarnException {
    ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader();
    Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
    try {//from ww  w.  j a v a  2  s.c om
        boolean securityEnabled = Boolean.valueOf(getHadoopConfigs().get("security_enabled"));
        UserGroupInformation.setConfiguration(getSliderClientConfiguration());
        UserGroupInformation sliderUser;
        String loggedInUser = getUserToRunAs();
        if (securityEnabled) {
            String viewPrincipal = getViewParameterValue(PARAM_VIEW_PRINCIPAL);
            String viewPrincipalKeytab = getViewParameterValue(PARAM_VIEW_PRINCIPAL_KEYTAB);
            UserGroupInformation ambariUser = UserGroupInformation
                    .loginUserFromKeytabAndReturnUGI(viewPrincipal, viewPrincipalKeytab);
            if (loggedInUser.equals(ambariUser.getShortUserName())) {
                // HDFS throws exception when caller tries to impresonate themselves.
                // User: admin@EXAMPLE.COM is not allowed to impersonate admin
                sliderUser = ambariUser;
            } else {
                sliderUser = UserGroupInformation.createProxyUser(loggedInUser, ambariUser);
            }
        } else {
            sliderUser = UserGroupInformation.getBestUGI(null, loggedInUser);
        }
        try {
            T value = sliderUser.doAs(new PrivilegedExceptionAction<T>() {
                @Override
                public T run() throws Exception {
                    final SliderClient sliderClient = createSliderClient();
                    try {
                        return runnable.run(sliderClient);
                    } finally {
                        destroySliderClient(sliderClient);
                    }
                }
            });
            return value;
        } catch (UndeclaredThrowableException e) {
            Throwable cause = e.getCause();
            if (cause instanceof YarnException) {
                YarnException ye = (YarnException) cause;
                throw ye;
            }
            throw e;
        }
    } finally {
        Thread.currentThread().setContextClassLoader(currentClassLoader);
    }
}

From source file:org.apache.atlas.hook.AtlasTopicCreator.java

License:Apache License

@VisibleForTesting
protected boolean handleSecurity(Configuration atlasProperties) {
    if (AuthenticationUtil.isKerberosAuthenticationEnabled(atlasProperties)) {
        String kafkaPrincipal = atlasProperties.getString("atlas.notification.kafka.service.principal");
        String kafkaKeyTab = atlasProperties.getString("atlas.notification.kafka.keytab.location");
        org.apache.hadoop.conf.Configuration hadoopConf = new org.apache.hadoop.conf.Configuration();
        SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, hadoopConf);
        try {/*from  w  w  w. j  a  va  2  s  . c om*/
            String serverPrincipal = SecurityUtil.getServerPrincipal(kafkaPrincipal, (String) null);
            UserGroupInformation.setConfiguration(hadoopConf);
            UserGroupInformation.loginUserFromKeytab(serverPrincipal, kafkaKeyTab);
        } catch (IOException e) {
            LOG.warn("Could not login as {} from keytab file {}", kafkaPrincipal, kafkaKeyTab, e);
            return false;
        }
    }
    return true;
}

From source file:org.apache.atlas.security.SecureClientUtils.java

License:Apache License

public static URLConnectionClientHandler getClientConnectionHandler(DefaultClientConfig config,
        org.apache.commons.configuration.Configuration clientConfig, String doAsUser,
        final UserGroupInformation ugi) {
    config.getProperties().put(URLConnectionClientHandler.PROPERTY_HTTP_URL_CONNECTION_SET_METHOD_WORKAROUND,
            true);//from   w  w  w .jav  a  2 s .  co  m
    Configuration conf = new Configuration();
    conf.addResource(conf.get(SSLFactory.SSL_CLIENT_CONF_KEY, SecurityProperties.SSL_CLIENT_PROPERTIES));
    UserGroupInformation.setConfiguration(conf);
    final ConnectionConfigurator connConfigurator = newConnConfigurator(conf);
    String authType = "simple";
    if (clientConfig != null) {
        authType = clientConfig.getString("atlas.http.authentication.type", "simple");
    }
    Authenticator authenticator = new PseudoDelegationTokenAuthenticator();
    if (!authType.equals("simple")) {
        authenticator = new KerberosDelegationTokenAuthenticator();
    }
    authenticator.setConnectionConfigurator(connConfigurator);
    final DelegationTokenAuthenticator finalAuthenticator = (DelegationTokenAuthenticator) authenticator;
    final DelegationTokenAuthenticatedURL.Token token = new DelegationTokenAuthenticatedURL.Token();
    HttpURLConnectionFactory httpURLConnectionFactory = null;
    try {
        UserGroupInformation ugiToUse = ugi != null ? ugi : UserGroupInformation.getCurrentUser();
        final UserGroupInformation actualUgi = (ugiToUse
                .getAuthenticationMethod() == UserGroupInformation.AuthenticationMethod.PROXY)
                        ? ugiToUse.getRealUser()
                        : ugiToUse;
        LOG.info("Real User: {}, is from ticket cache? {}", actualUgi, actualUgi.isLoginTicketBased());
        if (StringUtils.isEmpty(doAsUser)) {
            doAsUser = actualUgi.getShortUserName();
        }
        LOG.info("doAsUser: {}", doAsUser);
        final String finalDoAsUser = doAsUser;
        httpURLConnectionFactory = new HttpURLConnectionFactory() {
            @Override
            public HttpURLConnection getHttpURLConnection(final URL url) throws IOException {
                try {
                    return actualUgi.doAs(new PrivilegedExceptionAction<HttpURLConnection>() {
                        @Override
                        public HttpURLConnection run() throws Exception {
                            try {
                                return new DelegationTokenAuthenticatedURL(finalAuthenticator, connConfigurator)
                                        .openConnection(url, token, finalDoAsUser);
                            } catch (Exception e) {
                                throw new IOException(e);
                            }
                        }
                    });
                } catch (Exception e) {
                    if (e instanceof IOException) {
                        throw (IOException) e;
                    } else {
                        throw new IOException(e);
                    }
                }
            }
        };
    } catch (IOException e) {
        LOG.warn("Error obtaining user", e);
    }

    return new URLConnectionClientHandler(httpURLConnectionFactory);
}

From source file:org.apache.atlas.web.listeners.LoginProcessor.java

License:Apache License

protected void doServiceLogin(Configuration hadoopConfig,
        org.apache.commons.configuration.Configuration configuration) {
    UserGroupInformation.setConfiguration(hadoopConfig);

    UserGroupInformation ugi = null;/*from  ww w .  ja va  2 s.  c  o  m*/
    UserGroupInformation.AuthenticationMethod authenticationMethod = SecurityUtil
            .getAuthenticationMethod(hadoopConfig);
    try {
        if (authenticationMethod == UserGroupInformation.AuthenticationMethod.SIMPLE) {
            UserGroupInformation.loginUserFromSubject(null);
        } else if (authenticationMethod == UserGroupInformation.AuthenticationMethod.KERBEROS) {
            String bindAddress = getHostname(configuration);
            UserGroupInformation.loginUserFromKeytab(
                    getServerPrincipal(configuration.getString(AUTHENTICATION_PRINCIPAL), bindAddress),
                    configuration.getString(AUTHENTICATION_KEYTAB));
        }
        LOG.info("Logged in user {}", UserGroupInformation.getLoginUser());
    } catch (IOException e) {
        throw new IllegalStateException(String.format("Unable to perform %s login.", authenticationMethod), e);
    }
}

From source file:org.apache.coheigea.bigdata.hdfs.HDFSKerberosTest.java

License:Apache License

@org.junit.Test
public void readTest() throws Exception {
    FileSystem fileSystem = hdfsCluster.getFileSystem();

    // Write a file - the AccessControlEnforcer won't be invoked as we are the "superuser"
    final Path file = new Path("/tmp/tmpdir/data-file2");
    FSDataOutputStream out = fileSystem.create(file);
    for (int i = 0; i < 1024; ++i) {
        out.write(("data" + i + "\n").getBytes("UTF-8"));
        out.flush();/*from   w  w w .  j  av  a  2 s.c o  m*/
    }
    out.close();

    // Change permissions to read-only
    fileSystem.setPermission(file, new FsPermission(FsAction.READ, FsAction.NONE, FsAction.NONE));

    // Now try to read the file as "bob" - this should be allowed (by the policy - user)
    final Configuration conf = new Configuration();
    conf.set("fs.defaultFS", defaultFs);
    conf.set("hadoop.security.authentication", "kerberos");
    UserGroupInformation.setConfiguration(conf);

    String basedir = System.getProperty("basedir");
    if (basedir == null) {
        basedir = new File(".").getCanonicalPath();
    }

    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        public Void run() throws Exception {
            FileSystem fs = FileSystem.get(conf);

            // Read the file
            FSDataInputStream in = fs.open(file);
            ByteArrayOutputStream output = new ByteArrayOutputStream();
            IOUtils.copy(in, output);
            String content = new String(output.toByteArray());
            Assert.assertTrue(content.startsWith("data0"));

            fs.close();
            return null;
        }
    });
}

From source file:org.apache.drill.exec.rpc.security.kerberos.KerberosFactory.java

License:Apache License

@Override
public UserGroupInformation createAndLoginUser(final Map<String, ?> properties) throws IOException {
    final Configuration conf = new Configuration();
    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION,
            UserGroupInformation.AuthenticationMethod.KERBEROS.toString());
    UserGroupInformation.setConfiguration(conf);

    final String keytab = (String) properties.get(DrillProperties.KEYTAB);
    final boolean assumeSubject = properties.containsKey(DrillProperties.KERBEROS_FROM_SUBJECT)
            && Boolean.parseBoolean((String) properties.get(DrillProperties.KERBEROS_FROM_SUBJECT));
    try {/*from w  ww. j  a v a  2 s  .co  m*/
        final UserGroupInformation ugi;
        if (assumeSubject) {
            ugi = UserGroupInformation.getUGIFromSubject(Subject.getSubject(AccessController.getContext()));
            logger.debug("Assuming subject for {}.", ugi.getShortUserName());
        } else {
            if (keytab != null) {
                ugi = UserGroupInformation
                        .loginUserFromKeytabAndReturnUGI((String) properties.get(DrillProperties.USER), keytab);
                logger.debug("Logged in {} using keytab.", ugi.getShortUserName());
            } else {
                // includes Kerberos ticket login
                ugi = UserGroupInformation.getCurrentUser();
                logger.debug("Logged in {} using ticket.", ugi.getShortUserName());
            }
        }
        return ugi;
    } catch (final IOException e) {
        logger.debug("Login failed.", e);
        final Throwable cause = e.getCause();
        if (cause instanceof LoginException) {
            throw new SaslException("Failed to login.", cause);
        }
        throw new SaslException("Unexpected failure trying to login.", cause);
    }
}