Example usage for org.apache.hadoop.security UserGroupInformation hasKerberosCredentials

List of usage examples for org.apache.hadoop.security UserGroupInformation hasKerberosCredentials

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation hasKerberosCredentials.

Prototype

public boolean hasKerberosCredentials() 

Source Link

Document

checks if logged in using kerberos

Usage

From source file:org.apache.flink.yarn.FlinkYarnClient.java

License:Apache License

public AbstractFlinkYarnCluster deploy() throws Exception {

    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();

    if (UserGroupInformation.isSecurityEnabled()) {
        if (!ugi.hasKerberosCredentials()) {
            throw new YarnDeploymentException(
                    "In secure mode. Please provide Kerberos credentials in order to authenticate. "
                            + "You may use kinit to authenticate and request a TGT from the Kerberos server.");
        }/*from w  w  w  . ja  v  a 2s . c o m*/
        return ugi.doAs(new PrivilegedExceptionAction<AbstractFlinkYarnCluster>() {
            @Override
            public AbstractFlinkYarnCluster run() throws Exception {
                return deployInternal();
            }
        });
    } else {
        return deployInternal();
    }
}

From source file:org.apache.flink.yarn.FlinkYarnClientBase.java

License:Apache License

@Override
public AbstractFlinkYarnCluster deploy() throws Exception {

    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();

    if (UserGroupInformation.isSecurityEnabled()) {
        if (!ugi.hasKerberosCredentials()) {
            throw new YarnDeploymentException(
                    "In secure mode. Please provide Kerberos credentials in order to authenticate. "
                            + "You may use kinit to authenticate and request a TGT from the Kerberos server.");
        }/*from ww w.j  a v  a2 s. c om*/
        return ugi.doAs(new PrivilegedExceptionAction<AbstractFlinkYarnCluster>() {
            @Override
            public AbstractFlinkYarnCluster run() throws Exception {
                return deployInternal();
            }
        });
    } else {
        return deployInternal();
    }
}

From source file:org.apache.flume.auth.KerberosAuthenticator.java

License:Apache License

/**
 * When valid principal and keytab are provided and if authentication has
 * not yet been done for this object, this method authenticates the
 * credentials and populates the ugi. In case of null or invalid credentials
 * IllegalArgumentException is thrown. In case of failure to authenticate,
 * SecurityException is thrown. If authentication has already happened on
 * this KerberosAuthenticator object, then this method checks to see if the current
 * credentials passed are same as the validated credentials. If not, it throws
 * an exception as this authenticator can represent only one Principal.
 *
 * @param principal/* w  ww . jav  a  2s  . c  o  m*/
 * @param keytab
 */
public synchronized void authenticate(String principal, String keytab) {
    // sanity checking

    Preconditions.checkArgument(principal != null && !principal.isEmpty(),
            "Invalid Kerberos principal: " + String.valueOf(principal));
    Preconditions.checkArgument(keytab != null && !keytab.isEmpty(),
            "Invalid Kerberos keytab: " + String.valueOf(keytab));
    File keytabFile = new File(keytab);
    Preconditions.checkArgument(keytabFile.isFile() && keytabFile.canRead(),
            "Keytab is not a readable file: " + String.valueOf(keytab));

    // resolve the requested principal
    String resolvedPrincipal;
    try {
        // resolves _HOST pattern using standard Hadoop search/replace
        // via DNS lookup when 2nd argument is empty
        resolvedPrincipal = SecurityUtil.getServerPrincipal(principal, "");
    } catch (IOException e) {
        throw new IllegalArgumentException(
                "Host lookup error resolving kerberos principal (" + principal + "). Exception follows.", e);
    }
    Preconditions.checkNotNull(resolvedPrincipal, "Resolved Principal must not be null");

    // be cruel and unusual when user tries to login as multiple principals
    // this isn't really valid with a reconfigure but this should be rare
    // enough to warrant a restart of the agent JVM
    // TODO: find a way to interrogate the entire current config state,
    // since we don't have to be unnecessarily protective if they switch all
    // HDFS sinks to use a different principal all at once.

    KerberosUser newUser = new KerberosUser(resolvedPrincipal, keytab);
    Preconditions.checkState(prevUser == null || prevUser.equals(newUser),
            "Cannot use multiple kerberos principals in the same agent. "
                    + " Must restart agent to use new principal or keytab. " + "Previous = %s, New = %s",
            prevUser, newUser);

    // enable the kerberos mode of UGI, before doing anything else
    if (!UserGroupInformation.isSecurityEnabled()) {
        Configuration conf = new Configuration(false);
        conf.set(HADOOP_SECURITY_AUTHENTICATION, "kerberos");
        UserGroupInformation.setConfiguration(conf);
    }

    // We are interested in currently logged in user with kerberos creds
    UserGroupInformation curUser = null;
    try {
        curUser = UserGroupInformation.getLoginUser();
        if (curUser != null && !curUser.hasKerberosCredentials()) {
            curUser = null;
        }
    } catch (IOException e) {
        LOG.warn("User unexpectedly had no active login. Continuing with " + "authentication", e);
    }

    /*
     *  if ugi is not null,
     *     if ugi matches currently logged in kerberos user, we are good
     *     else we are logged out, so relogin our ugi
     *  else if ugi is null, login and populate state
     */
    try {
        if (ugi != null) {
            if (curUser != null && curUser.getUserName().equals(ugi.getUserName())) {
                LOG.debug("Using existing principal login: {}", ugi);
            } else {
                LOG.info("Attempting kerberos Re-login as principal ({}) ", new Object[] { ugi.getUserName() });
                ugi.reloginFromKeytab();
            }
        } else {
            LOG.info("Attempting kerberos login as principal ({}) from keytab " + "file ({})",
                    new Object[] { resolvedPrincipal, keytab });
            UserGroupInformation.loginUserFromKeytab(resolvedPrincipal, keytab);
            this.ugi = UserGroupInformation.getLoginUser();
            this.prevUser = new KerberosUser(resolvedPrincipal, keytab);
            this.privilegedExecutor = new UGIExecutor(this.ugi);
        }
    } catch (IOException e) {
        throw new SecurityException(
                "Authentication error while attempting to " + "login as kerberos principal ("
                        + resolvedPrincipal + ") using " + "keytab (" + keytab + "). Exception follows.",
                e);
    }

    printUGI(this.ugi);
}

From source file:org.apache.flume.auth.UGIExecutor.java

License:Apache License

private void reloginUGI(UserGroupInformation ugi) {
    try {/* w  ww.  j av  a 2s  .co m*/
        if (ugi.hasKerberosCredentials()) {
            long now = System.currentTimeMillis();
            if (now - lastReloginAttempt < MIN_TIME_BEFORE_RELOGIN) {
                return;
            }
            lastReloginAttempt = now;
            ugi.checkTGTAndReloginFromKeytab();
        }
    } catch (IOException e) {
        throw new SecurityException("Error trying to relogin from keytab for user " + ugi.getUserName(), e);
    }
}

From source file:org.apache.hive.service.auth.HiveAuthFactory.java

License:Apache License

public static boolean needUgiLogin(UserGroupInformation ugi, String principal, String keytab) {
    return null == ugi || !ugi.hasKerberosCredentials() || !ugi.getUserName().equals(principal)
            || !Objects.equals(keytab, getKeytabFromUgi());
}

From source file:org.apache.hive.streaming.HiveStreamingConnection.java

License:Apache License

private HiveStreamingConnection(Builder builder) throws StreamingException {
    this.database = builder.database.toLowerCase();
    this.table = builder.table.toLowerCase();
    this.staticPartitionValues = builder.staticPartitionValues;
    this.conf = builder.hiveConf;
    this.agentInfo = builder.agentInfo;
    this.streamingOptimizations = builder.streamingOptimizations;
    this.writeId = builder.writeId;
    this.statementId = builder.statementId;
    this.tableObject = builder.tableObject;
    this.setPartitionedTable(builder.isPartitioned);
    this.manageTransactions = builder.manageTransactions;

    UserGroupInformation loggedInUser = null;
    try {// ww w  . j a  v a 2 s  .c o m
        loggedInUser = UserGroupInformation.getLoginUser();
    } catch (IOException e) {
        LOG.warn("Unable to get logged in user via UGI. err: {}", e.getMessage());
    }
    if (loggedInUser == null) {
        this.username = System.getProperty("user.name");
        this.secureMode = false;
    } else {
        this.username = loggedInUser.getShortUserName();
        this.secureMode = loggedInUser.hasKerberosCredentials();
    }
    this.transactionBatchSize = builder.transactionBatchSize;
    this.recordWriter = builder.recordWriter;
    this.connectionStats = new ConnectionStats();
    if (agentInfo == null) {
        try {
            agentInfo = username + ":" + InetAddress.getLocalHost().getHostName() + ":"
                    + Thread.currentThread().getName();
        } catch (UnknownHostException e) {
            // ignore and use UUID instead
            this.agentInfo = UUID.randomUUID().toString();
        }
    }
    if (conf == null) {
        conf = createHiveConf(this.getClass(), DEFAULT_METASTORE_URI);
    }

    overrideConfSettings(conf);
    if (manageTransactions) {
        this.metastoreUri = conf.get(MetastoreConf.ConfVars.THRIFT_URIS.getHiveName());
        this.msClient = getMetaStoreClient(conf, metastoreUri, secureMode, "streaming-connection");
        // We use a separate metastore client for heartbeat calls to ensure heartbeat RPC calls are
        // isolated from the other transaction related RPC calls.
        this.heartbeatMSClient = getMetaStoreClient(conf, metastoreUri, secureMode,
                "streaming-connection-heartbeat");
        validateTable();
    }

    LOG.info("STREAMING CONNECTION INFO: {}", toConnectionInfoString());
}

From source file:org.apache.phoenix.queryserver.server.QueryServer.java

License:Apache License

@Override
public int run(String[] args) throws Exception {
    logProcessInfo(getConf());/*  w w  w .j  a  va 2  s.  c om*/
    final boolean loadBalancerEnabled = getConf().getBoolean(
            QueryServices.PHOENIX_QUERY_SERVER_LOADBALANCER_ENABLED,
            QueryServicesOptions.DEFAULT_PHOENIX_QUERY_SERVER_LOADBALANCER_ENABLED);
    try {
        final boolean isKerberos = "kerberos"
                .equalsIgnoreCase(getConf().get(QueryServices.QUERY_SERVER_HBASE_SECURITY_CONF_ATTRIB));
        final boolean disableSpnego = getConf().getBoolean(
                QueryServices.QUERY_SERVER_SPNEGO_AUTH_DISABLED_ATTRIB,
                QueryServicesOptions.DEFAULT_QUERY_SERVER_SPNEGO_AUTH_DISABLED);
        String hostname;
        final boolean disableLogin = getConf().getBoolean(QueryServices.QUERY_SERVER_DISABLE_KERBEROS_LOGIN,
                QueryServicesOptions.DEFAULT_QUERY_SERVER_DISABLE_KERBEROS_LOGIN);

        // handle secure cluster credentials
        if (isKerberos && !disableSpnego && !disableLogin) {
            hostname = Strings.domainNamePointerToHostName(DNS.getDefaultHost(
                    getConf().get(QueryServices.QUERY_SERVER_DNS_INTERFACE_ATTRIB, "default"),
                    getConf().get(QueryServices.QUERY_SERVER_DNS_NAMESERVER_ATTRIB, "default")));
            if (LOG.isDebugEnabled()) {
                LOG.debug("Login to " + hostname + " using "
                        + getConf().get(QueryServices.QUERY_SERVER_KEYTAB_FILENAME_ATTRIB) + " and principal "
                        + getConf().get(QueryServices.QUERY_SERVER_KERBEROS_PRINCIPAL_ATTRIB) + ".");
            }
            SecurityUtil.login(getConf(), QueryServices.QUERY_SERVER_KEYTAB_FILENAME_ATTRIB,
                    QueryServices.QUERY_SERVER_KERBEROS_PRINCIPAL_ATTRIB, hostname);
            LOG.info("Login successful.");
        } else {
            hostname = InetAddress.getLocalHost().getHostName();
            LOG.info(" Kerberos is off and hostname is : " + hostname);
        }

        Class<? extends PhoenixMetaFactory> factoryClass = getConf().getClass(
                QueryServices.QUERY_SERVER_META_FACTORY_ATTRIB, PhoenixMetaFactoryImpl.class,
                PhoenixMetaFactory.class);
        int port = getConf().getInt(QueryServices.QUERY_SERVER_HTTP_PORT_ATTRIB,
                QueryServicesOptions.DEFAULT_QUERY_SERVER_HTTP_PORT);
        LOG.debug("Listening on port " + port);
        PhoenixMetaFactory factory = factoryClass.getDeclaredConstructor(Configuration.class)
                .newInstance(getConf());
        Meta meta = factory.create(Arrays.asList(args));
        Service service = new LocalService(meta);

        // Start building the Avatica HttpServer
        final HttpServer.Builder builder = new HttpServer.Builder().withPort(port).withHandler(service,
                getSerialization(getConf()));

        // Enable SPNEGO and Impersonation when using Kerberos
        if (isKerberos) {
            UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
            LOG.debug("Current user is " + ugi);
            if (!ugi.hasKerberosCredentials()) {
                ugi = UserGroupInformation.getLoginUser();
                LOG.debug("Current user does not have Kerberos credentials, using instead " + ugi);
            }

            // Make sure the proxyuser configuration is up to date
            ProxyUsers.refreshSuperUserGroupsConfiguration(getConf());

            String keytabPath = getConf().get(QueryServices.QUERY_SERVER_KEYTAB_FILENAME_ATTRIB);
            File keytab = new File(keytabPath);

            String realmsString = getConf().get(QueryServices.QUERY_SERVER_KERBEROS_ALLOWED_REALMS, null);
            String[] additionalAllowedRealms = null;
            if (null != realmsString) {
                additionalAllowedRealms = StringUtils.split(realmsString, ',');
            }

            // Enable SPNEGO and impersonation (through standard Hadoop configuration means)
            builder.withSpnego(ugi.getUserName(), additionalAllowedRealms).withAutomaticLogin(keytab)
                    .withImpersonation(new PhoenixDoAsCallback(ugi, getConf()));

        }
        setRemoteUserExtractorIfNecessary(builder, getConf());

        // Build and start the HttpServer
        server = builder.build();
        server.start();
        if (loadBalancerEnabled) {
            registerToServiceProvider(hostname);
        }
        runningLatch.countDown();
        server.join();
        return 0;
    } catch (Throwable t) {
        LOG.fatal("Unrecoverable service error. Shutting down.", t);
        this.t = t;
        return -1;
    } finally {
        if (loadBalancerEnabled) {
            unRegister();
        }
    }
}

From source file:org.apache.pig.backend.hadoop.HKerberos.java

License:Apache License

public static void tryKerberosKeytabLogin(Configuration conf) {
    // Before we can actually connect we may need to login using the provided credentials.
    if (UserGroupInformation.isSecurityEnabled()) {
        UserGroupInformation loginUser;
        try {//from   ww w . ja  v a  2  s  .com
            loginUser = UserGroupInformation.getLoginUser();
        } catch (IOException e) {
            LOG.error("Unable to start attempt to login using Kerberos keytab: " + e.getMessage());
            return;
        }

        // If we are logged in into Kerberos with a keytab we can skip this to avoid needless logins
        if (!loginUser.hasKerberosCredentials() && !loginUser.isFromKeytab()) {
            String krb5Conf = conf.get("java.security.krb5.conf");
            String krb5Principal = conf.get("hadoop.security.krb5.principal");
            String krb5Keytab = conf.get("hadoop.security.krb5.keytab");

            // Only attempt login if we have all the required settings.
            if (krb5Conf != null && krb5Principal != null && krb5Keytab != null) {
                LOG.info("Trying login using Kerberos Keytab");
                LOG.info("krb5: Conf      = " + krb5Conf);
                LOG.info("krb5: Principal = " + krb5Principal);
                LOG.info("krb5: Keytab    = " + krb5Keytab);
                System.setProperty("java.security.krb5.conf", krb5Conf);
                try {
                    UserGroupInformation.loginUserFromKeytab(krb5Principal, krb5Keytab);
                } catch (IOException e) {
                    LOG.error("Unable to perform keytab based kerberos authentication: " + e.getMessage());
                }
            }
        }
    }
}