Example usage for org.apache.hadoop.security UserGroupInformation getLoginUser

List of usage examples for org.apache.hadoop.security UserGroupInformation getLoginUser

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation getLoginUser.

Prototype

@InterfaceAudience.Public
@InterfaceStability.Evolving
public static UserGroupInformation getLoginUser() throws IOException 

Source Link

Document

Get the currently logged in user.

Usage

From source file:org.apache.falcon.recipe.RecipeTool.java

License:Apache License

private FileSystem createFileSystem(UserGroupInformation ugi, final URI uri, final Configuration conf)
        throws Exception {
    try {/* w w  w  .  ja v a 2s.  c  om*/
        final String proxyUserName = ugi.getShortUserName();
        if (proxyUserName.equals(UserGroupInformation.getLoginUser().getShortUserName())) {
            return FileSystem.get(uri, conf);
        }

        return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
            public FileSystem run() throws Exception {
                return FileSystem.get(uri, conf);
            }
        });
    } catch (InterruptedException ex) {
        throw new IOException("Exception creating FileSystem:" + ex.getMessage(), ex);
    }
}

From source file:org.apache.falcon.security.AuthenticationInitializationServiceTest.java

License:Apache License

@Test
public void testInitForSimpleAuthenticationMethod() {
    try {/*from   w  w w  .  ja va 2s  .c o m*/
        StartupProperties.get().setProperty(SecurityUtil.AUTHENTICATION_TYPE, PseudoAuthenticationHandler.TYPE);
        authenticationService.init();

        UserGroupInformation loginUser = UserGroupInformation.getLoginUser();
        Assert.assertFalse(loginUser.isFromKeytab());
        Assert.assertEquals(loginUser.getAuthenticationMethod().name().toLowerCase(),
                PseudoAuthenticationHandler.TYPE);
        Assert.assertEquals(System.getProperty("user.name"), loginUser.getUserName());
    } catch (Exception e) {
        Assert.fail("AuthenticationInitializationService init failed.", e);
    }
}

From source file:org.apache.falcon.security.CurrentUser.java

License:Apache License

/**
 * Create a proxy UGI object for the proxy user.
 *
 * @param proxyUser logged in user/*from   ww  w .  j ava2 s .  com*/
 * @return UGI object
 * @throws IOException
 */
public static UserGroupInformation createProxyUGI(String proxyUser) throws IOException {
    UserGroupInformation proxyUgi = userUgiMap.get(proxyUser);
    if (proxyUgi == null) {
        // taking care of a race condition, the latest UGI will be discarded
        proxyUgi = UserGroupInformation.createProxyUser(proxyUser, UserGroupInformation.getLoginUser());
        userUgiMap.putIfAbsent(proxyUser, proxyUgi);
    }

    return proxyUgi;
}

From source file:org.apache.falcon.service.FalconTopicSubscriber.java

License:Apache License

@Override
public void onMessage(Message message) {
    MapMessage mapMessage = (MapMessage) message;
    try {/*from  w w  w. j  av  a2 s .c o  m*/
        if (LOG.isDebugEnabled()) {
            debug(mapMessage);
        }
        String cluster = mapMessage.getString(ARG.cluster.getArgName());
        String entityName = mapMessage.getString(ARG.entityName.getArgName());
        String entityType = mapMessage.getString(ARG.entityType.getArgName());
        String workflowId = mapMessage.getString(ARG.workflowId.getArgName());
        String workflowUser = mapMessage.getString(ARG.workflowUser.getArgName());
        String runId = mapMessage.getString(ARG.runId.getArgName());
        String nominalTime = mapMessage.getString(ARG.nominalTime.getArgName());
        String status = mapMessage.getString(ARG.status.getArgName());
        String operation = mapMessage.getString(ARG.operation.getArgName());

        AbstractWorkflowEngine wfEngine = WorkflowEngineFactory.getWorkflowEngine();
        InstancesResult.Instance result;

        if (workflowUser == null) {
            //BACKWARD COMPATIBILITY: for the old workflows where user is not set, get the user from workflow
            CurrentUser.authenticate(UserGroupInformation.getLoginUser().getUserName());
            result = wfEngine.getJobDetails(cluster, workflowId);
            workflowUser = result.details;
            CurrentUser.authenticate(workflowUser);
        } else {
            CurrentUser.authenticate(workflowUser);
            result = wfEngine.getJobDetails(cluster, workflowId);
        }

        Date startTime = result.startTime;
        Date endTime = result.endTime;
        Long duration = (endTime.getTime() - startTime.getTime()) * 1000000;

        if (status.equalsIgnoreCase("FAILED")) {
            retryHandler.handleRerun(cluster, entityType, entityName, nominalTime, runId, workflowId,
                    workflowUser, System.currentTimeMillis());

            GenericAlert.instrumentFailedInstance(cluster, entityType, entityName, nominalTime, workflowId,
                    workflowUser, runId, operation, SchemaHelper.formatDateUTC(startTime), "", "", duration);

        } else if (status.equalsIgnoreCase("SUCCEEDED")) {
            Entity entity = EntityUtil.getEntity(entityType, entityName);
            //late data handling not applicable for feed retention action
            if (!operation.equalsIgnoreCase("DELETE") && EntityUtil.getLateProcess(entity) != null) {
                latedataHandler.handleRerun(cluster, entityType, entityName, nominalTime, runId, workflowId,
                        workflowUser, System.currentTimeMillis());
            } else {
                LOG.info("Late data handling not applicable for entityType: {}, entityName: {} operation: {}",
                        entityType, entityName, operation);
            }
            GenericAlert.instrumentSucceededInstance(cluster, entityType, entityName, nominalTime, workflowId,
                    workflowUser, runId, operation, SchemaHelper.formatDateUTC(startTime), duration);

            registerHCatPartitions(mapMessage);

            notifyMetadataMappingService(entityName, operation, mapMessage.getString(ARG.logDir.getArgName()));
        }
    } catch (JMSException e) {
        LOG.info("Error in onMessage for subscriber of topic: {}", this, e);
    } catch (FalconException e) {
        LOG.info("Error in onMessage for subscriber of topic: {}", this, e);
    } catch (Exception e) {
        LOG.info("Error in onMessage for subscriber of topic: {}", this, e);
    }
}

From source file:org.apache.flink.runtime.security.modules.HadoopModule.java

License:Apache License

@Override
public void install(SecurityUtils.SecurityConfiguration securityConfig) throws SecurityInstallException {

    UserGroupInformation.setConfiguration(securityConfig.getHadoopConfiguration());

    try {//from ww w  .j a v  a 2  s  .co  m
        if (UserGroupInformation.isSecurityEnabled() && !StringUtils.isBlank(securityConfig.getKeytab())
                && !StringUtils.isBlank(securityConfig.getPrincipal())) {
            String keytabPath = (new File(securityConfig.getKeytab())).getAbsolutePath();

            UserGroupInformation.loginUserFromKeytab(securityConfig.getPrincipal(), keytabPath);

            loginUser = UserGroupInformation.getLoginUser();

            // supplement with any available tokens
            String fileLocation = System.getenv(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
            if (fileLocation != null) {
                /*
                 * Use reflection API since the API semantics are not available in Hadoop1 profile. Below APIs are
                 * used in the context of reading the stored tokens from UGI.
                 * Credentials cred = Credentials.readTokenStorageFile(new File(fileLocation), config.hadoopConf);
                 * loginUser.addCredentials(cred);
                */
                try {
                    Method readTokenStorageFileMethod = Credentials.class.getMethod("readTokenStorageFile",
                            File.class, org.apache.hadoop.conf.Configuration.class);
                    Credentials cred = (Credentials) readTokenStorageFileMethod.invoke(null,
                            new File(fileLocation), securityConfig.getHadoopConfiguration());
                    Method addCredentialsMethod = UserGroupInformation.class.getMethod("addCredentials",
                            Credentials.class);
                    addCredentialsMethod.invoke(loginUser, cred);
                } catch (NoSuchMethodException e) {
                    LOG.warn("Could not find method implementations in the shaded jar. Exception: {}", e);
                } catch (InvocationTargetException e) {
                    throw e.getTargetException();
                }
            }
        } else {
            // login with current user credentials (e.g. ticket cache, OS login)
            // note that the stored tokens are read automatically
            try {
                //Use reflection API to get the login user object
                //UserGroupInformation.loginUserFromSubject(null);
                Method loginUserFromSubjectMethod = UserGroupInformation.class.getMethod("loginUserFromSubject",
                        Subject.class);
                loginUserFromSubjectMethod.invoke(null, (Subject) null);
            } catch (NoSuchMethodException e) {
                LOG.warn("Could not find method implementations in the shaded jar. Exception: {}", e);
            } catch (InvocationTargetException e) {
                throw e.getTargetException();
            }

            loginUser = UserGroupInformation.getLoginUser();
        }

        if (UserGroupInformation.isSecurityEnabled()) {
            // note: UGI::hasKerberosCredentials inaccurately reports false
            // for logins based on a keytab (fixed in Hadoop 2.6.1, see HADOOP-10786),
            // so we check only in ticket cache scenario.
            if (securityConfig.useTicketCache() && !loginUser.hasKerberosCredentials()) {
                // a delegation token is an adequate substitute in most cases
                if (!HadoopUtils.hasHDFSDelegationToken()) {
                    LOG.warn(
                            "Hadoop security is enabled but current login user does not have Kerberos credentials");
                }
            }
        }

        LOG.info("Hadoop user set to {}", loginUser);

    } catch (Throwable ex) {
        throw new SecurityInstallException("Unable to set the Hadoop login user", ex);
    }
}

From source file:org.apache.flink.runtime.security.SecurityContext.java

License:Apache License

public static void install(SecurityConfiguration config) throws Exception {

    // perform static initialization of UGI, JAAS
    if (installedContext != null) {
        LOG.warn("overriding previous security context");
    }/* w w  w. j  a  va  2  s.  c  o m*/

    // establish the JAAS config
    JaasConfiguration jaasConfig = new JaasConfiguration(config.keytab, config.principal);
    javax.security.auth.login.Configuration.setConfiguration(jaasConfig);

    populateSystemSecurityProperties(config.flinkConf);

    // establish the UGI login user
    UserGroupInformation.setConfiguration(config.hadoopConf);

    UserGroupInformation loginUser;

    if (UserGroupInformation.isSecurityEnabled() && config.keytab != null
            && !StringUtils.isBlank(config.principal)) {
        String keytabPath = (new File(config.keytab)).getAbsolutePath();

        UserGroupInformation.loginUserFromKeytab(config.principal, keytabPath);

        loginUser = UserGroupInformation.getLoginUser();

        // supplement with any available tokens
        String fileLocation = System.getenv(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
        if (fileLocation != null) {
            /*
             * Use reflection API since the API semantics are not available in Hadoop1 profile. Below APIs are
             * used in the context of reading the stored tokens from UGI.
             * Credentials cred = Credentials.readTokenStorageFile(new File(fileLocation), config.hadoopConf);
             * loginUser.addCredentials(cred);
            */
            try {
                Method readTokenStorageFileMethod = Credentials.class.getMethod("readTokenStorageFile",
                        File.class, org.apache.hadoop.conf.Configuration.class);
                Credentials cred = (Credentials) readTokenStorageFileMethod.invoke(null, new File(fileLocation),
                        config.hadoopConf);
                Method addCredentialsMethod = UserGroupInformation.class.getMethod("addCredentials",
                        Credentials.class);
                addCredentialsMethod.invoke(loginUser, cred);
            } catch (NoSuchMethodException e) {
                LOG.warn("Could not find method implementations in the shaded jar. Exception: {}", e);
            }
        }
    } else {
        // login with current user credentials (e.g. ticket cache)
        try {
            //Use reflection API to get the login user object
            //UserGroupInformation.loginUserFromSubject(null);
            Method loginUserFromSubjectMethod = UserGroupInformation.class.getMethod("loginUserFromSubject",
                    Subject.class);
            Subject subject = null;
            loginUserFromSubjectMethod.invoke(null, subject);
        } catch (NoSuchMethodException e) {
            LOG.warn("Could not find method implementations in the shaded jar. Exception: {}", e);
        }

        loginUser = UserGroupInformation.getLoginUser();
        // note that the stored tokens are read automatically
    }

    boolean delegationToken = false;
    final Text HDFS_DELEGATION_KIND = new Text("HDFS_DELEGATION_TOKEN");
    Collection<Token<? extends TokenIdentifier>> usrTok = loginUser.getTokens();
    for (Token<? extends TokenIdentifier> token : usrTok) {
        final Text id = new Text(token.getIdentifier());
        LOG.debug("Found user token " + id + " with " + token);
        if (token.getKind().equals(HDFS_DELEGATION_KIND)) {
            delegationToken = true;
        }
    }

    if (UserGroupInformation.isSecurityEnabled() && !loginUser.hasKerberosCredentials()) {
        //throw an error in non-yarn deployment if kerberos cache is not available
        if (!delegationToken) {
            LOG.error("Hadoop Security is enabled but current login user does not have Kerberos Credentials");
            throw new RuntimeException(
                    "Hadoop Security is enabled but current login user does not have Kerberos Credentials");
        }
    }

    installedContext = new SecurityContext(loginUser);
}

From source file:org.apache.flink.runtime.security.SecurityContextTest.java

License:Apache License

@Test
public void testCreateInsecureHadoopCtx() {
    SecurityContext.SecurityConfiguration sc = new SecurityContext.SecurityConfiguration();
    try {//from w  ww .j a va 2s  .com
        SecurityContext.install(sc);
        assertEquals(UserGroupInformation.getLoginUser().getUserName(), getOSUserName());
    } catch (Exception e) {
        fail(e.getMessage());
    }
}

From source file:org.apache.flume.auth.KerberosAuthenticator.java

License:Apache License

/**
 * When valid principal and keytab are provided and if authentication has
 * not yet been done for this object, this method authenticates the
 * credentials and populates the ugi. In case of null or invalid credentials
 * IllegalArgumentException is thrown. In case of failure to authenticate,
 * SecurityException is thrown. If authentication has already happened on
 * this KerberosAuthenticator object, then this method checks to see if the current
 * credentials passed are same as the validated credentials. If not, it throws
 * an exception as this authenticator can represent only one Principal.
 *
 * @param principal/*w  ww.  j  a v  a2 s .c  o  m*/
 * @param keytab
 */
public synchronized void authenticate(String principal, String keytab) {
    // sanity checking

    Preconditions.checkArgument(principal != null && !principal.isEmpty(),
            "Invalid Kerberos principal: " + String.valueOf(principal));
    Preconditions.checkArgument(keytab != null && !keytab.isEmpty(),
            "Invalid Kerberos keytab: " + String.valueOf(keytab));
    File keytabFile = new File(keytab);
    Preconditions.checkArgument(keytabFile.isFile() && keytabFile.canRead(),
            "Keytab is not a readable file: " + String.valueOf(keytab));

    // resolve the requested principal
    String resolvedPrincipal;
    try {
        // resolves _HOST pattern using standard Hadoop search/replace
        // via DNS lookup when 2nd argument is empty
        resolvedPrincipal = SecurityUtil.getServerPrincipal(principal, "");
    } catch (IOException e) {
        throw new IllegalArgumentException(
                "Host lookup error resolving kerberos principal (" + principal + "). Exception follows.", e);
    }
    Preconditions.checkNotNull(resolvedPrincipal, "Resolved Principal must not be null");

    // be cruel and unusual when user tries to login as multiple principals
    // this isn't really valid with a reconfigure but this should be rare
    // enough to warrant a restart of the agent JVM
    // TODO: find a way to interrogate the entire current config state,
    // since we don't have to be unnecessarily protective if they switch all
    // HDFS sinks to use a different principal all at once.

    KerberosUser newUser = new KerberosUser(resolvedPrincipal, keytab);
    Preconditions.checkState(prevUser == null || prevUser.equals(newUser),
            "Cannot use multiple kerberos principals in the same agent. "
                    + " Must restart agent to use new principal or keytab. " + "Previous = %s, New = %s",
            prevUser, newUser);

    // enable the kerberos mode of UGI, before doing anything else
    if (!UserGroupInformation.isSecurityEnabled()) {
        Configuration conf = new Configuration(false);
        conf.set(HADOOP_SECURITY_AUTHENTICATION, "kerberos");
        UserGroupInformation.setConfiguration(conf);
    }

    // We are interested in currently logged in user with kerberos creds
    UserGroupInformation curUser = null;
    try {
        curUser = UserGroupInformation.getLoginUser();
        if (curUser != null && !curUser.hasKerberosCredentials()) {
            curUser = null;
        }
    } catch (IOException e) {
        LOG.warn("User unexpectedly had no active login. Continuing with " + "authentication", e);
    }

    /*
     *  if ugi is not null,
     *     if ugi matches currently logged in kerberos user, we are good
     *     else we are logged out, so relogin our ugi
     *  else if ugi is null, login and populate state
     */
    try {
        if (ugi != null) {
            if (curUser != null && curUser.getUserName().equals(ugi.getUserName())) {
                LOG.debug("Using existing principal login: {}", ugi);
            } else {
                LOG.info("Attempting kerberos Re-login as principal ({}) ", new Object[] { ugi.getUserName() });
                ugi.reloginFromKeytab();
            }
        } else {
            LOG.info("Attempting kerberos login as principal ({}) from keytab " + "file ({})",
                    new Object[] { resolvedPrincipal, keytab });
            UserGroupInformation.loginUserFromKeytab(resolvedPrincipal, keytab);
            this.ugi = UserGroupInformation.getLoginUser();
            this.prevUser = new KerberosUser(resolvedPrincipal, keytab);
            this.privilegedExecutor = new UGIExecutor(this.ugi);
        }
    } catch (IOException e) {
        throw new SecurityException(
                "Authentication error while attempting to " + "login as kerberos principal ("
                        + resolvedPrincipal + ") using " + "keytab (" + keytab + "). Exception follows.",
                e);
    }

    printUGI(this.ugi);
}

From source file:org.apache.flume.sink.hbase.HBaseSinkSecurityManager.java

License:Apache License

/**
 * Login the user using the configuration, and the hostname specified to use
 * for logging in./*from   w w w. j  a  v  a 2s.  c  o m*/
 *
 * @param conf - Configuration to use for logging the user in.
 * @param hostname - The hostname to use for logging the user in. If no
 * hostname is specified (null or empty string), the canonical hostname for
 * the address returned by {@linkplain InetAddress#getLocalHost()} will be
 * used.
 * @return The logged in HBase {@linkplain User}.
 * @throws IOException if login failed, or hostname lookup failed.
 */
public static synchronized User login(Configuration conf, String hostname, String kerberosPrincipal,
        String kerberosKeytab) throws IOException {
    if (kerberosPrincipal.isEmpty()) {
        String msg = "Login failed, since kerberos principal was not specified.";
        LOG.error(msg);
        throw new IllegalArgumentException(msg);
    }
    if (kerberosKeytab.isEmpty()) {
        String msg = "Login failed, since kerberos keytab was not specified.";
        LOG.error(msg);
        throw new IllegalArgumentException(msg);
    } else {
        //If keytab is specified, user should want it take effect.
        //HDFSEventSink will halt when keytab file is non-exist or unreadable
        File kfile = new File(kerberosKeytab);
        if (!(kfile.isFile() && kfile.canRead())) {
            throw new IllegalArgumentException(
                    "The keyTab file: " + kerberosKeytab + " is nonexistent or can't read. "
                            + "Please specify a readable keytab file for Kerberos auth.");
        }
    }
    String principal = kerberosPrincipal;
    try {
        // resolves _HOST pattern using standard Hadoop search/replace
        // via DNS lookup when 2nd argument is empty
        principal = SecurityUtil.getServerPrincipal(kerberosPrincipal, "");
    } catch (IOException e) {
        LOG.error("Host lookup error resolving kerberos principal (" + kerberosPrincipal
                + "). Exception follows.", e);
        throw e;
    }
    Preconditions.checkNotNull(principal, "Principal must not be null");
    KerberosUser newUser = new KerberosUser(principal, kerberosKeytab);
    //The HDFS Sink does not allow login credentials to change.
    //To be uniform, we will do the same thing here.
    User hbaseUser = null;
    boolean loggedIn = false;
    if (loggedInUser != null) {
        Preconditions.checkArgument(newUser.equals(loggedInUser),
                "Cannot switch kerberos credentials during a reconfiguration. "
                        + "Please restart the agent to set the new credentials.");
        try {
            hbaseUser = User.create(UserGroupInformation.getLoginUser());
            loggedIn = true;
        } catch (IOException ex) {
            LOG.warn("Previous login does not exist, " + "will authenticate against KDC");
        }
    }
    if (!loggedIn) {
        if (hostname == null || hostname.isEmpty()) {
            hostname = InetAddress.getLocalHost().getCanonicalHostName();
        }
        conf.set(FLUME_KEYTAB_KEY, kerberosKeytab);
        conf.set(FLUME_PRINCIPAL_KEY, principal);
        User.login(conf, FLUME_KEYTAB_KEY, FLUME_PRINCIPAL_KEY, hostname);
        hbaseUser = User.create(UserGroupInformation.getLoginUser());
        loggedInUser = newUser;
        //TODO: Set the loggedInUser to the current user.
        LOG.info("Logged into HBase as user: " + hbaseUser.getName());
    }
    return hbaseUser;
}

From source file:org.apache.flume.sink.kite.KerberosUtil.java

License:Apache License

/**
 * Static synchronized method for static Kerberos login. <br/>
 * Static synchronized due to a thundering herd problem when multiple Sinks
 * attempt to log in using the same principal at the same time with the
 * intention of impersonating different users (or even the same user).
 * If this is not controlled, MIT Kerberos v5 believes it is seeing a replay
 * attach and it returns:/*  ww w  .  jav a 2s  .c  o  m*/
 * <blockquote>Request is a replay (34) - PROCESS_TGS</blockquote>
 * In addition, since the underlying Hadoop APIs we are using for
 * impersonation are static, we define this method as static as well.
 *
 * @param principal
 *         Fully-qualified principal to use for authentication.
 * @param keytab
 *         Location of keytab file containing credentials for principal.
 * @return Logged-in user
 * @throws org.apache.flume.sink.kite.KerberosUtil.SecurityException
 *         if login fails.
 * @throws IllegalArgumentException
 *         if the principal or the keytab is not usable
 */
public static synchronized UserGroupInformation login(String principal, String keytab) {
    // resolve the requested principal, if it is present
    String finalPrincipal = null;
    if (principal != null && !principal.isEmpty()) {
        try {
            // resolves _HOST pattern using standard Hadoop search/replace
            // via DNS lookup when 2nd argument is empty
            finalPrincipal = SecurityUtil.getServerPrincipal(principal, "");
        } catch (IOException e) {
            throw new SecurityException("Failed to resolve Kerberos principal", e);
        }
    }

    // check if there is a user already logged in
    UserGroupInformation currentUser = null;
    try {
        currentUser = UserGroupInformation.getLoginUser();
    } catch (IOException e) {
        // not a big deal but this shouldn't typically happen because it will
        // generally fall back to the UNIX user
        LOG.debug("Unable to get login user before Kerberos auth attempt", e);
    }

    // if the current user is valid (matches the given principal) then use it
    if (currentUser != null) {
        if (finalPrincipal == null || finalPrincipal.equals(currentUser.getUserName())) {
            LOG.debug("Using existing login for {}: {}", finalPrincipal, currentUser);
            return currentUser;
        } else {
            // be cruel and unusual when user tries to login as multiple principals
            // this isn't really valid with a reconfigure but this should be rare
            // enough to warrant a restart of the agent JVM
            // TODO: find a way to interrogate the entire current config state,
            // since we don't have to be unnecessarily protective if they switch all
            // HDFS sinks to use a different principal all at once.
            throw new SecurityException("Cannot use multiple Kerberos principals: " + finalPrincipal
                    + " would replace " + currentUser.getUserName());
        }
    }

    // prepare for a new login
    Preconditions.checkArgument(principal != null && !principal.isEmpty(),
            "Invalid Kerberos principal: " + String.valueOf(principal));
    Preconditions.checkNotNull(finalPrincipal, "Resolved principal must not be null");
    Preconditions.checkArgument(keytab != null && !keytab.isEmpty(),
            "Invalid Kerberos keytab: " + String.valueOf(keytab));
    File keytabFile = new File(keytab);
    Preconditions.checkArgument(keytabFile.isFile() && keytabFile.canRead(),
            "Keytab is not a readable file: " + String.valueOf(keytab));

    try {
        // attempt static kerberos login
        LOG.debug("Logging in as {} with {}", finalPrincipal, keytab);
        UserGroupInformation.loginUserFromKeytab(principal, keytab);
        return UserGroupInformation.getLoginUser();
    } catch (IOException e) {
        throw new SecurityException("Kerberos login failed", e);
    }
}