Example usage for org.apache.hadoop.security UserGroupInformation getLoginUser

List of usage examples for org.apache.hadoop.security UserGroupInformation getLoginUser

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation getLoginUser.

Prototype

@InterfaceAudience.Public
@InterfaceStability.Evolving
public static UserGroupInformation getLoginUser() throws IOException 

Source Link

Document

Get the currently logged in user.

Usage

From source file:azkaban.jobtype.HadoopJavaJobRunnerMain.java

License:Apache License

public HadoopJavaJobRunnerMain() throws Exception {
    Runtime.getRuntime().addShutdownHook(new Thread() {
        @Override/*from  w w w  . j  a v a  2s  .  c o  m*/
        public void run() {
            cancelJob();
        }
    });

    try {
        _jobName = System.getenv(ProcessJob.JOB_NAME_ENV);
        String propsFile = System.getenv(ProcessJob.JOB_PROP_ENV);

        _logger = Logger.getRootLogger();
        _logger.removeAllAppenders();
        ConsoleAppender appender = new ConsoleAppender(DEFAULT_LAYOUT);
        appender.activateOptions();
        _logger.addAppender(appender);
        _logger.setLevel(Level.INFO); //Explicitly setting level to INFO

        Properties props = new Properties();
        props.load(new BufferedReader(new FileReader(propsFile)));

        HadoopConfigurationInjector.injectResources(new Props(null, props));

        final Configuration conf = new Configuration();

        UserGroupInformation.setConfiguration(conf);
        securityEnabled = UserGroupInformation.isSecurityEnabled();

        _logger.info("Running job " + _jobName);
        String className = props.getProperty(JOB_CLASS);
        if (className == null) {
            throw new Exception("Class name is not set.");
        }
        _logger.info("Class name " + className);

        UserGroupInformation loginUser = null;
        UserGroupInformation proxyUser = null;

        if (shouldProxy(props)) {
            String userToProxy = props.getProperty("user.to.proxy");
            if (securityEnabled) {
                String filelocation = System.getenv(HADOOP_TOKEN_FILE_LOCATION);
                _logger.info("Found token file " + filelocation);
                _logger.info("Security enabled is " + UserGroupInformation.isSecurityEnabled());

                _logger.info("Setting mapreduce.job.credentials.binary to " + filelocation);
                System.setProperty("mapreduce.job.credentials.binary", filelocation);

                _logger.info("Proxying enabled.");

                loginUser = UserGroupInformation.getLoginUser();

                _logger.info("Current logged in user is " + loginUser.getUserName());

                proxyUser = UserGroupInformation.createProxyUser(userToProxy, loginUser);
                for (Token<?> token : loginUser.getTokens()) {
                    proxyUser.addToken(token);
                }
            } else {
                proxyUser = UserGroupInformation.createRemoteUser(userToProxy);
            }
            _logger.info("Proxied as user " + userToProxy);
        }

        // Create the object using proxy
        if (shouldProxy(props)) {
            _javaObject = getObjectAsProxyUser(props, _logger, _jobName, className, proxyUser);
        } else {
            _javaObject = getObject(_jobName, className, props, _logger);
        }

        if (_javaObject == null) {
            _logger.info("Could not create java object to run job: " + className);
            throw new Exception("Could not create running object");
        }
        _logger.info("Got object " + _javaObject.toString());

        _cancelMethod = props.getProperty(CANCEL_METHOD_PARAM, DEFAULT_CANCEL_METHOD);

        final String runMethod = props.getProperty(RUN_METHOD_PARAM, DEFAULT_RUN_METHOD);
        _logger.info("Invoking method " + runMethod);

        if (shouldProxy(props)) {
            _logger.info("Proxying enabled.");
            runMethodAsUser(props, _javaObject, runMethod, proxyUser);
        } else {
            _logger.info("Proxy check failed, not proxying run.");
            runMethod(_javaObject, runMethod);
        }

        _isFinished = true;

        // Get the generated properties and store them to disk, to be read
        // by ProcessJob.
        try {
            final Method generatedPropertiesMethod = _javaObject.getClass()
                    .getMethod(GET_GENERATED_PROPERTIES_METHOD, new Class<?>[] {});
            Object outputGendProps = generatedPropertiesMethod.invoke(_javaObject, new Object[] {});

            if (outputGendProps != null) {
                final Method toPropertiesMethod = outputGendProps.getClass().getMethod("toProperties",
                        new Class<?>[] {});
                Properties properties = (Properties) toPropertiesMethod.invoke(outputGendProps,
                        new Object[] {});

                Props outputProps = new Props(null, properties);
                outputGeneratedProperties(outputProps);
            } else {
                _logger.info(GET_GENERATED_PROPERTIES_METHOD
                        + " method returned null.  No properties to pass along");
            }
        } catch (NoSuchMethodException e) {
            _logger.info(String.format(
                    "Apparently there isn't a method[%s] on object[%s], using " + "empty Props object instead.",
                    GET_GENERATED_PROPERTIES_METHOD, _javaObject));
            outputGeneratedProperties(new Props());
        }
    } catch (Exception e) {
        _isFinished = true;
        throw e;
    }
}

From source file:azkaban.jobtype.HadoopSecureWrapperUtils.java

License:Apache License

/**
 * Perform all the magic required to get the proxyUser in a securitized grid
 * //from w  w  w.  jav a 2s  .c o  m
 * @param userToProxy
 * @return a UserGroupInformation object for the specified userToProxy, which will also contain
 *         the logged in user's tokens
 * @throws IOException
 */
private static UserGroupInformation createSecurityEnabledProxyUser(String userToProxy, String filelocation,
        Logger log) throws IOException {

    if (!new File(filelocation).exists()) {
        throw new RuntimeException("hadoop token file doesn't exist.");
    }

    log.info("Found token file.  Setting " + HadoopSecurityManager.MAPREDUCE_JOB_CREDENTIALS_BINARY + " to "
            + filelocation);
    System.setProperty(HadoopSecurityManager.MAPREDUCE_JOB_CREDENTIALS_BINARY, filelocation);

    UserGroupInformation loginUser = null;

    loginUser = UserGroupInformation.getLoginUser();
    log.info("Current logged in user is " + loginUser.getUserName());

    UserGroupInformation proxyUser = UserGroupInformation.createProxyUser(userToProxy, loginUser);

    for (Token<?> token : loginUser.getTokens()) {
        proxyUser.addToken(token);
    }
    return proxyUser;
}

From source file:azkaban.security.commons.SecurityUtils.java

License:Apache License

/**
 * Create a proxied user based on the explicit user name, taking other
 * parameters necessary from properties file.
 *///from   ww w .  j ava  2 s.co m
public static synchronized UserGroupInformation getProxiedUser(String toProxy, Properties prop, Logger log,
        Configuration conf) throws IOException {

    if (conf == null) {
        throw new IllegalArgumentException("conf can't be null");
    }
    UserGroupInformation.setConfiguration(conf);

    if (toProxy == null) {
        throw new IllegalArgumentException("toProxy can't be null");
    }

    if (loginUser == null) {
        log.info("No login user. Creating login user");
        String keytab = verifySecureProperty(prop, PROXY_KEYTAB_LOCATION, log);
        String proxyUser = verifySecureProperty(prop, PROXY_USER, log);
        UserGroupInformation.loginUserFromKeytab(proxyUser, keytab);
        loginUser = UserGroupInformation.getLoginUser();
        log.info("Logged in with user " + loginUser);
    } else {
        log.info("loginUser (" + loginUser + ") already created, refreshing tgt.");
        loginUser.checkTGTAndReloginFromKeytab();
    }

    return UserGroupInformation.createProxyUser(toProxy, loginUser);
}

From source file:azkaban.security.HadoopSecurityManager_H_1_0.java

License:Apache License

private HadoopSecurityManager_H_1_0(Props props) throws HadoopSecurityManagerException, IOException {

    // for now, assume the same/compatible native library, the same/compatible
    // hadoop-core jar
    String hadoopHome = props.getString("hadoop.home", null);
    String hadoopConfDir = props.getString("hadoop.conf.dir", null);

    if (hadoopHome == null) {
        hadoopHome = System.getenv("HADOOP_HOME");
    }// ww  w.  j  av  a  2  s.  c o m
    if (hadoopConfDir == null) {
        hadoopConfDir = System.getenv("HADOOP_CONF_DIR");
    }

    List<URL> resources = new ArrayList<URL>();
    if (hadoopConfDir != null) {
        logger.info("Using hadoop config found in " + new File(hadoopConfDir).toURI().toURL());
        resources.add(new File(hadoopConfDir).toURI().toURL());
    } else if (hadoopHome != null) {
        logger.info("Using hadoop config found in " + new File(hadoopHome, "conf").toURI().toURL());
        resources.add(new File(hadoopHome, "conf").toURI().toURL());
    } else {
        logger.info("HADOOP_HOME not set, using default hadoop config.");
    }

    ucl = new URLClassLoader(resources.toArray(new URL[resources.size()]));

    conf = new Configuration();
    conf.setClassLoader(ucl);

    if (props.containsKey("fs.hdfs.impl.disable.cache")) {
        logger.info("Setting fs.hdfs.impl.disable.cache to " + props.get("fs.hdfs.impl.disable.cache"));
        conf.setBoolean("fs.hdfs.impl.disable.cache", Boolean.valueOf(props.get("fs.hdfs.impl.disable.cache")));
    }

    logger.info("hadoop.security.authentication set to " + conf.get("hadoop.security.authentication"));
    logger.info("hadoop.security.authorization set to " + conf.get("hadoop.security.authorization"));
    logger.info("DFS name " + conf.get("fs.default.name"));

    UserGroupInformation.setConfiguration(conf);

    securityEnabled = UserGroupInformation.isSecurityEnabled();
    if (securityEnabled) {
        logger.info("The Hadoop cluster has enabled security");
        shouldProxy = true;
        try {
            keytabLocation = props.getString(PROXY_KEYTAB_LOCATION);
            keytabPrincipal = props.getString(PROXY_USER);
        } catch (UndefinedPropertyException e) {
            throw new HadoopSecurityManagerException(e.getMessage());
        }

        // try login
        try {
            if (loginUser == null) {
                logger.info("No login user. Creating login user");
                logger.info("Logging with " + keytabPrincipal + " and " + keytabLocation);
                UserGroupInformation.loginUserFromKeytab(keytabPrincipal, keytabLocation);
                loginUser = UserGroupInformation.getLoginUser();
                logger.info("Logged in with user " + loginUser);
            } else {
                logger.info("loginUser (" + loginUser + ") already created, refreshing tgt.");
                loginUser.checkTGTAndReloginFromKeytab();
            }
        } catch (IOException e) {
            throw new HadoopSecurityManagerException("Failed to login with kerberos ", e);
        }

    }

    userUgiMap = new ConcurrentHashMap<String, UserGroupInformation>();

    logger.info("Hadoop Security Manager Initiated");
}

From source file:azkaban.security.HadoopSecurityManager_H_1_0.java

License:Apache License

/**
 * Create a proxied user based on the explicit user name, taking other
 * parameters necessary from properties file.
 *
 * @throws IOException//w w w.  j  av a 2  s .co m
 */
@Override
public synchronized UserGroupInformation getProxiedUser(String userToProxy)
        throws HadoopSecurityManagerException {

    if (userToProxy == null) {
        throw new HadoopSecurityManagerException("userToProxy can't be null");
    }

    UserGroupInformation ugi = userUgiMap.get(userToProxy);
    if (ugi == null) {
        logger.info("proxy user " + userToProxy + " not exist. Creating new proxy user");
        if (shouldProxy) {
            try {
                ugi = UserGroupInformation.createProxyUser(userToProxy, UserGroupInformation.getLoginUser());
            } catch (IOException e) {
                e.printStackTrace();
                throw new HadoopSecurityManagerException("Failed to create proxy user", e);
            }
        } else {
            ugi = UserGroupInformation.createRemoteUser(userToProxy);
        }
        userUgiMap.putIfAbsent(userToProxy, ugi);
    }
    return ugi;
}

From source file:azkaban.security.HadoopSecurityManager_H_1_0.java

License:Apache License

@Override
public synchronized void prefetchToken(final File tokenFile, final Props props, final Logger logger)
        throws HadoopSecurityManagerException {

    final String userToProxy = props.getString(USER_TO_PROXY);

    logger.info("Getting hadoop tokens for " + userToProxy);

    final Credentials cred = new Credentials();

    if (props.getBoolean(OBTAIN_HCAT_TOKEN, false)) {
        try {//from  w ww.jav  a  2  s  .  co m
            logger.info("Pre-fetching Hive MetaStore token from hive");

            HiveConf hiveConf = new HiveConf();
            logger.info("HiveConf.ConfVars.METASTOREURIS.varname "
                    + hiveConf.get(HiveConf.ConfVars.METASTOREURIS.varname));
            logger.info("HIVE_METASTORE_SASL_ENABLED " + hiveConf.get(HIVE_METASTORE_SASL_ENABLED));
            logger.info("HIVE_METASTORE_KERBEROS_PRINCIPAL " + hiveConf.get(HIVE_METASTORE_KERBEROS_PRINCIPAL));
            logger.info("HIVE_METASTORE_LOCAL " + hiveConf.get(HIVE_METASTORE_LOCAL));

            HiveMetaStoreClient hiveClient = new HiveMetaStoreClient(hiveConf);
            String hcatTokenStr = hiveClient.getDelegationToken(userToProxy,
                    UserGroupInformation.getLoginUser().getShortUserName());
            Token<DelegationTokenIdentifier> hcatToken = new Token<DelegationTokenIdentifier>();
            hcatToken.decodeFromUrlString(hcatTokenStr);
            logger.info("Created hive metastore token: " + hcatTokenStr);
            logger.info("Token kind: " + hcatToken.getKind());
            logger.info("Token id: " + hcatToken.getIdentifier());
            logger.info("Token service: " + hcatToken.getService());
            cred.addToken(hcatToken.getService(), hcatToken);
        } catch (Exception e) {
            e.printStackTrace();
            logger.error("Failed to get hive metastore token." + e.getMessage() + e.getCause());
        } catch (Throwable t) {
            t.printStackTrace();
            logger.error("Failed to get hive metastore token." + t.getMessage() + t.getCause());
        }
    }

    try {
        getProxiedUser(userToProxy).doAs(new PrivilegedExceptionAction<Void>() {
            @Override
            public Void run() throws Exception {
                getToken(userToProxy);
                return null;
            }

            private void getToken(String userToProxy)
                    throws InterruptedException, IOException, HadoopSecurityManagerException {
                logger.info("Here is the props for " + OBTAIN_NAMENODE_TOKEN + ": "
                        + props.getBoolean(OBTAIN_NAMENODE_TOKEN));
                if (props.getBoolean(OBTAIN_NAMENODE_TOKEN, false)) {
                    FileSystem fs = FileSystem.get(conf);
                    // check if we get the correct FS, and most importantly, the
                    // conf
                    logger.info("Getting DFS token from " + fs.getUri());
                    Token<?> fsToken = fs.getDelegationToken(userToProxy);
                    if (fsToken == null) {
                        logger.error("Failed to fetch DFS token for ");
                        throw new HadoopSecurityManagerException(
                                "Failed to fetch DFS token for " + userToProxy);
                    }
                    logger.info("Created DFS token: " + fsToken.toString());
                    logger.info("Token kind: " + fsToken.getKind());
                    logger.info("Token id: " + fsToken.getIdentifier());
                    logger.info("Token service: " + fsToken.getService());
                    cred.addToken(fsToken.getService(), fsToken);
                }

                if (props.getBoolean(OBTAIN_JOBTRACKER_TOKEN, false)) {
                    JobClient jobClient = new JobClient(new JobConf());
                    logger.info("Pre-fetching JT token from JobTracker");

                    Token<DelegationTokenIdentifier> mrdt = jobClient.getDelegationToken(new Text("mr token"));
                    if (mrdt == null) {
                        logger.error("Failed to fetch JT token");
                        throw new HadoopSecurityManagerException("Failed to fetch JT token for " + userToProxy);
                    }
                    logger.info("Created JT token: " + mrdt.toString());
                    logger.info("Token kind: " + mrdt.getKind());
                    logger.info("Token id: " + mrdt.getIdentifier());
                    logger.info("Token service: " + mrdt.getService());
                    cred.addToken(mrdt.getService(), mrdt);
                }
            }
        });

        FileOutputStream fos = null;
        DataOutputStream dos = null;
        try {
            fos = new FileOutputStream(tokenFile);
            dos = new DataOutputStream(fos);
            cred.writeTokenStorageToStream(dos);
        } finally {
            if (dos != null) {
                dos.close();
            }
            if (fos != null) {
                fos.close();
            }
        }

        // stash them to cancel after use.
        logger.info("Tokens loaded in " + tokenFile.getAbsolutePath());

    } catch (Exception e) {
        e.printStackTrace();
        throw new HadoopSecurityManagerException(
                "Failed to get hadoop tokens! " + e.getMessage() + e.getCause());
    } catch (Throwable t) {
        t.printStackTrace();
        throw new HadoopSecurityManagerException(
                "Failed to get hadoop tokens! " + t.getMessage() + t.getCause());
    }
}

From source file:azkaban.security.HadoopSecurityManager_H_2_0.java

License:Apache License

private HadoopSecurityManager_H_2_0(Props props) throws HadoopSecurityManagerException, IOException {

    // for now, assume the same/compatible native library, the same/compatible
    // hadoop-core jar
    String hadoopHome = props.getString("hadoop.home", null);
    String hadoopConfDir = props.getString("hadoop.conf.dir", null);

    if (hadoopHome == null) {
        hadoopHome = System.getenv("HADOOP_HOME");
    }/*w ww .  ja va  2  s. c  o  m*/
    if (hadoopConfDir == null) {
        hadoopConfDir = System.getenv("HADOOP_CONF_DIR");
    }

    List<URL> resources = new ArrayList<URL>();
    URL urlToHadoop = null;
    if (hadoopConfDir != null) {
        urlToHadoop = new File(hadoopConfDir).toURI().toURL();
        logger.info("Using hadoop config found in " + urlToHadoop);
        resources.add(urlToHadoop);
    } else if (hadoopHome != null) {
        urlToHadoop = new File(hadoopHome, "conf").toURI().toURL();
        logger.info("Using hadoop config found in " + urlToHadoop);
        resources.add(urlToHadoop);
    } else {
        logger.info("HADOOP_HOME not set, using default hadoop config.");
    }

    ucl = new URLClassLoader(resources.toArray(new URL[resources.size()]));

    conf = new Configuration();
    conf.setClassLoader(ucl);

    if (props.containsKey(FS_HDFS_IMPL_DISABLE_CACHE)) {
        logger.info("Setting " + FS_HDFS_IMPL_DISABLE_CACHE + " to " + props.get(FS_HDFS_IMPL_DISABLE_CACHE));
        conf.setBoolean(FS_HDFS_IMPL_DISABLE_CACHE, Boolean.valueOf(props.get(FS_HDFS_IMPL_DISABLE_CACHE)));
    }

    logger.info(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION + ": "
            + conf.get(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION));
    logger.info(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION + ":  "
            + conf.get(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION));
    logger.info(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY + ": "
            + conf.get(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY));

    UserGroupInformation.setConfiguration(conf);

    securityEnabled = UserGroupInformation.isSecurityEnabled();
    if (securityEnabled) {
        logger.info("The Hadoop cluster has enabled security");
        shouldProxy = true;
        try {

            keytabLocation = props.getString(AZKABAN_KEYTAB_LOCATION);
            keytabPrincipal = props.getString(AZKABAN_PRINCIPAL);
        } catch (UndefinedPropertyException e) {
            throw new HadoopSecurityManagerException(e.getMessage());
        }

        // try login
        try {
            if (loginUser == null) {
                logger.info("No login user. Creating login user");
                logger.info("Using principal from " + keytabPrincipal + " and " + keytabLocation);
                UserGroupInformation.loginUserFromKeytab(keytabPrincipal, keytabLocation);
                loginUser = UserGroupInformation.getLoginUser();
                logger.info("Logged in with user " + loginUser);
            } else {
                logger.info("loginUser (" + loginUser + ") already created, refreshing tgt.");
                loginUser.checkTGTAndReloginFromKeytab();
            }
        } catch (IOException e) {
            throw new HadoopSecurityManagerException("Failed to login with kerberos ", e);
        }

    }

    userUgiMap = new ConcurrentHashMap<String, UserGroupInformation>();

    logger.info("Hadoop Security Manager initialized");
}

From source file:azkaban.security.HadoopSecurityManager_H_2_0.java

License:Apache License

/**
 * Create a proxied user based on the explicit user name, taking other
 * parameters necessary from properties file.
 *
 * @throws IOException/*from www.  j  av  a2  s.  co  m*/
 */
@Override
public synchronized UserGroupInformation getProxiedUser(String userToProxy)
        throws HadoopSecurityManagerException {

    if (userToProxy == null) {
        throw new HadoopSecurityManagerException("userToProxy can't be null");
    }

    UserGroupInformation ugi = userUgiMap.get(userToProxy);
    if (ugi == null) {
        logger.info("proxy user " + userToProxy + " not exist. Creating new proxy user");
        if (shouldProxy) {
            try {
                ugi = UserGroupInformation.createProxyUser(userToProxy, UserGroupInformation.getLoginUser());
            } catch (IOException e) {
                throw new HadoopSecurityManagerException("Failed to create proxy user", e);
            }
        } else {
            ugi = UserGroupInformation.createRemoteUser(userToProxy);
        }
        userUgiMap.putIfAbsent(userToProxy, ugi);
    }
    return ugi;
}

From source file:azkaban.security.HadoopSecurityManager_H_2_0.java

License:Apache License

/**
 * function to fetch hcat token as per the specified hive configuration and
 * then store the token in to the credential store specified .
 *
 * @param userToProxy String value indicating the name of the user the token
 *          will be fetched for.//from  w ww  .j  a v  a  2 s  .c  o  m
 * @param hiveConf the configuration based off which the hive client will be
 *          initialized.
 * @param logger the logger instance which writes the logging content to the
 *          job logs.
 *
 * @throws IOException
 * @throws TException
 * @throws MetaException
 *
 * */
private Token<DelegationTokenIdentifier> fetchHcatToken(String userToProxy, HiveConf hiveConf,
        String tokenSignatureOverwrite, final Logger logger) throws IOException, MetaException, TException {

    logger.info(HiveConf.ConfVars.METASTOREURIS.varname + ": "
            + hiveConf.get(HiveConf.ConfVars.METASTOREURIS.varname));

    logger.info(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL.varname + ": "
            + hiveConf.get(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL.varname));

    logger.info(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname + ": "
            + hiveConf.get(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname));

    HiveMetaStoreClient hiveClient = new HiveMetaStoreClient(hiveConf);
    String hcatTokenStr = hiveClient.getDelegationToken(userToProxy,
            UserGroupInformation.getLoginUser().getShortUserName());
    Token<DelegationTokenIdentifier> hcatToken = new Token<DelegationTokenIdentifier>();
    hcatToken.decodeFromUrlString(hcatTokenStr);

    // overwrite the value of the service property of the token if the signature
    // override is specified.
    if (tokenSignatureOverwrite != null && tokenSignatureOverwrite.trim().length() > 0) {
        hcatToken.setService(new Text(tokenSignatureOverwrite.trim().toLowerCase()));

        logger.info(HIVE_TOKEN_SIGNATURE_KEY + ":"
                + (tokenSignatureOverwrite == null ? "" : tokenSignatureOverwrite));
    }

    logger.info("Created hive metastore token: " + hcatTokenStr);
    logger.info("Token kind: " + hcatToken.getKind());
    logger.info("Token id: " + hcatToken.getIdentifier());
    logger.info("Token service: " + hcatToken.getService());
    return hcatToken;
}

From source file:azkaban.storage.HdfsAuth.java

License:Apache License

private void login(final String keytabPrincipal, final String keytabPath) throws IOException {
    if (this.loggedInUser == null) {
        log.info(String.format("Logging in using Principal: %s Keytab: %s", keytabPrincipal, keytabPath));

        UserGroupInformation.loginUserFromKeytab(keytabPrincipal, keytabPath);
        this.loggedInUser = UserGroupInformation.getLoginUser();
        log.info(String.format("User %s logged in.", this.loggedInUser));
    } else {//w  ww  . ja  v a 2s.c om
        log.info(String.format("User %s already logged in. Refreshing TGT", this.loggedInUser));
        this.loggedInUser.checkTGTAndReloginFromKeytab();
    }
}