Example usage for org.apache.hadoop.security UserGroupInformation loginUserFromKeytab

List of usage examples for org.apache.hadoop.security UserGroupInformation loginUserFromKeytab

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation loginUserFromKeytab.

Prototype

@InterfaceAudience.Public
@InterfaceStability.Evolving
public static void loginUserFromKeytab(String user, String path) throws IOException 

Source Link

Document

Log a user in from a keytab file.

Usage

From source file:azkaban.security.commons.SecurityUtils.java

License:Apache License

/**
 * Create a proxied user based on the explicit user name, taking other
 * parameters necessary from properties file.
 *//*from   w  w w  . j  a  v a 2 s.com*/
public static synchronized UserGroupInformation getProxiedUser(String toProxy, Properties prop, Logger log,
        Configuration conf) throws IOException {

    if (conf == null) {
        throw new IllegalArgumentException("conf can't be null");
    }
    UserGroupInformation.setConfiguration(conf);

    if (toProxy == null) {
        throw new IllegalArgumentException("toProxy can't be null");
    }

    if (loginUser == null) {
        log.info("No login user. Creating login user");
        String keytab = verifySecureProperty(prop, PROXY_KEYTAB_LOCATION, log);
        String proxyUser = verifySecureProperty(prop, PROXY_USER, log);
        UserGroupInformation.loginUserFromKeytab(proxyUser, keytab);
        loginUser = UserGroupInformation.getLoginUser();
        log.info("Logged in with user " + loginUser);
    } else {
        log.info("loginUser (" + loginUser + ") already created, refreshing tgt.");
        loginUser.checkTGTAndReloginFromKeytab();
    }

    return UserGroupInformation.createProxyUser(toProxy, loginUser);
}

From source file:azkaban.security.HadoopSecurityManager_H_1_0.java

License:Apache License

private HadoopSecurityManager_H_1_0(Props props) throws HadoopSecurityManagerException, IOException {

    // for now, assume the same/compatible native library, the same/compatible
    // hadoop-core jar
    String hadoopHome = props.getString("hadoop.home", null);
    String hadoopConfDir = props.getString("hadoop.conf.dir", null);

    if (hadoopHome == null) {
        hadoopHome = System.getenv("HADOOP_HOME");
    }/*from   w  w w  . jav  a 2  s . c om*/
    if (hadoopConfDir == null) {
        hadoopConfDir = System.getenv("HADOOP_CONF_DIR");
    }

    List<URL> resources = new ArrayList<URL>();
    if (hadoopConfDir != null) {
        logger.info("Using hadoop config found in " + new File(hadoopConfDir).toURI().toURL());
        resources.add(new File(hadoopConfDir).toURI().toURL());
    } else if (hadoopHome != null) {
        logger.info("Using hadoop config found in " + new File(hadoopHome, "conf").toURI().toURL());
        resources.add(new File(hadoopHome, "conf").toURI().toURL());
    } else {
        logger.info("HADOOP_HOME not set, using default hadoop config.");
    }

    ucl = new URLClassLoader(resources.toArray(new URL[resources.size()]));

    conf = new Configuration();
    conf.setClassLoader(ucl);

    if (props.containsKey("fs.hdfs.impl.disable.cache")) {
        logger.info("Setting fs.hdfs.impl.disable.cache to " + props.get("fs.hdfs.impl.disable.cache"));
        conf.setBoolean("fs.hdfs.impl.disable.cache", Boolean.valueOf(props.get("fs.hdfs.impl.disable.cache")));
    }

    logger.info("hadoop.security.authentication set to " + conf.get("hadoop.security.authentication"));
    logger.info("hadoop.security.authorization set to " + conf.get("hadoop.security.authorization"));
    logger.info("DFS name " + conf.get("fs.default.name"));

    UserGroupInformation.setConfiguration(conf);

    securityEnabled = UserGroupInformation.isSecurityEnabled();
    if (securityEnabled) {
        logger.info("The Hadoop cluster has enabled security");
        shouldProxy = true;
        try {
            keytabLocation = props.getString(PROXY_KEYTAB_LOCATION);
            keytabPrincipal = props.getString(PROXY_USER);
        } catch (UndefinedPropertyException e) {
            throw new HadoopSecurityManagerException(e.getMessage());
        }

        // try login
        try {
            if (loginUser == null) {
                logger.info("No login user. Creating login user");
                logger.info("Logging with " + keytabPrincipal + " and " + keytabLocation);
                UserGroupInformation.loginUserFromKeytab(keytabPrincipal, keytabLocation);
                loginUser = UserGroupInformation.getLoginUser();
                logger.info("Logged in with user " + loginUser);
            } else {
                logger.info("loginUser (" + loginUser + ") already created, refreshing tgt.");
                loginUser.checkTGTAndReloginFromKeytab();
            }
        } catch (IOException e) {
            throw new HadoopSecurityManagerException("Failed to login with kerberos ", e);
        }

    }

    userUgiMap = new ConcurrentHashMap<String, UserGroupInformation>();

    logger.info("Hadoop Security Manager Initiated");
}

From source file:azkaban.security.HadoopSecurityManager_H_2_0.java

License:Apache License

private HadoopSecurityManager_H_2_0(Props props) throws HadoopSecurityManagerException, IOException {

    // for now, assume the same/compatible native library, the same/compatible
    // hadoop-core jar
    String hadoopHome = props.getString("hadoop.home", null);
    String hadoopConfDir = props.getString("hadoop.conf.dir", null);

    if (hadoopHome == null) {
        hadoopHome = System.getenv("HADOOP_HOME");
    }//  w w w .  j  av  a  2  s. c o m
    if (hadoopConfDir == null) {
        hadoopConfDir = System.getenv("HADOOP_CONF_DIR");
    }

    List<URL> resources = new ArrayList<URL>();
    URL urlToHadoop = null;
    if (hadoopConfDir != null) {
        urlToHadoop = new File(hadoopConfDir).toURI().toURL();
        logger.info("Using hadoop config found in " + urlToHadoop);
        resources.add(urlToHadoop);
    } else if (hadoopHome != null) {
        urlToHadoop = new File(hadoopHome, "conf").toURI().toURL();
        logger.info("Using hadoop config found in " + urlToHadoop);
        resources.add(urlToHadoop);
    } else {
        logger.info("HADOOP_HOME not set, using default hadoop config.");
    }

    ucl = new URLClassLoader(resources.toArray(new URL[resources.size()]));

    conf = new Configuration();
    conf.setClassLoader(ucl);

    if (props.containsKey(FS_HDFS_IMPL_DISABLE_CACHE)) {
        logger.info("Setting " + FS_HDFS_IMPL_DISABLE_CACHE + " to " + props.get(FS_HDFS_IMPL_DISABLE_CACHE));
        conf.setBoolean(FS_HDFS_IMPL_DISABLE_CACHE, Boolean.valueOf(props.get(FS_HDFS_IMPL_DISABLE_CACHE)));
    }

    logger.info(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION + ": "
            + conf.get(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION));
    logger.info(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION + ":  "
            + conf.get(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION));
    logger.info(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY + ": "
            + conf.get(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY));

    UserGroupInformation.setConfiguration(conf);

    securityEnabled = UserGroupInformation.isSecurityEnabled();
    if (securityEnabled) {
        logger.info("The Hadoop cluster has enabled security");
        shouldProxy = true;
        try {

            keytabLocation = props.getString(AZKABAN_KEYTAB_LOCATION);
            keytabPrincipal = props.getString(AZKABAN_PRINCIPAL);
        } catch (UndefinedPropertyException e) {
            throw new HadoopSecurityManagerException(e.getMessage());
        }

        // try login
        try {
            if (loginUser == null) {
                logger.info("No login user. Creating login user");
                logger.info("Using principal from " + keytabPrincipal + " and " + keytabLocation);
                UserGroupInformation.loginUserFromKeytab(keytabPrincipal, keytabLocation);
                loginUser = UserGroupInformation.getLoginUser();
                logger.info("Logged in with user " + loginUser);
            } else {
                logger.info("loginUser (" + loginUser + ") already created, refreshing tgt.");
                loginUser.checkTGTAndReloginFromKeytab();
            }
        } catch (IOException e) {
            throw new HadoopSecurityManagerException("Failed to login with kerberos ", e);
        }

    }

    userUgiMap = new ConcurrentHashMap<String, UserGroupInformation>();

    logger.info("Hadoop Security Manager initialized");
}

From source file:azkaban.storage.HdfsAuth.java

License:Apache License

private void login(final String keytabPrincipal, final String keytabPath) throws IOException {
    if (this.loggedInUser == null) {
        log.info(String.format("Logging in using Principal: %s Keytab: %s", keytabPrincipal, keytabPath));

        UserGroupInformation.loginUserFromKeytab(keytabPrincipal, keytabPath);
        this.loggedInUser = UserGroupInformation.getLoginUser();
        log.info(String.format("User %s logged in.", this.loggedInUser));
    } else {//from  w w w . j av  a 2s. co  m
        log.info(String.format("User %s already logged in. Refreshing TGT", this.loggedInUser));
        this.loggedInUser.checkTGTAndReloginFromKeytab();
    }
}

From source file:azkaban.utils.AuthenticationUtils.java

License:Apache License

public static HttpURLConnection loginAuthenticatedURL(final URL url, final String keytabPrincipal,
        final String keytabPath) throws Exception {

    logger.info("Logging in URL: " + url.toString() + " using Principal: " + keytabPrincipal + ", Keytab: "
            + keytabPath);/*from  w w w.j av a 2s .c  o  m*/

    UserGroupInformation loginUser = UserGroupInformation.getLoginUser();

    if (loginUser == null) {
        UserGroupInformation.loginUserFromKeytab(keytabPrincipal, keytabPath);
        loginUser = UserGroupInformation.getLoginUser();
        logger.info("Logged in with user " + loginUser);
    } else {
        logger.info("Login user (" + loginUser + ") already created, refreshing tgt.");
        loginUser.checkTGTAndReloginFromKeytab();
    }

    final HttpURLConnection connection = loginUser.doAs((PrivilegedExceptionAction<HttpURLConnection>) () -> {
        final Token token = new Token();
        return new AuthenticatedURL().openConnection(url, token);
    });

    return connection;
}

From source file:co.cask.cdap.common.kerberos.SecurityUtil.java

License:Apache License

public static void loginForMasterService(CConfiguration cConf) throws IOException, LoginException {
    String principal = SecurityUtil
            .expandPrincipal(cConf.get(Constants.Security.CFG_CDAP_MASTER_KRB_PRINCIPAL));
    String keytabPath = cConf.get(Constants.Security.CFG_CDAP_MASTER_KRB_KEYTAB_PATH);

    if (UserGroupInformation.isSecurityEnabled()) {
        LOG.info("Logging in as: principal={}, keytab={}", principal, keytabPath);
        UserGroupInformation.loginUserFromKeytab(principal, keytabPath);

        long delaySec = cConf.getLong(Constants.Security.KERBEROS_KEYTAB_RELOGIN_INTERVAL);
        Executors.newSingleThreadScheduledExecutor(Threads.createDaemonThreadFactory("Kerberos keytab renewal"))
                .scheduleWithFixedDelay(new Runnable() {
                    @Override/*from   w ww.  j  ava  2s .  co  m*/
                    public void run() {
                        try {
                            UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab();
                        } catch (IOException e) {
                            LOG.error("Failed to relogin from keytab", e);
                        }
                    }
                }, delaySec, delaySec, TimeUnit.SECONDS);
    }
}

From source file:com.amintor.hdfs.client.kerberizedhdfsclient.KerberizedHDFSClient.java

/**
 * @param args the command line arguments
 *//*  ww  w.  ja v  a 2s.  c  o  m*/
public static void main(String[] args) {

    try {
        Configuration conf = new Configuration();
        conf.addResource(new FileInputStream(HDFS_SITE_LOCATION));
        conf.addResource(new FileInputStream(CORE_SITE_LOCATION));
        String authType = conf.get("hadoop.security.authentication");
        System.out.println("Authentication Type:" + authType);
        if (authType.trim().equalsIgnoreCase("kerberos")) {
            // Login through UGI keytab
            UserGroupInformation.setConfiguration(conf);
            UserGroupInformation.loginUserFromKeytab("vijay", "/Users/vsingh/Software/vijay.keytab");
            FileSystem hdFS = FileSystem.get(conf);
            FileStatus[] listStatus = hdFS.listStatus(new Path(args[0]));
            for (FileStatus statusFile : listStatus) {
                System.out.print("Replication:" + statusFile.getReplication() + "\t");
                System.out.print("Owner:" + statusFile.getOwner() + "\t");
                System.out.print("Group:" + statusFile.getGroup() + "\t");
                System.out.println("Path:" + statusFile.getPath() + "\t");
            }

        }
    } catch (IOException ex) {
        Logger.getLogger(KerberizedHDFSClient.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:com.blackberry.bdp.kaboom.Authenticator.java

License:Apache License

/**
 * Static synchronized method for static Kerberos login. <br/>
 * Static synchronized due to a thundering herd problem when multiple Sinks
 * attempt to log in using the same principal at the same time with the
 * intention of impersonating different users (or even the same user). If this
 * is not controlled, MIT Kerberos v5 believes it is seeing a replay attach
 * and it returns: <blockquote>Request is a replay (34) -
 * PROCESS_TGS</blockquote> In addition, since the underlying Hadoop APIs we
 * are using for impersonation are static, we define this method as static as
 * well./*from w  ww .jav  a  2s. c om*/
 *
 * @param principal
 *          Fully-qualified principal to use for authentication.
 * @param keytab
 *          Location of keytab file containing credentials for principal.
 * @return Logged-in user
 * @throws IOException
 *           if login fails.
 */
private synchronized UserGroupInformation kerberosLogin(Authenticator authenticator, String principal,
        String keytab) throws IOException {

    // if we are the 2nd user thru the lock, the login should already be
    // available statically if login was successful
    UserGroupInformation curUser = null;
    try {
        curUser = UserGroupInformation.getLoginUser();
    } catch (IOException e) {
        // not a big deal but this shouldn't typically happen because it will
        // generally fall back to the UNIX user
        LOG.debug("Unable to get login user before Kerberos auth attempt.", e);
    }

    // we already have logged in successfully
    if (curUser != null && curUser.getUserName().equals(principal)) {
        LOG.debug("{}: Using existing principal ({}): {}", new Object[] { authenticator, principal, curUser });

        // no principal found
    } else {

        LOG.info("{}: Attempting kerberos login as principal ({}) from keytab " + "file ({})",
                new Object[] { authenticator, principal, keytab });

        // attempt static kerberos login
        UserGroupInformation.loginUserFromKeytab(principal, keytab);
        curUser = UserGroupInformation.getLoginUser();
    }

    return curUser;
}

From source file:com.cloudera.beeswax.Server.java

License:Apache License

/**
 * Authenticate using kerberos if configured
 *///ww w. j a v a2s.  co m
private static void doKerberosAuth() throws IllegalArgumentException {
    if (keytabFile == null || keytabFile.isEmpty()) {
        throw new IllegalArgumentException("No keytab specified");
    }
    if (principalConf == null || principalConf.isEmpty()) {
        throw new IllegalArgumentException("No principal specified");
    }

    // Login from the keytab
    try {
        kerberosName = SecurityUtil.getServerPrincipal(principalConf, "0.0.0.0");
        UserGroupInformation.loginUserFromKeytab(kerberosName, keytabFile);
        LOG.info("Logged in using Kerberos ticket for '" + kerberosName + "' from " + keytabFile);
        bwUgi = UserGroupInformation.getCurrentUser();
        // Start a thread to periodically refresh kerberos ticket
        Thread t = new Thread(new Runnable() {
            @Override
            public void run() {
                while (true) {
                    try {
                        Thread.sleep(refreshInterval);
                    } catch (InterruptedException e) {
                        return;
                    }
                    try {
                        LOG.info("Refreshed Kerberos ticket for '" + kerberosName + "' from " + keytabFile);
                        UserGroupInformation.getLoginUser().reloginFromKeytab();
                    } catch (IOException eIO) {
                        LOG.error("Error refreshing Kerberos ticket", eIO);
                    }
                }
            }
        }, "KerberosRefresher");
        t.start();
    } catch (IOException e) {
        throw new IllegalArgumentException("Couldn't setup Kerberos authentication", e);
    }
}

From source file:com.cloudera.lib.service.hadoop.HadoopService.java

License:Open Source License

@Override
protected void init() throws ServiceException {
    LOG.info("Using Hadoop JARs version [{}]", VersionInfo.getVersion());
    String security = getServiceConfig().get(AUTHENTICATION_TYPE, "simple").trim();
    if (security.equals("kerberos")) {
        String defaultName = getServer().getName();
        String keytab = System.getProperty("user.home") + "/" + defaultName + ".keytab";
        keytab = getServiceConfig().get(KERBEROS_KEYTAB, keytab).trim();
        if (keytab.length() == 0) {
            throw new ServiceException(HadoopException.ERROR.H01, KERBEROS_KEYTAB);
        }//from  www  .j a v a  2  s . c om
        String principal = defaultName + "/localhost@LOCALHOST";
        principal = getServiceConfig().get(KERBEROS_PRINCIPAL, principal).trim();
        if (principal.length() == 0) {
            throw new ServiceException(HadoopException.ERROR.H01, KERBEROS_PRINCIPAL);
        }
        Configuration conf = new Configuration();
        conf.set("hadoop.security.authentication", "kerberos");
        UserGroupInformation.setConfiguration(conf);
        try {
            UserGroupInformation.loginUserFromKeytab(principal, keytab);
        } catch (IOException ex) {
            throw new ServiceException(HadoopException.ERROR.H02, ex.getMessage(), ex);
        }
        LOG.info("Using Hadoop Kerberos authentication, principal [{}] keytab [{}]", principal, keytab);
    } else if (security.equals("simple")) {
        Configuration conf = new Configuration();
        conf.set("hadoop.security.authentication", "simple");
        UserGroupInformation.setConfiguration(conf);
        LOG.info("Using Hadoop simple/pseudo authentication, principal [{}]", System.getProperty("user.name"));
    } else {
        throw new ServiceException(HadoopException.ERROR.H09, security);
    }

    serviceHadoopConf = new XConfiguration();
    for (Map.Entry entry : getServiceConfig()) {
        String name = (String) entry.getKey();
        if (name.startsWith(HADOOP_CONF_PREFIX)) {
            name = name.substring(HADOOP_CONF_PREFIX.length());
            String value = (String) entry.getValue();
            serviceHadoopConf.set(name, value);

        }
    }
    setRequiredServiceHadoopConf(serviceHadoopConf);

    LOG.debug("Hadoop default configuration:");
    for (Map.Entry entry : serviceHadoopConf) {
        LOG.debug("  {} = {}", entry.getKey(), entry.getValue());
    }

    jobTrackerWhitelist = toLowerCase(getServiceConfig().getTrimmedStringCollection(JOB_TRACKER_WHITELIST));
    nameNodeWhitelist = toLowerCase(getServiceConfig().getTrimmedStringCollection(NAME_NODE_WHITELIST));
}