Example usage for org.apache.hadoop.security SecurityUtil login

List of usage examples for org.apache.hadoop.security SecurityUtil login

Introduction

In this page you can find the example usage for org.apache.hadoop.security SecurityUtil login.

Prototype

@InterfaceAudience.Public
@InterfaceStability.Evolving
public static void login(final Configuration conf, final String keytabFileKey, final String userNameKey)
        throws IOException 

Source Link

Document

Login as a principal specified in config.

Usage

From source file:com.alibaba.jstorm.hdfs.common.security.AutoHDFS.java

License:Apache License

private void login(Configuration configuration) throws IOException {
    configuration.set(STORM_KEYTAB_FILE_KEY, this.hdfsKeyTab);
    configuration.set(STORM_USER_NAME_KEY, this.hdfsPrincipal);
    SecurityUtil.login(configuration, STORM_KEYTAB_FILE_KEY, STORM_USER_NAME_KEY);

    LOG.info("Logged into hdfs with principal {}", this.hdfsPrincipal);
}

From source file:com.alibaba.jstorm.hdfs.common.security.HdfsSecurityUtil.java

License:Apache License

public static void login(Map conf, Configuration hdfsConfig) throws IOException {
    //If AutoHDFS is specified, do not attempt to login using keytabs, only kept for backward compatibility.
    if (conf.get(TOPOLOGY_AUTO_CREDENTIALS) == null
            || (!(((List) conf.get(TOPOLOGY_AUTO_CREDENTIALS)).contains(AutoHDFS.class.getName()))
                    && !(((List) conf.get(TOPOLOGY_AUTO_CREDENTIALS)).contains(AutoTGT.class.getName())))) {
        if (UserGroupInformation.isSecurityEnabled()) {
            // compareAndSet added because of https://issues.apache.org/jira/browse/STORM-1535
            if (isLoggedIn.compareAndSet(false, true)) {
                LOG.info("Logging in using keytab as AutoHDFS is not specified for "
                        + TOPOLOGY_AUTO_CREDENTIALS);
                String keytab = (String) conf.get(STORM_KEYTAB_FILE_KEY);
                if (keytab != null) {
                    hdfsConfig.set(STORM_KEYTAB_FILE_KEY, keytab);
                }//from   ww w. j a v  a  2 s.com
                String userName = (String) conf.get(STORM_USER_NAME_KEY);
                if (userName != null) {
                    hdfsConfig.set(STORM_USER_NAME_KEY, userName);
                }
                SecurityUtil.login(hdfsConfig, STORM_KEYTAB_FILE_KEY, STORM_USER_NAME_KEY);
            }
        }
    }
}

From source file:com.cip.crane.agent.utils.TaskHelper.java

License:Open Source License

public TaskHelper() {
    conf.set("hadoop.security.authentication", "kerberos");
    conf.set("hadoop.security.authorization", "true");
    conf.set("dfs.namenode.kerberos.principal", AgentEnvValue.getHdfsValue(AgentEnvValue.NAMENODE_PRINCIPAL));
    conf.set("dp.hdfsclinet.kerberos.principal", AgentEnvValue.getHdfsValue(AgentEnvValue.KERBEROS_PRINCIPAL));
    conf.set("dp.hdfsclinet.keytab.file", AgentEnvValue.getValue(AgentEnvValue.AGENT_ROOT_PATH)
            + AgentEnvValue.getHdfsValue(AgentEnvValue.KEYTAB_FILE));
    conf.set("fs.hdfs.impl.disable.cache", "true");
    UserGroupInformation.setConfiguration(conf);
    try {//w  ww.  j  a  v a2 s  .  c  o m
        SecurityUtil.login(conf, "dp.hdfsclinet.keytab.file", "dp.hdfsclinet.kerberos.principal");
    } catch (IOException e) {
        //throw new RuntimeException(e.getMessage(), e);
    }
}

From source file:com.deloitte.storm.common.security.HdfsSecurityUtil.java

License:Apache License

public static void login(Map conf, Configuration hdfsConfig) throws IOException {
    if (UserGroupInformation.isSecurityEnabled()) {
        String keytab = (String) conf.get(STORM_KEYTAB_FILE_KEY);
        if (keytab != null) {
            hdfsConfig.set(STORM_KEYTAB_FILE_KEY, keytab);
        }/*from  w w w  . jav  a  2  s. c o  m*/
        String userName = (String) conf.get(STORM_USER_NAME_KEY);
        if (userName != null) {
            hdfsConfig.set(STORM_USER_NAME_KEY, userName);
        }
        SecurityUtil.login(hdfsConfig, STORM_KEYTAB_FILE_KEY, STORM_USER_NAME_KEY);
    }
}

From source file:com.dp.bigdata.taurus.agent.utils.TaskHelper.java

License:Open Source License

public TaskHelper() {
    conf.set("hadoop.security.authentication", "kerberos");
    conf.set("hadoop.security.authorization", "true");
    conf.set("dfs.namenode.kerberos.principal", AgentEnvValue.getHdfsValue(AgentEnvValue.NAMENODE_PRINCIPAL));
    conf.set("dp.hdfsclinet.kerberos.principal", AgentEnvValue.getHdfsValue(AgentEnvValue.KERBEROS_PRINCIPAL));
    conf.set("dp.hdfsclinet.keytab.file", AgentEnvValue.getValue(AgentEnvValue.AGENT_ROOT_PATH)
            + AgentEnvValue.getHdfsValue(AgentEnvValue.KEYTAB_FILE));
    conf.set("fs.hdfs.impl.disable.cache", "true");
    UserGroupInformation.setConfiguration(conf);
    try {//from ww  w .  j  a  va 2  s . co  m
        SecurityUtil.login(conf, "dp.hdfsclinet.keytab.file", "dp.hdfsclinet.kerberos.principal");
    } catch (IOException e) {
        throw new RuntimeException(e.getMessage(), e);
    }
}

From source file:com.emc.greenplum.gpdb.hdfsconnector.ConnectorUtil.java

License:Open Source License

/**
 * Helper routine to login to secure Hadoop. If it's not configured to use
 * security (in the core-site.xml) then return
 *
 * Create a LoginContext using config in $GPHOME/lib/hadoop/jaas.conf and search for a valid TGT
 * which matches HADOOP_SECURITY_USERNAME.
 * Check if the TGT needs to be renewed or recreated and use installed kinit command to handle the
 * credential cache/* w  w w  .  j  a va 2s . c  om*/
 *
 * @param conf the configuration
 */
protected static void loginSecureHadoop(Configuration conf) throws IOException, InterruptedException {
    // if security config does not exist then assume no security
    if (conf.get(HADOOP_SECURITY_USERNAME) == null || conf.get(HADOOP_SECURITY_USER_KEYTAB_FILE) == null) {
        return;
    }

    String principal = SecurityUtil.getServerPrincipal(conf.get(HADOOP_SECURITY_USERNAME),
            InetAddress.getLocalHost().getCanonicalHostName());
    String jaasConf = System.getenv("GPHOME") + "/lib/hadoop/jaas.conf";
    System.setProperty("java.security.auth.login.config", jaasConf);
    Boolean kinitDisabled = conf.getBoolean(HADOOP_DISABLE_KINIT, false);

    /*
       Attempt to find the TGT from the users ticket cache and check if its a valid TGT
       If the TGT needs to be renewed or recreated then we use kinit binary command so the cache can be persisted
       allowing future queries to reuse cached TGT's
            
       If user disables kinit then we fail back SecurityUtil.login which will always perform a AS_REQ
       followed by a TGS_REQ to the KDC and set the global login context.  the problem with this method is if you have 300
       or more GPDB segments then every gphdfs query will issue 300 AS_REQ to the KDC and may result in intermittent failures
       or longer running queries if the KDC can not keep up with the demand
    */
    try {
        LoginContext login = new LoginContext("gphdfs");
        login.login();
        Subject subject = login.getSubject();
        Set<KerberosTicket> tickets = subject.getPrivateCredentials(KerberosTicket.class);

        // find the TGT that matches the configured principal
        for (KerberosTicket ticket : tickets) {
            if (ticket.getClient().toString().equals(principal)) {
                long start = ticket.getStartTime().getTime();
                long end = ticket.getEndTime().getTime();
                long current = System.currentTimeMillis();
                Long rtime = start + (long) ((end - start) * .8); // compute start time of ticket plus 80% to find the refresh window

                if (current <= rtime && ticket.isCurrent()) { // Ticket is current so no further action required
                    return;
                } else if (current >= rtime && ticket.isRenewable() && !kinitDisabled) { // Ticket needs to be renewed and is renewable
                    String[] kinitRefresh = { "kinit", "-R" };
                    Process kinitRenew = Runtime.getRuntime().exec(kinitRefresh);
                    int rt = kinitRenew.waitFor();
                    if (rt == 0) {
                        return;
                    }

                }
                break;
            }
        }
    } catch (LoginException | InterruptedException e) {
        if (kinitDisabled) {
            SecurityUtil.login(conf, HADOOP_SECURITY_USER_KEYTAB_FILE, HADOOP_SECURITY_USERNAME);
            return;
        }
        /* if kinit is not disabled then do nothing because we will request a new TGT and update the ticket cache
        * regardless if login or kinit refresh failed initially
        */
    }

    // fail back to securityutil if kinit is disabled
    if (kinitDisabled) { // login from keytab
        SecurityUtil.login(conf, HADOOP_SECURITY_USER_KEYTAB_FILE, HADOOP_SECURITY_USERNAME);
        return;
    }

    // if we made it here then there is not a current TGT found in cache that matches the principal and we need to request a new TGT
    String[] kinitCmd = { "kinit", "-kt", conf.get(HADOOP_SECURITY_USER_KEYTAB_FILE), principal };
    try {
        Process kinit = Runtime.getRuntime().exec(kinitCmd);
        int rt = kinit.waitFor();
        if (rt != 0) {
            BufferedReader errOut = new BufferedReader(new InputStreamReader(kinit.getErrorStream()));
            String line;
            String errOutput = "";
            while ((line = errOut.readLine()) != null) {
                errOutput += line;
            }
            throw new IOException(String.format(
                    "Failed to Acquire TGT using command \"kinit -kt\" with configured keytab and principal settings:\n%s",
                    errOutput));
        }
    } catch (InterruptedException e) {
        throw new InterruptedException(String.format(
                "command \"kinit -kt\" with configured keytab and principal settings:\n%s", e.getMessage()));
    }
}

From source file:com.inmobi.conduit.utils.SecureLoginUtil.java

License:Apache License

public static void login(Configuration conf, String principalKey, String keyTabKey) throws IOException {
    SecurityUtil.login(conf, keyTabKey, principalKey);
    UserGroupInformation ugi = UserGroupInformation.getLoginUser();
    LOG.info("User logged in :" + ugi);
}

From source file:com.inmobi.messaging.consumer.databus.AbstractMessagingDatabusConsumer.java

License:Apache License

protected void initializeConfig(ClientConfig config) throws IOException {
    String hadoopConfFileName = config.getString(hadoopConfigFileKey);
    if (hadoopConfFileName != null) {
        Configuration.addDefaultResource(hadoopConfFileName);
    }/*from   www  .java2s  . c o m*/
    conf = new Configuration();
    super.init(config);
    // verify authentication
    if (UserGroupInformation.isSecurityEnabled()) {
        String principal = config.getString(consumerPrincipal);
        String keytab = config.getString(consumerKeytab);
        if (principal != null && keytab != null) {
            Configuration conf = new Configuration();
            conf.set(consumerPrincipal, principal);
            conf.set(consumerKeytab, keytab);
            SecurityUtil.login(conf, consumerKeytab, consumerPrincipal);
            UserGroupInformation ugi = UserGroupInformation.getLoginUser();
            LOG.info("User logged in :" + ugi);
        } else {
            LOG.info(
                    "There is no principal or key tab file passed. Using the" + " commandline authentication.");
        }
    }
    // Read consumer id
    String consumerIdStr = config.getString(consumerIdInGroupConfig, DEFAULT_CONSUMER_ID);
    String[] id = consumerIdStr.split("/");
    try {
        consumerNumber = Integer.parseInt(id[0]);
        totalConsumers = Integer.parseInt(id[1]);
        partitionMinList = new HashSet<Integer>();
        if (isValidConfiguration()) {
            for (int i = 0; i < 60; i++) {
                if ((i % totalConsumers) == (consumerNumber - 1)) {
                    partitionMinList.add(i);
                }
            }
        } else {
            throw new IllegalArgumentException("Invalid consumer group membership");
        }
    } catch (NumberFormatException nfe) {
        throw new IllegalArgumentException("Invalid consumer group membership", nfe);
    }
    // Create checkpoint provider and initialize checkpoint
    String chkpointProviderClassName = config.getString(chkProviderConfig, DEFAULT_CHK_PROVIDER);
    String databusCheckpointDir = config.getString(checkpointDirConfig, DEFAULT_CHECKPOINT_DIR);
    this.checkpointProvider = createCheckpointProvider(chkpointProviderClassName, databusCheckpointDir);

    createCheckpoint();
    currentCheckpoint.read(checkpointProvider, getChkpointKey());

    //create buffer
    bufferSize = config.getInteger(queueSizeConfig, DEFAULT_QUEUE_SIZE);
    buffer = new LinkedBlockingQueue<QueueEntry>(bufferSize);

    // initialize other common configuration
    waitTimeForFileCreate = config.getLong(waitTimeForFileCreateConfig, DEFAULT_WAIT_TIME_FOR_FILE_CREATE);

    // get the retention period of the topic
    retentionInHours = config.getString(retentionConfig);

    relativeStartTimeStr = config.getString(relativeStartTimeConfig);

    if (relativeStartTimeStr == null && retentionInHours != null) {
        LOG.warn(retentionConfig + " is deprecated." + " Use " + relativeStartTimeConfig + " instead");
        int minutes = (Integer.parseInt(retentionInHours)) * 60;
        relativeStartTimeStr = String.valueOf(minutes);
    }

    String stopTimeStr = config.getString(stopDateConfig);
    stopTime = getDateFromString(stopTimeStr);

    startOfStream = config.getBoolean(startOfStreamConfig, DEFAULT_START_OF_STREAM);
    closedReadercount = 0;
}

From source file:org.apache.hawq.pxf.service.utilities.SecureLogin.java

License:Apache License

public static void login() {
    try {/*  ww  w .j av a 2  s  . com*/
        Configuration config = new Configuration();
        config.addResource("pxf-site.xml");

        SecurityUtil.login(config, CONFIG_KEY_SERVICE_KEYTAB, CONFIG_KEY_SERVICE_PRINCIPAL);
    } catch (Exception e) {
        LOG.error("PXF service login failed");
        throw new RuntimeException(e);
    }
}

From source file:org.apache.storm.hdfs.common.security.HdfsSecurityUtil.java

License:Apache License

public static void login(Map conf, Configuration hdfsConfig) throws IOException {
    //If AutoHDFS is specified, do not attempt to login using keytabs, only kept for backward compatibility.
    if (conf.get(TOPOLOGY_AUTO_CREDENTIALS) == null
            || (!(((List) conf.get(TOPOLOGY_AUTO_CREDENTIALS)).contains(AutoHDFS.class.getName()))
                    && !(((List) conf.get(TOPOLOGY_AUTO_CREDENTIALS)).contains(AutoTGT.class.getName())))) {
        if (UserGroupInformation.isSecurityEnabled()) {
            LOG.info("Logging in using keytab as AutoHDFS is not specified for " + TOPOLOGY_AUTO_CREDENTIALS);
            String keytab = (String) conf.get(STORM_KEYTAB_FILE_KEY);
            if (keytab != null) {
                hdfsConfig.set(STORM_KEYTAB_FILE_KEY, keytab);
            }//ww w  .j ava  2 s .  c  o  m
            String userName = (String) conf.get(STORM_USER_NAME_KEY);
            if (userName != null) {
                hdfsConfig.set(STORM_USER_NAME_KEY, userName);
            }
            SecurityUtil.login(hdfsConfig, STORM_KEYTAB_FILE_KEY, STORM_USER_NAME_KEY);
        }
    }
}