Example usage for org.apache.hadoop.security UserGroupInformation isSecurityEnabled

List of usage examples for org.apache.hadoop.security UserGroupInformation isSecurityEnabled

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation isSecurityEnabled.

Prototype

public static boolean isSecurityEnabled() 

Source Link

Document

Determine if UserGroupInformation is using Kerberos to determine user identities or is relying on simple authentication

Usage

From source file:org.apache.storm.hdfs.security.HdfsSecurityUtil.java

License:Apache License

public static void login(Map<String, Object> conf, Configuration hdfsConfig) throws IOException {
    //If AutoHDFS is specified, do not attempt to login using keytabs, only kept for backward compatibility.
    if (conf.get(TOPOLOGY_AUTO_CREDENTIALS) == null
            || (!(((List) conf.get(TOPOLOGY_AUTO_CREDENTIALS)).contains(AutoHDFS.class.getName()))
                    && !(((List) conf.get(TOPOLOGY_AUTO_CREDENTIALS)).contains(AutoTGT.class.getName())))) {
        if (UserGroupInformation.isSecurityEnabled()) {
            // compareAndSet added because of https://issues.apache.org/jira/browse/STORM-1535
            if (isLoggedIn.compareAndSet(false, true)) {
                LOG.info("Logging in using keytab as AutoHDFS is not specified for "
                        + TOPOLOGY_AUTO_CREDENTIALS);
                String keytab = (String) conf.get(STORM_KEYTAB_FILE_KEY);
                if (keytab != null) {
                    hdfsConfig.set(STORM_KEYTAB_FILE_KEY, keytab);
                }/*from   ww  w. ja v a  2 s  .  co  m*/
                String userName = (String) conf.get(STORM_USER_NAME_KEY);
                if (userName != null) {
                    hdfsConfig.set(STORM_USER_NAME_KEY, userName);
                }
                SecurityUtil.login(hdfsConfig, STORM_KEYTAB_FILE_KEY, STORM_USER_NAME_KEY);
            }
        }
    }
}

From source file:org.apache.storm.hive.security.AutoHive.java

License:Apache License

@SuppressWarnings("unchecked")
protected byte[] getHadoopCredentials(Map<String, Object> conf, final Configuration configuration) {
    try {/*from  ww w. ja v  a 2s. c  o  m*/
        if (UserGroupInformation.isSecurityEnabled()) {
            String topologySubmitterUser = (String) conf.get(Config.TOPOLOGY_SUBMITTER_PRINCIPAL);
            String hiveMetaStoreURI = getMetaStoreURI(configuration);
            String hiveMetaStorePrincipal = getMetaStorePrincipal(configuration);
            HiveConf hcatConf = createHiveConf(hiveMetaStoreURI, hiveMetaStorePrincipal);
            login(configuration);

            UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
            UserGroupInformation proxyUser = UserGroupInformation.createProxyUser(topologySubmitterUser,
                    currentUser);
            try {
                Token<DelegationTokenIdentifier> delegationTokenId = getDelegationToken(hcatConf,
                        hiveMetaStorePrincipal, topologySubmitterUser);
                proxyUser.addToken(delegationTokenId);
                LOG.info("Obtained Hive tokens, adding to user credentials.");

                Credentials credential = proxyUser.getCredentials();
                ByteArrayOutputStream bao = new ByteArrayOutputStream();
                ObjectOutputStream out = new ObjectOutputStream(bao);
                credential.write(out);
                out.flush();
                out.close();
                return bao.toByteArray();
            } catch (Exception ex) {
                LOG.debug(" Exception" + ex.getMessage());
                throw ex;
            }
        } else {
            throw new RuntimeException("Security is not enabled for Hadoop");
        }
    } catch (Exception ex) {
        throw new RuntimeException("Failed to get delegation tokens.", ex);
    }
}

From source file:org.apache.storm.hive.security.AutoHive.java

License:Apache License

private long renewToken(Token token, String metaStoreURI, String hiveMetaStorePrincipal) {
    HCatClient hcatClient = null;//from  w  ww .j  a va  2  s.  com
    if (UserGroupInformation.isSecurityEnabled()) {
        try {
            String tokenStr = token.encodeToUrlString();
            HiveConf hcatConf = createHiveConf(metaStoreURI, hiveMetaStorePrincipal);
            LOG.debug("renewing delegation tokens for principal={}", hiveMetaStorePrincipal);
            hcatClient = HCatClient.create(hcatConf);
            Long expiryTime = hcatClient.renewDelegationToken(tokenStr);
            LOG.info("Renewed delegation token. new expiryTime={}", expiryTime);
            return expiryTime;
        } catch (Exception ex) {
            throw new RuntimeException("Failed to renew delegation tokens.", ex);
        } finally {
            if (hcatClient != null)
                try {
                    hcatClient.close();
                } catch (HCatException e) {
                    LOG.error(" Exception", e);
                }
        }
    } else {
        throw new RuntimeException("Security is not enabled for Hadoop");
    }
}

From source file:org.apache.storm.hive.security.AutoHiveNimbus.java

License:Apache License

@SuppressWarnings("unchecked")
protected byte[] getHadoopCredentials(Map<String, Object> conf, final Configuration configuration,
        final String topologySubmitterUser) {
    try {/*w  w  w. java  2s  . co m*/
        if (UserGroupInformation.isSecurityEnabled()) {
            String hiveMetaStoreURI = getMetaStoreURI(configuration);
            String hiveMetaStorePrincipal = getMetaStorePrincipal(configuration);
            HiveConf hcatConf = createHiveConf(hiveMetaStoreURI, hiveMetaStorePrincipal);
            login(configuration);

            UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
            UserGroupInformation proxyUser = UserGroupInformation.createProxyUser(topologySubmitterUser,
                    currentUser);
            try {
                Token<DelegationTokenIdentifier> delegationTokenId = getDelegationToken(hcatConf,
                        hiveMetaStorePrincipal, topologySubmitterUser);
                proxyUser.addToken(delegationTokenId);
                LOG.info("Obtained Hive tokens, adding to user credentials.");

                Credentials credential = proxyUser.getCredentials();
                ByteArrayOutputStream bao = new ByteArrayOutputStream();
                ObjectOutputStream out = new ObjectOutputStream(bao);
                credential.write(out);
                out.flush();
                out.close();
                return bao.toByteArray();
            } catch (Exception ex) {
                LOG.debug(" Exception" + ex.getMessage());
                throw ex;
            }
        } else {
            throw new RuntimeException("Security is not enabled for Hadoop");
        }
    } catch (Exception ex) {
        throw new RuntimeException("Failed to get delegation tokens.", ex);
    }
}

From source file:org.apache.tajo.master.TaskRunnerLauncherImpl.java

License:Apache License

protected ContainerManager getCMProxy(ContainerId containerID, final String containerManagerBindAddr,
        ContainerToken containerToken) throws IOException {
    String[] hosts = containerManagerBindAddr.split(":");
    final InetSocketAddress cmAddr = new InetSocketAddress(hosts[0], Integer.parseInt(hosts[1]));
    UserGroupInformation user = UserGroupInformation.getCurrentUser();

    if (UserGroupInformation.isSecurityEnabled()) {
        Token<ContainerTokenIdentifier> token = ProtoUtils.convertFromProtoFormat(containerToken, cmAddr);
        // the user in createRemoteUser in this context has to be ContainerID
        user = UserGroupInformation.createRemoteUser(containerID.toString());
        user.addToken(token);//from  w  w w  .  java 2  s . c o  m
    }

    ContainerManager proxy = user.doAs(new PrivilegedAction<ContainerManager>() {
        @Override
        public ContainerManager run() {
            return (ContainerManager) yarnRPC.getProxy(ContainerManager.class, cmAddr, getConfig());
        }
    });
    return proxy;
}

From source file:org.apache.tajo.master.YarnContainerProxy.java

License:Apache License

protected ContainerManagementProtocol getCMProxy(ContainerId containerID, final String containerManagerBindAddr,
        Token containerToken) throws IOException {
    String[] hosts = containerManagerBindAddr.split(":");
    final InetSocketAddress cmAddr = new InetSocketAddress(hosts[0], Integer.parseInt(hosts[1]));
    UserGroupInformation user = UserGroupInformation.getCurrentUser();

    if (UserGroupInformation.isSecurityEnabled()) {
        org.apache.hadoop.security.token.Token<ContainerTokenIdentifier> token = ConverterUtils
                .convertFromYarn(containerToken, cmAddr);
        // the user in createRemoteUser in this context has to be ContainerID
        user = UserGroupInformation.createRemoteUser(containerID.toString());
        user.addToken(token);/*from w  ww. j  a  v a 2s . c  o  m*/
    }

    ContainerManagementProtocol proxy = user.doAs(new PrivilegedAction<ContainerManagementProtocol>() {
        @Override
        public ContainerManagementProtocol run() {
            return (ContainerManagementProtocol) yarnRPC.getProxy(ContainerManagementProtocol.class, cmAddr,
                    conf);
        }
    });

    return proxy;
}

From source file:org.apache.tajo.webapp.HttpServer.java

License:Apache License

/**
 * Add an internal servlet in the server, specifying whether or not to
 * protect with Kerberos authentication. 
 * Note: This method is to be used for adding servlets that facilitate
 * internal communication and not for user facing functionality. For
 * servlets added using this method, filters (except internal Kerberized
 * filters) are not enabled. /*from  w w  w .  j av  a  2 s .  co m*/
 * 
 * @param name The name of the servlet (can be passed as null)
 * @param pathSpec The path spec for the servlet
 * @param clazz The servlet class
 */
public void addInternalServlet(String name, String pathSpec, Class<? extends HttpServlet> clazz,
        boolean requireAuth) {
    ServletHolder holder = new ServletHolder(clazz);
    if (name != null) {
        holder.setName(name);
    }
    webAppContext.addServlet(holder, pathSpec);

    if (requireAuth && UserGroupInformation.isSecurityEnabled()) {
        LOG.info("Adding Kerberos filter to " + name);
        ServletHandler handler = webAppContext.getServletHandler();
        FilterMapping fmap = new FilterMapping();
        fmap.setPathSpec(pathSpec);
        fmap.setFilterName("krb5Filter");
        fmap.setDispatches(Handler.ALL);
        handler.addFilterMapping(fmap);
    }
}

From source file:org.apache.tajo.yarn.command.LaunchCommand.java

License:Apache License

private void setupSecurityTokens(ContainerLaunchContext amContainer, FileSystem fs) throws IOException {
    if (UserGroupInformation.isSecurityEnabled()) {
        Credentials credentials = new Credentials();
        String tokenRenewer = conf.get(YarnConfiguration.RM_PRINCIPAL);
        if (tokenRenewer == null || tokenRenewer.length() == 0) {
            throw new IOException("Can't get Master Kerberos principal for the RM to use as renewer");
        }//from w  w w. j av a 2 s  .co  m

        // For now, only getting tokens for the default file-system.
        final Token<?> tokens[] = fs.addDelegationTokens(tokenRenewer, credentials);
        if (tokens != null) {
            for (Token<?> token : tokens) {
                LOG.info("Got dt for " + fs.getUri() + "; " + token);
            }
        }
        DataOutputBuffer dob = new DataOutputBuffer();
        credentials.writeTokenStorageToStream(dob);
        ByteBuffer fsTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
        amContainer.setTokens(fsTokens);
    }
}

From source file:org.apache.tez.common.security.TokenCache.java

License:Apache License

/**
 * Convenience method to obtain delegation tokens from namenodes 
 * corresponding to the paths passed./*from w w w.  j  a v  a2  s  .  c o  m*/
 * @param credentials
 * @param ps array of paths
 * @param conf configuration
 * @throws IOException
 */
public static void obtainTokensForFileSystems(Credentials credentials, Path[] ps, Configuration conf)
        throws IOException {
    if (!UserGroupInformation.isSecurityEnabled()) {
        return;
    }
    obtainTokensForFileSystemsInternal(credentials, ps, conf);
}

From source file:org.apache.tez.dag.api.client.DAGClientImpl.java

License:Apache License

public DAGClientImpl(ApplicationId appId, String dagId, TezConfiguration conf,
        @Nullable FrameworkClient frameworkClient) {
    this.appId = appId;
    this.dagId = dagId;
    this.conf = conf;
    if (frameworkClient != null
            && conf.getBoolean(TezConfiguration.TEZ_LOCAL_MODE, TezConfiguration.TEZ_LOCAL_MODE_DEFAULT)) {
        this.frameworkClient = frameworkClient;
    } else {/* w  w w.jav a  2  s  .  c o m*/
        this.frameworkClient = FrameworkClient.createFrameworkClient(conf);
        this.frameworkClient.init(conf, new YarnConfiguration(conf));
        this.frameworkClient.start();
    }
    isATSEnabled = conf.get(TezConfiguration.TEZ_HISTORY_LOGGING_SERVICE_CLASS, "")
            .equals("org.apache.tez.dag.history.logging.ats.ATSHistoryLoggingService");

    if (UserGroupInformation.isSecurityEnabled()) {
        //TODO: enable ATS integration in kerberos secured cluster - see TEZ-1529
        isATSEnabled = false;
    }

    realClient = new DAGClientRPCImpl(appId, dagId, conf, this.frameworkClient);
}