Example usage for org.apache.hadoop.security UserGroupInformation setConfiguration

List of usage examples for org.apache.hadoop.security UserGroupInformation setConfiguration

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation setConfiguration.

Prototype

@InterfaceAudience.Public
@InterfaceStability.Evolving
public static void setConfiguration(Configuration conf) 

Source Link

Document

Set the static configuration for UGI.

Usage

From source file:org.apache.slider.common.tools.SliderUtils.java

License:Apache License

/**
 * Turn on security. This is setup to only run once.
 * @param conf configuration to build up security
 * @return true if security was initialized in this call
 * @throws IOException IO/Net problems/*from   www. ja v a 2  s  . c  o m*/
 * @throws BadConfigException the configuration and system state are inconsistent
 */
public static boolean initProcessSecurity(Configuration conf) throws IOException, BadConfigException {

    if (processSecurityAlreadyInitialized.compareAndSet(true, true)) {
        //security is already inited
        return false;
    }

    log.info("JVM initialized into secure mode with kerberos realm {}", SliderUtils.getKerberosRealm());
    //this gets UGI to reset its previous world view (i.e simple auth)
    //security
    log.debug("java.security.krb5.realm={}", System.getProperty(JAVA_SECURITY_KRB5_REALM, ""));
    log.debug("java.security.krb5.kdc={}", System.getProperty(JAVA_SECURITY_KRB5_KDC, ""));
    log.debug("hadoop.security.authentication={}",
            conf.get(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION));
    log.debug("hadoop.security.authorization={}",
            conf.get(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION));
    /*    SecurityUtil.setAuthenticationMethod(
            UserGroupInformation.AuthenticationMethod.KERBEROS, conf);*/
    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation authUser = UserGroupInformation.getCurrentUser();
    log.debug("Authenticating as " + authUser.toString());
    log.debug("Login user is {}", UserGroupInformation.getLoginUser());
    if (!UserGroupInformation.isSecurityEnabled()) {
        throw new BadConfigException("Although secure mode is enabled,"
                + "the application has already set up its user as an insecure entity %s", authUser);
    }
    if (authUser.getAuthenticationMethod() == UserGroupInformation.AuthenticationMethod.SIMPLE) {
        throw new BadConfigException("Auth User is not Kerberized %s"
                + " -security has already been set up with the wrong authentication method. "
                + "This can occur if a file system has already been created prior to the loading of "
                + "the security configuration.", authUser);

    }

    SliderUtils.verifyPrincipalSet(conf, YarnConfiguration.RM_PRINCIPAL);
    SliderUtils.verifyPrincipalSet(conf, DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY);
    return true;
}

From source file:org.apache.slider.server.appmaster.SliderAppMaster.java

License:Apache License

@Override //AbstractService
public synchronized void serviceInit(Configuration conf) throws Exception {
    // slider client if found

    Configuration customConf = SliderUtils.loadClientConfigurationResource();
    // Load in the server configuration - if it is actually on the Classpath
    Configuration serverConf = ConfigHelper.loadFromResource(SERVER_RESOURCE);
    ConfigHelper.mergeConfigurations(customConf, serverConf, SERVER_RESOURCE, true);
    serviceArgs.applyDefinitions(customConf);
    serviceArgs.applyFileSystemBinding(customConf);
    // conf now contains all customizations

    AbstractActionArgs action = serviceArgs.getCoreAction();
    SliderAMCreateAction createAction = (SliderAMCreateAction) action;

    // sort out the location of the AM
    String rmAddress = createAction.getRmAddress();
    if (rmAddress != null) {
        log.debug("Setting rm address from the command line: {}", rmAddress);
        SliderUtils.setRmSchedulerAddress(customConf, rmAddress);
    }/*from   w w  w .j a  va  2  s  .co  m*/

    log.info("AM configuration:\n{}", ConfigHelper.dumpConfigToString(customConf));

    ConfigHelper.mergeConfigurations(conf, customConf, CLIENT_RESOURCE, true);
    //init security with our conf
    if (SliderUtils.isHadoopClusterSecure(conf)) {
        log.info("Secure mode with kerberos realm {}", SliderUtils.getKerberosRealm());
        UserGroupInformation.setConfiguration(conf);
        UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
        log.debug("Authenticating as {}", ugi);
        SliderUtils.verifyPrincipalSet(conf, DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY);
    } else {
        log.info("Cluster is insecure");
    }
    log.info("Login user is {}", UserGroupInformation.getLoginUser());

    //look at settings of Hadoop Auth, to pick up a problem seen once
    checkAndWarnForAuthTokenProblems();

    // validate server env
    boolean dependencyChecks = !conf.getBoolean(KEY_SLIDER_AM_DEPENDENCY_CHECKS_DISABLED, false);
    SliderUtils.validateSliderServerEnvironment(log, dependencyChecks);

    executorService = new WorkflowExecutorService<ExecutorService>("AmExecutor",
            Executors.newFixedThreadPool(2, new ServiceThreadFactory("AmExecutor", true)));
    addService(executorService);

    addService(actionQueues);
    //init all child services
    super.serviceInit(conf);
}

From source file:org.apache.solr.core.HdfsDirectoryFactory.java

License:Apache License

private void initKerberos() {
    String keytabFile = params.get(KERBEROS_KEYTAB, "").trim();
    if (keytabFile.length() == 0) {
        throw new IllegalArgumentException(
                KERBEROS_KEYTAB + " required because " + KERBEROS_ENABLED + " set to true");
    }/*from  w w  w  . java  2  s.c  om*/
    String principal = params.get(KERBEROS_PRINCIPAL, "");
    if (principal.length() == 0) {
        throw new IllegalArgumentException(
                KERBEROS_PRINCIPAL + " required because " + KERBEROS_ENABLED + " set to true");
    }
    synchronized (HdfsDirectoryFactory.class) {
        if (kerberosInit == null) {
            kerberosInit = new Boolean(true);
            Configuration conf = new Configuration();
            conf.set("hadoop.security.authentication", "kerberos");
            UserGroupInformation.setConfiguration(conf);
            LOG.info("Attempting to acquire kerberos ticket with keytab: {}, principal: {} ", keytabFile,
                    principal);
            try {
                UserGroupInformation.loginUserFromKeytab(principal, keytabFile);
            } catch (IOException ioe) {
                throw new RuntimeException(ioe);
            }
            LOG.info("Got Kerberos ticket");
        }
    }
}

From source file:org.apache.sqoop.security.Authentication.KerberosAuthenticationHandler.java

License:Apache License

public void secureLogin() {
    MapContext mapContext = SqoopConfiguration.getInstance().getContext();
    String keytab = mapContext.getString(SecurityConstants.AUTHENTICATION_KERBEROS_KEYTAB).trim();
    if (keytab.length() == 0) {
        throw new SqoopException(SecurityError.AUTH_0001, SecurityConstants.AUTHENTICATION_KERBEROS_KEYTAB);
    }/*from w  w  w .j a v a2s  . c  om*/
    keytabFile = keytab;

    String principal = mapContext.getString(SecurityConstants.AUTHENTICATION_KERBEROS_PRINCIPAL).trim();
    if (principal.length() == 0) {
        throw new SqoopException(SecurityError.AUTH_0002, SecurityConstants.AUTHENTICATION_KERBEROS_PRINCIPAL);
    }
    keytabPrincipal = principal;

    Configuration conf = new Configuration();
    conf.set(get_hadoop_security_authentication(), SecurityConstants.TYPE.KERBEROS.name());
    UserGroupInformation.setConfiguration(conf);
    try {
        String hostPrincipal = SecurityUtil.getServerPrincipal(principal, "0.0.0.0");
        UserGroupInformation.loginUserFromKeytab(hostPrincipal, keytab);
    } catch (IOException ex) {
        throw new SqoopException(SecurityError.AUTH_0003, ex);
    }
    LOG.info("Using Kerberos authentication, principal [" + principal + "] keytab [" + keytab + "]");
}

From source file:org.apache.sqoop.security.Authentication.SimpleAuthenticationHandler.java

License:Apache License

public void secureLogin() {
    //no secureLogin, just set configurations
    Configuration conf = new Configuration();
    conf.set(get_hadoop_security_authentication(), SecurityConstants.TYPE.SIMPLE.name());
    UserGroupInformation.setConfiguration(conf);
    LOG.info("Using simple/pseudo authentication, principal [" + System.getProperty("user.name") + "]");
}

From source file:org.apache.storm.common.AbstractAutoCreds.java

License:Apache License

protected void fillHadoopConfiguration(Map<String, Object> topoConf, String configKey,
        Configuration configuration) {
    Map<String, Object> config = (Map<String, Object>) topoConf.get(configKey);
    LOG.info("TopoConf {}, got config {}, for configKey {}", topoConf, config, configKey);
    if (config != null) {
        List<String> resourcesToLoad = new ArrayList<>();
        for (Map.Entry<String, Object> entry : config.entrySet()) {
            if (entry.getKey().equals(CONFIG_KEY_RESOURCES)) {
                resourcesToLoad.addAll((List<String>) entry.getValue());
            } else {
                configuration.set(entry.getKey(), String.valueOf(entry.getValue()));
            }/*from  ww  w.  j  av a  2s. c  o  m*/
        }
        LOG.info("Resources to load {}", resourcesToLoad);
        // add configs from resources like hdfs-site.xml
        for (String pathStr : resourcesToLoad) {
            configuration.addResource(new Path(Paths.get(pathStr).toUri()));
        }
    }
    LOG.info("Initializing UGI with config {}", configuration);
    UserGroupInformation.setConfiguration(configuration);
}

From source file:org.apache.storm.common.AbstractHadoopNimbusPluginAutoCreds.java

License:Apache License

protected void fillHadoopConfiguration(Map topologyConf, String configKey, Configuration configuration) {
    Map<String, Object> config = (Map<String, Object>) topologyConf.get(configKey);
    LOG.info("TopoConf {}, got config {}, for configKey {}", ConfigUtils.maskPasswords(topologyConf),
            ConfigUtils.maskPasswords(config), configKey);
    if (config != null) {
        List<String> resourcesToLoad = new ArrayList<>();
        for (Map.Entry<String, Object> entry : config.entrySet()) {
            if (entry.getKey().equals(CONFIG_KEY_RESOURCES)) {
                resourcesToLoad.addAll((List<String>) entry.getValue());
            } else {
                configuration.set(entry.getKey(), String.valueOf(entry.getValue()));
            }//from   w  ww  . ja va2s . co  m
        }
        LOG.info("Resources to load {}", resourcesToLoad);
        // add configs from resources like hdfs-site.xml
        for (String pathStr : resourcesToLoad) {
            configuration.addResource(new Path(Paths.get(pathStr).toUri()));
        }
    }
    LOG.info("Initializing UGI with config {}", configuration);
    UserGroupInformation.setConfiguration(configuration);
}

From source file:org.apache.storm.hbase.security.AutoHBase.java

License:Apache License

@SuppressWarnings("unchecked")
protected byte[] getHadoopCredentials(Map conf) {
    try {/*from  w  ww  . ja v a  2  s.  c o  m*/
        final Configuration hbaseConf = HBaseConfiguration.create();
        if (UserGroupInformation.isSecurityEnabled()) {
            final String topologySubmitterUser = (String) conf.get(Config.TOPOLOGY_SUBMITTER_PRINCIPAL);

            UserProvider provider = UserProvider.instantiate(hbaseConf);

            hbaseConf.set(HBASE_KEYTAB_FILE_KEY, hbaseKeytab);
            hbaseConf.set(HBASE_PRINCIPAL_KEY, hbasePrincipal);
            provider.login(HBASE_KEYTAB_FILE_KEY, HBASE_PRINCIPAL_KEY,
                    InetAddress.getLocalHost().getCanonicalHostName());

            LOG.info("Logged into Hbase as principal = " + conf.get(HBASE_PRINCIPAL_KEY));
            UserGroupInformation.setConfiguration(hbaseConf);

            UserGroupInformation ugi = UserGroupInformation.getCurrentUser();

            final UserGroupInformation proxyUser = UserGroupInformation.createProxyUser(topologySubmitterUser,
                    ugi);

            User user = User.create(ugi);

            if (user.isHBaseSecurityEnabled(hbaseConf)) {
                TokenUtil.obtainAndCacheToken(hbaseConf, proxyUser);

                LOG.info("Obtained HBase tokens, adding to user credentials.");

                Credentials credential = proxyUser.getCredentials();
                ByteArrayOutputStream bao = new ByteArrayOutputStream();
                ObjectOutputStream out = new ObjectOutputStream(bao);
                credential.write(out);
                out.flush();
                out.close();
                return bao.toByteArray();
            } else {
                throw new RuntimeException("Security is not enabled for HBase.");
            }
        } else {
            throw new RuntimeException("Security is not enabled for Hadoop");
        }
    } catch (Exception ex) {
        throw new RuntimeException("Failed to get delegation tokens.", ex);
    }
}

From source file:org.apache.tajo.master.querymaster.QueryMasterRunner.java

License:Apache License

public static void main(String[] args) throws Exception {
    LOG.info("QueryMasterRunner started");

    final TajoConf conf = new TajoConf();
    conf.addResource(new Path(TajoConstants.SYSTEM_CONF_FILENAME));

    UserGroupInformation.setConfiguration(conf);

    final QueryId queryId = TajoIdUtils.parseQueryId(args[0]);
    final String queryMasterManagerAddr = args[1];

    LOG.info("Received QueryId:" + queryId);

    QueryMasterRunner queryMasterRunner = new QueryMasterRunner(queryId, queryMasterManagerAddr);
    queryMasterRunner.init(conf);//from w  w w  .  ja va 2s .  c  om
    queryMasterRunner.start();

    synchronized (queryId) {
        queryId.wait();
    }

    System.exit(0);
}

From source file:org.apache.tajo.worker.ExecutionBlockContext.java

License:Apache License

public void init() throws Throwable {

    LOG.info("Tajo Root Dir: " + systemConf.getVar(TajoConf.ConfVars.ROOT_DIR));
    LOG.info("Worker Local Dir: " + systemConf.getVar(TajoConf.ConfVars.WORKER_TEMPORAL_DIR));

    this.qmMasterAddr = NetUtils.createSocketAddr(queryMaster.getHost(), queryMaster.getQueryMasterPort());
    LOG.info("QueryMaster Address:" + qmMasterAddr);

    UserGroupInformation.setConfiguration(systemConf);
    // TODO - 'load credential' should be implemented
    // Getting taskOwner
    UserGroupInformation taskOwner = UserGroupInformation
            .createRemoteUser(systemConf.getVar(TajoConf.ConfVars.USERNAME));

    // initialize DFS and LocalFileSystems
    this.taskOwner = taskOwner;
    this.reporter.startReporter();

    // resource intiailization
    try {//from  ww w.  j a  va2 s . c  o  m
        this.resource.initialize(queryContext, plan);
    } catch (Throwable e) {
        try {
            NettyClientBase client = getQueryMasterConnection();
            try {
                QueryMasterProtocol.QueryMasterProtocolService.Interface stub = client.getStub();
                stub.killQuery(null, executionBlockId.getQueryId().getProto(), NullCallback.get());
            } finally {
                connPool.releaseConnection(client);
            }
        } catch (Throwable t) {
            //ignore
        }
        throw e;
    }
}