Example usage for org.apache.hadoop.security.ssl KeyStoreTestUtil getClasspathDir

List of usage examples for org.apache.hadoop.security.ssl KeyStoreTestUtil getClasspathDir

Introduction

In this page you can find the example usage for org.apache.hadoop.security.ssl KeyStoreTestUtil getClasspathDir.

Prototype

public static String getClasspathDir(Class klass) throws Exception 

Source Link

Usage

From source file:com.github.sakserv.minicluster.impl.KdcLocalCluster.java

License:Apache License

protected void prepareSecureConfiguration(String username) throws Exception {
    baseConf = new Configuration(false);
    SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, baseConf);
    baseConf.setBoolean(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, true);
    //baseConf.set(CommonConfigurationKeys.HADOOP_RPC_PROTECTION, "authentication");

    String sslConfigDir = KeyStoreTestUtil.getClasspathDir(this.getClass());
    KeyStoreTestUtil.setupSSLConfig(baseDir, sslConfigDir, baseConf, false);

    // User//from   w w  w.ja  v  a 2  s  .co  m
    baseConf.set("hadoop.proxyuser." + username + ".hosts", "*");
    baseConf.set("hadoop.proxyuser." + username + ".groups", "*");

    // HTTP
    String spnegoPrincipal = getKrbPrincipalWithRealm(SPNEGO_USER_NAME);
    baseConf.set("hadoop.proxyuser." + SPNEGO_USER_NAME + ".groups", "*");
    baseConf.set("hadoop.proxyuser." + SPNEGO_USER_NAME + ".hosts", "*");

    // Oozie
    String ooziePrincipal = getKrbPrincipalWithRealm(OOZIE_USER_NAME);
    baseConf.set("hadoop.proxyuser." + OOZIE_USER_NAME + ".hosts", "*");
    baseConf.set("hadoop.proxyuser." + OOZIE_USER_NAME + ".groups", "*");
    baseConf.set("hadoop.user.group.static.mapping.overrides", OOZIE_PROXIED_USER_NAME + "=oozie");
    baseConf.set("oozie.service.HadoopAccessorService.keytab.file", getKeytabForPrincipal(OOZIE_USER_NAME));
    baseConf.set("oozie.service.HadoopAccessorService.kerberos.principal", ooziePrincipal);
    baseConf.setBoolean("oozie.service.HadoopAccessorService.kerberos.enabled", true);

    // HDFS
    String hdfsPrincipal = getKrbPrincipalWithRealm(HDFS_USER_NAME);
    baseConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
    baseConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, getKeytabForPrincipal(HDFS_USER_NAME));
    baseConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
    baseConf.set(DFS_DATANODE_KEYTAB_FILE_KEY, getKeytabForPrincipal(HDFS_USER_NAME));
    baseConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
    baseConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY, getKeytabForPrincipal(SPNEGO_USER_NAME));
    baseConf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
    baseConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
    baseConf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
    baseConf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
    baseConf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
    baseConf.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
    baseConf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);

    // HBase
    String hbasePrincipal = getKrbPrincipalWithRealm(HBASE_USER_NAME);
    baseConf.set("hbase.security.authentication", "kerberos");
    baseConf.setBoolean("hbase.security.authorization", true);
    baseConf.set("hbase.regionserver.kerberos.principal", hbasePrincipal);
    baseConf.set("hbase.regionserver.keytab.file", getKeytabForPrincipal(HBASE_USER_NAME));
    baseConf.set("hbase.master.kerberos.principal", hbasePrincipal);
    baseConf.set("hbase.master.keytab.file", getKeytabForPrincipal(HBASE_USER_NAME));
    baseConf.set("hbase.coprocessor.region.classes", "org.apache.hadoop.hbase.security.token.TokenProvider");
    baseConf.set("hbase.rest.authentication.kerberos.keytab", getKeytabForPrincipal(SPNEGO_USER_NAME));
    baseConf.set("hbase.rest.authentication.kerberos.principal", spnegoPrincipal);
    baseConf.set("hbase.rest.kerberos.principal", hbasePrincipal);
    baseConf.set("hadoop.proxyuser." + HBASE_USER_NAME + ".groups", "*");
    baseConf.set("hadoop.proxyuser." + HBASE_USER_NAME + ".hosts", "*");

    //hbase.coprocessor.master.classes -> org.apache.hadoop.hbase.security.access.AccessController
    //hbase.coprocessor.region.classes -> org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,org.apache.hadoop.hbase.security.access.AccessController

    // Storm
    //String stormPrincipal = getKrbPrincipalWithRealm(STORM_USER_NAME);

    // Yarn
    String yarnPrincipal = getKrbPrincipalWithRealm(YARN_USER_NAME);
    baseConf.set("yarn.resourcemanager.keytab", getKeytabForPrincipal(YARN_USER_NAME));
    baseConf.set("yarn.resourcemanager.principal", yarnPrincipal);
    baseConf.set("yarn.nodemanager.keytab", getKeytabForPrincipal(YARN_USER_NAME));
    baseConf.set("yarn.nodemanager.principal", yarnPrincipal);

    // Mapreduce
    String mrv2Principal = getKrbPrincipalWithRealm(MRV2_USER_NAME);
    baseConf.set("mapreduce.jobhistory.keytab", getKeytabForPrincipal(MRV2_USER_NAME));
    baseConf.set("mapreduce.jobhistory.principal", mrv2Principal);
}

From source file:io.confluent.connect.hdfs.TestWithSecureMiniDFSCluster.java

License:Apache License

private Configuration createSecureConfig(String dataTransferProtection) throws Exception {
    HdfsConfiguration conf = new HdfsConfiguration();
    SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, conf);
    conf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
    conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
    conf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
    conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
    conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
    conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
    conf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, dataTransferProtection);
    conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
    conf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
    conf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
    conf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);
    conf.set(DFS_ENCRYPT_DATA_TRANSFER_KEY, "true");//https://issues.apache.org/jira/browse/HDFS-7431
    String keystoresDir = baseDir.getAbsolutePath();
    String sslConfDir = KeyStoreTestUtil.getClasspathDir(this.getClass());
    KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
    return conf;/*from   ww w  .  ja  v  a2s . com*/
}

From source file:org.apache.tez.test.TestSecureShuffle.java

License:Apache License

/**
 * Create relevant keystores for test cluster
 *
 * @throws Exception//  w  w w. j  a  v  a  2s  .c om
 */
private static void setupKeyStores() throws Exception {
    keysStoresDir.mkdirs();
    String sslConfsDir = KeyStoreTestUtil.getClasspathDir(TestSecureShuffle.class);

    KeyStoreTestUtil.setupSSLConfig(keysStoresDir.getAbsolutePath(), sslConfsDir, conf, true);
}