Example usage for org.apache.hadoop.minikdc MiniKdc DEBUG

List of usage examples for org.apache.hadoop.minikdc MiniKdc DEBUG

Introduction

In this page you can find the example usage for org.apache.hadoop.minikdc MiniKdc DEBUG.

Prototype

String DEBUG

To view the source code for org.apache.hadoop.minikdc MiniKdc DEBUG.

Click Source Link

Usage

From source file:org.apache.atlas.web.security.BaseSecurityTest.java

License:Apache License

protected File startKDC() throws Exception {
    File target = Files.createTempDirectory("sectest").toFile();
    File kdcWorkDir = new File(target, "kdc");
    Properties kdcConf = MiniKdc.createConf();
    kdcConf.setProperty(MiniKdc.DEBUG, "true");
    kdc = new MiniKdc(kdcConf, kdcWorkDir);
    kdc.start();//  w w w .  j  a v  a2s  .  co m

    Assert.assertNotNull(kdc.getRealm());
    return kdcWorkDir;
}

From source file:org.apache.flink.test.util.SecureTestEnvironment.java

License:Apache License

public static void prepare(TemporaryFolder tempFolder) {

    try {//from w  ww. j  a  va 2 s .com
        File baseDirForSecureRun = tempFolder.newFolder();
        LOG.info("Base Directory for Secure Environment: {}", baseDirForSecureRun);

        String hostName = "localhost";
        Properties kdcConf = MiniKdc.createConf();
        if (LOG.isDebugEnabled()) {
            kdcConf.setProperty(MiniKdc.DEBUG, "true");
        }
        kdcConf.setProperty(MiniKdc.KDC_BIND_ADDRESS, hostName);
        kdc = new MiniKdc(kdcConf, baseDirForSecureRun);
        kdc.start();
        LOG.info("Started Mini KDC");

        File keytabFile = new File(baseDirForSecureRun, "test-users.keytab");
        testKeytab = keytabFile.getAbsolutePath();
        testZkServerPrincipal = "zookeeper/127.0.0.1";
        testZkClientPrincipal = "zk-client/127.0.0.1";
        testKafkaServerPrincipal = "kafka/" + hostName;
        hadoopServicePrincipal = "hadoop/" + hostName;
        testPrincipal = "client/" + hostName;

        kdc.createPrincipal(keytabFile, testPrincipal, testZkServerPrincipal, hadoopServicePrincipal,
                testZkClientPrincipal, testKafkaServerPrincipal);

        testPrincipal = testPrincipal + "@" + kdc.getRealm();
        testZkServerPrincipal = testZkServerPrincipal + "@" + kdc.getRealm();
        testZkClientPrincipal = testZkClientPrincipal + "@" + kdc.getRealm();
        testKafkaServerPrincipal = testKafkaServerPrincipal + "@" + kdc.getRealm();
        hadoopServicePrincipal = hadoopServicePrincipal + "@" + kdc.getRealm();

        LOG.info("-------------------------------------------------------------------");
        LOG.info("Test Principal: {}", testPrincipal);
        LOG.info("Test ZK Server Principal: {}", testZkServerPrincipal);
        LOG.info("Test ZK Client Principal: {}", testZkClientPrincipal);
        LOG.info("Test Kafka Server Principal: {}", testKafkaServerPrincipal);
        LOG.info("Test Hadoop Service Principal: {}", hadoopServicePrincipal);
        LOG.info("Test Keytab: {}", testKeytab);
        LOG.info("-------------------------------------------------------------------");

        //Security Context is established to allow non hadoop applications that requires JAAS
        //based SASL/Kerberos authentication to work. However, for Hadoop specific applications
        //the context can be reinitialized with Hadoop configuration by calling
        //ctx.setHadoopConfiguration() for the UGI implementation to work properly.
        //See Yarn test case module for reference
        Configuration flinkConfig = GlobalConfiguration.loadConfiguration();
        flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB, testKeytab);
        flinkConfig.setBoolean(SecurityOptions.KERBEROS_LOGIN_USETICKETCACHE, false);
        flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, testPrincipal);
        flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_CONTEXTS, "Client,KafkaClient");
        SecurityUtils.SecurityConfiguration ctx = new SecurityUtils.SecurityConfiguration(flinkConfig);
        TestingSecurityContext.install(ctx, getClientSecurityConfigurationMap());

        populateJavaPropertyVariables();

    } catch (Exception e) {
        throw new RuntimeException("Exception occured while preparing secure environment.", e);
    }

}

From source file:org.apache.phoenix.jdbc.SecureUserConnectionsIT.java

License:Apache License

@BeforeClass
public static void setupKdc() throws Exception {
    ensureIsEmptyDirectory(KDC_DIR);//from   w ww  . j  a v a  2 s  . co  m
    ensureIsEmptyDirectory(KEYTAB_DIR);
    // Create and start the KDC. MiniKDC appears to have a race condition in how it does
    // port allocation (with apache-ds). See PHOENIX-3287.
    boolean started = false;
    for (int i = 0; !started && i < KDC_START_ATTEMPTS; i++) {
        Properties kdcConf = MiniKdc.createConf();
        kdcConf.put(MiniKdc.DEBUG, true);
        KDC = new MiniKdc(kdcConf, KDC_DIR);
        try {
            KDC.start();
            started = true;
        } catch (Exception e) {
            LOG.warn("PHOENIX-3287: Failed to start KDC, retrying..", e);
        }
    }
    assertTrue("The embedded KDC failed to start successfully after " + KDC_START_ATTEMPTS + " attempts.",
            started);

    createUsers(NUM_USERS);
    createServiceUsers(NUM_USERS);

    final Configuration conf = new Configuration(false);
    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    conf.set(User.HBASE_SECURITY_CONF_KEY, "kerberos");
    conf.setBoolean(User.HBASE_SECURITY_AUTHORIZATION_CONF_KEY, true);
    UserGroupInformation.setConfiguration(conf);

    // Clear the cached singletons so we can inject our own.
    InstanceResolver.clearSingletons();
    // Make sure the ConnectionInfo doesn't try to pull a default Configuration
    InstanceResolver.getSingleton(ConfigurationFactory.class, new ConfigurationFactory() {
        @Override
        public Configuration getConfiguration() {
            return conf;
        }

        @Override
        public Configuration getConfiguration(Configuration confToClone) {
            Configuration copy = new Configuration(conf);
            copy.addResource(confToClone);
            return copy;
        }
    });
    updateDefaultRealm();
}