Example usage for org.apache.hadoop.security.authentication.util KerberosUtil getDefaultRealm

List of usage examples for org.apache.hadoop.security.authentication.util KerberosUtil getDefaultRealm

Introduction

In this page you can find the example usage for org.apache.hadoop.security.authentication.util KerberosUtil getDefaultRealm.

Prototype

public static String getDefaultRealm() throws ClassNotFoundException, NoSuchMethodException,
            IllegalArgumentException, IllegalAccessException, InvocationTargetException 

Source Link

Usage

From source file:com.github.sakserv.minicluster.impl.KdcLocalCluster.java

License:Apache License

protected void refreshDefaultRealm() throws Exception {
    // Config is statically initialized at this point. But the above configuration results in a different
    // initialization which causes the tests to fail. So the following two changes are required.

    // (1) Refresh Kerberos config.
    // refresh the config
    Class<?> configClass;/*  w  w w . jav  a2 s .co m*/
    if (System.getProperty("java.vendor").contains("IBM")) {
        configClass = Class.forName("com.ibm.security.krb5.internal.Config");
    } else {
        configClass = Class.forName("sun.security.krb5.Config");
    }
    Method refreshMethod = configClass.getMethod("refresh", new Class[0]);
    refreshMethod.invoke(configClass, new Object[0]);
    // (2) Reset the default realm.
    try {
        Class<?> hadoopAuthClass = Class.forName("org.apache.hadoop.security.authentication.util.KerberosName");
        Field defaultRealm = hadoopAuthClass.getDeclaredField("defaultRealm");
        defaultRealm.setAccessible(true);
        defaultRealm.set(null, KerberosUtil.getDefaultRealm());
        LOG.info("HADOOP: Using default realm " + KerberosUtil.getDefaultRealm());
    } catch (ClassNotFoundException e) {
        // Don't care
        LOG.info(
                "Class org.apache.hadoop.security.authentication.util.KerberosName not found, Kerberos default realm not updated");
    }

    try {
        Class<?> zookeeperAuthClass = Class.forName("org.apache.zookeeper.server.auth.KerberosName");
        Field defaultRealm = zookeeperAuthClass.getDeclaredField("defaultRealm");
        defaultRealm.setAccessible(true);
        defaultRealm.set(null, KerberosUtil.getDefaultRealm());
        LOG.info("ZOOKEEPER: Using default realm " + KerberosUtil.getDefaultRealm());
    } catch (ClassNotFoundException e) {
        // Don't care
        LOG.info(
                "Class org.apache.zookeeper.server.auth.KerberosName not found, Kerberos default realm not updated");
    }
}

From source file:com.streamsets.pipeline.stage.destination.hbase.HBaseTarget.java

License:Apache License

private void validateSecurityConfigs(List<ConfigIssue> issues) {
    try {// w ww .  ja  v  a 2  s.c o  m
        if (kerberosAuth) {
            hbaseConf.set(User.HBASE_SECURITY_CONF_KEY,
                    UserGroupInformation.AuthenticationMethod.KERBEROS.name());
            hbaseConf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION,
                    UserGroupInformation.AuthenticationMethod.KERBEROS.name());
            if (hbaseConf.get(MASTER_KERBEROS_PRINCIPAL) == null) {
                try {
                    hbaseConf.set(MASTER_KERBEROS_PRINCIPAL, "hbase/_HOST@" + KerberosUtil.getDefaultRealm());
                } catch (Exception e) {
                    issues.add(getContext().createConfigIssue(Groups.HBASE.name(), "masterPrincipal",
                            Errors.HBASE_22));
                }
            }
            if (hbaseConf.get(REGIONSERVER_KERBEROS_PRINCIPAL) == null) {
                try {
                    hbaseConf.set(REGIONSERVER_KERBEROS_PRINCIPAL,
                            "hbase/_HOST@" + KerberosUtil.getDefaultRealm());
                } catch (Exception e) {
                    issues.add(getContext().createConfigIssue(Groups.HBASE.name(), "regionServerPrincipal",
                            Errors.HBASE_23));
                }
            }
        }

        UserGroupInformation.setConfiguration(hbaseConf);
        Subject subject = Subject.getSubject(AccessController.getContext());
        if (UserGroupInformation.isSecurityEnabled()) {
            loginUgi = UserGroupInformation.getUGIFromSubject(subject);
        } else {
            UserGroupInformation.loginUserFromSubject(subject);
            loginUgi = UserGroupInformation.getLoginUser();
        }
        LOG.info("Subject = {}, Principals = {}, Login UGI = {}", subject,
                subject == null ? "null" : subject.getPrincipals(), loginUgi);
        StringBuilder logMessage = new StringBuilder();
        if (kerberosAuth) {
            logMessage.append("Using Kerberos");
            if (loginUgi.getAuthenticationMethod() != UserGroupInformation.AuthenticationMethod.KERBEROS) {
                issues.add(getContext().createConfigIssue(Groups.HBASE.name(), "kerberosAuth", Errors.HBASE_16,
                        loginUgi.getAuthenticationMethod()));
            }
        } else {
            logMessage.append("Using Simple");
            hbaseConf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION,
                    UserGroupInformation.AuthenticationMethod.SIMPLE.name());
        }
        LOG.info("Authentication Config: " + logMessage);
    } catch (Exception ex) {
        LOG.info("Error validating security configuration: " + ex, ex);
        issues.add(
                getContext().createConfigIssue(Groups.HBASE.name(), null, Errors.HBASE_17, ex.toString(), ex));
    }
}

From source file:com.streamsets.pipeline.stage.destination.hdfs.HdfsTarget.java

License:Apache License

Configuration getHadoopConfiguration(List<ConfigIssue> issues) {
    Configuration conf = new Configuration();
    conf.setClass("fs.file.impl", RawLocalFileSystem.class, FileSystem.class);
    if (hdfsKerberos) {
        conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION,
                UserGroupInformation.AuthenticationMethod.KERBEROS.name());
        try {//from w  ww . j av a 2s  . c o  m
            conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, "hdfs/_HOST@" + KerberosUtil.getDefaultRealm());
        } catch (Exception ex) {
            if (!hdfsConfigs.containsKey(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY)) {
                issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), null, Errors.HADOOPFS_28,
                        ex.toString()));
            }
        }
    }
    if (hadoopConfDir != null && !hadoopConfDir.isEmpty()) {
        File hadoopConfigDir = new File(hadoopConfDir);
        if (getContext().isClusterMode() && hadoopConfigDir.isAbsolute()) {
            //Do not allow absolute hadoop config directory in cluster mode
            issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), "hadoopConfDir",
                    Errors.HADOOPFS_45, hadoopConfDir));
        } else {
            if (!hadoopConfigDir.isAbsolute()) {
                hadoopConfigDir = new File(getContext().getResourcesDirectory(), hadoopConfDir)
                        .getAbsoluteFile();
            }
            if (!hadoopConfigDir.exists()) {
                issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), "hadoopConfDir",
                        Errors.HADOOPFS_25, hadoopConfigDir.getPath()));
            } else if (!hadoopConfigDir.isDirectory()) {
                issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), "hadoopConfDir",
                        Errors.HADOOPFS_26, hadoopConfigDir.getPath()));
            } else {
                File coreSite = new File(hadoopConfigDir, "core-site.xml");
                if (coreSite.exists()) {
                    if (!coreSite.isFile()) {
                        issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), "hadoopConfDir",
                                Errors.HADOOPFS_27, coreSite.getPath()));
                    }
                    conf.addResource(new Path(coreSite.getAbsolutePath()));
                }
                File hdfsSite = new File(hadoopConfigDir, "hdfs-site.xml");
                if (hdfsSite.exists()) {
                    if (!hdfsSite.isFile()) {
                        issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), "hadoopConfDir",
                                Errors.HADOOPFS_27, hdfsSite.getPath()));
                    }
                    conf.addResource(new Path(hdfsSite.getAbsolutePath()));
                }
            }
        }
    }
    for (Map.Entry<String, String> config : hdfsConfigs.entrySet()) {
        conf.set(config.getKey(), config.getValue());
    }
    return conf;
}

From source file:com.streamsets.pipeline.stage.origin.hdfs.cluster.ClusterHdfsSource.java

License:Apache License

Configuration getHadoopConfiguration(List<ConfigIssue> issues) {
    Configuration conf = new Configuration();
    if (hdfsKerberos) {
        conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION,
                UserGroupInformation.AuthenticationMethod.KERBEROS.name());
        try {//  w w w .  j  a va  2  s . c  o m
            conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, "hdfs/_HOST@" + KerberosUtil.getDefaultRealm());
        } catch (Exception ex) {
            if (!hdfsConfigs.containsKey(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY)) {
                issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), null, Errors.HADOOPFS_28,
                        ex.getMessage()));
            }
        }
    }
    if (hadoopConfDir != null && !hadoopConfDir.isEmpty()) {
        File hadoopConfigDir = new File(hadoopConfDir);
        if (hadoopConfigDir.isAbsolute()) {
            // Do not allow absolute hadoop config directory in cluster mode
            issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), "hadoopConfDir",
                    Errors.HADOOPFS_29, hadoopConfDir));
        } else {
            hadoopConfigDir = new File(getContext().getResourcesDirectory(), hadoopConfDir).getAbsoluteFile();
        }
        if (!hadoopConfigDir.exists()) {
            issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), "hdfsConfDir",
                    Errors.HADOOPFS_25, hadoopConfigDir.getPath()));
        } else if (!hadoopConfigDir.isDirectory()) {
            issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), "hdfsConfDir",
                    Errors.HADOOPFS_26, hadoopConfigDir.getPath()));
        } else {
            File coreSite = new File(hadoopConfigDir, "core-site.xml");
            if (coreSite.exists()) {
                if (!coreSite.isFile()) {
                    issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), "hdfsConfDir",
                            Errors.HADOOPFS_27, coreSite.getPath()));
                }
                conf.addResource(new Path(coreSite.getAbsolutePath()));
            }
            File hdfsSite = new File(hadoopConfigDir, "hdfs-site.xml");
            if (hdfsSite.exists()) {
                if (!hdfsSite.isFile()) {
                    issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), "hdfsConfDir",
                            Errors.HADOOPFS_27, hdfsSite.getPath()));
                }
                conf.addResource(new Path(hdfsSite.getAbsolutePath()));
            }
            File yarnSite = new File(hadoopConfigDir, "yarn-site.xml");
            if (yarnSite.exists()) {
                if (!yarnSite.isFile()) {
                    issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), "hdfsConfDir",
                            Errors.HADOOPFS_27, yarnSite.getPath()));
                }
                conf.addResource(new Path(yarnSite.getAbsolutePath()));
            }
            File mapredSite = new File(hadoopConfigDir, "mapred-site.xml");
            if (mapredSite.exists()) {
                if (!mapredSite.isFile()) {
                    issues.add(getContext().createConfigIssue(Groups.HADOOP_FS.name(), "hdfsConfDir",
                            Errors.HADOOPFS_27, mapredSite.getPath()));
                }
                conf.addResource(new Path(mapredSite.getAbsolutePath()));
            }
        }
    }
    for (Map.Entry<String, String> config : hdfsConfigs.entrySet()) {
        conf.set(config.getKey(), config.getValue());
    }
    return conf;
}

From source file:org.apache.ambari.server.view.ViewContextImpl.java

License:Apache License

@Override
public String getUsername() {
    String shortName = getLoggedinUser();
    try {//from   w  ww .  j a v a2  s.c  o  m
        String authToLocalRules = getAuthToLocalRules();
        //Getting ambari server realm. Ideally this should come from user
        String defaultRealm = KerberosUtil.getDefaultRealm();
        if (Strings.isNotEmpty(authToLocalRules) && Strings.isNotEmpty(defaultRealm)) {
            synchronized (KerberosName.class) {
                KerberosName.setRules(authToLocalRules);
                shortName = new KerberosName(shortName + "@" + defaultRealm).getShortName();
            }
        }
    } catch (InvocationTargetException e) {
        LOG.debug("Failed to get default realm", e);
    } catch (Exception e) {
        LOG.warn("Failed to apply auth_to_local rules. " + e.getMessage());
        LOG.debug("Failed to apply auth_to_local rules", e);
    }
    return shortName;
}

From source file:org.apache.drill.exec.rpc.data.TestBitBitKerberos.java

License:Apache License

@BeforeClass
public static void setupKdc() throws Exception {
    kdc = new SimpleKdcServer();
    workspace = new File(getTempDir("kerberos_target"));

    kdcDir = new File(workspace, TestBitBitKerberos.class.getSimpleName());
    kdcDir.mkdirs();//w  w  w  .  ja v a 2  s . c  o  m
    kdc.setWorkDir(kdcDir);

    kdc.setKdcHost(HOSTNAME);
    kdcPort = getFreePort();
    kdc.setAllowTcp(true);
    kdc.setAllowUdp(false);
    kdc.setKdcTcpPort(kdcPort);

    logger.debug("Starting KDC server at {}:{}", HOSTNAME, kdcPort);

    kdc.init();
    kdc.start();
    kdcStarted = true;

    final Config config = DrillConfig.create(cloneDefaultTestConfigProperties());
    keytabDir = new File(workspace, TestBitBitKerberos.class.getSimpleName() + "_keytabs");
    keytabDir.mkdirs();
    setupUsers(keytabDir);

    // Kerby sets "java.security.krb5.conf" for us!
    System.clearProperty("java.security.auth.login.config");
    System.setProperty("javax.security.auth.useSubjectCredsOnly", "false");
    // Uncomment the following lines for debugging.
    // System.setProperty("sun.security.spnego.debug", "true");
    // System.setProperty("sun.security.krb5.debug", "true");

    newConfig = new DrillConfig(config
            .withValue(ExecConstants.AUTHENTICATION_MECHANISMS,
                    ConfigValueFactory.fromIterable(Lists.newArrayList("kerberos")))
            .withValue(ExecConstants.BIT_AUTHENTICATION_ENABLED, ConfigValueFactory.fromAnyRef(true))
            .withValue(ExecConstants.BIT_AUTHENTICATION_MECHANISM, ConfigValueFactory.fromAnyRef("kerberos"))
            .withValue(ExecConstants.USE_LOGIN_PRINCIPAL, ConfigValueFactory.fromAnyRef(true))
            .withValue(BootStrapContext.SERVICE_PRINCIPAL, ConfigValueFactory.fromAnyRef(SERVER_PRINCIPAL))
            .withValue(BootStrapContext.SERVICE_KEYTAB_LOCATION,
                    ConfigValueFactory.fromAnyRef(serverKeytab.toString())),
            false);

    // Ignore the compile time warning caused by the code below.

    // Config is statically initialized at this point. But the above configuration results in a different
    // initialization which causes the tests to fail. So the following two changes are required.

    // (1) Refresh Kerberos config.
    sun.security.krb5.Config.refresh();
    // (2) Reset the default realm.
    final Field defaultRealm = KerberosName.class.getDeclaredField("defaultRealm");
    defaultRealm.setAccessible(true);
    defaultRealm.set(null, KerberosUtil.getDefaultRealm());

    updateTestCluster(1, newConfig);
}

From source file:org.apache.drill.exec.rpc.user.security.TestUserBitKerberos.java

License:Apache License

@BeforeClass
public static void setupKdc() throws Exception {
    kdc = new SimpleKdcServer();
    workspace = new File(getTempDir("kerberos_target"));

    kdcDir = new File(workspace, TestUserBitKerberos.class.getSimpleName());
    kdcDir.mkdirs();//from  w  w w  .ja v a 2 s.c  om
    kdc.setWorkDir(kdcDir);

    kdc.setKdcHost(HOSTNAME);
    kdcPort = getFreePort();
    kdc.setAllowTcp(true);
    kdc.setAllowUdp(false);
    kdc.setKdcTcpPort(kdcPort);

    logger.debug("Starting KDC server at {}:{}", HOSTNAME, kdcPort);

    kdc.init();
    kdc.start();
    kdcStarted = true;

    keytabDir = new File(workspace, TestUserBitKerberos.class.getSimpleName() + "_keytabs");
    keytabDir.mkdirs();
    setupUsers(keytabDir);

    // Kerby sets "java.security.krb5.conf" for us!
    System.clearProperty("java.security.auth.login.config");
    System.setProperty("javax.security.auth.useSubjectCredsOnly", "false");
    // Uncomment the following lines for debugging.
    // System.setProperty("sun.security.spnego.debug", "true");
    // System.setProperty("sun.security.krb5.debug", "true");

    final DrillConfig newConfig = new DrillConfig(DrillConfig.create(cloneDefaultTestConfigProperties())
            .withValue(ExecConstants.USER_AUTHENTICATION_ENABLED, ConfigValueFactory.fromAnyRef(true))
            .withValue(ExecConstants.USER_AUTHENTICATOR_IMPL,
                    ConfigValueFactory.fromAnyRef(UserAuthenticatorTestImpl.TYPE))
            .withValue(BootStrapContext.SERVICE_PRINCIPAL, ConfigValueFactory.fromAnyRef(SERVER_PRINCIPAL))
            .withValue(BootStrapContext.SERVICE_KEYTAB_LOCATION,
                    ConfigValueFactory.fromAnyRef(serverKeytab.toString()))
            .withValue(ExecConstants.AUTHENTICATION_MECHANISMS,
                    ConfigValueFactory.fromIterable(Lists.newArrayList("plain", "kerberos"))),
            false);

    final Properties connectionProps = new Properties();
    connectionProps.setProperty(DrillProperties.USER, "anonymous");
    connectionProps.setProperty(DrillProperties.PASSWORD, "anything works!");

    // Ignore the compile time warning caused by the code below.

    // Config is statically initialized at this point. But the above configuration results in a different
    // initialization which causes the tests to fail. So the following two changes are required.

    // (1) Refresh Kerberos config.
    sun.security.krb5.Config.refresh();
    // (2) Reset the default realm.
    final Field defaultRealm = KerberosName.class.getDeclaredField("defaultRealm");
    defaultRealm.setAccessible(true);
    defaultRealm.set(null, KerberosUtil.getDefaultRealm());

    updateTestCluster(1, newConfig, connectionProps);
}

From source file:org.apache.drill.exec.rpc.user.security.TestUserBitKerberosEncryption.java

License:Apache License

@BeforeClass
public static void setupTest() throws Exception {
    krbHelper = new KerberosHelper(TestUserBitKerberosEncryption.class.getSimpleName(), null);
    krbHelper.setupKdc(dirTestWatcher.getTmpDir());

    // Create a new DrillConfig which has user authentication enabled and authenticator set to
    // UserAuthenticatorTestImpl.
    newConfig = new DrillConfig(DrillConfig.create(cloneDefaultTestConfigProperties())
            .withValue(ExecConstants.USER_AUTHENTICATION_ENABLED, ConfigValueFactory.fromAnyRef(true))
            .withValue(ExecConstants.USER_AUTHENTICATOR_IMPL,
                    ConfigValueFactory.fromAnyRef(UserAuthenticatorTestImpl.TYPE))
            .withValue(ExecConstants.SERVICE_PRINCIPAL,
                    ConfigValueFactory.fromAnyRef(krbHelper.SERVER_PRINCIPAL))
            .withValue(ExecConstants.SERVICE_KEYTAB_LOCATION,
                    ConfigValueFactory.fromAnyRef(krbHelper.serverKeytab.toString()))
            .withValue(ExecConstants.AUTHENTICATION_MECHANISMS,
                    ConfigValueFactory.fromIterable(Lists.newArrayList("plain", "kerberos")))
            .withValue(ExecConstants.USER_ENCRYPTION_SASL_ENABLED, ConfigValueFactory.fromAnyRef(true)));

    final Properties connectionProps = new Properties();
    connectionProps.setProperty(DrillProperties.SERVICE_PRINCIPAL, krbHelper.SERVER_PRINCIPAL);
    connectionProps.setProperty(DrillProperties.USER, krbHelper.CLIENT_PRINCIPAL);
    connectionProps.setProperty(DrillProperties.KEYTAB, krbHelper.clientKeytab.getAbsolutePath());

    // Ignore the compile time warning caused by the code below.

    // Config is statically initialized at this point. But the above configuration results in a different
    // initialization which causes the tests to fail. So the following two changes are required.

    // (1) Refresh Kerberos config.
    sun.security.krb5.Config.refresh();/*  ww  w  . ja va  2 s.  com*/
    // (2) Reset the default realm.
    final Field defaultRealm = KerberosName.class.getDeclaredField("defaultRealm");
    defaultRealm.setAccessible(true);
    defaultRealm.set(null, KerberosUtil.getDefaultRealm());

    // Start a secure cluster with client using Kerberos related parameters.
    updateTestCluster(1, newConfig, connectionProps);
}

From source file:org.apache.drill.exec.server.rest.spnego.TestDrillSpnegoAuthenticator.java

License:Apache License

@BeforeClass
public static void setupTest() throws Exception {
    spnegoHelper = new KerberosHelper(TestSpnegoAuthentication.class.getSimpleName(), primaryName);
    spnegoHelper.setupKdc(dirTestWatcher.getTmpDir());

    sun.security.krb5.Config.refresh();/*from ww w . j  a  va2 s .  c  o  m*/

    // (2) Reset the default realm.
    final Field defaultRealm = KerberosName.class.getDeclaredField("defaultRealm");
    defaultRealm.setAccessible(true);
    defaultRealm.set(null, KerberosUtil.getDefaultRealm());

    // Create a DrillbitContext with service principal and keytab for DrillSpnegoLoginService
    final DrillConfig newConfig = new DrillConfig(DrillConfig.create()
            .withValue(ExecConstants.HTTP_AUTHENTICATION_MECHANISMS,
                    ConfigValueFactory.fromIterable(Lists.newArrayList("spnego")))
            .withValue(ExecConstants.HTTP_SPNEGO_PRINCIPAL,
                    ConfigValueFactory.fromAnyRef(spnegoHelper.SERVER_PRINCIPAL))
            .withValue(ExecConstants.HTTP_SPNEGO_KEYTAB,
                    ConfigValueFactory.fromAnyRef(spnegoHelper.serverKeytab.toString())));

    // Create mock objects for optionManager and AuthConfiguration
    final SystemOptionManager optionManager = Mockito.mock(SystemOptionManager.class);
    Mockito.when(optionManager.getOption(ExecConstants.ADMIN_USERS_VALIDATOR))
            .thenReturn(ExecConstants.ADMIN_USERS_VALIDATOR.DEFAULT_ADMIN_USERS);
    Mockito.when(optionManager.getOption(ExecConstants.ADMIN_USER_GROUPS_VALIDATOR))
            .thenReturn(ExecConstants.ADMIN_USER_GROUPS_VALIDATOR.DEFAULT_ADMIN_USER_GROUPS);

    final DrillbitContext drillbitContext = Mockito.mock(DrillbitContext.class);
    Mockito.when(drillbitContext.getConfig()).thenReturn(newConfig);
    Mockito.when(drillbitContext.getOptionManager()).thenReturn(optionManager);

    Authenticator.AuthConfiguration authConfiguration = Mockito.mock(Authenticator.AuthConfiguration.class);

    spnegoAuthenticator = new DrillSpnegoAuthenticator("SPNEGO");
    DrillSpnegoLoginService spnegoLoginService = new DrillSpnegoLoginService(drillbitContext);

    Mockito.when(authConfiguration.getLoginService()).thenReturn(spnegoLoginService);
    Mockito.when(authConfiguration.getIdentityService()).thenReturn(new DefaultIdentityService());
    Mockito.when(authConfiguration.isSessionRenewedOnAuthentication()).thenReturn(true);

    // Set the login service and identity service inside SpnegoAuthenticator
    spnegoAuthenticator.setConfiguration(authConfiguration);
}

From source file:org.apache.drill.exec.server.rest.spnego.TestSpnegoAuthentication.java

License:Apache License

@BeforeClass
public static void setupTest() throws Exception {
    spnegoHelper = new KerberosHelper(TestSpnegoAuthentication.class.getSimpleName(), primaryName);
    spnegoHelper.setupKdc(dirTestWatcher.getTmpDir());

    sun.security.krb5.Config.refresh();/*from  w w w.ja v a  2  s  .  c  o  m*/

    // (2) Reset the default realm.
    final Field defaultRealm = KerberosName.class.getDeclaredField("defaultRealm");
    defaultRealm.setAccessible(true);
    defaultRealm.set(null, KerberosUtil.getDefaultRealm());
}