Example usage for org.apache.hadoop.security UserGroupInformation setConfiguration

List of usage examples for org.apache.hadoop.security UserGroupInformation setConfiguration

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation setConfiguration.

Prototype

@InterfaceAudience.Public
@InterfaceStability.Evolving
public static void setConfiguration(Configuration conf) 

Source Link

Document

Set the static configuration for UGI.

Usage

From source file:ConnectHBase124ClusterExample.java

@Test
public void test() throws Exception {
    final String tableName = "test_table";
    final String family = "family";
    final String columnQualifier = "qual";
    final String rowValue = "foo";
    final String cellValue = "bar";

    Configuration baseDefaults = new Configuration();
    baseDefaults.set("hbase.defaults.for.version", "1.2.4");
    Configuration configuration = HBaseConfiguration.create(baseDefaults);
    configuration.set("hbase.zookeeper.quorum", "new-hbase-server:2181");
    UserGroupInformation.setConfiguration(configuration);
    try (HBaseAdmin admin = new HBaseAdmin(configuration);) {
        HColumnDescriptor col = new HColumnDescriptor(family);
        if (!admin.isTableAvailable(tableName)) {
            System.out.println("Table " + tableName + " does not exist. Creating");
            HTableDescriptor desc = new HTableDescriptor(tableName);
            desc.addFamily(col);//from  w w w.j av  a2 s  .c  o m
            admin.createTable(desc);
            System.out.println("Table " + tableName + " created");
        }
        if (!admin.isTableEnabled(tableName)) {
            System.out.println("table " + tableName + " is not enabled. enabling");
            admin.enableTable(tableName);
        }
        try (HTablePool pool = new HTablePool(configuration, 1);
                HTableInterface table = pool.getTable(tableName)) {
            //                Put put = new Put(rowValue.getBytes());
            //                put.add(family.getBytes(), columnQualifier.getBytes(), cellValue.getBytes());
            //                table.put(put);

            Scan scan = new Scan();
            scan.setCacheBlocks(false);
            scan.setCaching(1000);
            scan.setBatch(1000);
            scan.setMaxVersions(1);
            try (ResultScanner scanner = table.getScanner(scan);) {
                Result result = scanner.next();
                while (result != null) {
                    KeyValue cell = result.getColumnLatest(family.getBytes(), columnQualifier.getBytes());
                    System.out.println("row:" + new String(cell.getRow()));
                    System.out.println("value:" + new String(cell.getValue()));
                    result = scanner.next();
                }
            }
        }

    }

}

From source file:alluxio.yarn.ApplicationMaster.java

License:Apache License

/**
 * @param args Command line arguments to launch application master
 *//*  w  w  w  . j  a  v a 2s  . c  o  m*/
public static void main(String[] args) {
    Options options = new Options();
    options.addOption("num_workers", true, "Number of Alluxio workers to launch. Default 1");
    options.addOption("master_address", true, "(Required) Address to run Alluxio master");
    options.addOption("resource_path", true, "(Required) HDFS path containing the Application Master");

    try {
        LOG.info("Starting Application Master with args {}", Arrays.toString(args));
        final CommandLine cliParser = new GnuParser().parse(options, args);

        YarnConfiguration conf = new YarnConfiguration();
        UserGroupInformation.setConfiguration(conf);
        if (UserGroupInformation.isSecurityEnabled()) {
            String user = System.getenv("ALLUXIO_USER");
            UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user);
            for (Token token : UserGroupInformation.getCurrentUser().getTokens()) {
                ugi.addToken(token);
            }
            LOG.info("UserGroupInformation: " + ugi);
            ugi.doAs(new PrivilegedExceptionAction<Void>() {
                @Override
                public Void run() throws Exception {
                    runApplicationMaster(cliParser);
                    return null;
                }
            });
        } else {
            runApplicationMaster(cliParser);
        }
    } catch (Exception e) {
        LOG.error("Error running Application Master", e);
        System.exit(1);
    }
}

From source file:azkaban.jobtype.connectors.HdfsToTeradataJobRunnerMain.java

License:Apache License

public HdfsToTeradataJobRunnerMain() throws FileNotFoundException, IOException {
    _logger = JobUtils.initJobLogger();/* ww  w .j av a  2  s.  c  o m*/
    _jobProps = HadoopSecureWrapperUtils.loadAzkabanProps();

    Props props = new Props(null, _jobProps);
    HadoopConfigurationInjector.injectResources(props);
    UserGroupInformation.setConfiguration(new Configuration());

    _params = TdchParameters.builder().mrParams(_jobProps.getProperty(TdchConstants.HADOOP_CONFIG_KEY))
            .libJars(props.getString(TdchConstants.LIB_JARS_KEY))
            .tdJdbcClassName(TdchConstants.TERADATA_JDBCDRIVER_CLASSNAME)
            .teradataHostname(props.getString(TdchConstants.TD_HOSTNAME_KEY))
            .fileFormat(_jobProps.getProperty(TdchConstants.HDFS_FILE_FORMAT_KEY))
            .fieldSeparator(_jobProps.getProperty(TdchConstants.HDFS_FIELD_SEPARATOR_KEY))
            .jobType(TdchConstants.TDCH_JOB_TYPE).userName(props.getString(TdchConstants.TD_USERID_KEY))
            .credentialName(String.format(TdchConstants.TD_WALLET_FORMAT,
                    props.getString(TdchConstants.TD_CREDENTIAL_NAME)))
            .avroSchemaPath(_jobProps.getProperty(TdchConstants.AVRO_SCHEMA_PATH_KEY))
            .avroSchemaInline(_jobProps.getProperty(TdchConstants.AVRO_SCHEMA_INLINE_KEY))
            .sourceHdfsPath(props.getString(TdchConstants.SOURCE_HDFS_PATH_KEY))
            .targetTdTableName(props.getString(TdchConstants.TARGET_TD_TABLE_NAME_KEY))
            .tdInsertMethod(_jobProps.getProperty(TdchConstants.TD_INSERT_METHOD_KEY))
            .numMapper(DEFAULT_NO_MAPPERS).build();
}

From source file:azkaban.jobtype.connectors.teradata.HdfsToTeradataJobRunnerMain.java

License:Apache License

@VisibleForTesting
HdfsToTeradataJobRunnerMain(Properties jobProps, Whitelist whitelist, Decryptions decryptions)
        throws FileNotFoundException, IOException {
    _logger = JobUtils.initJobLogger();//from   ww w.  j  ava  2  s . c  om
    _jobProps = jobProps;

    Props props = new Props(null, _jobProps);

    HadoopConfigurationInjector.injectResources(props);
    Configuration conf = new Configuration();
    UserGroupInformation.setConfiguration(conf);

    if (props.containsKey(Whitelist.WHITE_LIST_FILE_PATH_KEY)) {
        whitelist.validateWhitelisted(props);
    }

    String encryptedCredential = _jobProps.getProperty(TdchConstants.TD_ENCRYPTED_CREDENTIAL_KEY);
    String cryptoKeyPath = _jobProps.getProperty(TdchConstants.TD_CRYPTO_KEY_PATH_KEY);
    String password = null;

    if (encryptedCredential != null && cryptoKeyPath != null) {
        password = decryptions.decrypt(encryptedCredential, cryptoKeyPath, FileSystem.get(new Configuration()));
    }

    _params = TdchParameters.builder().mrParams(_jobProps.getProperty(TdchConstants.HADOOP_CONFIG_KEY))
            .libJars(props.getString(TdchConstants.LIB_JARS_KEY))
            .tdJdbcClassName(TdchConstants.TERADATA_JDBCDRIVER_CLASSNAME)
            .teradataHostname(props.getString(TdchConstants.TD_HOSTNAME_KEY))
            .fileFormat(_jobProps.getProperty(TdchConstants.HDFS_FILE_FORMAT_KEY))
            .fieldSeparator(_jobProps.getProperty(TdchConstants.HDFS_FIELD_SEPARATOR_KEY))
            .jobType(TdchConstants.TDCH_JOB_TYPE).userName(props.getString(TdchConstants.TD_USERID_KEY))
            .credentialName(_jobProps.getProperty(TdchConstants.TD_CREDENTIAL_NAME_KEY)).password(password)
            .avroSchemaPath(_jobProps.getProperty(TdchConstants.AVRO_SCHEMA_PATH_KEY))
            .avroSchemaInline(_jobProps.getProperty(TdchConstants.AVRO_SCHEMA_INLINE_KEY))
            .sourceHdfsPath(props.getString(TdchConstants.SOURCE_HDFS_PATH_KEY))
            .targetTdTableName(props.getString(TdchConstants.TARGET_TD_TABLE_NAME_KEY))
            .errorTdDatabase(_jobProps.getProperty(TdchConstants.ERROR_DB_KEY))
            .errorTdTableName(_jobProps.getProperty(TdchConstants.ERROR_TABLE_KEY))
            .tdInsertMethod(_jobProps.getProperty(TdchConstants.TD_INSERT_METHOD_KEY))
            .numMapper(TdchConstants.DEFAULT_NO_MAPPERS)
            .otherProperties(_jobProps.getProperty(TdchConstants.TD_OTHER_PROPERTIES_HOCON_KEY)).build();
}

From source file:azkaban.jobtype.connectors.teradata.TeradataToHdfsJobRunnerMain.java

License:Apache License

public TeradataToHdfsJobRunnerMain() throws FileNotFoundException, IOException {
    _logger = JobUtils.initJobLogger();/*from  w  w  w . jav  a2s.  c  o  m*/
    _jobProps = HadoopSecureWrapperUtils.loadAzkabanProps();

    Props props = new Props(null, _jobProps);
    HadoopConfigurationInjector.injectResources(props);
    Configuration conf = new Configuration();
    UserGroupInformation.setConfiguration(conf);

    if (props.containsKey(Whitelist.WHITE_LIST_FILE_PATH_KEY)) {
        new Whitelist(props, FileSystem.get(conf)).validateWhitelisted(props);
    }
    String encryptedCredential = _jobProps.getProperty(TdchConstants.TD_ENCRYPTED_CREDENTIAL_KEY);
    String cryptoKeyPath = _jobProps.getProperty(TdchConstants.TD_CRYPTO_KEY_PATH_KEY);
    String password = null;
    if (encryptedCredential != null && cryptoKeyPath != null) {
        password = new Decryptions().decrypt(encryptedCredential, cryptoKeyPath,
                FileSystem.get(new Configuration()));
    }

    _params = TdchParameters.builder().mrParams(_jobProps.getProperty(TdchConstants.HADOOP_CONFIG_KEY))
            .libJars(props.getString(TdchConstants.LIB_JARS_KEY))
            .tdJdbcClassName(TdchConstants.TERADATA_JDBCDRIVER_CLASSNAME)
            .teradataHostname(props.getString(TdchConstants.TD_HOSTNAME_KEY))
            .fileFormat(_jobProps.getProperty(TdchConstants.HDFS_FILE_FORMAT_KEY))
            .fieldSeparator(_jobProps.getProperty(TdchConstants.HDFS_FIELD_SEPARATOR_KEY))
            .jobType(TdchConstants.TDCH_JOB_TYPE).userName(props.getString(TdchConstants.TD_USERID_KEY))
            .credentialName(_jobProps.getProperty(TdchConstants.TD_CREDENTIAL_NAME_KEY)).password(password)
            .avroSchemaPath(_jobProps.getProperty(TdchConstants.AVRO_SCHEMA_PATH_KEY))
            .avroSchemaInline(_jobProps.getProperty(TdchConstants.AVRO_SCHEMA_INLINE_KEY))
            .sourceTdTableName(_jobProps.getProperty(TdchConstants.SOURCE_TD_TABLE_NAME_KEY))
            .sourceQuery(_jobProps.getProperty(TdchConstants.SOURCE_TD_QUERY_NAME_KEY))
            .targetHdfsPath(props.getString(TdchConstants.TARGET_HDFS_PATH_KEY))
            .tdRetrieveMethod(_jobProps.getProperty(TdchConstants.TD_RETRIEVE_METHOD_KEY))
            .numMapper(TdchConstants.DEFAULT_NO_MAPPERS).build();
}

From source file:azkaban.jobtype.connectors.TeradataToHdfsJobRunnerMain.java

License:Apache License

public TeradataToHdfsJobRunnerMain() throws FileNotFoundException, IOException {
    _logger = JobUtils.initJobLogger();/* w  ww  .  j  a  v  a 2s .  com*/
    _jobProps = HadoopSecureWrapperUtils.loadAzkabanProps();

    Props props = new Props(null, _jobProps);
    HadoopConfigurationInjector.injectResources(props);
    UserGroupInformation.setConfiguration(new Configuration());

    _params = TdchParameters.builder().mrParams(_jobProps.getProperty(TdchConstants.HADOOP_CONFIG_KEY))
            .libJars(props.getString(TdchConstants.LIB_JARS_KEY))
            .tdJdbcClassName(TdchConstants.TERADATA_JDBCDRIVER_CLASSNAME)
            .teradataHostname(props.getString(TdchConstants.TD_HOSTNAME_KEY))
            .fileFormat(_jobProps.getProperty(TdchConstants.HDFS_FILE_FORMAT_KEY))
            .fieldSeparator(_jobProps.getProperty(TdchConstants.HDFS_FIELD_SEPARATOR_KEY))
            .jobType(TdchConstants.TDCH_JOB_TYPE).userName(props.getString(TdchConstants.TD_USERID_KEY))
            .credentialName(String.format(TdchConstants.TD_WALLET_FORMAT,
                    props.getString(TdchConstants.TD_CREDENTIAL_NAME)))
            .avroSchemaPath(_jobProps.getProperty(TdchConstants.AVRO_SCHEMA_PATH_KEY))
            .avroSchemaInline(_jobProps.getProperty(TdchConstants.AVRO_SCHEMA_INLINE_KEY))
            .sourceTdTableName(_jobProps.getProperty(TdchConstants.SOURCE_TD_TABLE_NAME_KEY))
            .sourceQuery(_jobProps.getProperty(TdchConstants.SOURCE_TD_QUERY_NAME_KEY))
            .targetHdfsPath(props.getString(TdchConstants.TARGET_HDFS_PATH_KEY))
            .tdRetrieveMethod(_jobProps.getProperty(TdchConstants.TD_RETRIEVE_METHOD_KEY))
            .numMapper(DEFAULT_NO_MAPPERS).build();
}

From source file:azkaban.jobtype.HadoopJavaJobRunnerMain.java

License:Apache License

public HadoopJavaJobRunnerMain() throws Exception {
    Runtime.getRuntime().addShutdownHook(new Thread() {
        @Override/*from w ww. j  a  va2  s . co  m*/
        public void run() {
            cancelJob();
        }
    });

    try {
        _jobName = System.getenv(ProcessJob.JOB_NAME_ENV);
        String propsFile = System.getenv(ProcessJob.JOB_PROP_ENV);

        _logger = Logger.getRootLogger();
        _logger.removeAllAppenders();
        ConsoleAppender appender = new ConsoleAppender(DEFAULT_LAYOUT);
        appender.activateOptions();
        _logger.addAppender(appender);
        _logger.setLevel(Level.INFO); //Explicitly setting level to INFO

        Properties props = new Properties();
        props.load(new BufferedReader(new FileReader(propsFile)));

        HadoopConfigurationInjector.injectResources(new Props(null, props));

        final Configuration conf = new Configuration();

        UserGroupInformation.setConfiguration(conf);
        securityEnabled = UserGroupInformation.isSecurityEnabled();

        _logger.info("Running job " + _jobName);
        String className = props.getProperty(JOB_CLASS);
        if (className == null) {
            throw new Exception("Class name is not set.");
        }
        _logger.info("Class name " + className);

        UserGroupInformation loginUser = null;
        UserGroupInformation proxyUser = null;

        if (shouldProxy(props)) {
            String userToProxy = props.getProperty("user.to.proxy");
            if (securityEnabled) {
                String filelocation = System.getenv(HADOOP_TOKEN_FILE_LOCATION);
                _logger.info("Found token file " + filelocation);
                _logger.info("Security enabled is " + UserGroupInformation.isSecurityEnabled());

                _logger.info("Setting mapreduce.job.credentials.binary to " + filelocation);
                System.setProperty("mapreduce.job.credentials.binary", filelocation);

                _logger.info("Proxying enabled.");

                loginUser = UserGroupInformation.getLoginUser();

                _logger.info("Current logged in user is " + loginUser.getUserName());

                proxyUser = UserGroupInformation.createProxyUser(userToProxy, loginUser);
                for (Token<?> token : loginUser.getTokens()) {
                    proxyUser.addToken(token);
                }
            } else {
                proxyUser = UserGroupInformation.createRemoteUser(userToProxy);
            }
            _logger.info("Proxied as user " + userToProxy);
        }

        // Create the object using proxy
        if (shouldProxy(props)) {
            _javaObject = getObjectAsProxyUser(props, _logger, _jobName, className, proxyUser);
        } else {
            _javaObject = getObject(_jobName, className, props, _logger);
        }

        if (_javaObject == null) {
            _logger.info("Could not create java object to run job: " + className);
            throw new Exception("Could not create running object");
        }
        _logger.info("Got object " + _javaObject.toString());

        _cancelMethod = props.getProperty(CANCEL_METHOD_PARAM, DEFAULT_CANCEL_METHOD);

        final String runMethod = props.getProperty(RUN_METHOD_PARAM, DEFAULT_RUN_METHOD);
        _logger.info("Invoking method " + runMethod);

        if (shouldProxy(props)) {
            _logger.info("Proxying enabled.");
            runMethodAsUser(props, _javaObject, runMethod, proxyUser);
        } else {
            _logger.info("Proxy check failed, not proxying run.");
            runMethod(_javaObject, runMethod);
        }

        _isFinished = true;

        // Get the generated properties and store them to disk, to be read
        // by ProcessJob.
        try {
            final Method generatedPropertiesMethod = _javaObject.getClass()
                    .getMethod(GET_GENERATED_PROPERTIES_METHOD, new Class<?>[] {});
            Object outputGendProps = generatedPropertiesMethod.invoke(_javaObject, new Object[] {});

            if (outputGendProps != null) {
                final Method toPropertiesMethod = outputGendProps.getClass().getMethod("toProperties",
                        new Class<?>[] {});
                Properties properties = (Properties) toPropertiesMethod.invoke(outputGendProps,
                        new Object[] {});

                Props outputProps = new Props(null, properties);
                outputGeneratedProperties(outputProps);
            } else {
                _logger.info(GET_GENERATED_PROPERTIES_METHOD
                        + " method returned null.  No properties to pass along");
            }
        } catch (NoSuchMethodException e) {
            _logger.info(String.format(
                    "Apparently there isn't a method[%s] on object[%s], using " + "empty Props object instead.",
                    GET_GENERATED_PROPERTIES_METHOD, _javaObject));
            outputGeneratedProperties(new Props());
        }
    } catch (Exception e) {
        _isFinished = true;
        throw e;
    }
}

From source file:azkaban.jobtype.HadoopSecureWrapperUtils.java

License:Apache License

/**
 * Sets up the UserGroupInformation proxyUser object so that calling code can do doAs returns null
 * if the jobProps does not call for a proxyUser
 * // ww  w. jav  a  2  s.co  m
 * @param jobPropsIn
 * @param tokenFile
 *          pass tokenFile if known. Pass null if the tokenFile is in the environmental variable
 *          already.
 * @param log
 * @return returns null if no need to run as proxyUser, otherwise returns valid proxyUser that can
 *         doAs
 */
public static UserGroupInformation setupProxyUser(Properties jobProps, String tokenFile, Logger log) {
    UserGroupInformation proxyUser = null;

    if (!HadoopSecureWrapperUtils.shouldProxy(jobProps)) {
        log.info("submitting job as original submitter, not proxying");
        return proxyUser;
    }

    // set up hadoop related configurations
    final Configuration conf = new Configuration();
    UserGroupInformation.setConfiguration(conf);
    boolean securityEnabled = UserGroupInformation.isSecurityEnabled();

    // setting up proxy user if required
    try {
        String userToProxy = null;
        userToProxy = jobProps.getProperty(HadoopSecurityManager.USER_TO_PROXY);
        if (securityEnabled) {
            proxyUser = HadoopSecureWrapperUtils.createSecurityEnabledProxyUser(userToProxy, tokenFile, log);
            log.info("security enabled, proxying as user " + userToProxy);
        } else {
            proxyUser = UserGroupInformation.createRemoteUser(userToProxy);
            log.info("security not enabled, proxying as user " + userToProxy);
        }
    } catch (IOException e) {
        log.error("HadoopSecureWrapperUtils.setupProxyUser threw an IOException", e);
    }

    return proxyUser;
}

From source file:azkaban.security.commons.SecurityUtils.java

License:Apache License

/**
 * Create a proxied user based on the explicit user name, taking other
 * parameters necessary from properties file.
 *//* w  w w  . j  a va2s  . c  o  m*/
public static synchronized UserGroupInformation getProxiedUser(String toProxy, Properties prop, Logger log,
        Configuration conf) throws IOException {

    if (conf == null) {
        throw new IllegalArgumentException("conf can't be null");
    }
    UserGroupInformation.setConfiguration(conf);

    if (toProxy == null) {
        throw new IllegalArgumentException("toProxy can't be null");
    }

    if (loginUser == null) {
        log.info("No login user. Creating login user");
        String keytab = verifySecureProperty(prop, PROXY_KEYTAB_LOCATION, log);
        String proxyUser = verifySecureProperty(prop, PROXY_USER, log);
        UserGroupInformation.loginUserFromKeytab(proxyUser, keytab);
        loginUser = UserGroupInformation.getLoginUser();
        log.info("Logged in with user " + loginUser);
    } else {
        log.info("loginUser (" + loginUser + ") already created, refreshing tgt.");
        loginUser.checkTGTAndReloginFromKeytab();
    }

    return UserGroupInformation.createProxyUser(toProxy, loginUser);
}

From source file:azkaban.security.HadoopSecurityManager_H_1_0.java

License:Apache License

private HadoopSecurityManager_H_1_0(Props props) throws HadoopSecurityManagerException, IOException {

    // for now, assume the same/compatible native library, the same/compatible
    // hadoop-core jar
    String hadoopHome = props.getString("hadoop.home", null);
    String hadoopConfDir = props.getString("hadoop.conf.dir", null);

    if (hadoopHome == null) {
        hadoopHome = System.getenv("HADOOP_HOME");
    }/*  ww w  .  j a v a 2s  . c o m*/
    if (hadoopConfDir == null) {
        hadoopConfDir = System.getenv("HADOOP_CONF_DIR");
    }

    List<URL> resources = new ArrayList<URL>();
    if (hadoopConfDir != null) {
        logger.info("Using hadoop config found in " + new File(hadoopConfDir).toURI().toURL());
        resources.add(new File(hadoopConfDir).toURI().toURL());
    } else if (hadoopHome != null) {
        logger.info("Using hadoop config found in " + new File(hadoopHome, "conf").toURI().toURL());
        resources.add(new File(hadoopHome, "conf").toURI().toURL());
    } else {
        logger.info("HADOOP_HOME not set, using default hadoop config.");
    }

    ucl = new URLClassLoader(resources.toArray(new URL[resources.size()]));

    conf = new Configuration();
    conf.setClassLoader(ucl);

    if (props.containsKey("fs.hdfs.impl.disable.cache")) {
        logger.info("Setting fs.hdfs.impl.disable.cache to " + props.get("fs.hdfs.impl.disable.cache"));
        conf.setBoolean("fs.hdfs.impl.disable.cache", Boolean.valueOf(props.get("fs.hdfs.impl.disable.cache")));
    }

    logger.info("hadoop.security.authentication set to " + conf.get("hadoop.security.authentication"));
    logger.info("hadoop.security.authorization set to " + conf.get("hadoop.security.authorization"));
    logger.info("DFS name " + conf.get("fs.default.name"));

    UserGroupInformation.setConfiguration(conf);

    securityEnabled = UserGroupInformation.isSecurityEnabled();
    if (securityEnabled) {
        logger.info("The Hadoop cluster has enabled security");
        shouldProxy = true;
        try {
            keytabLocation = props.getString(PROXY_KEYTAB_LOCATION);
            keytabPrincipal = props.getString(PROXY_USER);
        } catch (UndefinedPropertyException e) {
            throw new HadoopSecurityManagerException(e.getMessage());
        }

        // try login
        try {
            if (loginUser == null) {
                logger.info("No login user. Creating login user");
                logger.info("Logging with " + keytabPrincipal + " and " + keytabLocation);
                UserGroupInformation.loginUserFromKeytab(keytabPrincipal, keytabLocation);
                loginUser = UserGroupInformation.getLoginUser();
                logger.info("Logged in with user " + loginUser);
            } else {
                logger.info("loginUser (" + loginUser + ") already created, refreshing tgt.");
                loginUser.checkTGTAndReloginFromKeytab();
            }
        } catch (IOException e) {
            throw new HadoopSecurityManagerException("Failed to login with kerberos ", e);
        }

    }

    userUgiMap = new ConcurrentHashMap<String, UserGroupInformation>();

    logger.info("Hadoop Security Manager Initiated");
}