Example usage for org.apache.hadoop.security UserGroupInformation setConfiguration

List of usage examples for org.apache.hadoop.security UserGroupInformation setConfiguration

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation setConfiguration.

Prototype

@InterfaceAudience.Public
@InterfaceStability.Evolving
public static void setConfiguration(Configuration conf) 

Source Link

Document

Set the static configuration for UGI.

Usage

From source file:org.apache.flink.tez.client.TezExecutorTool.java

License:Apache License

@Override
public int run(String[] args) throws Exception {

    Configuration conf = getConf();

    TezConfiguration tezConf;// w  ww . j  av  a  2 s.c  o  m
    if (conf != null) {
        tezConf = new TezConfiguration(conf);
    } else {
        tezConf = new TezConfiguration();
    }

    UserGroupInformation.setConfiguration(tezConf);

    executor.setConfiguration(tezConf);

    try {
        if (jarPath != null) {
            executor.setJobJar(jarPath);
        }
        JobExecutionResult result = executor.executePlan(plan);
    } catch (Exception e) {
        LOG.error("Job execution failed due to: " + e.getMessage());
        throw new RuntimeException(e.getMessage());
    }
    return 0;
}

From source file:org.apache.flink.yarn.FlinkYarnClient.java

License:Apache License

public AbstractFlinkYarnCluster deploy() throws Exception {

    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();

    if (UserGroupInformation.isSecurityEnabled()) {
        if (!ugi.hasKerberosCredentials()) {
            throw new YarnDeploymentException(
                    "In secure mode. Please provide Kerberos credentials in order to authenticate. "
                            + "You may use kinit to authenticate and request a TGT from the Kerberos server.");
        }/*from  ww w  .  j  av  a2  s.  c o m*/
        return ugi.doAs(new PrivilegedExceptionAction<AbstractFlinkYarnCluster>() {
            @Override
            public AbstractFlinkYarnCluster run() throws Exception {
                return deployInternal();
            }
        });
    } else {
        return deployInternal();
    }
}

From source file:org.apache.flink.yarn.FlinkYarnClientBase.java

License:Apache License

@Override
public AbstractFlinkYarnCluster deploy() throws Exception {

    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();

    if (UserGroupInformation.isSecurityEnabled()) {
        if (!ugi.hasKerberosCredentials()) {
            throw new YarnDeploymentException(
                    "In secure mode. Please provide Kerberos credentials in order to authenticate. "
                            + "You may use kinit to authenticate and request a TGT from the Kerberos server.");
        }//from   w  ww .  j a  v a 2 s . com
        return ugi.doAs(new PrivilegedExceptionAction<AbstractFlinkYarnCluster>() {
            @Override
            public AbstractFlinkYarnCluster run() throws Exception {
                return deployInternal();
            }
        });
    } else {
        return deployInternal();
    }
}

From source file:org.apache.flume.auth.KerberosAuthenticator.java

License:Apache License

/**
 * When valid principal and keytab are provided and if authentication has
 * not yet been done for this object, this method authenticates the
 * credentials and populates the ugi. In case of null or invalid credentials
 * IllegalArgumentException is thrown. In case of failure to authenticate,
 * SecurityException is thrown. If authentication has already happened on
 * this KerberosAuthenticator object, then this method checks to see if the current
 * credentials passed are same as the validated credentials. If not, it throws
 * an exception as this authenticator can represent only one Principal.
 *
 * @param principal//from  ww  w  .  j a  v  a  2 s .  c o m
 * @param keytab
 */
public synchronized void authenticate(String principal, String keytab) {
    // sanity checking

    Preconditions.checkArgument(principal != null && !principal.isEmpty(),
            "Invalid Kerberos principal: " + String.valueOf(principal));
    Preconditions.checkArgument(keytab != null && !keytab.isEmpty(),
            "Invalid Kerberos keytab: " + String.valueOf(keytab));
    File keytabFile = new File(keytab);
    Preconditions.checkArgument(keytabFile.isFile() && keytabFile.canRead(),
            "Keytab is not a readable file: " + String.valueOf(keytab));

    // resolve the requested principal
    String resolvedPrincipal;
    try {
        // resolves _HOST pattern using standard Hadoop search/replace
        // via DNS lookup when 2nd argument is empty
        resolvedPrincipal = SecurityUtil.getServerPrincipal(principal, "");
    } catch (IOException e) {
        throw new IllegalArgumentException(
                "Host lookup error resolving kerberos principal (" + principal + "). Exception follows.", e);
    }
    Preconditions.checkNotNull(resolvedPrincipal, "Resolved Principal must not be null");

    // be cruel and unusual when user tries to login as multiple principals
    // this isn't really valid with a reconfigure but this should be rare
    // enough to warrant a restart of the agent JVM
    // TODO: find a way to interrogate the entire current config state,
    // since we don't have to be unnecessarily protective if they switch all
    // HDFS sinks to use a different principal all at once.

    KerberosUser newUser = new KerberosUser(resolvedPrincipal, keytab);
    Preconditions.checkState(prevUser == null || prevUser.equals(newUser),
            "Cannot use multiple kerberos principals in the same agent. "
                    + " Must restart agent to use new principal or keytab. " + "Previous = %s, New = %s",
            prevUser, newUser);

    // enable the kerberos mode of UGI, before doing anything else
    if (!UserGroupInformation.isSecurityEnabled()) {
        Configuration conf = new Configuration(false);
        conf.set(HADOOP_SECURITY_AUTHENTICATION, "kerberos");
        UserGroupInformation.setConfiguration(conf);
    }

    // We are interested in currently logged in user with kerberos creds
    UserGroupInformation curUser = null;
    try {
        curUser = UserGroupInformation.getLoginUser();
        if (curUser != null && !curUser.hasKerberosCredentials()) {
            curUser = null;
        }
    } catch (IOException e) {
        LOG.warn("User unexpectedly had no active login. Continuing with " + "authentication", e);
    }

    /*
     *  if ugi is not null,
     *     if ugi matches currently logged in kerberos user, we are good
     *     else we are logged out, so relogin our ugi
     *  else if ugi is null, login and populate state
     */
    try {
        if (ugi != null) {
            if (curUser != null && curUser.getUserName().equals(ugi.getUserName())) {
                LOG.debug("Using existing principal login: {}", ugi);
            } else {
                LOG.info("Attempting kerberos Re-login as principal ({}) ", new Object[] { ugi.getUserName() });
                ugi.reloginFromKeytab();
            }
        } else {
            LOG.info("Attempting kerberos login as principal ({}) from keytab " + "file ({})",
                    new Object[] { resolvedPrincipal, keytab });
            UserGroupInformation.loginUserFromKeytab(resolvedPrincipal, keytab);
            this.ugi = UserGroupInformation.getLoginUser();
            this.prevUser = new KerberosUser(resolvedPrincipal, keytab);
            this.privilegedExecutor = new UGIExecutor(this.ugi);
        }
    } catch (IOException e) {
        throw new SecurityException(
                "Authentication error while attempting to " + "login as kerberos principal ("
                        + resolvedPrincipal + ") using " + "keytab (" + keytab + "). Exception follows.",
                e);
    }

    printUGI(this.ugi);
}

From source file:org.apache.flume.sink.customhdfs.TestHDFSEventSink.java

License:Apache License

@Test
public void testKerbFileAccess()
        throws InterruptedException, LifecycleException, EventDeliveryException, IOException {
    LOG.debug("Starting testKerbFileAccess() ...");
    final String fileName = "FlumeData";
    final long rollCount = 5;
    final long batchSize = 2;
    String newPath = testPath + "/singleBucket";
    String kerbConfPrincipal = "user1/localhost@EXAMPLE.COM";
    String kerbKeytab = "/usr/lib/flume/nonexistkeytabfile";

    //turn security on
    Configuration conf = new Configuration();
    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    UserGroupInformation.setConfiguration(conf);

    Context context = new Context();
    context.put("hdfs.path", newPath);
    context.put("hdfs.filePrefix", fileName);
    context.put("hdfs.rollCount", String.valueOf(rollCount));
    context.put("hdfs.batchSize", String.valueOf(batchSize));
    context.put("hdfs.kerberosPrincipal", kerbConfPrincipal);
    context.put("hdfs.kerberosKeytab", kerbKeytab);

    try {//from  w w  w .  ja  v a 2  s .co  m
        Configurables.configure(sink, context);
        Assert.fail("no exception thrown");
    } catch (IllegalArgumentException expected) {
        Assert.assertTrue(expected.getMessage().contains("Keytab is not a readable file"));
    } finally {
        //turn security off
        conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, "simple");
        UserGroupInformation.setConfiguration(conf);
    }
}

From source file:org.apache.flume.sink.hdfs.TestHDFSEventSink.java

License:Apache License

@Test
public void testKerbFileAccess()
        throws InterruptedException, LifecycleException, EventDeliveryException, IOException {
    LOG.debug("Starting testKerbFileAccess() ...");
    final String fileName = "FlumeData";
    final long rollCount = 5;
    final long batchSize = 2;
    String newPath = testPath + "/singleBucket";
    String kerbConfPrincipal = "user1/localhost@EXAMPLE.COM";
    String kerbKeytab = "/usr/lib/flume/nonexistkeytabfile";

    //turn security on
    Configuration conf = new Configuration();
    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    UserGroupInformation.setConfiguration(conf);

    Context context = new Context();
    context.put("hdfs.path", newPath);
    context.put("hdfs.filePrefix", fileName);
    context.put("hdfs.rollCount", String.valueOf(rollCount));
    context.put("hdfs.batchSize", String.valueOf(batchSize));
    context.put("hdfs.kerberosPrincipal", kerbConfPrincipal);
    context.put("hdfs.kerberosKeytab", kerbKeytab);

    try {/*ww  w . java2 s. co  m*/
        Configurables.configure(sink, context);
        Assert.fail("no exception thrown");
    } catch (IllegalArgumentException expected) {
        Assert.assertTrue(expected.getMessage().contains("is nonexistent or can't read."));
    } finally {
        //turn security off
        conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, "simple");
        UserGroupInformation.setConfiguration(conf);
    }
}

From source file:org.apache.gobblin.util.hadoop.TokenUtils.java

License:Apache License

/**
 * Get Hadoop tokens (tokens for job history server, job tracker and HDFS) using Kerberos keytab.
 *
 * @param state A {@link State} object that should contain property {@link #USER_TO_PROXY},
 * {@link #KEYTAB_USER} and {@link #KEYTAB_LOCATION}. To obtain tokens for
 * other namenodes, use property {@link #OTHER_NAMENODES} with comma separated HDFS URIs.
 * @param tokenFile If present, the file will store materialized credentials.
 * @param cred A im-memory representation of credentials.
 *//*from   w  ww.ja va  2  s  .  co m*/
public static void getHadoopTokens(final State state, Optional<File> tokenFile, Credentials cred)
        throws IOException, InterruptedException {

    Preconditions.checkArgument(state.contains(KEYTAB_USER), "Missing required property " + KEYTAB_USER);
    Preconditions.checkArgument(state.contains(KEYTAB_LOCATION),
            "Missing required property " + KEYTAB_LOCATION);

    Configuration configuration = new Configuration();
    configuration.set(HADOOP_SECURITY_AUTHENTICATION, KERBEROS);
    UserGroupInformation.setConfiguration(configuration);
    UserGroupInformation.loginUserFromKeytab(obtainKerberosPrincipal(state), state.getProp(KEYTAB_LOCATION));

    final Optional<String> userToProxy = Strings.isNullOrEmpty(state.getProp(USER_TO_PROXY))
            ? Optional.<String>absent()
            : Optional.fromNullable(state.getProp(USER_TO_PROXY));
    final Configuration conf = new Configuration();

    LOG.info("Getting tokens for " + userToProxy);

    getJhToken(conf, cred);
    getFsAndJtTokens(state, conf, userToProxy, cred);

    if (tokenFile.isPresent()) {
        persistTokens(cred, tokenFile.get());
    }
}

From source file:org.apache.hawq.ranger.authorization.RangerHawqPluginResource.java

License:Apache License

/**
 * Constructor. Creates a new instance of the resource that uses <code>RangerHawqAuthorizer</code>.
 *///from ww  w .  j  av  a2  s.  co m
public RangerHawqPluginResource() {
    // set UserGroupInformation under kerberos authentication
    if (Utils.getAuth() == Utils.AuthMethod.KERBEROS) {
        Configuration conf = new Configuration();
        conf.set("hadoop.security.authentication", "kerberos");
        UserGroupInformation.setConfiguration(conf);

        String prin = Utils.getPrincipal();
        String keytab = Utils.getKeytab();

        if (!prin.equals("") && !keytab.equals("")) {
            try {
                UserGroupInformation.loginUserFromKeytab(prin, keytab);
            } catch (Exception e) {
                LOG.warn(String.format("loginUserFromKeytab failed, user[%s], keytab[%s]", prin, keytab));
            }
        }
    }

    if (LOG.isDebugEnabled()) {
        try {
            UserGroupInformation user = UserGroupInformation.getLoginUser();
            LOG.debug(String.format("login user: %s", user));
        } catch (Exception e) {
            LOG.warn("get login user failed exception: " + e);
        }
    }

    this.authorizer = RangerHawqAuthorizer.getInstance();

}

From source file:org.apache.hoya.tools.HoyaUtils.java

License:Apache License

/**
 * Turn on security. This is setup to only run once.
 * @param conf configuration to build up security
 * @return true if security was initialized in this call
 * @throws IOException IO/Net problems/* w  w  w  . j  av  a 2  s .co m*/
 * @throws BadConfigException the configuration and system state are inconsistent
 */
public static boolean initProcessSecurity(Configuration conf) throws IOException, BadConfigException {

    if (processSecurityAlreadyInitialized.compareAndSet(true, true)) {
        //security is already inited
        return false;
    }

    log.info("JVM initialized into secure mode with kerberos realm {}", HoyaUtils.getKerberosRealm());
    //this gets UGI to reset its previous world view (i.e simple auth)
    //security
    log.debug("java.security.krb5.realm={}", System.getProperty("java.security.krb5.realm", ""));
    log.debug("java.security.krb5.kdc={}", System.getProperty("java.security.krb5.kdc", ""));
    SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, conf);
    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation authUser = UserGroupInformation.getCurrentUser();
    log.debug("Authenticating as " + authUser.toString());
    log.debug("Login user is {}", UserGroupInformation.getLoginUser());
    if (!UserGroupInformation.isSecurityEnabled()) {
        throw new BadConfigException("Although secure mode is enabled,"
                + "the application has already set up its user as an insecure entity %s", authUser);
    }
    if (authUser.getAuthenticationMethod() == UserGroupInformation.AuthenticationMethod.SIMPLE) {
        throw new BadConfigException("Auth User is not Kerberized %s"
                + " -security has already been set up with the wrong authentication method", authUser);

    }

    HoyaUtils.verifyPrincipalSet(conf, YarnConfiguration.RM_PRINCIPAL);
    HoyaUtils.verifyPrincipalSet(conf, DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY);
    return true;
}

From source file:org.apache.hoya.yarn.appmaster.HoyaAppMaster.java

License:Apache License

@Override //AbstractService
public synchronized void serviceInit(Configuration conf) throws Exception {

    // Load in the server configuration - if it is actually on the Classpath
    Configuration serverConf = ConfigHelper.loadFromResource(SERVER_RESOURCE);
    ConfigHelper.mergeConfigurations(conf, serverConf, SERVER_RESOURCE);

    AbstractActionArgs action = serviceArgs.getCoreAction();
    HoyaAMCreateAction createAction = (HoyaAMCreateAction) action;
    //sort out the location of the AM
    serviceArgs.applyDefinitions(conf);/*from  w w  w .ja va 2s  .c  o m*/
    serviceArgs.applyFileSystemURL(conf);

    String rmAddress = createAction.getRmAddress();
    if (rmAddress != null) {
        log.debug("Setting rm address from the command line: {}", rmAddress);
        HoyaUtils.setRmSchedulerAddress(conf, rmAddress);
    }
    serviceArgs.applyDefinitions(conf);
    serviceArgs.applyFileSystemURL(conf);
    //init security with our conf
    if (HoyaUtils.isClusterSecure(conf)) {
        log.info("Secure mode with kerberos realm {}", HoyaUtils.getKerberosRealm());
        UserGroupInformation.setConfiguration(conf);
        UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
        log.debug("Authenticating as " + ugi.toString());
        HoyaUtils.verifyPrincipalSet(conf, DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY);
        // always enforce protocol to be token-based.
        conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
                SaslRpcServer.AuthMethod.TOKEN.toString());
    }
    log.info("Login user is {}", UserGroupInformation.getLoginUser());

    //look at settings of Hadoop Auth, to pick up a problem seen once
    checkAndWarnForAuthTokenProblems();

    super.serviceInit(conf);
}