Example usage for org.apache.hadoop.security UserGroupInformation HADOOP_TOKEN_FILE_LOCATION

List of usage examples for org.apache.hadoop.security UserGroupInformation HADOOP_TOKEN_FILE_LOCATION

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation HADOOP_TOKEN_FILE_LOCATION.

Prototype

String HADOOP_TOKEN_FILE_LOCATION

To view the source code for org.apache.hadoop.security UserGroupInformation HADOOP_TOKEN_FILE_LOCATION.

Click Source Link

Document

Environment variable pointing to the token cache file

Usage

From source file:co.cask.cdap.explore.service.hive.BaseHiveExploreService.java

License:Apache License

protected HiveConf getHiveConf() {
    HiveConf conf = new HiveConf();
    // Read delegation token if security is enabled.
    if (UserGroupInformation.isSecurityEnabled()) {
        conf.set(HIVE_METASTORE_TOKEN_KEY, HiveAuthFactory.HS2_CLIENT_TOKEN);

        // mapreduce.job.credentials.binary is added by Hive only if Kerberos credentials are present and impersonation
        // is enabled. However, in our case we don't have Kerberos credentials for Explore service.
        // Hence it will not be automatically added by Hive, instead we have to add it ourselves.
        // TODO: When Explore does secure impersonation this has to be the tokens of the user,
        // TODO: ... and not the tokens of the service itself.
        String hadoopAuthToken = System.getenv(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
        if (hadoopAuthToken != null) {
            conf.set("mapreduce.job.credentials.binary", hadoopAuthToken);
            if ("tez".equals(conf.get("hive.execution.engine"))) {
                // Add token file location property for tez if engine is tez
                conf.set("tez.credentials.path", hadoopAuthToken);
            }//w w w.java 2s. c om
        }
    }

    // Since we use delegation token in HIVE, unset the SPNEGO authentication if it is
    // enabled. Please see CDAP-3452 for details.
    conf.unset("hive.server2.authentication.spnego.keytab");
    conf.unset("hive.server2.authentication.spnego.principal");
    return conf;
}

From source file:co.cask.cdap.explore.service.hive.BaseHiveExploreService.java

License:Apache License

/**
 * Updates the token store to be used for the hive job, based upon the Explore container's credentials.
 * This is because twill doesn't update the container_tokens on upon token refresh.
 * See: https://issues.apache.org/jira/browse/TWILL-170
 *///  w  w  w.  j  av  a2  s .  co  m
private void updateTokenStore() throws IOException, ExploreException {
    String hadoopTokenFileLocation = System.getenv(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
    if (hadoopTokenFileLocation == null) {
        LOG.warn("Skipping update of token store due to failure to find environment variable '{}'.",
                UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
        return;
    }

    Path credentialsFile = Paths.get(hadoopTokenFileLocation);

    FileAttribute<Set<PosixFilePermission>> originalPermissionAttributes = PosixFilePermissions
            .asFileAttribute(Files.getPosixFilePermissions(credentialsFile));

    Path tmpFile = Files.createTempFile(credentialsFile.getParent(), "credentials.store", null,
            originalPermissionAttributes);
    LOG.debug("Writing to temporary file: {}", tmpFile);

    try (DataOutputStream os = new DataOutputStream(Files.newOutputStream(tmpFile))) {
        Credentials credentials = UserGroupInformation.getCurrentUser().getCredentials();
        credentials.writeTokenStorageToStream(os);
    }

    Files.move(tmpFile, credentialsFile, StandardCopyOption.ATOMIC_MOVE);
    LOG.debug("Secure store saved to {}", credentialsFile);
}

From source file:org.apache.flink.runtime.security.modules.HadoopModule.java

License:Apache License

@Override
public void install(SecurityUtils.SecurityConfiguration securityConfig) throws SecurityInstallException {

    UserGroupInformation.setConfiguration(securityConfig.getHadoopConfiguration());

    try {/*from w ww. j  av a2 s . c o  m*/
        if (UserGroupInformation.isSecurityEnabled() && !StringUtils.isBlank(securityConfig.getKeytab())
                && !StringUtils.isBlank(securityConfig.getPrincipal())) {
            String keytabPath = (new File(securityConfig.getKeytab())).getAbsolutePath();

            UserGroupInformation.loginUserFromKeytab(securityConfig.getPrincipal(), keytabPath);

            loginUser = UserGroupInformation.getLoginUser();

            // supplement with any available tokens
            String fileLocation = System.getenv(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
            if (fileLocation != null) {
                /*
                 * Use reflection API since the API semantics are not available in Hadoop1 profile. Below APIs are
                 * used in the context of reading the stored tokens from UGI.
                 * Credentials cred = Credentials.readTokenStorageFile(new File(fileLocation), config.hadoopConf);
                 * loginUser.addCredentials(cred);
                */
                try {
                    Method readTokenStorageFileMethod = Credentials.class.getMethod("readTokenStorageFile",
                            File.class, org.apache.hadoop.conf.Configuration.class);
                    Credentials cred = (Credentials) readTokenStorageFileMethod.invoke(null,
                            new File(fileLocation), securityConfig.getHadoopConfiguration());
                    Method addCredentialsMethod = UserGroupInformation.class.getMethod("addCredentials",
                            Credentials.class);
                    addCredentialsMethod.invoke(loginUser, cred);
                } catch (NoSuchMethodException e) {
                    LOG.warn("Could not find method implementations in the shaded jar. Exception: {}", e);
                } catch (InvocationTargetException e) {
                    throw e.getTargetException();
                }
            }
        } else {
            // login with current user credentials (e.g. ticket cache, OS login)
            // note that the stored tokens are read automatically
            try {
                //Use reflection API to get the login user object
                //UserGroupInformation.loginUserFromSubject(null);
                Method loginUserFromSubjectMethod = UserGroupInformation.class.getMethod("loginUserFromSubject",
                        Subject.class);
                loginUserFromSubjectMethod.invoke(null, (Subject) null);
            } catch (NoSuchMethodException e) {
                LOG.warn("Could not find method implementations in the shaded jar. Exception: {}", e);
            } catch (InvocationTargetException e) {
                throw e.getTargetException();
            }

            loginUser = UserGroupInformation.getLoginUser();
        }

        if (UserGroupInformation.isSecurityEnabled()) {
            // note: UGI::hasKerberosCredentials inaccurately reports false
            // for logins based on a keytab (fixed in Hadoop 2.6.1, see HADOOP-10786),
            // so we check only in ticket cache scenario.
            if (securityConfig.useTicketCache() && !loginUser.hasKerberosCredentials()) {
                // a delegation token is an adequate substitute in most cases
                if (!HadoopUtils.hasHDFSDelegationToken()) {
                    LOG.warn(
                            "Hadoop security is enabled but current login user does not have Kerberos credentials");
                }
            }
        }

        LOG.info("Hadoop user set to {}", loginUser);

    } catch (Throwable ex) {
        throw new SecurityInstallException("Unable to set the Hadoop login user", ex);
    }
}

From source file:org.apache.flink.runtime.security.SecurityContext.java

License:Apache License

public static void install(SecurityConfiguration config) throws Exception {

    // perform static initialization of UGI, JAAS
    if (installedContext != null) {
        LOG.warn("overriding previous security context");
    }/* w  ww .j  av  a  2s.  co  m*/

    // establish the JAAS config
    JaasConfiguration jaasConfig = new JaasConfiguration(config.keytab, config.principal);
    javax.security.auth.login.Configuration.setConfiguration(jaasConfig);

    populateSystemSecurityProperties(config.flinkConf);

    // establish the UGI login user
    UserGroupInformation.setConfiguration(config.hadoopConf);

    UserGroupInformation loginUser;

    if (UserGroupInformation.isSecurityEnabled() && config.keytab != null
            && !StringUtils.isBlank(config.principal)) {
        String keytabPath = (new File(config.keytab)).getAbsolutePath();

        UserGroupInformation.loginUserFromKeytab(config.principal, keytabPath);

        loginUser = UserGroupInformation.getLoginUser();

        // supplement with any available tokens
        String fileLocation = System.getenv(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
        if (fileLocation != null) {
            /*
             * Use reflection API since the API semantics are not available in Hadoop1 profile. Below APIs are
             * used in the context of reading the stored tokens from UGI.
             * Credentials cred = Credentials.readTokenStorageFile(new File(fileLocation), config.hadoopConf);
             * loginUser.addCredentials(cred);
            */
            try {
                Method readTokenStorageFileMethod = Credentials.class.getMethod("readTokenStorageFile",
                        File.class, org.apache.hadoop.conf.Configuration.class);
                Credentials cred = (Credentials) readTokenStorageFileMethod.invoke(null, new File(fileLocation),
                        config.hadoopConf);
                Method addCredentialsMethod = UserGroupInformation.class.getMethod("addCredentials",
                        Credentials.class);
                addCredentialsMethod.invoke(loginUser, cred);
            } catch (NoSuchMethodException e) {
                LOG.warn("Could not find method implementations in the shaded jar. Exception: {}", e);
            }
        }
    } else {
        // login with current user credentials (e.g. ticket cache)
        try {
            //Use reflection API to get the login user object
            //UserGroupInformation.loginUserFromSubject(null);
            Method loginUserFromSubjectMethod = UserGroupInformation.class.getMethod("loginUserFromSubject",
                    Subject.class);
            Subject subject = null;
            loginUserFromSubjectMethod.invoke(null, subject);
        } catch (NoSuchMethodException e) {
            LOG.warn("Could not find method implementations in the shaded jar. Exception: {}", e);
        }

        loginUser = UserGroupInformation.getLoginUser();
        // note that the stored tokens are read automatically
    }

    boolean delegationToken = false;
    final Text HDFS_DELEGATION_KIND = new Text("HDFS_DELEGATION_TOKEN");
    Collection<Token<? extends TokenIdentifier>> usrTok = loginUser.getTokens();
    for (Token<? extends TokenIdentifier> token : usrTok) {
        final Text id = new Text(token.getIdentifier());
        LOG.debug("Found user token " + id + " with " + token);
        if (token.getKind().equals(HDFS_DELEGATION_KIND)) {
            delegationToken = true;
        }
    }

    if (UserGroupInformation.isSecurityEnabled() && !loginUser.hasKerberosCredentials()) {
        //throw an error in non-yarn deployment if kerberos cache is not available
        if (!delegationToken) {
            LOG.error("Hadoop Security is enabled but current login user does not have Kerberos Credentials");
            throw new RuntimeException(
                    "Hadoop Security is enabled but current login user does not have Kerberos Credentials");
        }
    }

    installedContext = new SecurityContext(loginUser);
}

From source file:org.apache.giraph.comm.netty.handler.SaslServerHandler.java

License:Apache License

/**
 * Load Hadoop Job Token into secret manager.
 *
 * @param conf Configuration//from   www .j a v a2s.c  o m
 * @throws IOException
 */
private void setupSecretManager(Configuration conf) throws IOException {
    secretManager = new JobTokenSecretManager();
    String localJobTokenFile = System.getenv().get(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
    if (localJobTokenFile == null) {
        throw new IOException("Could not find job credentials: environment " + "variable: "
                + UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION + " was not defined.");
    }
    JobConf jobConf = new JobConf(conf);

    // Find the JobTokenIdentifiers among all the tokens available in the
    // jobTokenFile and store them in the secretManager.
    Credentials credentials = TokenCache.loadTokens(localJobTokenFile, jobConf);
    Collection<Token<? extends TokenIdentifier>> collection = credentials.getAllTokens();
    for (Token<? extends TokenIdentifier> token : collection) {
        TokenIdentifier tokenIdentifier = decodeIdentifier(token, JobTokenIdentifier.class);
        if (tokenIdentifier instanceof JobTokenIdentifier) {
            Token<JobTokenIdentifier> theToken = (Token<JobTokenIdentifier>) token;
            JobTokenIdentifier jobTokenIdentifier = (JobTokenIdentifier) tokenIdentifier;
            secretManager.addTokenForJob(jobTokenIdentifier.getJobId().toString(), theToken);
        }
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug("loaded JobToken credentials: " + credentials + " from " + "localJobTokenFile: "
                + localJobTokenFile);
    }
}

From source file:org.apache.giraph.comm.netty.SaslNettyClient.java

License:Apache License

/**
 * Obtain JobToken, which we'll use as a credential for SASL authentication
 * when connecting to other Giraph BSPWorkers.
 *
 * @param conf Configuration//  w  w w.  j a  v  a 2  s .com
 * @return a JobToken containing username and password so that client can
 * authenticate with a server.
 */
private Token<JobTokenIdentifier> createJobToken(Configuration conf) throws IOException {
    String localJobTokenFile = System.getenv().get(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
    if (localJobTokenFile != null) {
        JobConf jobConf = new JobConf(conf);
        Credentials credentials = TokenCache.loadTokens(localJobTokenFile, jobConf);
        return TokenCache.getJobToken(credentials);
    } else {
        throw new IOException("createJobToken: Cannot obtain authentication " + "credentials for job: file: '"
                + UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION + "' not found");
    }
}

From source file:org.apache.giraph.comm.RPCCommunications.java

License:Apache License

/**
  * Create the job token.//from   w w  w .j  a v a 2 s.  c o  m
  *
  * @return Job token.
  */
protected

        Token<JobTokenIdentifier> createJobToken() throws IOException {

    String localJobTokenFile = System.getenv().get(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
    if (localJobTokenFile != null) {
        JobConf jobConf = new JobConf(conf);
        Credentials credentials = TokenCache.loadTokens(localJobTokenFile, jobConf);
        return TokenCache.getJobToken(credentials);
    }

    return null;
}

From source file:org.apache.hcatalog.templeton.SecureProxySupport.java

License:Apache License

/**
 * Add Hadoop env variables.//  w ww . j  a v a  2  s  .co m
 */
public void addEnv(Map<String, String> env) {
    if (isEnabled) {
        env.put(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION, getTokenPath().toUri().getPath());
    }
}

From source file:org.apache.hoya.yarn.appmaster.HoyaAppMaster.java

License:Apache License

/**
 * looks for a specific case where a token file is provided as an environment
 * variable, yet the file is not there./*from   www.j  a va  2s.c om*/
 * 
 * This surfaced (once) in HBase, where its HDFS library was looking for this,
 * and somehow the token was missing. This is a check in the AM so that
 * if the problem re-occurs, the AM can fail with a more meaningful message.
 * 
 */
private void checkAndWarnForAuthTokenProblems() {
    String fileLocation = System.getenv(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
    if (fileLocation != null) {
        File tokenFile = new File(fileLocation);
        if (!tokenFile.exists()) {
            log.warn("Token file {} specified in {} not found", tokenFile,
                    UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
        }
    }
}

From source file:org.apache.slider.client.TokensOperation.java

License:Apache License

public int actionTokens(ActionTokensArgs args, FileSystem fs, Configuration conf, YarnClientImpl yarnClient)
        throws IOException, YarnException {
    Credentials credentials;//from w w w.j  a v  a 2  s  .  com
    String footnote = "";
    UserGroupInformation user = UserGroupInformation.getCurrentUser();
    boolean isSecure = UserGroupInformation.isSecurityEnabled();
    if (args.keytab != null) {
        File keytab = args.keytab;
        if (!keytab.isFile()) {
            throw new NotFoundException(E_NO_KEYTAB + keytab.getAbsolutePath());
        }
        String principal = args.principal;
        log.info("Logging in as {} from keytab {}", principal, keytab);
        user = UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keytab.getCanonicalPath());
    }
    Credentials userCredentials = user.getCredentials();
    File output = args.output;
    if (output != null) {
        if (!isSecure) {
            throw new BadClusterStateException(E_INSECURE);
        }
        credentials = new Credentials(userCredentials);
        // filesystem
        addRMRenewableFSDelegationTokens(conf, fs, credentials);
        addRMDelegationToken(yarnClient, credentials);
        if (maybeAddTimelineToken(conf, credentials) != null) {
            log.debug("Added timeline token");
        }
        saveTokens(output, credentials);
        String filename = output.getCanonicalPath();
        footnote = String.format(
                "%d tokens saved to %s\n" + "To use these in the environment:\n" + "export %s=%s",
                credentials.numberOfTokens(), filename, UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION,
                filename);
    } else if (args.source != null) {
        File source = args.source;
        log.info("Reading credentials from file {}", source);
        if (!source.isFile()) {
            throw new NotFoundException(E_MISSING_SOURCE_FILE + source.getAbsolutePath());
        }
        credentials = Credentials.readTokenStorageFile(args.source, conf);
    } else {
        StringBuffer origin = new StringBuffer();
        File file = locateEnvCredentials(System.getenv(), conf, origin);
        if (file != null) {
            log.info("Credential Source {}", origin);
        } else {
            log.info("Credential source: logged in user");
        }
        credentials = userCredentials;
    }
    // list the tokens
    log.info("\n{}", dumpTokens(credentials, "\n"));
    if (!footnote.isEmpty()) {
        log.info(footnote);
    }
    return 0;
}