Example usage for org.apache.hadoop.security.token Token decodeIdentifier

List of usage examples for org.apache.hadoop.security.token Token decodeIdentifier

Introduction

In this page you can find the example usage for org.apache.hadoop.security.token Token decodeIdentifier.

Prototype

@SuppressWarnings("unchecked")
public T decodeIdentifier() throws IOException 

Source Link

Document

Get the token identifier object, or null if it could not be constructed (because the class could not be loaded, for example).

Usage

From source file:org.apache.lens.server.auth.DelegationTokenAuthenticationFilter.java

License:Apache License

@Override
public void filter(ContainerRequestContext requestContext) throws IOException {
    Principal userPrincipal = requestContext.getSecurityContext().getUserPrincipal();
    if (userPrincipal != null) {
        log.info("Authentication already done for principal {}, skipping this filter...",
                userPrincipal.getName());
        return;//from  www  .ja  va  2  s .c  o  m
    }
    // only authenticate when @Authenticate is present on resource
    if (resourceInfo.getResourceClass() == null || resourceInfo.getResourceMethod() == null) {
        return;
    }
    if (!(resourceInfo.getResourceClass().isAnnotationPresent(Authenticate.class)
            || resourceInfo.getResourceMethod().isAnnotationPresent(Authenticate.class))) {
        return;
    }

    String delegationToken = requestContext.getHeaderString(HDFS_DELEGATION_TKN_HEADER);
    if (StringUtils.isBlank(delegationToken)) {
        return;
    }

    Token<AbstractDelegationTokenIdentifier> dt = new Token();
    dt.decodeFromUrlString(delegationToken);
    UserGroupInformation user = dt.decodeIdentifier().getUser();
    user.addToken(dt);

    log.info("Received delegation token for user: {}", user.getUserName());

    try {
        user.doAs(new PrivilegedExceptionAction<Void>() {
            @Override
            public Void run() throws IOException {
                try (FileSystem fs = FileSystem.get(new Configuration())) {
                    fs.exists(PATH_TO_CHECK); // dummy hdfs call
                    requestContext.setSecurityContext(createSecurityContext(user.getUserName(), AUTH_SCHEME));
                    return null;
                }
            }
        });
    } catch (InterruptedException | IOException e) {
        log.error("Error while doing HDFS op: ", e);
        throw new NotAuthorizedException(Response.status(401).entity("Invalid HDFS delegation token").build());
    }
}

From source file:org.apache.slider.core.launch.CredentialUtils.java

License:Apache License

/**
 * Create a string for people to look at
 * @param token token to convert to a string form
 * @return a printable view of the token
 *//* w  w  w.j  a v a  2  s . c om*/
public static String tokenToString(Token<? extends TokenIdentifier> token) {
    DateFormat df = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT);
    StringBuilder buffer = new StringBuilder(128);
    buffer.append(token.toString());
    try {
        TokenIdentifier ti = token.decodeIdentifier();
        buffer.append("; ").append(ti);
        if (ti instanceof AbstractDelegationTokenIdentifier) {
            // details in human readable form, and compensate for information HDFS DT omits
            AbstractDelegationTokenIdentifier dt = (AbstractDelegationTokenIdentifier) ti;
            buffer.append("; Renewer: ").append(dt.getRenewer());
            buffer.append("; Issued: ").append(df.format(new Date(dt.getIssueDate())));
            buffer.append("; Max Date: ").append(df.format(new Date(dt.getMaxDate())));
        }
    } catch (IOException e) {
        //marshall problem; not ours
        LOG.debug("Failed to decode {}: {}", token, e, e);
    }
    return buffer.toString();
}

From source file:org.apache.slider.core.launch.CredentialUtils.java

License:Apache License

/**
 * Get the expiry time of a token.//from   ww w. j  a v a 2 s. c  o m
 * @param token token to examine
 * @return the time in milliseconds after which the token is invalid.
 * @throws IOException
 */
public static long getTokenExpiryTime(Token token) throws IOException {
    TokenIdentifier identifier = token.decodeIdentifier();
    Preconditions.checkState(identifier instanceof AbstractDelegationTokenIdentifier,
            "Token %s of type: %s has an identifier which cannot be examined: %s", token, token.getClass(),
            identifier);
    AbstractDelegationTokenIdentifier id = (AbstractDelegationTokenIdentifier) identifier;
    return id.getMaxDate();
}

From source file:org.apache.storm.common.AbstractHadoopAutoCreds.java

License:Apache License

private void addTokensToUGI(Subject subject) {
    if (subject != null) {
        Set<Credentials> privateCredentials = subject.getPrivateCredentials(Credentials.class);
        if (privateCredentials != null) {
            for (Credentials cred : privateCredentials) {
                Collection<Token<? extends TokenIdentifier>> allTokens = cred.getAllTokens();
                if (allTokens != null) {
                    for (Token<? extends TokenIdentifier> token : allTokens) {
                        try {
                            if (token == null) {
                                LOG.debug("Ignoring null token");
                                continue;
                            }/*from   w ww.  j  ava  2 s  .c om*/

                            LOG.debug("Current user: {}", UserGroupInformation.getCurrentUser());
                            LOG.debug("Token from Credentials : {}", token);

                            TokenIdentifier tokenId = token.decodeIdentifier();
                            if (tokenId != null) {
                                LOG.debug("Token identifier : {}", tokenId);
                                LOG.debug("Username in token identifier : {}", tokenId.getUser());
                            }

                            UserGroupInformation.getCurrentUser().addToken(token);
                            LOG.info("Added delegation tokens to UGI.");
                        } catch (IOException e) {
                            LOG.error("Exception while trying to add tokens to ugi", e);
                        }
                    }
                }
            }
        }
    }
}

From source file:org.apache.storm.hbase.security.AutoHBaseNimbus.java

License:Apache License

@SuppressWarnings("unchecked")
protected byte[] getHadoopCredentials(Map<String, Object> conf, Configuration hbaseConf,
        final String topologySubmitterUser) {
    try {/*w  w  w . j  a  v  a  2s  .c  o  m*/
        if (UserGroupInformation.isSecurityEnabled()) {
            UserProvider provider = UserProvider.instantiate(hbaseConf);
            provider.login(HBASE_KEYTAB_FILE_KEY, HBASE_PRINCIPAL_KEY,
                    InetAddress.getLocalHost().getCanonicalHostName());

            LOG.info("Logged into Hbase as principal = " + hbaseConf.get(HBASE_PRINCIPAL_KEY));

            UserGroupInformation ugi = UserGroupInformation.getCurrentUser();

            final UserGroupInformation proxyUser = UserGroupInformation.createProxyUser(topologySubmitterUser,
                    ugi);

            User user = User.create(proxyUser);

            if (user.isHBaseSecurityEnabled(hbaseConf)) {
                final Connection connection = ConnectionFactory.createConnection(hbaseConf, user);
                TokenUtil.obtainAndCacheToken(connection, user);

                LOG.info("Obtained HBase tokens, adding to user credentials.");

                Credentials credential = proxyUser.getCredentials();

                for (Token<? extends TokenIdentifier> tokenForLog : credential.getAllTokens()) {
                    LOG.debug("Obtained token info in credential: {} / {}", tokenForLog.toString(),
                            tokenForLog.decodeIdentifier().getUser());
                }

                ByteArrayOutputStream bao = new ByteArrayOutputStream();
                ObjectOutputStream out = new ObjectOutputStream(bao);
                credential.write(out);
                out.flush();
                out.close();
                return bao.toByteArray();
            } else {
                throw new RuntimeException("Security is not enabled for HBase.");
            }
        } else {
            throw new RuntimeException("Security is not enabled for Hadoop");
        }
    } catch (Exception ex) {
        throw new RuntimeException("Failed to get delegation tokens.", ex);
    }
}