Example usage for org.apache.hadoop.security Credentials writeTokenStorageFile

List of usage examples for org.apache.hadoop.security Credentials writeTokenStorageFile

Introduction

In this page you can find the example usage for org.apache.hadoop.security Credentials writeTokenStorageFile.

Prototype

public void writeTokenStorageFile(Path filename, Configuration conf) throws IOException 

Source Link

Usage

From source file:com.cloudera.hue.CredentialsMerger.java

License:Apache License

/**
 * Merge several credentials files into one. Give the desired output file
 * first, followed by all of the input files.
 *
 * <p>File formats are tried in this order: TokenStorageFile, urlEncodedString.
 * </p>/*from  w  w w  .  j  a  va2 s  . c  o m*/
 *
 * @param args &lt;out&gt; &lt;in1&gt; ...
 * @throws IOException  in the event of an error reading or writing files.
 */
public static void main(String[] args) throws IOException {
    if (args.length < 2) {
        printUsage();
        System.exit(1);
    }

    Path outputFile = new Path("file://" + new File(args[0]).getAbsolutePath());
    Configuration conf = new Configuration();
    Credentials credentials = new Credentials();

    for (int i = 1; i < args.length; i++) {
        try {
            Credentials singleFileCredentials = Credentials
                    .readTokenStorageFile(new Path("file://" + new File(args[i]).getAbsolutePath()), conf);
            credentials.addAll(singleFileCredentials);
        } catch (IOException e) {
            BufferedReader reader = new BufferedReader(new FileReader(args[i]));
            try {
                // Retry to read the token with an encodedUrl format
                Token<?> token = new Token();
                String encodedtoken = reader.readLine();
                token.decodeFromUrlString(encodedtoken);
                credentials.addToken(new Text(args[i]), token);
            } finally {
                reader.close();
            }
        }
    }

    credentials.writeTokenStorageFile(outputFile, conf);
}

From source file:gobblin.yarn.YarnHelixUtils.java

License:Apache License

/**
 * Write a {@link Token} to a given file.
 *
 * @param token the token to write//from   w  w w  .  j  av  a 2s .c om
 * @param tokenFilePath the token file path
 * @param configuration a {@link Configuration} object carrying Hadoop configuration properties
 * @throws IOException
 */
public static void writeTokenToFile(Token<? extends TokenIdentifier> token, Path tokenFilePath,
        Configuration configuration) throws IOException {
    Credentials credentials = new Credentials();
    credentials.addToken(token.getService(), token);
    credentials.writeTokenStorageFile(tokenFilePath, configuration);
}

From source file:org.apache.hcatalog.templeton.SecureProxySupport.java

License:Apache License

private void writeProxyDelegationTokens(final Token<?> fsToken, final Token<?> msToken,
        final Configuration conf, String user, final Path tokenPath) throws IOException, InterruptedException {

    LOG.info("user: " + user + " loginUser: " + UserGroupInformation.getLoginUser().getUserName());
    final UserGroupInformation ugi = UgiFactory.getUgi(user);

    ugi.doAs(new PrivilegedExceptionAction<Object>() {
        public Object run() throws IOException {
            Credentials cred = new Credentials();
            cred.addToken(fsToken.getService(), fsToken);
            cred.addToken(msToken.getService(), msToken);
            cred.writeTokenStorageFile(tokenPath, conf);
            return null;
        }/*  ww  w  .ja  v a2 s  .  c o  m*/
    });

}

From source file:org.apache.hive.hcatalog.templeton.SecureProxySupport.java

License:Apache License

/**
 * @param fsTokens not null/*from  w ww .  j  a  va2s . c o m*/
 */
private void writeProxyDelegationTokens(final Token<?> fsTokens[], final Token<?> msToken,
        final Configuration conf, String user, final Path tokenPath) throws IOException, InterruptedException {

    LOG.info("user: " + user + " loginUser: " + UserGroupInformation.getLoginUser().getUserName());
    final UserGroupInformation ugi = UgiFactory.getUgi(user);

    ugi.doAs(new PrivilegedExceptionAction<Object>() {
        public Object run() throws IOException {
            Credentials cred = new Credentials();
            for (Token<?> fsToken : fsTokens) {
                cred.addToken(fsToken.getService(), fsToken);
            }
            cred.addToken(msToken.getService(), msToken);
            cred.writeTokenStorageFile(tokenPath, conf);
            return null;
        }
    });

}

From source file:org.apache.tez.common.security.TestTokenCache.java

License:Apache License

@Test(timeout = 5000)
@SuppressWarnings("deprecation")
public void testBinaryCredentials() throws Exception {
    String binaryTokenFile = null;
    try {//from w  w  w .  j  av a2  s .c  om
        Path TEST_ROOT_DIR = new Path("target");
        binaryTokenFile = FileSystem.getLocal(conf).makeQualified(new Path(TEST_ROOT_DIR, "tokenFile")).toUri()
                .getPath();

        MockFileSystem fs1 = createFileSystemForServiceName("service1");
        MockFileSystem fs2 = createFileSystemForServiceName("service2");
        MockFileSystem fs3 = createFileSystemForServiceName("service3");

        // get the tokens for fs1 & fs2 and write out to binary creds file
        Credentials creds = new Credentials();
        Token<?> token1 = fs1.getDelegationToken(renewer);
        Token<?> token2 = fs2.getDelegationToken(renewer);
        creds.addToken(token1.getService(), token1);
        creds.addToken(token2.getService(), token2);
        creds.writeTokenStorageFile(new Path(binaryTokenFile), conf);

        Credentials newCreds = new Credentials();
        TokenCache.mergeBinaryTokens(newCreds, conf, binaryTokenFile);

        Assert.assertTrue(newCreds.getAllTokens().size() > 0);
        checkTokens(creds, newCreds);
    } finally {
        if (binaryTokenFile != null) {
            try {
                FileSystem.getLocal(conf).delete(new Path(binaryTokenFile));
            } catch (IOException e) {
                // Ignore
            }
        }
    }
}

From source file:org.apache.tez.mapreduce.YARNRunner.java

License:Apache License

@Override
public JobStatus submitJob(JobID jobId, String jobSubmitDir, Credentials ts)
        throws IOException, InterruptedException {

    // TEZ-192 - stop using token file
    // Upload only in security mode: TODO
    Path applicationTokensFile = new Path(jobSubmitDir, MRJobConfig.APPLICATION_TOKENS_FILE);
    try {/*w  w w  .  j  a v  a 2  s .  c  o m*/
        ts.writeTokenStorageFile(applicationTokensFile, conf);
    } catch (IOException e) {
        throw new TezUncheckedException(e);
    }

    ApplicationId appId = resMgrDelegate.getApplicationId();

    FileSystem fs = FileSystem.get(conf);
    // Loads the job.xml written by the user.
    JobConf jobConf = new JobConf(new TezConfiguration(conf));

    // Extract individual raw MR configs.
    Configuration[] stageConfs = MultiStageMRConfToTezTranslator.getStageConfs(jobConf);

    // Transform all confs to use Tez keys
    MultiStageMRConfToTezTranslator.translateVertexConfToTez(stageConfs[0], null);
    for (int i = 1; i < stageConfs.length; i++) {
        MultiStageMRConfToTezTranslator.translateVertexConfToTez(stageConfs[i], stageConfs[i - 1]);
    }

    // create inputs to tezClient.submit()

    // FIXME set up job resources
    Map<String, LocalResource> jobLocalResources = createJobLocalResources(stageConfs[0], jobSubmitDir);

    // FIXME createDAG should take the tezConf as a parameter, instead of using
    // MR keys.
    DAG dag = createDAG(fs, jobId, stageConfs, jobSubmitDir, ts, jobLocalResources);

    List<String> vargs = new LinkedList<String>();
    // admin command opts and user command opts
    String mrAppMasterAdminOptions = conf.get(MRJobConfig.MR_AM_ADMIN_COMMAND_OPTS,
            MRJobConfig.DEFAULT_MR_AM_ADMIN_COMMAND_OPTS);
    warnForJavaLibPath(mrAppMasterAdminOptions, "app master", MRJobConfig.MR_AM_ADMIN_COMMAND_OPTS,
            MRJobConfig.MR_AM_ADMIN_USER_ENV);
    vargs.add(mrAppMasterAdminOptions);

    // Add AM user command opts
    String mrAppMasterUserOptions = conf.get(MRJobConfig.MR_AM_COMMAND_OPTS,
            MRJobConfig.DEFAULT_MR_AM_COMMAND_OPTS);
    warnForJavaLibPath(mrAppMasterUserOptions, "app master", MRJobConfig.MR_AM_COMMAND_OPTS,
            MRJobConfig.MR_AM_ENV);
    vargs.add(mrAppMasterUserOptions);

    // Setup the CLASSPATH in environment
    // i.e. add { Hadoop jars, job jar, CWD } to classpath.
    Map<String, String> environment = new HashMap<String, String>();

    // Setup the environment variables for AM
    MRHelpers.updateEnvironmentForMRAM(conf, environment);

    TezConfiguration dagAMConf = getDAGAMConfFromMRConf();

    // Submit to ResourceManager
    try {
        Path appStagingDir = fs.resolvePath(new Path(jobSubmitDir));
        dagClient = tezClient.submitDAGApplication(appId, dag, appStagingDir, ts,
                jobConf.get(JobContext.QUEUE_NAME, YarnConfiguration.DEFAULT_QUEUE_NAME), vargs, environment,
                jobLocalResources, dagAMConf);

    } catch (TezException e) {
        throw new IOException(e);
    }

    return getJobStatus(jobId);
}