Example usage for org.apache.hadoop.security.token Token Token

List of usage examples for org.apache.hadoop.security.token Token Token

Introduction

In this page you can find the example usage for org.apache.hadoop.security.token Token Token.

Prototype

public Token(byte[] identifier, byte[] password, Text kind, Text service) 

Source Link

Document

Construct a token from the components.

Usage

From source file:co.cask.cdap.app.runtime.spark.SparkCredentialsUpdaterTest.java

License:Apache License

@Test
public void testUpdater() throws Exception {
    Location credentialsDir = Locations.toLocation(TEMPORARY_FOLDER.newFolder());

    // Create a updater that don't do any auto-update within the test time and don't cleanup
    SparkCredentialsUpdater updater = new SparkCredentialsUpdater(createCredentialsSupplier(), credentialsDir,
            "credentials", TimeUnit.DAYS.toMillis(1), TimeUnit.DAYS.toMillis(1), Integer.MAX_VALUE) {
        @Override//from   ww  w  .  j a v  a  2 s  .  c  o m
        long getNextUpdateDelay(Credentials credentials) throws IOException {
            return TimeUnit.DAYS.toMillis(1);
        }
    };

    // Before the updater starts, the directory is empty
    Assert.assertTrue(credentialsDir.list().isEmpty());

    UserGroupInformation.getCurrentUser().addToken(
            new Token<>(Bytes.toBytes("id"), Bytes.toBytes("pass"), new Text("kind"), new Text("service")));

    updater.startAndWait();
    try {
        List<Location> expectedFiles = new ArrayList<>();
        expectedFiles.add(credentialsDir.append("credentials-1"));

        for (int i = 1; i <= 10; i++) {
            Assert.assertEquals(expectedFiles, listAndSort(credentialsDir));

            // Read the credentials from the last file
            Credentials newCredentials = new Credentials();
            try (DataInputStream is = new DataInputStream(
                    expectedFiles.get(expectedFiles.size() - 1).getInputStream())) {
                newCredentials.readTokenStorageStream(is);
            }

            // Should contains all tokens of the current user
            Credentials userCredentials = UserGroupInformation.getCurrentUser().getCredentials();
            for (Token<? extends TokenIdentifier> token : userCredentials.getAllTokens()) {
                Assert.assertEquals(token, newCredentials.getToken(token.getService()));
            }

            UserGroupInformation.getCurrentUser().addToken(new Token<>(Bytes.toBytes("id" + i),
                    Bytes.toBytes("pass" + i), new Text("kind" + i), new Text("service" + i)));
            updater.run();
            expectedFiles.add(credentialsDir.append("credentials-" + (i + 1)));
        }
    } finally {
        updater.stopAndWait();
    }
}

From source file:com.continuuity.weave.internal.yarn.Hadoop20YarnAppClient.java

License:Apache License

private <T extends TokenIdentifier> Token<T> convertToken(DelegationToken protoToken,
        InetSocketAddress serviceAddr) {
    Token<T> token = new Token<T>(protoToken.getIdentifier().array(), protoToken.getPassword().array(),
            new Text(protoToken.getKind()), new Text(protoToken.getService()));
    if (serviceAddr != null) {
        SecurityUtil.setTokenService(token, serviceAddr);
    }//from  w w  w  .ja va  2 s  . c o  m
    return token;
}

From source file:com.datatorrent.stram.StramClient.java

License:Apache License

private Token<RMDelegationTokenIdentifier> getRMHAToken(
        org.apache.hadoop.yarn.api.records.Token rmDelegationToken) {
    // Build a list of service addresses to form the service name
    ArrayList<String> services = new ArrayList<String>();
    for (String rmId : conf.getStringCollection(RM_HA_IDS)) {
        LOG.info("Yarn Resource Manager id: {}", rmId);
        // Set RM_ID to get the corresponding RM_ADDRESS
        services.add(//from ww w .  j  av  a  2s . c  o m
                SecurityUtil.buildTokenService(NetUtils.createSocketAddr(conf.get(RM_HOSTNAME_PREFIX + rmId),
                        YarnConfiguration.DEFAULT_RM_PORT, RM_HOSTNAME_PREFIX + rmId)).toString());
    }
    Text rmTokenService = new Text(Joiner.on(',').join(services));

    return new Token<RMDelegationTokenIdentifier>(rmDelegationToken.getIdentifier().array(),
            rmDelegationToken.getPassword().array(), new Text(rmDelegationToken.getKind()), rmTokenService);
}

From source file:it.crs4.pydoop.mapreduce.pipes.TestPipeApplication.java

License:Apache License

/**
 * test PipesMapRunner    test the transfer data from reader
 *
 * @throws Exception/*from w ww  . j  ava  2s.  c o m*/
 */
@Test
public void testRunner() throws Exception {
    // clean old password files
    File[] psw = cleanTokenPasswordFile();
    try {
        JobID jobId = new JobID("201408272347", 0);
        TaskID taskId = new TaskID(jobId, TaskType.MAP, 0);
        TaskAttemptID taskAttemptid = new TaskAttemptID(taskId, 0);

        Job job = new Job(new Configuration());
        job.setJobID(jobId);
        Configuration conf = job.getConfiguration();
        conf.set(Submitter.IS_JAVA_RR, "true");
        conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskAttemptid.toString());
        job.setInputFormatClass(DummyInputFormat.class);
        FileSystem fs = new RawLocalFileSystem();
        fs.setConf(conf);

        DummyInputFormat input_format = new DummyInputFormat();
        List<InputSplit> isplits = input_format.getSplits(job);

        InputSplit isplit = isplits.get(0);

        TaskAttemptContextImpl tcontext = new TaskAttemptContextImpl(conf, taskAttemptid);

        RecordReader<FloatWritable, NullWritable> rReader = input_format.createRecordReader(isplit, tcontext);

        TestMapContext context = new TestMapContext(conf, taskAttemptid, rReader, null, null, null, isplit);
        // stub for client
        File fCommand = getFileCommand("it.crs4.pydoop.mapreduce.pipes.PipeApplicationRunnableStub");
        conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
        // token for authorization
        Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>("user".getBytes(),
                "password".getBytes(), new Text("kind"), new Text("service"));
        TokenCache.setJobToken(token, job.getCredentials());
        conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
        PipesMapper<FloatWritable, NullWritable, IntWritable, Text> mapper = new PipesMapper<FloatWritable, NullWritable, IntWritable, Text>(
                context);

        initStdOut(conf);
        mapper.run(context);
        String stdOut = readStdOut(conf);

        // test part of translated data. As common file for client and test -
        // clients stdOut
        // check version
        assertTrue(stdOut.contains("CURRENT_PROTOCOL_VERSION:0"));
        // check key and value classes
        assertTrue(stdOut.contains("Key class:org.apache.hadoop.io.FloatWritable"));
        assertTrue(stdOut.contains("Value class:org.apache.hadoop.io.NullWritable"));
        // test have sent all data from reader
        assertTrue(stdOut.contains("value:0.0"));
        assertTrue(stdOut.contains("value:9.0"));

    } finally {
        if (psw != null) {
            // remove password files
            for (File file : psw) {
                file.deleteOnExit();
            }
        }
    }
}

From source file:it.crs4.pydoop.mapreduce.pipes.TestPipeApplication.java

License:Apache License

/**
 * test org.apache.hadoop.mapreduce.pipes.Application
 * test a internal functions: //ww w  . java 2s.  c  om
 *     MessageType.REGISTER_COUNTER,  INCREMENT_COUNTER, STATUS, PROGRESS...
 *
 * @throws Throwable
 */

@Test
public void testApplication() throws Throwable {

    System.err.println("testApplication");

    File[] psw = cleanTokenPasswordFile();
    try {
        JobID jobId = new JobID("201408272347", 0);
        TaskID taskId = new TaskID(jobId, TaskType.MAP, 0);
        TaskAttemptID taskAttemptid = new TaskAttemptID(taskId, 0);

        Job job = new Job(new Configuration());
        job.setJobID(jobId);
        Configuration conf = job.getConfiguration();
        conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskAttemptid.toString());
        FileSystem fs = new RawLocalFileSystem();
        fs.setConf(conf);

        File fCommand = getFileCommand("it.crs4.pydoop.mapreduce.pipes.PipeApplicationStub");
        //getFileCommand("it.crs4.pydoop.mapreduce.pipes.PipeApplicationRunnableStub");
        conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
        System.err.println("fCommand" + fCommand.getAbsolutePath());

        Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>("user".getBytes(),
                "password".getBytes(), new Text("kind"), new Text("service"));
        TokenCache.setJobToken(token, job.getCredentials());
        conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);

        TestReporter reporter = new TestReporter();
        DummyInputFormat input_format = new DummyInputFormat();
        List<InputSplit> isplits = input_format.getSplits(job);
        InputSplit isplit = isplits.get(0);
        TaskAttemptContextImpl tcontext = new TaskAttemptContextImpl(conf, taskAttemptid);

        DummyRecordReader reader = (DummyRecordReader) input_format.createRecordReader(isplit, tcontext);

        job.setOutputKeyClass(IntWritable.class);
        job.setOutputValueClass(Text.class);

        RecordWriter<IntWritable, Text> writer = new TestRecordWriter(
                new FileOutputStream(workSpace.getAbsolutePath() + File.separator + "outfile"));

        MapContextImpl<IntWritable, Text, IntWritable, Text> context = new MapContextImpl<IntWritable, Text, IntWritable, Text>(
                conf, taskAttemptid, null, writer, null, reporter, null);

        System.err.println("ready to launch application");
        Application<IntWritable, Text, IntWritable, Text> application = new Application<IntWritable, Text, IntWritable, Text>(
                context, reader);
        System.err.println("done");

        application.getDownlink().flush();
        application.getDownlink().mapItem(new IntWritable(3), new Text("txt"));
        application.getDownlink().flush();
        application.waitForFinish();

        // test getDownlink().mapItem();
        String stdOut = readStdOut(conf);
        assertTrue(stdOut.contains("key:3"));
        assertTrue(stdOut.contains("value:txt"));

        assertEquals(0.0, context.getProgress(), 0.01);
        assertNotNull(context.getCounter("group", "name"));

        // test status MessageType.STATUS
        assertEquals(context.getStatus(), "PROGRESS");
        // check MessageType.PROGRESS
        assertEquals(0.55f, reader.getProgress(), 0.001);
        application.getDownlink().close();
        // test MessageType.OUTPUT
        stdOut = readFile(new File(workSpace.getAbsolutePath() + File.separator + "outfile"));
        assertTrue(stdOut.contains("key:123"));
        assertTrue(stdOut.contains("value:value"));
        try {
            // try to abort
            application.abort(new Throwable());
            fail();
        } catch (IOException e) {
            // abort works ?
            assertEquals("pipe child exception", e.getMessage());
        }
    } finally {
        if (psw != null) {
            // remove password files
            for (File file : psw) {
                file.deleteOnExit();
            }
        }
    }
}

From source file:it.crs4.pydoop.mapreduce.pipes.TestPipeApplication.java

License:Apache License

/**
 * test org.apache.hadoop.mapreduce.pipes.PipesReducer
 * test the transfer of data: key and value
 *
 * @throws Exception//from w  w w .j  av a 2  s .  co m
 */
@Test
public void testPipesReducer() throws Exception {
    System.err.println("testPipesReducer");

    File[] psw = cleanTokenPasswordFile();
    try {
        JobID jobId = new JobID("201408272347", 0);
        TaskID taskId = new TaskID(jobId, TaskType.MAP, 0);
        TaskAttemptID taskAttemptid = new TaskAttemptID(taskId, 0);

        Job job = new Job(new Configuration());
        job.setJobID(jobId);
        Configuration conf = job.getConfiguration();
        conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskAttemptid.toString());
        FileSystem fs = new RawLocalFileSystem();
        fs.setConf(conf);

        File fCommand = getFileCommand("it.crs4.pydoop.mapreduce.pipes.PipeReducerStub");
        conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
        System.err.println("fCommand" + fCommand.getAbsolutePath());

        Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>("user".getBytes(),
                "password".getBytes(), new Text("kind"), new Text("service"));
        TokenCache.setJobToken(token, job.getCredentials());
        conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);

        TestReporter reporter = new TestReporter();
        DummyInputFormat input_format = new DummyInputFormat();
        List<InputSplit> isplits = input_format.getSplits(job);
        InputSplit isplit = isplits.get(0);
        TaskAttemptContextImpl tcontext = new TaskAttemptContextImpl(conf, taskAttemptid);

        RecordWriter<IntWritable, Text> writer = new TestRecordWriter(
                new FileOutputStream(workSpace.getAbsolutePath() + File.separator + "outfile"));

        BooleanWritable bw = new BooleanWritable(true);
        List<Text> texts = new ArrayList<Text>();
        texts.add(new Text("first"));
        texts.add(new Text("second"));
        texts.add(new Text("third"));

        DummyRawKeyValueIterator kvit = new DummyRawKeyValueIterator();

        ReduceContextImpl<BooleanWritable, Text, IntWritable, Text> context = new ReduceContextImpl<BooleanWritable, Text, IntWritable, Text>(
                conf, taskAttemptid, kvit, null, null, writer, null, null, null, BooleanWritable.class,
                Text.class);

        PipesReducer<BooleanWritable, Text, IntWritable, Text> reducer = new PipesReducer<BooleanWritable, Text, IntWritable, Text>();
        reducer.setup(context);

        initStdOut(conf);
        reducer.reduce(bw, texts, context);
        reducer.cleanup(context);
        String stdOut = readStdOut(conf);

        // test data: key
        assertTrue(stdOut.contains("reducer key :true"));
        // and values
        assertTrue(stdOut.contains("reduce value  :first"));
        assertTrue(stdOut.contains("reduce value  :second"));
        assertTrue(stdOut.contains("reduce value  :third"));

    } finally {
        if (psw != null) {
            // remove password files
            for (File file : psw) {
                file.deleteOnExit();
            }
        }
    }

}

From source file:org.apache.accumulo.core.client.mapred.AbstractInputFormat.java

License:Apache License

/**
 * Sets the connector information needed to communicate with Accumulo in this job.
 *
 * <p>/*w ww.ja  va 2  s  .  co m*/
 * <b>WARNING:</b> Some tokens, when serialized, divulge sensitive information in the configuration as a means to pass the token to MapReduce tasks. This
 * information is BASE64 encoded to provide a charset safe conversion to a string, but this conversion is not intended to be secure. {@link PasswordToken} is
 * one example that is insecure in this way; however {@link DelegationToken}s, acquired using
 * {@link SecurityOperations#getDelegationToken(DelegationTokenConfig)}, is not subject to this concern.
 *
 * @param job
 *          the Hadoop job instance to be configured
 * @param principal
 *          a valid Accumulo user name (user must have Table.CREATE permission)
 * @param token
 *          the user's password
 * @since 1.5.0
 */
public static void setConnectorInfo(JobConf job, String principal, AuthenticationToken token)
        throws AccumuloSecurityException {
    if (token instanceof KerberosToken) {
        log.info("Received KerberosToken, attempting to fetch DelegationToken");
        try {
            Instance instance = getInstance(job);
            Connector conn = instance.getConnector(principal, token);
            token = conn.securityOperations().getDelegationToken(new DelegationTokenConfig());
        } catch (Exception e) {
            log.warn(
                    "Failed to automatically obtain DelegationToken, Mappers/Reducers will likely fail to communicate with Accumulo",
                    e);
        }
    }
    // DelegationTokens can be passed securely from user to task without serializing insecurely in the configuration
    if (token instanceof DelegationTokenImpl) {
        DelegationTokenImpl delegationToken = (DelegationTokenImpl) token;

        // Convert it into a Hadoop Token
        AuthenticationTokenIdentifier identifier = delegationToken.getIdentifier();
        Token<AuthenticationTokenIdentifier> hadoopToken = new Token<>(identifier.getBytes(),
                delegationToken.getPassword(), identifier.getKind(), delegationToken.getServiceName());

        // Add the Hadoop Token to the Job so it gets serialized and passed along.
        job.getCredentials().addToken(hadoopToken.getService(), hadoopToken);
    }

    InputConfigurator.setConnectorInfo(CLASS, job, principal, token);
}

From source file:org.apache.accumulo.core.client.mapred.AccumuloOutputFormat.java

License:Apache License

/**
 * Sets the connector information needed to communicate with Accumulo in this job.
 *
 * <p>//w w  w. ja v  a  2 s .  c  o  m
 * <b>WARNING:</b> Some tokens, when serialized, divulge sensitive information in the configuration as a means to pass the token to MapReduce tasks. This
 * information is BASE64 encoded to provide a charset safe conversion to a string, but this conversion is not intended to be secure. {@link PasswordToken} is
 * one example that is insecure in this way; however {@link DelegationToken}s, acquired using
 * {@link SecurityOperations#getDelegationToken(DelegationTokenConfig)}, is not subject to this concern.
 *
 * @param job
 *          the Hadoop job instance to be configured
 * @param principal
 *          a valid Accumulo user name (user must have Table.CREATE permission if {@link #setCreateTables(JobConf, boolean)} is set to true)
 * @param token
 *          the user's password
 * @since 1.5.0
 */
public static void setConnectorInfo(JobConf job, String principal, AuthenticationToken token)
        throws AccumuloSecurityException {
    if (token instanceof KerberosToken) {
        log.info("Received KerberosToken, attempting to fetch DelegationToken");
        try {
            Instance instance = getInstance(job);
            Connector conn = instance.getConnector(principal, token);
            token = conn.securityOperations().getDelegationToken(new DelegationTokenConfig());
        } catch (Exception e) {
            log.warn(
                    "Failed to automatically obtain DelegationToken, Mappers/Reducers will likely fail to communicate with Accumulo",
                    e);
        }
    }
    // DelegationTokens can be passed securely from user to task without serializing insecurely in the configuration
    if (token instanceof DelegationTokenImpl) {
        DelegationTokenImpl delegationToken = (DelegationTokenImpl) token;

        // Convert it into a Hadoop Token
        AuthenticationTokenIdentifier identifier = delegationToken.getIdentifier();
        Token<AuthenticationTokenIdentifier> hadoopToken = new Token<>(identifier.getBytes(),
                delegationToken.getPassword(), identifier.getKind(), delegationToken.getServiceName());

        // Add the Hadoop Token to the Job so it gets serialized and passed along.
        job.getCredentials().addToken(hadoopToken.getService(), hadoopToken);
    }

    OutputConfigurator.setConnectorInfo(CLASS, job, principal, token);
}

From source file:org.apache.accumulo.core.client.mapreduce.AbstractInputFormat.java

License:Apache License

/**
 * Sets the connector information needed to communicate with Accumulo in this job.
 *
 * <p>/* w w  w. jav  a2  s. c om*/
 * <b>WARNING:</b> Some tokens, when serialized, divulge sensitive information in the configuration as a means to pass the token to MapReduce tasks. This
 * information is BASE64 encoded to provide a charset safe conversion to a string, but this conversion is not intended to be secure. {@link PasswordToken} is
 * one example that is insecure in this way; however {@link DelegationToken}s, acquired using
 * {@link SecurityOperations#getDelegationToken(DelegationTokenConfig)}, is not subject to this concern.
 *
 * @param job
 *          the Hadoop job instance to be configured
 * @param principal
 *          a valid Accumulo user name (user must have Table.CREATE permission)
 * @param token
 *          the user's password
 * @since 1.5.0
 */
public static void setConnectorInfo(Job job, String principal, AuthenticationToken token)
        throws AccumuloSecurityException {
    if (token instanceof KerberosToken) {
        log.info("Received KerberosToken, attempting to fetch DelegationToken");
        try {
            Instance instance = getInstance(job);
            Connector conn = instance.getConnector(principal, token);
            token = conn.securityOperations().getDelegationToken(new DelegationTokenConfig());
        } catch (Exception e) {
            log.warn(
                    "Failed to automatically obtain DelegationToken, Mappers/Reducers will likely fail to communicate with Accumulo",
                    e);
        }
    }
    // DelegationTokens can be passed securely from user to task without serializing insecurely in the configuration
    if (token instanceof DelegationTokenImpl) {
        DelegationTokenImpl delegationToken = (DelegationTokenImpl) token;

        // Convert it into a Hadoop Token
        AuthenticationTokenIdentifier identifier = delegationToken.getIdentifier();
        Token<AuthenticationTokenIdentifier> hadoopToken = new Token<>(identifier.getBytes(),
                delegationToken.getPassword(), identifier.getKind(), delegationToken.getServiceName());

        // Add the Hadoop Token to the Job so it gets serialized and passed along.
        job.getCredentials().addToken(hadoopToken.getService(), hadoopToken);
    }

    InputConfigurator.setConnectorInfo(CLASS, job.getConfiguration(), principal, token);
}

From source file:org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat.java

License:Apache License

/**
 * Sets the connector information needed to communicate with Accumulo in this job.
 *
 * <p>//from w  ww. j  av  a 2 s. co m
 * <b>WARNING:</b> Some tokens, when serialized, divulge sensitive information in the configuration as a means to pass the token to MapReduce tasks. This
 * information is BASE64 encoded to provide a charset safe conversion to a string, but this conversion is not intended to be secure. {@link PasswordToken} is
 * one example that is insecure in this way; however {@link DelegationToken}s, acquired using
 * {@link SecurityOperations#getDelegationToken(DelegationTokenConfig)}, is not subject to this concern.
 *
 * @param job
 *          the Hadoop job instance to be configured
 * @param principal
 *          a valid Accumulo user name (user must have Table.CREATE permission if {@link #setCreateTables(Job, boolean)} is set to true)
 * @param token
 *          the user's password
 * @since 1.5.0
 */
public static void setConnectorInfo(Job job, String principal, AuthenticationToken token)
        throws AccumuloSecurityException {
    if (token instanceof KerberosToken) {
        log.info("Received KerberosToken, attempting to fetch DelegationToken");
        try {
            Instance instance = getInstance(job);
            Connector conn = instance.getConnector(principal, token);
            token = conn.securityOperations().getDelegationToken(new DelegationTokenConfig());
        } catch (Exception e) {
            log.warn(
                    "Failed to automatically obtain DelegationToken, Mappers/Reducers will likely fail to communicate with Accumulo",
                    e);
        }
    }
    // DelegationTokens can be passed securely from user to task without serializing insecurely in the configuration
    if (token instanceof DelegationTokenImpl) {
        DelegationTokenImpl delegationToken = (DelegationTokenImpl) token;

        // Convert it into a Hadoop Token
        AuthenticationTokenIdentifier identifier = delegationToken.getIdentifier();
        Token<AuthenticationTokenIdentifier> hadoopToken = new Token<>(identifier.getBytes(),
                delegationToken.getPassword(), identifier.getKind(), delegationToken.getServiceName());

        // Add the Hadoop Token to the Job so it gets serialized and passed along.
        job.getCredentials().addToken(hadoopToken.getService(), hadoopToken);
    }

    OutputConfigurator.setConnectorInfo(CLASS, job.getConfiguration(), principal, token);
}