Example usage for org.apache.hadoop.mapreduce.security TokenCache getJobToken

List of usage examples for org.apache.hadoop.mapreduce.security TokenCache getJobToken

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce.security TokenCache getJobToken.

Prototype

@SuppressWarnings("unchecked")
@InterfaceAudience.Private
public static Token<JobTokenIdentifier> getJobToken(Credentials credentials) 

Source Link

Usage

From source file:it.crs4.pydoop.mapreduce.pipes.Application.java

License:Apache License

/**
 * Start the child process to handle the task for us.
 * @throws IOException/*  www.j  a v a  2s  .c  om*/
 * @throws InterruptedException
 */
Application(TaskInputOutputContext<K1, V1, K2, V2> context, DummyRecordReader input)
        throws IOException, InterruptedException {

    Configuration conf = context.getConfiguration();
    serverSocket = new ServerSocket(0);
    Map<String, String> env = new HashMap<String, String>();
    // add TMPDIR environment variable with the value of java.io.tmpdir
    env.put("TMPDIR", System.getProperty("java.io.tmpdir"));
    env.put(Submitter.PORT, Integer.toString(serverSocket.getLocalPort()));

    //Add token to the environment if security is enabled
    Token<JobTokenIdentifier> jobToken = TokenCache.getJobToken(context.getCredentials());
    // This password is used as shared secret key between this application and
    // child pipes process
    byte[] password = jobToken.getPassword();
    String localPasswordFile = new File(".") + Path.SEPARATOR + "jobTokenPassword";
    writePasswordToLocalFile(localPasswordFile, password, conf);
    // FIXME why is this not Submitter.SECRET_LOCATION ?
    env.put("hadoop.pipes.shared.secret.location", localPasswordFile);

    List<String> cmd = new ArrayList<String>();
    String interpretor = conf.get(Submitter.INTERPRETOR);
    if (interpretor != null) {
        cmd.add(interpretor);
    }
    String executable = context.getLocalCacheFiles()[0].toString();
    if (!(new File(executable).canExecute())) {
        // LinuxTaskController sets +x permissions on all distcache files already.
        // In case of DefaultTaskController, set permissions here.
        FileUtil.chmod(executable, "u+x");
    }
    cmd.add(executable);
    // wrap the command in a stdout/stderr capture
    // we are starting map/reduce task of the pipes job. this is not a cleanup
    // attempt. 
    TaskAttemptID taskid = context.getTaskAttemptID();

    File stdout = TaskLog.getTaskLogFile(taskid, false, TaskLog.LogName.STDOUT);
    File stderr = TaskLog.getTaskLogFile(taskid, false, TaskLog.LogName.STDERR);
    long logLength = TaskLog.getTaskLogLength(conf);
    cmd = TaskLog.captureOutAndError(null, cmd, stdout, stderr, logLength, false);
    process = runClient(cmd, env);
    clientSocket = serverSocket.accept();

    String challenge = getSecurityChallenge();
    String digestToSend = createDigest(password, challenge);
    String digestExpected = createDigest(password, digestToSend);

    handler = new OutputHandler<K2, V2>(context, input, digestExpected);
    K2 outputKey = (K2) ReflectionUtils.newInstance(context.getOutputKeyClass(), conf);
    V2 outputValue = (V2) ReflectionUtils.newInstance(context.getOutputValueClass(), conf);
    downlink = new BinaryProtocol<K1, V1, K2, V2>(clientSocket, handler, outputKey, outputValue, conf);

    downlink.authenticate(digestToSend, challenge);
    waitForAuthentication();
    LOG.debug("Authentication succeeded");
    downlink.start();
    downlink.setJobConf(conf);
}

From source file:it.crs4.pydoop.pipes.Application.java

License:Apache License

/**
 * Start the child process to handle the task for us.
 * @param conf the task's configuration//www.  ja  v a 2s  .c  o m
 * @param recordReader the fake record reader to update progress with
 * @param output the collector to send output to
 * @param reporter the reporter for the task
 * @param outputKeyClass the class of the output keys
 * @param outputValueClass the class of the output values
 * @throws IOException
 * @throws InterruptedException
 */
Application(JobConf conf, RecordReader<FloatWritable, NullWritable> recordReader,
        OutputCollector<K2, V2> output, Reporter reporter, Class<? extends K2> outputKeyClass,
        Class<? extends V2> outputValueClass) throws IOException, InterruptedException {
    serverSocket = new ServerSocket(0);
    Map<String, String> env = new HashMap<String, String>();
    // add TMPDIR environment variable with the value of java.io.tmpdir
    env.put("TMPDIR", System.getProperty("java.io.tmpdir"));
    env.put(Submitter.PORT, Integer.toString(serverSocket.getLocalPort()));

    TaskAttemptID taskid = TaskAttemptID.forName(conf.get(MRJobConfig.TASK_ATTEMPT_ID));

    // get the task's working directory
    String workDir = LocalJobRunner.getLocalTaskDir(conf.getUser(), taskid.getJobID().toString(),
            taskid.getTaskID().toString(), false);

    //Add token to the environment if security is enabled
    Token<JobTokenIdentifier> jobToken = TokenCache.getJobToken(conf.getCredentials());
    // This password is used as shared secret key between this application and
    // child pipes process
    byte[] password = jobToken.getPassword();

    String localPasswordFile = new File(workDir, "jobTokenPassword").getAbsolutePath();
    writePasswordToLocalFile(localPasswordFile, password, conf);
    env.put("hadoop.pipes.shared.secret.location", localPasswordFile);

    List<String> cmd = new ArrayList<String>();
    String interpretor = conf.get(Submitter.INTERPRETOR);
    if (interpretor != null) {
        cmd.add(interpretor);
    }
    String executable = DistributedCache.getLocalCacheFiles(conf)[0].toString();
    if (!(new File(executable).canExecute())) {
        // LinuxTaskController sets +x permissions on all distcache files already.
        // In case of DefaultTaskController, set permissions here.
        FileUtil.chmod(executable, "u+x");
    }
    cmd.add(executable);
    // wrap the command in a stdout/stderr capture
    // we are starting map/reduce task of the pipes job. this is not a cleanup
    // attempt. 
    File stdout = TaskLog.getTaskLogFile(taskid, false, TaskLog.LogName.STDOUT);
    File stderr = TaskLog.getTaskLogFile(taskid, false, TaskLog.LogName.STDERR);
    long logLength = TaskLog.getTaskLogLength(conf);
    cmd = TaskLog.captureOutAndError(null, cmd, stdout, stderr, logLength, false);

    process = runClient(cmd, env);
    clientSocket = serverSocket.accept();

    String challenge = getSecurityChallenge();
    String digestToSend = createDigest(password, challenge);
    String digestExpected = createDigest(password, digestToSend);

    handler = new OutputHandler<K2, V2>(output, reporter, recordReader, digestExpected);
    K2 outputKey = (K2) ReflectionUtils.newInstance(outputKeyClass, conf);
    V2 outputValue = (V2) ReflectionUtils.newInstance(outputValueClass, conf);
    downlink = new BinaryProtocol<K1, V1, K2, V2>(clientSocket, handler, outputKey, outputValue, conf);

    downlink.authenticate(digestToSend, challenge);
    waitForAuthentication();
    LOG.debug("Authentication succeeded");
    downlink.start();
    downlink.setJobConf(conf);
}

From source file:org.apache.giraph.comm.netty.SaslNettyClient.java

License:Apache License

/**
 * Obtain JobToken, which we'll use as a credential for SASL authentication
 * when connecting to other Giraph BSPWorkers.
 *
 * @param conf Configuration//from   www  . j av  a  2  s  .  com
 * @return a JobToken containing username and password so that client can
 * authenticate with a server.
 */
private Token<JobTokenIdentifier> createJobToken(Configuration conf) throws IOException {
    String localJobTokenFile = System.getenv().get(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
    if (localJobTokenFile != null) {
        JobConf jobConf = new JobConf(conf);
        Credentials credentials = TokenCache.loadTokens(localJobTokenFile, jobConf);
        return TokenCache.getJobToken(credentials);
    } else {
        throw new IOException("createJobToken: Cannot obtain authentication " + "credentials for job: file: '"
                + UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION + "' not found");
    }
}

From source file:org.apache.giraph.comm.RPCCommunications.java

License:Apache License

/**
  * Create the job token./*from w w w  . j  a  va  2 s . com*/
  *
  * @return Job token.
  */
protected

        Token<JobTokenIdentifier> createJobToken() throws IOException {

    String localJobTokenFile = System.getenv().get(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
    if (localJobTokenFile != null) {
        JobConf jobConf = new JobConf(conf);
        Credentials credentials = TokenCache.loadTokens(localJobTokenFile, jobConf);
        return TokenCache.getJobToken(credentials);
    }

    return null;
}