Example usage for org.apache.hadoop.mapred JobConf getCredentials

List of usage examples for org.apache.hadoop.mapred JobConf getCredentials

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred JobConf getCredentials.

Prototype

public Credentials getCredentials() 

Source Link

Document

Get credentials for the job.

Usage

From source file:com.savy3.util.DBConfiguration.java

License:Apache License

public static String getPassword(JobConf configuration) {
    LOG.debug("Fetching password from job credentials store");
    byte[] secret = configuration.getCredentials().getSecretKey(PASSWORD_SECRET_KEY);
    return secret != null ? new String(secret) : null;
}

From source file:it.crs4.pydoop.pipes.Application.java

License:Apache License

/**
 * Start the child process to handle the task for us.
 * @param conf the task's configuration//from  w w  w.j  a  v a 2  s .c o m
 * @param recordReader the fake record reader to update progress with
 * @param output the collector to send output to
 * @param reporter the reporter for the task
 * @param outputKeyClass the class of the output keys
 * @param outputValueClass the class of the output values
 * @throws IOException
 * @throws InterruptedException
 */
Application(JobConf conf, RecordReader<FloatWritable, NullWritable> recordReader,
        OutputCollector<K2, V2> output, Reporter reporter, Class<? extends K2> outputKeyClass,
        Class<? extends V2> outputValueClass) throws IOException, InterruptedException {
    serverSocket = new ServerSocket(0);
    Map<String, String> env = new HashMap<String, String>();
    // add TMPDIR environment variable with the value of java.io.tmpdir
    env.put("TMPDIR", System.getProperty("java.io.tmpdir"));
    env.put(Submitter.PORT, Integer.toString(serverSocket.getLocalPort()));

    TaskAttemptID taskid = TaskAttemptID.forName(conf.get(MRJobConfig.TASK_ATTEMPT_ID));

    // get the task's working directory
    String workDir = LocalJobRunner.getLocalTaskDir(conf.getUser(), taskid.getJobID().toString(),
            taskid.getTaskID().toString(), false);

    //Add token to the environment if security is enabled
    Token<JobTokenIdentifier> jobToken = TokenCache.getJobToken(conf.getCredentials());
    // This password is used as shared secret key between this application and
    // child pipes process
    byte[] password = jobToken.getPassword();

    String localPasswordFile = new File(workDir, "jobTokenPassword").getAbsolutePath();
    writePasswordToLocalFile(localPasswordFile, password, conf);
    env.put("hadoop.pipes.shared.secret.location", localPasswordFile);

    List<String> cmd = new ArrayList<String>();
    String interpretor = conf.get(Submitter.INTERPRETOR);
    if (interpretor != null) {
        cmd.add(interpretor);
    }
    String executable = DistributedCache.getLocalCacheFiles(conf)[0].toString();
    if (!(new File(executable).canExecute())) {
        // LinuxTaskController sets +x permissions on all distcache files already.
        // In case of DefaultTaskController, set permissions here.
        FileUtil.chmod(executable, "u+x");
    }
    cmd.add(executable);
    // wrap the command in a stdout/stderr capture
    // we are starting map/reduce task of the pipes job. this is not a cleanup
    // attempt. 
    File stdout = TaskLog.getTaskLogFile(taskid, false, TaskLog.LogName.STDOUT);
    File stderr = TaskLog.getTaskLogFile(taskid, false, TaskLog.LogName.STDERR);
    long logLength = TaskLog.getTaskLogLength(conf);
    cmd = TaskLog.captureOutAndError(null, cmd, stdout, stderr, logLength, false);

    process = runClient(cmd, env);
    clientSocket = serverSocket.accept();

    String challenge = getSecurityChallenge();
    String digestToSend = createDigest(password, challenge);
    String digestExpected = createDigest(password, digestToSend);

    handler = new OutputHandler<K2, V2>(output, reporter, recordReader, digestExpected);
    K2 outputKey = (K2) ReflectionUtils.newInstance(outputKeyClass, conf);
    V2 outputValue = (V2) ReflectionUtils.newInstance(outputValueClass, conf);
    downlink = new BinaryProtocol<K1, V1, K2, V2>(clientSocket, handler, outputKey, outputValue, conf);

    downlink.authenticate(digestToSend, challenge);
    waitForAuthentication();
    LOG.debug("Authentication succeeded");
    downlink.start();
    downlink.setJobConf(conf);
}

From source file:org.apache.accumulo.core.client.mapred.AbstractInputFormat.java

License:Apache License

/**
 * Sets the connector information needed to communicate with Accumulo in this job.
 *
 * <p>/*from  w  w w. j  ava2 s. c o m*/
 * <b>WARNING:</b> Some tokens, when serialized, divulge sensitive information in the configuration as a means to pass the token to MapReduce tasks. This
 * information is BASE64 encoded to provide a charset safe conversion to a string, but this conversion is not intended to be secure. {@link PasswordToken} is
 * one example that is insecure in this way; however {@link DelegationToken}s, acquired using
 * {@link SecurityOperations#getDelegationToken(DelegationTokenConfig)}, is not subject to this concern.
 *
 * @param job
 *          the Hadoop job instance to be configured
 * @param principal
 *          a valid Accumulo user name (user must have Table.CREATE permission)
 * @param token
 *          the user's password
 * @since 1.5.0
 */
public static void setConnectorInfo(JobConf job, String principal, AuthenticationToken token)
        throws AccumuloSecurityException {
    if (token instanceof KerberosToken) {
        log.info("Received KerberosToken, attempting to fetch DelegationToken");
        try {
            Instance instance = getInstance(job);
            Connector conn = instance.getConnector(principal, token);
            token = conn.securityOperations().getDelegationToken(new DelegationTokenConfig());
        } catch (Exception e) {
            log.warn(
                    "Failed to automatically obtain DelegationToken, Mappers/Reducers will likely fail to communicate with Accumulo",
                    e);
        }
    }
    // DelegationTokens can be passed securely from user to task without serializing insecurely in the configuration
    if (token instanceof DelegationTokenImpl) {
        DelegationTokenImpl delegationToken = (DelegationTokenImpl) token;

        // Convert it into a Hadoop Token
        AuthenticationTokenIdentifier identifier = delegationToken.getIdentifier();
        Token<AuthenticationTokenIdentifier> hadoopToken = new Token<>(identifier.getBytes(),
                delegationToken.getPassword(), identifier.getKind(), delegationToken.getServiceName());

        // Add the Hadoop Token to the Job so it gets serialized and passed along.
        job.getCredentials().addToken(hadoopToken.getService(), hadoopToken);
    }

    InputConfigurator.setConnectorInfo(CLASS, job, principal, token);
}

From source file:org.apache.accumulo.core.client.mapred.AccumuloOutputFormat.java

License:Apache License

/**
 * Sets the connector information needed to communicate with Accumulo in this job.
 *
 * <p>/*from www. j  a  v  a2  s . co  m*/
 * <b>WARNING:</b> Some tokens, when serialized, divulge sensitive information in the configuration as a means to pass the token to MapReduce tasks. This
 * information is BASE64 encoded to provide a charset safe conversion to a string, but this conversion is not intended to be secure. {@link PasswordToken} is
 * one example that is insecure in this way; however {@link DelegationToken}s, acquired using
 * {@link SecurityOperations#getDelegationToken(DelegationTokenConfig)}, is not subject to this concern.
 *
 * @param job
 *          the Hadoop job instance to be configured
 * @param principal
 *          a valid Accumulo user name (user must have Table.CREATE permission if {@link #setCreateTables(JobConf, boolean)} is set to true)
 * @param token
 *          the user's password
 * @since 1.5.0
 */
public static void setConnectorInfo(JobConf job, String principal, AuthenticationToken token)
        throws AccumuloSecurityException {
    if (token instanceof KerberosToken) {
        log.info("Received KerberosToken, attempting to fetch DelegationToken");
        try {
            Instance instance = getInstance(job);
            Connector conn = instance.getConnector(principal, token);
            token = conn.securityOperations().getDelegationToken(new DelegationTokenConfig());
        } catch (Exception e) {
            log.warn(
                    "Failed to automatically obtain DelegationToken, Mappers/Reducers will likely fail to communicate with Accumulo",
                    e);
        }
    }
    // DelegationTokens can be passed securely from user to task without serializing insecurely in the configuration
    if (token instanceof DelegationTokenImpl) {
        DelegationTokenImpl delegationToken = (DelegationTokenImpl) token;

        // Convert it into a Hadoop Token
        AuthenticationTokenIdentifier identifier = delegationToken.getIdentifier();
        Token<AuthenticationTokenIdentifier> hadoopToken = new Token<>(identifier.getBytes(),
                delegationToken.getPassword(), identifier.getKind(), delegationToken.getServiceName());

        // Add the Hadoop Token to the Job so it gets serialized and passed along.
        job.getCredentials().addToken(hadoopToken.getService(), hadoopToken);
    }

    OutputConfigurator.setConnectorInfo(CLASS, job, principal, token);
}

From source file:org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase.java

License:Apache License

/**
 * Unwraps the provided {@link AuthenticationToken} if it is an instance of {@link DelegationTokenStub}, reconstituting it from the provided {@link JobConf}.
 *
 * @param job/*from  w  w  w  . j  ava 2s .  c  o  m*/
 *          The job
 * @param token
 *          The authentication token
 */
public static AuthenticationToken unwrapAuthenticationToken(JobConf job, AuthenticationToken token) {
    requireNonNull(job);
    requireNonNull(token);
    if (token instanceof DelegationTokenStub) {
        DelegationTokenStub delTokenStub = (DelegationTokenStub) token;
        Token<? extends TokenIdentifier> hadoopToken = job.getCredentials()
                .getToken(new Text(delTokenStub.getServiceName()));
        AuthenticationTokenIdentifier identifier = new AuthenticationTokenIdentifier();
        try {
            identifier.readFields(new DataInputStream(new ByteArrayInputStream(hadoopToken.getIdentifier())));
            return new DelegationTokenImpl(hadoopToken.getPassword(), identifier);
        } catch (IOException e) {
            throw new RuntimeException("Could not construct DelegationToken from JobConf Credentials", e);
        }
    }
    return token;
}

From source file:org.apache.accumulo.core.clientImpl.mapreduce.lib.ConfiguratorBase.java

License:Apache License

/**
 * Unwraps the provided {@link AuthenticationToken} if it is an instance of DelegationTokenStub,
 * reconstituting it from the provided {@link JobConf}.
 *
 * @param job//from   w ww .ja v a2 s.  c o  m
 *          The job
 * @param token
 *          The authentication token
 */
public static AuthenticationToken unwrapAuthenticationToken(JobConf job, AuthenticationToken token) {
    requireNonNull(job);
    requireNonNull(token);
    if (token instanceof org.apache.accumulo.core.clientImpl.mapreduce.DelegationTokenStub) {
        org.apache.accumulo.core.clientImpl.mapreduce.DelegationTokenStub delTokenStub = (org.apache.accumulo.core.clientImpl.mapreduce.DelegationTokenStub) token;
        Token<? extends TokenIdentifier> hadoopToken = job.getCredentials()
                .getToken(new Text(delTokenStub.getServiceName()));
        AuthenticationTokenIdentifier identifier = new AuthenticationTokenIdentifier();
        try {
            identifier.readFields(new DataInputStream(new ByteArrayInputStream(hadoopToken.getIdentifier())));
            return new DelegationTokenImpl(hadoopToken.getPassword(), identifier);
        } catch (IOException e) {
            throw new RuntimeException("Could not construct DelegationToken from JobConf Credentials", e);
        }
    }
    return token;
}

From source file:org.apache.accumulo.hadoopImpl.mapred.AbstractInputFormat.java

License:Apache License

/**
 * Sets connection information needed to communicate with Accumulo for this job
 *
 * @param job// ww  w  .  j a  v  a 2s  . c o  m
 *          Hadoop job instance to be configured
 * @param info
 *          Connection information for Accumulo
 * @since 2.0.0
 */
public static void setClientInfo(JobConf job, ClientInfo info) {
    ClientInfo inputInfo = InputConfigurator.updateToken(job.getCredentials(), info);
    InputConfigurator.setClientInfo(CLASS, job, inputInfo);
}

From source file:org.apache.accumulo.hadoopImpl.mapred.AccumuloOutputFormatImpl.java

License:Apache License

/**
 * Set the connection information needed to communicate with Accumulo in this job.
 *
 * @param job/*w  w  w. j a  v a  2 s. c om*/
 *          Hadoop job to be configured
 * @param info
 *          Accumulo connection information
 * @since 2.0.0
 */
public static void setClientInfo(JobConf job, ClientInfo info) {
    ClientInfo outInfo = OutputConfigurator.updateToken(job.getCredentials(), info);
    OutputConfigurator.setClientInfo(CLASS, job, outInfo);
}

From source file:org.apache.flink.api.java.hadoop.mapred.HadoopInputFormatBase.java

License:Apache License

public HadoopInputFormatBase(org.apache.hadoop.mapred.InputFormat<K, V> mapredInputFormat, Class<K> key,
        Class<V> value, JobConf job) {
    super(job.getCredentials());
    this.mapredInputFormat = mapredInputFormat;
    this.keyClass = key;
    this.valueClass = value;
    HadoopUtils.mergeHadoopConf(job);// ww w.j a  v  a2 s . c  o  m
    this.jobConf = job;
    ReflectionUtils.setConf(mapredInputFormat, jobConf);
}

From source file:org.apache.flink.api.java.hadoop.mapred.HadoopOutputFormatBase.java

License:Apache License

public HadoopOutputFormatBase(org.apache.hadoop.mapred.OutputFormat<K, V> mapredOutputFormat, JobConf job) {
    super(job.getCredentials());
    this.mapredOutputFormat = mapredOutputFormat;
    HadoopUtils.mergeHadoopConf(job);/* w ww .  ja  v  a  2s  .  c  o  m*/
    this.jobConf = job;
}