Example usage for org.apache.hadoop.mapred JobConf getCredentials

List of usage examples for org.apache.hadoop.mapred JobConf getCredentials

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred JobConf getCredentials.

Prototype

public Credentials getCredentials() 

Source Link

Document

Get credentials for the job.

Usage

From source file:org.apache.oozie.action.hadoop.JavaActionExecutor.java

License:Apache License

public void submitLauncher(FileSystem actionFs, Context context, WorkflowAction action)
        throws ActionExecutorException {
    JobClient jobClient = null;//  w  w w.  j  a va  2s  .c  om
    boolean exception = false;
    try {
        Path appPathRoot = new Path(context.getWorkflow().getAppPath());

        // app path could be a file
        if (actionFs.isFile(appPathRoot)) {
            appPathRoot = appPathRoot.getParent();
        }

        Element actionXml = XmlUtils.parseXml(action.getConf());

        // action job configuration
        Configuration actionConf = loadHadoopDefaultResources(context, actionXml);
        setupActionConf(actionConf, context, actionXml, appPathRoot);
        LOG.debug("Setting LibFilesArchives ");
        setLibFilesArchives(context, actionXml, appPathRoot, actionConf);

        String jobName = actionConf.get(HADOOP_JOB_NAME);
        if (jobName == null || jobName.isEmpty()) {
            jobName = XLog.format("oozie:action:T={0}:W={1}:A={2}:ID={3}", getType(),
                    context.getWorkflow().getAppName(), action.getName(), context.getWorkflow().getId());
            actionConf.set(HADOOP_JOB_NAME, jobName);
        }

        injectActionCallback(context, actionConf);

        if (actionConf.get(ACL_MODIFY_JOB) == null || actionConf.get(ACL_MODIFY_JOB).trim().equals("")) {
            // ONLY in the case where user has not given the
            // modify-job ACL specifically
            if (context.getWorkflow().getAcl() != null) {
                // setting the group owning the Oozie job to allow anybody in that
                // group to modify the jobs.
                actionConf.set(ACL_MODIFY_JOB, context.getWorkflow().getAcl());
            }
        }

        // Setting the credential properties in launcher conf
        JobConf credentialsConf = null;
        HashMap<String, CredentialsProperties> credentialsProperties = setCredentialPropertyToActionConf(
                context, action, actionConf);
        if (credentialsProperties != null) {

            // Adding if action need to set more credential tokens
            credentialsConf = new JobConf(false);
            XConfiguration.copy(actionConf, credentialsConf);
            setCredentialTokens(credentialsConf, context, action, credentialsProperties);

            // insert conf to action conf from credentialsConf
            for (Entry<String, String> entry : credentialsConf) {
                if (actionConf.get(entry.getKey()) == null) {
                    actionConf.set(entry.getKey(), entry.getValue());
                }
            }
        }

        JobConf launcherJobConf = createLauncherConf(actionFs, context, action, actionXml, actionConf);

        LOG.debug("Creating Job Client for action " + action.getId());
        jobClient = createJobClient(context, launcherJobConf);
        String launcherId = LauncherMapperHelper.getRecoveryId(launcherJobConf, context.getActionDir(),
                context.getRecoveryId());
        boolean alreadyRunning = launcherId != null;
        RunningJob runningJob;

        // if user-retry is on, always submit new launcher
        boolean isUserRetry = ((WorkflowActionBean) action).isUserRetry();

        if (alreadyRunning && !isUserRetry) {
            runningJob = jobClient.getJob(JobID.forName(launcherId));
            if (runningJob == null) {
                String jobTracker = launcherJobConf.get(HADOOP_JOB_TRACKER);
                throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "JA017",
                        "unknown job [{0}@{1}], cannot recover", launcherId, jobTracker);
            }
        } else {
            LOG.debug("Submitting the job through Job Client for action " + action.getId());

            // setting up propagation of the delegation token.
            HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
            Token<DelegationTokenIdentifier> mrdt = jobClient
                    .getDelegationToken(has.getMRDelegationTokenRenewer(launcherJobConf));
            launcherJobConf.getCredentials().addToken(HadoopAccessorService.MR_TOKEN_ALIAS, mrdt);

            // insert credentials tokens to launcher job conf if needed
            if (needInjectCredentials() && credentialsConf != null) {
                for (Token<? extends TokenIdentifier> tk : credentialsConf.getCredentials().getAllTokens()) {
                    Text fauxAlias = new Text(tk.getKind() + "_" + tk.getService());
                    LOG.debug("ADDING TOKEN: " + fauxAlias);
                    launcherJobConf.getCredentials().addToken(fauxAlias, tk);
                }
            } else {
                LOG.info("No need to inject credentials.");
            }
            runningJob = jobClient.submitJob(launcherJobConf);
            if (runningJob == null) {
                throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "JA017",
                        "Error submitting launcher for action [{0}]", action.getId());
            }
            launcherId = runningJob.getID().toString();
            LOG.debug("After submission get the launcherId " + launcherId);
        }

        String jobTracker = launcherJobConf.get(HADOOP_JOB_TRACKER);
        String consoleUrl = runningJob.getTrackingURL();
        context.setStartData(launcherId, jobTracker, consoleUrl);
    } catch (Exception ex) {
        exception = true;
        throw convertException(ex);
    } finally {
        if (jobClient != null) {
            try {
                jobClient.close();
            } catch (Exception e) {
                if (exception) {
                    LOG.error("JobClient error: ", e);
                } else {
                    throw convertException(e);
                }
            }
        }
    }
}

From source file:org.apache.oozie.action.hadoop.KerberosAuthHelper.java

License:Open Source License

public void set(JobClient jobClient, JobConf launcherJobConf) throws IOException, InterruptedException {
    Token<DelegationTokenIdentifier> mrdt = jobClient.getDelegationToken(new Text("mr token"));
    launcherJobConf.getCredentials().addToken(new Text("mr token"), mrdt);
}

From source file:org.apache.oozie.action.hadoop.TestJavaActionExecutor.java

License:Apache License

public void testCredentialsModule() throws Exception {
    String actionXml = "<workflow-app xmlns='uri:oozie:workflow:0.2.5' name='pig-wf'>" + "<credentials>"
            + "<credential name='abcname' type='abc'>" + "<property>" + "<name>property1</name>"
            + "<value>value1</value>" + "</property>" + "<property>" + "<name>property2</name>"
            + "<value>value2</value>" + "</property>" + "<property>" + "<name>${property3}</name>"
            + "<value>${value3}</value>" + "</property>" + "</credential>" + "</credentials>"
            + "<start to='pig1' />" + "<action name='pig1' cred='abcname'>" + "<pig>" + "</pig>"
            + "<ok to='end' />" + "<error to='fail' />" + "</action>" + "<kill name='fail'>"
            + "<message>Pig failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>" + "</kill>"
            + "<end name='end' />" + "</workflow-app>";

    JavaActionExecutor ae = new JavaActionExecutor();
    WorkflowJobBean wfBean = addRecordToWfJobTable("test1", actionXml);
    WorkflowActionBean action = (WorkflowActionBean) wfBean.getActions().get(0);
    action.setType(ae.getType());/*from   w  w  w  .  ja va  2 s. com*/
    action.setCred("abcname");
    String actionxml = "<pig>" + "<job-tracker>${jobTracker}</job-tracker>"
            + "<name-node>${nameNode}</name-node>" + "<prepare>" + "<delete path='outputdir' />" + "</prepare>"
            + "<configuration>" + "<property>" + "<name>mapred.compress.map.output</name>"
            + "<value>true</value>" + "</property>" + "<property>" + "<name>mapred.job.queue.name</name>"
            + "<value>${queueName}</value>" + "</property>" + "</configuration>"
            + "<script>org/apache/oozie/examples/pig/id.pig</script>" + "<param>INPUT=${inputDir}</param>"
            + "<param>OUTPUT=${outputDir}/pig-output</param>" + "</pig>";
    action.setConf(actionxml);
    Context context = new Context(wfBean, action);

    Element actionXmlconf = XmlUtils.parseXml(action.getConf());
    // action job configuration
    Configuration actionConf = ae.createBaseHadoopConf(context, actionXmlconf);

    // Setting the credential properties in launcher conf
    HashMap<String, CredentialsProperties> credProperties = ae.setCredentialPropertyToActionConf(context,
            action, actionConf);

    assertNotNull(credProperties);
    CredentialsProperties prop = credProperties.get("abcname");
    assertEquals("value1", prop.getProperties().get("property1"));
    assertEquals("value2", prop.getProperties().get("property2"));
    assertEquals("val3", prop.getProperties().get("prop3"));

    // Try to load the token without it being defined in oozie-site; should get an exception
    JobConf credentialsConf = new JobConf();
    Configuration launcherConf = ae.createBaseHadoopConf(context, actionXmlconf);
    XConfiguration.copy(launcherConf, credentialsConf);
    try {
        ae.setCredentialTokens(credentialsConf, context, action, credProperties);
        fail("Should have gotten an exception but did not");
    } catch (ActionExecutorException aee) {
        assertEquals("JA020", aee.getErrorCode());
        assertTrue(aee.getMessage().contains("type [abc]"));
        assertTrue(aee.getMessage().contains("name [abcname]"));
    }

    // Define 'abc' token type in oozie-site
    ConfigurationService.set("oozie.credentials.credentialclasses",
            "abc=org.apache.oozie.action.hadoop.InsertTestToken");

    // Try to load the token after being defined in oozie-site; should work correctly
    credentialsConf = new JobConf();
    launcherConf = ae.createBaseHadoopConf(context, actionXmlconf);
    XConfiguration.copy(launcherConf, credentialsConf);
    ae.setCredentialTokens(credentialsConf, context, action, credProperties);
    Token<? extends TokenIdentifier> tk = credentialsConf.getCredentials().getToken(new Text("ABC Token"));
    assertNotNull(tk);
}

From source file:org.apache.oozie.action.hadoop.TestJavaActionExecutor.java

License:Apache License

private void _testCredentialsSkip(boolean skipSite, String skipJob, String skipAction, boolean expectingTokens)
        throws Exception {
    String actionLevelSkipConf = (skipAction == null) ? ""
            : "<property><name>oozie.credentials.skip</name><value>" + skipAction + "</value></property>";
    String actionxml = "<pig>" + "<job-tracker>${jobTracker}</job-tracker>"
            + "<name-node>${nameNode}</name-node>" + "<prepare>" + "<delete path='outputdir' />" + "</prepare>"
            + "<configuration>" + "<property>" + "<name>mapred.compress.map.output</name>"
            + "<value>true</value>" + "</property>" + "<property>" + "<name>mapred.job.queue.name</name>"
            + "<value>${queueName}</value>" + "</property>" + actionLevelSkipConf + "</configuration>"
            + "<script>org/apache/oozie/examples/pig/id.pig</script>" + "<param>INPUT=${inputDir}</param>"
            + "<param>OUTPUT=${outputDir}/pig-output</param>" + "</pig>";
    String workflowXml = "<workflow-app xmlns='uri:oozie:workflow:0.2.5' name='pig-wf'>" + "<credentials>"
            + "<credential name='abcname' type='abc'>" + "<property>" + "<name>property1</name>"
            + "<value>value1</value>" + "</property>" + "<property>" + "<name>property2</name>"
            + "<value>value2</value>" + "</property>" + "<property>" + "<name>${property3}</name>"
            + "<value>${value3}</value>" + "</property>" + "</credential>" + "</credentials>"
            + "<start to='pig1' />" + "<action name='pig1' cred='abcname'>" + actionxml + "<ok to='end' />"
            + "<error to='fail' />" + "</action>" + "<kill name='fail'>"
            + "<message>Pig failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>" + "</kill>"
            + "<end name='end' />" + "</workflow-app>";

    JavaActionExecutor ae = new JavaActionExecutor();
    WorkflowJobBean wfBean = addRecordToWfJobTable("test1", workflowXml,
            (skipJob == null) ? null : Collections.singletonMap("oozie.credentials.skip", skipJob));
    WorkflowActionBean action = (WorkflowActionBean) wfBean.getActions().get(0);
    action.setType(ae.getType());//  w  w  w  .  ja v a  2s  . c om
    action.setCred("abcname");
    action.setConf(actionxml);
    Context context = new Context(wfBean, action);

    Element actionXmlconf = XmlUtils.parseXml(action.getConf());
    // action job configuration
    Configuration actionConf = ae.createBaseHadoopConf(context, actionXmlconf);
    actionConf = ae.setupActionConf(actionConf, context, actionXmlconf, new Path("/tmp/foo"));

    // Define 'abc' token type in oozie-site
    ConfigurationService.set("oozie.credentials.credentialclasses",
            "abc=org.apache.oozie.action.hadoop.InsertTestToken");
    ConfigurationService.setBoolean("oozie.credentials.skip", skipSite);

    // Setting the credential properties in launcher conf
    HashMap<String, CredentialsProperties> credProperties = ae.setCredentialPropertyToActionConf(context,
            action, actionConf);

    // Try to load the token without it being defined in oozie-site; should get an exception
    JobConf credentialsConf = new JobConf();
    Configuration launcherConf = ae.createBaseHadoopConf(context, actionXmlconf);
    XConfiguration.copy(launcherConf, credentialsConf);
    ae.setCredentialTokens(credentialsConf, context, action, credProperties);
    Token<? extends TokenIdentifier> tk = credentialsConf.getCredentials().getToken(new Text("ABC Token"));
    if (expectingTokens) {
        assertNotNull(tk);
    } else {
        assertNull(tk);
    }
}

From source file:org.apache.oozie.service.HadoopAccessorService.java

License:Apache License

/**
 * Return a JobClient created with the provided user/group.
 *
 *
 * @param conf JobConf with all necessary information to create the
 *        JobClient./* w  w  w  .  j  ava 2s  . c o  m*/
 * @return JobClient created with the provided user/group.
 * @throws HadoopAccessorException if the client could not be created.
 */
public JobClient createJobClient(String user, final JobConf conf) throws HadoopAccessorException {
    ParamChecker.notEmpty(user, "user");
    if (!conf.getBoolean(OOZIE_HADOOP_ACCESSOR_SERVICE_CREATED, false)) {
        throw new HadoopAccessorException(ErrorCode.E0903);
    }
    String jobTracker = conf.get(JavaActionExecutor.HADOOP_JOB_TRACKER);
    validateJobTracker(jobTracker);
    try {
        UserGroupInformation ugi = getUGI(user);
        JobClient jobClient = ugi.doAs(new PrivilegedExceptionAction<JobClient>() {
            public JobClient run() throws Exception {
                return new JobClient(conf);
            }
        });
        Token<DelegationTokenIdentifier> mrdt = jobClient.getDelegationToken(getMRDelegationTokenRenewer(conf));
        conf.getCredentials().addToken(MR_TOKEN_ALIAS, mrdt);
        return jobClient;
    } catch (InterruptedException ex) {
        throw new HadoopAccessorException(ErrorCode.E0902, ex.getMessage(), ex);
    } catch (IOException ex) {
        throw new HadoopAccessorException(ErrorCode.E0902, ex.getMessage(), ex);
    }
}

From source file:org.apache.oozie.service.KerberosHadoopAccessorService.java

License:Open Source License

/**
 * Return a JobClient created with the provided user/group.
 *
 * @param conf JobConf with all necessary information to create the JobClient.
 * @return JobClient created with the provided user/group.
 * @throws HadoopAccessorException if the client could not be created.
 *//*from   www  .  j a  va 2s  . c o  m*/
public JobClient createJobClient(String user, String group, final JobConf conf) throws HadoopAccessorException {
    ParamChecker.notEmpty(user, "user");
    ParamChecker.notEmpty(group, "group");
    validateJobTracker(conf.get("mapred.job.tracker"));
    try {
        UserGroupInformation ugi = getUGI(user);
        JobClient jobClient = ugi.doAs(new PrivilegedExceptionAction<JobClient>() {
            public JobClient run() throws Exception {
                return new JobClient(conf);
            }
        });
        Token<DelegationTokenIdentifier> mrdt = jobClient.getDelegationToken(new Text("mr token"));
        conf.getCredentials().addToken(new Text("mr token"), mrdt);
        return jobClient;
    } catch (InterruptedException ex) {
        throw new HadoopAccessorException(ErrorCode.E0902, ex);
    } catch (IOException ex) {
        throw new HadoopAccessorException(ErrorCode.E0902, ex);
    }
}

From source file:org.apache.sqoop.credentials.TestPassingSecurePassword.java

License:Apache License

public void testPasswordInDBConfiguration() throws Exception {
    JobConf jobConf = new JobConf(getConf());
    DBConfiguration.configureDB(jobConf, "org.hsqldb.jdbcDriver", getConnectString(), "username", "password",
            null, null);//ww  w .ja  va  2 s .  c  om

    assertNotNull(jobConf.getCredentials().getSecretKey(new Text(DBConfiguration.PASSWORD_PROPERTY)));
    assertEquals("password",
            new String(jobConf.getCredentials().getSecretKey(new Text(DBConfiguration.PASSWORD_PROPERTY))));

    // necessary to wipe the state of previous call to configureDB
    jobConf = new JobConf();
    DBConfiguration.configureDB(jobConf, "org.hsqldb.jdbcDriver", getConnectString(), null, null, null, null);
    DBConfiguration dbConfiguration = new DBConfiguration(jobConf);
    Connection connection = dbConfiguration.getConnection();
    assertNotNull(connection);
}

From source file:org.apache.sqoop.job.mr.MRConfigurationUtils.java

License:Apache License

/**
 * Load configuration instance serialized in Hadoop credentials cache.
 *
 * @param configuration JobConf object associated with the job
 * @param classProperty Property with stored configuration class name
 * @param valueProperty Property with stored JSON representation of the
 *                      configuration object
 * @return New instance with loaded data
 *//* ww  w .j  av  a 2  s.c o  m*/
private static Object loadConfiguration(JobConf configuration, String classProperty, Text valueProperty) {
    // Create new instance of configuration class
    Object object = ClassUtils.instantiate(configuration.get(classProperty));
    if (object == null) {
        return null;
    }

    String json = new String(configuration.getCredentials().getSecretKey(valueProperty));

    // Fill it with JSON data
    ConfigUtils.fillValues(json, object);

    // And give it back
    return object;
}

From source file:org.apache.sqoop.mapreduce.mainframe.TestMainframeDatasetFTPRecordReader.java

License:Apache License

@Before
public void setUp() throws IOException {
    mockFTPClient = mock(FTPClient.class);
    MainframeFTPClientUtils.setMockFTPClient(mockFTPClient);
    try {/*from  w  w w .j  a  v  a 2s  . co  m*/
        when(mockFTPClient.login("user", "pssword")).thenReturn(true);
        when(mockFTPClient.logout()).thenReturn(true);
        when(mockFTPClient.isConnected()).thenReturn(true);
        when(mockFTPClient.completePendingCommand()).thenReturn(true);
        when(mockFTPClient.changeWorkingDirectory(anyString())).thenReturn(true);
        when(mockFTPClient.getReplyCode()).thenReturn(200);
        when(mockFTPClient.noop()).thenReturn(200);
        when(mockFTPClient.setFileType(anyInt())).thenReturn(true);

        FTPFile ftpFile1 = new FTPFile();
        ftpFile1.setType(FTPFile.FILE_TYPE);
        ftpFile1.setName("test1");
        FTPFile ftpFile2 = new FTPFile();
        ftpFile2.setType(FTPFile.FILE_TYPE);
        ftpFile2.setName("test2");
        FTPFile[] ftpFiles = { ftpFile1, ftpFile2 };
        when(mockFTPClient.listFiles()).thenReturn(ftpFiles);

        when(mockFTPClient.retrieveFileStream("test1"))
                .thenReturn(new ByteArrayInputStream("123\n456\n".getBytes()));
        when(mockFTPClient.retrieveFileStream("test2"))
                .thenReturn(new ByteArrayInputStream("789\n".getBytes()));
        when(mockFTPClient.retrieveFileStream("NotComplete"))
                .thenReturn(new ByteArrayInputStream("NotComplete\n".getBytes()));
    } catch (IOException e) {
        fail("No IOException should be thrown!");
    }

    JobConf conf = new JobConf();
    conf.set(DBConfiguration.URL_PROPERTY, "localhost:" + "11111");
    conf.set(DBConfiguration.USERNAME_PROPERTY, "user");
    conf.set(DBConfiguration.PASSWORD_PROPERTY, "pssword");
    // set the password in the secure credentials object
    Text PASSWORD_SECRET_KEY = new Text(DBConfiguration.PASSWORD_PROPERTY);
    conf.getCredentials().addSecretKey(PASSWORD_SECRET_KEY, "pssword".getBytes());
    conf.setClass(DBConfiguration.INPUT_CLASS_PROPERTY, DummySqoopRecord.class, DBWritable.class);

    Job job = new Job(conf);
    mfDIS = new MainframeDatasetInputSplit();
    mfDIS.addDataset("test1");
    mfDIS.addDataset("test2");
    context = mock(TaskAttemptContext.class);
    when(context.getConfiguration()).thenReturn(job.getConfiguration());
    mfDFTPRR = new MainframeDatasetFTPRecordReader();
}

From source file:org.apache.sqoop.mapreduce.mainframe.TestMainframeDatasetInputFormat.java

License:Apache License

@Test
public void testRetrieveDatasets() throws IOException {
    JobConf conf = new JobConf();
    conf.set(DBConfiguration.URL_PROPERTY, "localhost:12345");
    conf.set(DBConfiguration.USERNAME_PROPERTY, "user");
    conf.set(DBConfiguration.PASSWORD_PROPERTY, "pssword");
    // set the password in the secure credentials object
    Text PASSWORD_SECRET_KEY = new Text(DBConfiguration.PASSWORD_PROPERTY);
    conf.getCredentials().addSecretKey(PASSWORD_SECRET_KEY, "pssword".getBytes());

    String dsName = "dsName1";
    conf.set(MainframeConfiguration.MAINFRAME_INPUT_DATASET_NAME, dsName);
    Job job = new Job(conf);
    ConfigurationHelper.setJobNumMaps(job, 2);
    //format.getSplits(job);

    List<InputSplit> splits = new ArrayList<InputSplit>();
    splits = ((MainframeDatasetInputFormat<SqoopRecord>) format).getSplits(job);
    Assert.assertEquals("test1", ((MainframeDatasetInputSplit) splits.get(0)).getNextDataset().toString());
    Assert.assertEquals("test2", ((MainframeDatasetInputSplit) splits.get(1)).getNextDataset().toString());
}