Example usage for org.apache.hadoop.mapred RunningJob isSuccessful

List of usage examples for org.apache.hadoop.mapred RunningJob isSuccessful

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred RunningJob isSuccessful.

Prototype

public boolean isSuccessful() throws IOException;

Source Link

Document

Check if the job completed successfully.

Usage

From source file:org.apache.oozie.action.hadoop.TestMapReduceActionExecutor.java

License:Apache License

/**
 * Test "oozie.launcher.mapred.job.name" and "mapred.job.name" can be set in
 * the action configuration and not overridden by the action executor
 *
 * @throws Exception// ww w  . ja va 2s. c  om
 */
public void testSetMapredJobName() throws Exception {
    final String launcherJobName = "MapReduceLauncherTest";
    final String mapredJobName = "MapReduceTest";

    FileSystem fs = getFileSystem();

    Path inputDir = new Path(getFsTestCaseDir(), "input");
    Path outputDir = new Path(getFsTestCaseDir(), "output");

    Writer w = new OutputStreamWriter(fs.create(new Path(inputDir, "data.txt")));
    w.write("dummy\n");
    w.write("dummy\n");
    w.close();

    XConfiguration mrConfig = getMapReduceConfig(inputDir.toString(), outputDir.toString());
    mrConfig.set("oozie.launcher.mapred.job.name", launcherJobName);
    mrConfig.set("mapred.job.name", mapredJobName);

    StringBuilder sb = new StringBuilder("<map-reduce>").append("<job-tracker>").append(getJobTrackerUri())
            .append("</job-tracker>").append("<name-node>").append(getNameNodeUri()).append("</name-node>")
            .append(mrConfig.toXmlString(false)).append("</map-reduce>");
    String actionXml = sb.toString();

    Context context = createContext("map-reduce", actionXml);
    final RunningJob launcherJob = submitAction(context);
    String launcherId = context.getAction().getExternalId();
    waitFor(120 * 2000, new Predicate() {
        public boolean evaluate() throws Exception {
            return launcherJob.isComplete();
        }
    });

    assertTrue(launcherJob.isSuccessful());
    Map<String, String> actionData = LauncherMapperHelper.getActionData(getFileSystem(), context.getActionDir(),
            context.getProtoActionConf());
    assertTrue(LauncherMapperHelper.hasIdSwap(actionData));
    // Assert launcher job name has been set
    System.out.println("Launcher job name: " + launcherJob.getJobName());
    assertTrue(launcherJob.getJobName().equals(launcherJobName));

    MapReduceActionExecutor ae = new MapReduceActionExecutor();
    ae.check(context, context.getAction());
    assertTrue(launcherId.equals(context.getAction().getExternalId()));

    JobConf conf = ae.createBaseHadoopConf(context, XmlUtils.parseXml(actionXml));
    String user = conf.get("user.name");

    JobClient jobClient = Services.get().get(HadoopAccessorService.class).createJobClient(user, conf);
    final RunningJob mrJob = jobClient.getJob(JobID.forName(context.getAction().getExternalChildIDs()));

    waitFor(120 * 1000, new Predicate() {
        public boolean evaluate() throws Exception {
            return mrJob.isComplete();
        }
    });
    assertTrue(mrJob.isSuccessful());
    ae.check(context, context.getAction());

    assertEquals("SUCCEEDED", context.getAction().getExternalStatus());
    assertNull(context.getAction().getData());

    ae.end(context, context.getAction());
    assertEquals(WorkflowAction.Status.OK, context.getAction().getStatus());

    // Assert Mapred job name has been set
    System.out.println("Mapred job name: " + mrJob.getJobName());
    assertTrue(mrJob.getJobName().equals(mapredJobName));

    // Assert for stats info stored in the context.
    assertNull(context.getExecutionStats());

    // External Child IDs used to be null, but after 4.0, become Non-Null in case of MR action.
    assertNotNull(context.getExternalChildIDs());

    // hadoop.counters will always be set in case of MR action.
    assertNotNull(context.getVar("hadoop.counters"));
    String counters = context.getVar("hadoop.counters");
    assertTrue(counters.contains("Counter"));
}

From source file:org.apache.oozie.action.hadoop.TestPigActionExecutor.java

License:Apache License

private void _testSubmit(String actionXml, boolean checkForSuccess) throws Exception {

    Context context = createContext(actionXml);
    final RunningJob launcherJob = submitAction(context);
    String launcherId = context.getAction().getExternalId();
    evaluateLauncherJob(launcherJob);//from w  w  w  . j  a  va2s. c  om
    assertTrue(launcherJob.isSuccessful());

    sleep(2000);

    PigActionExecutor ae = new PigActionExecutor();
    ae.check(context, context.getAction());
    ae.end(context, context.getAction());

    if (checkForSuccess) {
        assertFalse(context.getExternalChildIDs().equals(launcherId));
        assertNotNull(context.getAction().getStats());
    }

    assertTrue(launcherId.equals(context.getAction().getExternalId()));
    if (checkForSuccess) {
        assertEquals("SUCCEEDED", context.getAction().getExternalStatus());
        assertNull(context.getAction().getData());

    } else {
        assertEquals("FAILED/KILLED", context.getAction().getExternalStatus());
        assertNotNull(context.getAction().getErrorMessage());
    }
    if (checkForSuccess) {
        assertEquals(WorkflowAction.Status.OK, context.getAction().getStatus());
    } else {
        assertEquals(WorkflowAction.Status.ERROR, context.getAction().getStatus());
    }
}

From source file:org.apache.oozie.action.hadoop.TestPigActionExecutor.java

License:Apache License

public void testExecutionStats() throws Exception {
    // Set the action xml with the option for retrieving stats to true
    String actionXml = setPigActionXml(PIG_SCRIPT, true);
    Context context = createContext(actionXml);
    final RunningJob launcherJob = submitAction(context);
    evaluateLauncherJob(launcherJob);//from   w ww .ja  va  2 s.c  o  m
    assertTrue(launcherJob.isSuccessful());

    Configuration conf = new XConfiguration();
    conf.set("user.name", getTestUser());
    Map<String, String> actionData = LauncherMapperHelper.getActionData(getFileSystem(), context.getActionDir(),
            conf);
    assertTrue(LauncherMapperHelper.hasStatsData(actionData));

    PigActionExecutor ae = new PigActionExecutor();
    WorkflowAction wfAction = context.getAction();
    ae.check(context, wfAction);
    ae.end(context, wfAction);

    assertEquals("SUCCEEDED", wfAction.getExternalStatus());
    String stats = wfAction.getStats();
    assertNotNull(stats);
    // check for some of the expected key values in the stats
    Map m = (Map) JSONValue.parse(stats);
    // check for expected 1st level JSON keys
    assertTrue(m.containsKey("PIG_VERSION"));

    String expectedChildIDs = wfAction.getExternalChildIDs();
    String[] childIDs = expectedChildIDs.split(",");
    assertTrue(m.containsKey(childIDs[0]));

    Map q = (Map) m.get(childIDs[0]);
    // check for expected 2nd level JSON keys
    assertTrue(q.containsKey("HADOOP_COUNTERS"));
}

From source file:org.apache.oozie.action.hadoop.TestPigActionExecutor.java

License:Apache License

public void testExternalChildIds() throws Exception {
    // Set the action xml with the option for retrieving stats to false
    String actionXml = setPigActionXml(PIG_SCRIPT, false);
    Context context = createContext(actionXml);
    final RunningJob launcherJob = submitAction(context);
    evaluateLauncherJob(launcherJob);//from ww  w .  j a v a  2 s.  c o m
    assertTrue(launcherJob.isSuccessful());

    PigActionExecutor ae = new PigActionExecutor();
    WorkflowAction wfAction = context.getAction();
    ae.check(context, wfAction);
    ae.end(context, wfAction);

    assertEquals("SUCCEEDED", wfAction.getExternalStatus());
    String externalIds = wfAction.getExternalChildIDs();
    assertNotNull(externalIds);
    assertNotSame("", externalIds);
    // check for the expected prefix of hadoop jobIDs
    assertTrue(externalIds.contains("job_"));

}

From source file:org.apache.oozie.action.hadoop.TestPigActionExecutor.java

License:Apache License

public void testExecutionStatsWithMaxStatsSizeLimit() throws Exception {
    Services.get().destroy();/*from  w  w  w .  j a  v a  2s.c om*/
    // Set a very small value for max size of stats
    setSystemProperty(JavaActionExecutor.MAX_EXTERNAL_STATS_SIZE, new String("1"));
    new Services().init();
    // Set the action xml with the option for retrieving stats to true
    String actionXml = setPigActionXml(PIG_SCRIPT, true);
    Context context = createContext(actionXml);
    final RunningJob launcherJob = submitAction(context);
    evaluateLauncherJob(launcherJob);
    assertTrue(launcherJob.isSuccessful());

    PigActionExecutor ae = new PigActionExecutor();
    WorkflowAction wfAction = context.getAction();
    ae.check(context, wfAction);
    ae.end(context, wfAction);

    // action should fail as the size of pig stats will always be greater
    // than 1 byte
    assertEquals("FAILED/KILLED", wfAction.getExternalStatus());
    assertNull(wfAction.getStats());
}

From source file:org.apache.oozie.action.hadoop.TestPigActionExecutor.java

License:Apache License

public void testExecutionStatsWithRetrieveStatsFalse() throws Exception {
    // Set the action xml with the option for retrieving stats to false
    String actionXml = setPigActionXml(PIG_SCRIPT, false);
    Context context = createContext(actionXml);
    final RunningJob launcherJob = submitAction(context);
    evaluateLauncherJob(launcherJob);// ww w.  j  ava 2s.  c  o m
    assertTrue(launcherJob.isSuccessful());

    Configuration conf = new XConfiguration();
    conf.set("user.name", getTestUser());
    Map<String, String> actionData = LauncherMapperHelper.getActionData(getFileSystem(), context.getActionDir(),
            conf);
    assertFalse(LauncherMapperHelper.hasStatsData(actionData));

    PigActionExecutor ae = new PigActionExecutor();
    WorkflowAction wfAction = context.getAction();
    ae.check(context, wfAction);
    ae.end(context, wfAction);

    assertEquals("SUCCEEDED", wfAction.getExternalStatus());
    assertNotNull(wfAction.getExternalChildIDs());
}

From source file:org.apache.oozie.action.hadoop.TestPyspark.java

License:Apache License

private void testPysparkHelper(String sparkOpts, WorkflowJobBean wf, String externalStatus,
        WorkflowAction.Status wfStatus) throws Exception {
    Context context = createContext(getActionXml(sparkOpts), wf);
    final RunningJob launcherJob = submitAction(context);
    waitFor(200 * 1000, new Predicate() {
        public boolean evaluate() throws Exception {
            return launcherJob.isComplete();
        }//ww  w. ja  va  2  s.  c  o  m
    });
    assertTrue(launcherJob.isSuccessful());
    SparkActionExecutor ae = new SparkActionExecutor();
    ae.check(context, context.getAction());
    assertEquals(externalStatus, context.getAction().getExternalStatus());
    ae.end(context, context.getAction());
    assertEquals(wfStatus, context.getAction().getStatus());
}

From source file:org.apache.oozie.action.hadoop.TestShellActionExecutor.java

License:Apache License

/**
 * Submit the WF with a Shell action and very of the job succeeds
 *
 * @param actionXml/*from   ww  w.  ja v a2 s  .c om*/
 * @param checkForSuccess
 * @throws Exception
 */
private WorkflowAction _testSubmit(String actionXml, boolean checkForSuccess, String capture_output)
        throws Exception {

    Context context = createContext(actionXml);
    final RunningJob launcherJob = submitAction(context);// Submit the
    // action
    String launcherId = context.getAction().getExternalId(); // Get LM id
    waitFor(180 * 1000, new Predicate() { // Wait for the external job to
        // finish
        public boolean evaluate() throws Exception {
            return launcherJob.isComplete();
        }
    });
    // Thread.sleep(2000);
    assertTrue(launcherJob.isSuccessful());

    sleep(2000);// Wait more to make sure no ID swap happens
    Configuration conf = new XConfiguration();
    conf.set("user.name", getTestUser());
    Map<String, String> actionData = LauncherMapperHelper.getActionData(getFileSystem(), context.getActionDir(),
            conf);
    assertFalse(LauncherMapperHelper.hasIdSwap(actionData));

    ShellActionExecutor ae = new ShellActionExecutor();
    ae.check(context, context.getAction());
    ae.end(context, context.getAction());
    assertTrue(launcherId.equals(context.getAction().getExternalId()));

    if (checkForSuccess) { // Postive test cases
        assertEquals("SUCCEEDED", context.getAction().getExternalStatus());
        // Testing capture output
        if (capture_output != null && capture_output.length() > 0) {
            assertEquals(capture_output,
                    PropertiesUtils.stringToProperties(context.getAction().getData()).getProperty("MY_VAR"));
        }
    } else { // Negative test cases
        assertEquals("FAILED/KILLED", context.getAction().getExternalStatus());
        assertNotNull(context.getAction().getErrorMessage());
    }
    if (checkForSuccess) { // Positive test cases
        assertEquals(WorkflowAction.Status.OK, context.getAction().getStatus());
    } else {// Negative test cases
        assertEquals(WorkflowAction.Status.ERROR, context.getAction().getStatus());
    }
    return context.getAction();
}

From source file:org.apache.oozie.action.hadoop.TestSparkActionExecutor.java

License:Apache License

public void testSparkAction() throws Exception {
    FileSystem fs = getFileSystem();
    Path file = new Path(getAppPath(), SPARK_FILENAME);
    Writer scriptWriter = new OutputStreamWriter(fs.create(file));
    scriptWriter.write("1,2,3");
    scriptWriter.write("\n");
    scriptWriter.write("2,3,4");
    scriptWriter.close();//from   w  ww  .  j  a  v  a2  s. c  o  m

    Context context = createContext(getActionXml());
    final RunningJob launcherJob = submitAction(context);
    waitFor(200 * 1000, new Predicate() {
        public boolean evaluate() throws Exception {
            return launcherJob.isComplete();
        }
    });
    assertTrue(launcherJob.isSuccessful());

    SparkActionExecutor ae = new SparkActionExecutor();
    ae.check(context, context.getAction());
    assertEquals("SUCCEEDED", context.getAction().getExternalStatus());
    assertTrue(fs.exists(new Path(getAppPath() + "/" + OUTPUT)));
    ae.end(context, context.getAction());
    assertEquals(WorkflowAction.Status.OK, context.getAction().getStatus());

}

From source file:org.apache.oozie.action.hadoop.TestSqoopActionExecutor.java

License:Apache License

public void testSqoopAction() throws Exception {
    createDB();// www .  j a  v  a  2s.  co  m

    Context context = createContext(getActionXml());
    final RunningJob launcherJob = submitAction(context);
    String launcherId = context.getAction().getExternalId();
    waitFor(120 * 1000, new Predicate() {
        public boolean evaluate() throws Exception {
            return launcherJob.isComplete();
        }
    });
    assertTrue(launcherJob.isSuccessful());
    Map<String, String> actionData = LauncherMapperHelper.getActionData(getFileSystem(), context.getActionDir(),
            context.getProtoActionConf());
    assertFalse(LauncherMapperHelper.hasIdSwap(actionData));

    SqoopActionExecutor ae = new SqoopActionExecutor();
    ae.check(context, context.getAction());
    assertTrue(launcherId.equals(context.getAction().getExternalId()));
    assertEquals("SUCCEEDED", context.getAction().getExternalStatus());
    assertNotNull(context.getAction().getData());
    assertNotNull(context.getAction().getExternalChildIDs());
    ae.end(context, context.getAction());
    assertEquals(WorkflowAction.Status.OK, context.getAction().getStatus());

    String hadoopCounters = context.getVar(MapReduceActionExecutor.HADOOP_COUNTERS);
    assertNotNull(hadoopCounters);
    assertFalse(hadoopCounters.isEmpty());

    FileSystem fs = getFileSystem();
    BufferedReader br = new BufferedReader(
            new InputStreamReader(fs.open(new Path(getSqoopOutputDir(), "part-m-00000"))));
    int count = 0;
    String line = br.readLine();
    while (line != null) {
        assertTrue(line.contains("a"));
        count++;
        line = br.readLine();
    }
    br.close();
    assertEquals(3, count);

    assertNotNull(context.getAction().getData());
    Properties outputData = new Properties();
    outputData.load(new StringReader(context.getAction().getData()));
    assertTrue(outputData.containsKey(LauncherMain.HADOOP_JOBS));
    assertTrue(outputData.getProperty(LauncherMain.HADOOP_JOBS).trim().length() > 0);
}