Example usage for org.apache.hadoop.mapred JobStatus getJobRunState

List of usage examples for org.apache.hadoop.mapred JobStatus getJobRunState

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred JobStatus getJobRunState.

Prototype

public static String getJobRunState(int state) 

Source Link

Document

Helper method to get human-readable state of the job.

Usage

From source file:cascading.flow.hadoop.planner.HadoopFlowStepJob.java

License:Open Source License

protected void dumpDebugInfo() {
    try {//w  w w .  j a v  a2s . c  o m
        if (runningJob == null)
            return;

        int jobState = runningJob.getJobState(); // may throw an NPE internally

        flowStep.logWarn(
                "hadoop job " + runningJob.getID() + " state at " + JobStatus.getJobRunState(jobState));
        flowStep.logWarn("failure info: " + runningJob.getFailureInfo());

        TaskCompletionEvent[] events = runningJob.getTaskCompletionEvents(0);
        flowStep.logWarn("task completion events identify failed tasks");
        flowStep.logWarn("task completion events count: " + events.length);

        for (TaskCompletionEvent event : events)
            flowStep.logWarn("event = " + event);
    } catch (Throwable throwable) {
        flowStep.logError("failed reading task completion events", throwable);
    }
}

From source file:com.impetus.ankush2.hadoop.monitor.JobStatusProvider.java

License:Open Source License

/**
 * @param jobClient/*from w w w  . ja va 2 s .c om*/
 * @param jobSts
 * @return
 * @throws IOException
 */
private Map<String, Object> getJobReport(JobStatus jobSts) throws IOException {
    // Creating an empty map for storing job information
    Map<String, Object> jobReport = new HashMap<String, Object>();
    // Returns the jobid of the Job
    org.apache.hadoop.mapred.JobID jobId = jobSts.getJobID();
    // Get an RunningJob object to track an ongoing Map-Reduce
    // job.
    RunningJob job = jobClient.getJob(jobId);
    String jobName = "";
    if (job != null) {
        // Get the name of the job.
        jobName = job.getJobName();
    }
    // Percentage of progress in maps
    float mapProgress = jobSts.mapProgress() * 100;
    // Percentage of progress in reduce
    float reduceProgress = jobSts.reduceProgress() * 100;

    int mapTotal = 0;
    int reduceTotal = 0;
    int mapComp = 0;
    int reduceComp = 0;

    // Count for Map and Reduce Complete
    try {
        // Get the information of the current state of the map
        // tasks of a job
        TaskReport[] mapTaskReports = jobClient.getMapTaskReports(jobId);
        // Get the total map
        mapTotal = mapTaskReports.length;
        // Iterating over the map tasks
        for (TaskReport taskReport : mapTaskReports) {
            // The current state of a map TaskInProgress as seen
            // by the JobTracker.
            TIPStatus currentStatus = taskReport.getCurrentStatus();
            if (currentStatus == TIPStatus.COMPLETE) {
                mapComp++;
            }
        }

        // Get the information of the current state of the
        // reduce tasks of a job.
        TaskReport[] reduceTaskReport = jobClient.getReduceTaskReports(jobId);
        // Get the total reduce
        reduceTotal = reduceTaskReport.length;
        // Iterating over the reduce tasks
        for (TaskReport taskReport : reduceTaskReport) {
            // The current state of a reduce TaskInProgress as
            // seen by the JobTracker.
            TIPStatus currentStatus = taskReport.getCurrentStatus();
            if (currentStatus == TIPStatus.COMPLETE) {
                reduceComp++;
            }
        }
    } catch (Exception e) {
        LOG.error(e.getMessage(), e);
    }
    // Percentage of progress in setup
    float setupProgress = jobSts.setupProgress() * 100;
    // The progress made on cleanup
    float cleanupProgress = jobSts.cleanupProgress() * 100;
    // gets any available info on the reason of failure of the
    // job..Returns the diagnostic information on why a job
    // might have failed.
    String failureInfo = jobSts.getFailureInfo();

    // Putting Job Sttaus information in map
    jobReport.put("jobId", jobId.toString());
    jobReport.put("jobName", jobName);
    jobReport.put("jobPriority", jobSts.getJobPriority().toString());
    jobReport.put("jobStartTime", jobSts.getStartTime());

    jobReport.put("userName", jobSts.getUsername());
    jobReport.put("jobComplete", jobSts.isJobComplete());

    jobReport.put("mapProgress", mapProgress);
    jobReport.put("reduceProgress", reduceProgress);

    jobReport.put("mapTotal", mapTotal);
    jobReport.put("reduceTotal", reduceTotal);
    jobReport.put("mapCompleted", mapComp);
    jobReport.put("reduceCompleted", reduceComp);

    jobReport.put("setupProgress", setupProgress);
    jobReport.put("cleanupProgress", cleanupProgress);

    jobReport.put("schedulingInfo", jobSts.getSchedulingInfo());
    jobReport.put("jobState", JobStatus.getJobRunState(jobSts.getRunState()));
    jobReport.put("failureInfo", failureInfo);
    jobReport.put("jobFile", job.getJobFile());
    jobReport.put("trackingURL", job.getTrackingURL());

    jobReport.putAll(getDetailedJobReport(jobId));
    return jobReport;
}

From source file:com.jackbe.mapreduce.LocalJobManager.java

License:Open Source License

@Override
public String getJobState(RunningJob job) throws IOException {
    if (log.isTraceEnabled())
        log.trace("called.");

    if (job == null)
        return JOB_NOT_FOUND;

    return JobStatus.getJobRunState(job.getJobState());
}

From source file:com.jackbe.mapreduce.LocalJobManager.java

License:Open Source License

@Override
public String getAllJobs() {
    RunningJob[] jobsInfo = null;//from   w  w w  . j a v a  2 s  .c om

    try {
        if (jobClient == null)
            return "<Exception> NULL jobClient. </Exception>";
        //FIXME need to fix for remote jobs
        //jobsInfo = jobClient.getAllJobs();
    } catch (Exception e) {
        log.error("Exception getting jobs from jobClient: " + e, e);
        return "Exception getting jobs: " + e.getMessage();
    }
    // If this is running local, we need to use the Jobs in our map.
    // TODO: remove old entries eventually.

    RunningJob[] temp = new RunningJob[statusMap.size()];
    jobsInfo = statusMap.values().toArray(temp);

    StringBuffer xml = new StringBuffer();
    xml.append("<jobs>\n");
    for (RunningJob job : jobsInfo) {
        try {
            xml.append("<job>\n\t<id>" + job.getID().toString() + "</id>\n\t<state>"
                    + JobStatus.getJobRunState(job.getJobState()));
        } catch (IOException e) {
            log.error("Exception apending job status info: " + e);
            // The XML string is now screwed up, just return null.
            return null;
        }
        xml.append("</state>\n\t<progress>" + getJobProgress(job.getID().toString()) + "</progress>\n</job>\n");
    }
    xml.append("\n</jobs>");
    return xml.toString();
}

From source file:org.apache.hive.hcatalog.templeton.tool.LogRetriever.java

License:Apache License

private void logJob(String logDir, String jobID, PrintWriter listWriter) throws IOException {
    RunningJob rj = jobClient.getJob(JobID.forName(jobID));
    String jobURLString = rj.getTrackingURL();

    Path jobDir = new Path(logDir, jobID);
    fs.mkdirs(jobDir);/* ww w .  j a  v  a 2  s .  c o  m*/

    // Logger jobconf
    try {
        logJobConf(jobID, jobURLString, jobDir.toString());
    } catch (IOException e) {
        System.err.println("Cannot retrieve job.xml.html for " + jobID);
        e.printStackTrace();
    }

    listWriter.println("job: " + jobID + "(" + "name=" + rj.getJobName() + "," + "status="
            + JobStatus.getJobRunState(rj.getJobState()) + ")");

    // Get completed attempts
    List<AttemptInfo> attempts = new ArrayList<AttemptInfo>();
    for (String type : new String[] { "map", "reduce", "setup", "cleanup" }) {
        try {
            List<AttemptInfo> successAttempts = getCompletedAttempts(jobID, jobURLString, type);
            attempts.addAll(successAttempts);
        } catch (IOException e) {
            System.err.println("Cannot retrieve " + type + " tasks for " + jobID);
            e.printStackTrace();
        }
    }

    // Get failed attempts
    try {
        List<AttemptInfo> failedAttempts = getFailedAttempts(jobID, jobURLString);
        attempts.addAll(failedAttempts);
    } catch (IOException e) {
        System.err.println("Cannot retrieve failed attempts for " + jobID);
        e.printStackTrace();
    }

    // Logger attempts
    for (AttemptInfo attempt : attempts) {
        try {
            logAttempt(jobID, attempt, jobDir.toString());
            listWriter.println("  attempt:" + attempt.id + "(" + "type=" + attempt.type + "," + "status="
                    + attempt.status + "," + "starttime=" + attempt.startTime + "," + "endtime="
                    + attempt.endTime + ")");
        } catch (IOException e) {
            System.err.println("Cannot log attempt " + attempt.id);
            e.printStackTrace();
        }
    }

    listWriter.println();
}