Example usage for org.apache.hadoop.mapreduce.jobhistory JobHistoryParser JobHistoryParser

List of usage examples for org.apache.hadoop.mapreduce.jobhistory JobHistoryParser JobHistoryParser

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce.jobhistory JobHistoryParser JobHistoryParser.

Prototype

public JobHistoryParser(FileSystem fs, Path historyFile) throws IOException 

Source Link

Document

Create the job history parser for the given history file using the given file system

Usage

From source file:com.linkedin.drelephant.mapreduce.fetchers.MapReduceFSFetcherHadoop2.java

License:Apache License

@Override
public MapReduceApplicationData fetchData(AnalyticJob job) throws IOException {
    DataFiles files = getHistoryFiles(job);
    String confFile = files.getJobConfPath();
    String histFile = files.getJobHistPath();
    String appId = job.getAppId();
    String jobId = Utils.getJobIdFromApplicationId(appId);

    MapReduceApplicationData jobData = new MapReduceApplicationData();
    jobData.setAppId(appId).setJobId(jobId);

    // Fetch job config
    Configuration jobConf = new Configuration(false);
    jobConf.addResource(_fs.open(new Path(confFile)), confFile);
    Properties jobConfProperties = new Properties();
    for (Map.Entry<String, String> entry : jobConf) {
        jobConfProperties.put(entry.getKey(), entry.getValue());
    }//from  ww w. jav a 2s.c  o m
    jobData.setJobConf(jobConfProperties);

    // Check if job history file is too large and should be throttled
    if (_fs.getFileStatus(new Path(histFile)).getLen() > _maxLogSizeInMB * FileUtils.ONE_MB) {
        String errMsg = "The history log of MapReduce application: " + appId + " is over the limit size of "
                + _maxLogSizeInMB + " MB, the parsing process gets throttled.";
        logger.warn(errMsg);
        jobData.setDiagnosticInfo(errMsg);
        jobData.setSucceeded(false); // set succeeded to false to avoid heuristic analysis
        return jobData;
    }

    // Analyze job history file
    JobHistoryParser parser = new JobHistoryParser(_fs, histFile);
    JobHistoryParser.JobInfo jobInfo = parser.parse();
    IOException parseException = parser.getParseException();
    if (parseException != null) {
        throw new RuntimeException("Could not parse history file " + histFile, parseException);
    }

    jobData.setSubmitTime(jobInfo.getSubmitTime());
    jobData.setStartTime(jobInfo.getLaunchTime());
    jobData.setFinishTime(jobInfo.getFinishTime());

    String state = jobInfo.getJobStatus();
    if (state.equals("SUCCEEDED")) {

        jobData.setSucceeded(true);

        // Fetch job counter
        MapReduceCounterData jobCounter = getCounterData(jobInfo.getTotalCounters());

        // Fetch task data
        Map<TaskID, JobHistoryParser.TaskInfo> allTasks = jobInfo.getAllTasks();
        List<JobHistoryParser.TaskInfo> mapperInfoList = new ArrayList<JobHistoryParser.TaskInfo>();
        List<JobHistoryParser.TaskInfo> reducerInfoList = new ArrayList<JobHistoryParser.TaskInfo>();
        for (JobHistoryParser.TaskInfo taskInfo : allTasks.values()) {
            if (taskInfo.getTaskType() == TaskType.MAP) {
                mapperInfoList.add(taskInfo);
            } else {
                reducerInfoList.add(taskInfo);
            }
        }
        if (jobInfo.getTotalMaps() > MAX_SAMPLE_SIZE) {
            logger.debug(jobId + " total mappers: " + mapperInfoList.size());
        }
        if (jobInfo.getTotalReduces() > MAX_SAMPLE_SIZE) {
            logger.debug(jobId + " total reducers: " + reducerInfoList.size());
        }
        MapReduceTaskData[] mapperList = getTaskData(jobId, mapperInfoList);
        MapReduceTaskData[] reducerList = getTaskData(jobId, reducerInfoList);

        jobData.setCounters(jobCounter).setMapperData(mapperList).setReducerData(reducerList);
    } else if (state.equals("FAILED")) {

        jobData.setSucceeded(false);
        jobData.setDiagnosticInfo(jobInfo.getErrorInfo());
    } else {
        // Should not reach here
        throw new RuntimeException("Job state not supported. Should be either SUCCEEDED or FAILED");
    }

    return jobData;
}

From source file:com.netflix.bdp.inviso.history.TraceJobHistoryLoader.java

License:Apache License

public static void main(String[] args) throws Exception {
    FileSystem fs = FileSystem.newInstanceLocal(new Configuration());

    JobHistoryParser parser = new JobHistoryParser(fs, "/tmp/job_1405808155709_124465.history");

    //JobInfo jobInfo = parser.parse();
    TraceJobHistoryLoader loader = new TraceJobHistoryLoader(new PropertiesConfiguration());
    parser.parse(loader);/*from   w  ww  . j  a v a 2s . c o m*/

    ObjectMapper mapper = new ObjectMapper();
    mapper.configure(Feature.INDENT_OUTPUT, true);
    mapper.writeValue(new File("/tmp/mr2-hist.json"), loader.getJob());
}