Example usage for org.apache.hadoop.mapred JobClient getJob

List of usage examples for org.apache.hadoop.mapred JobClient getJob

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred JobClient getJob.

Prototype

@Deprecated
public RunningJob getJob(String jobid) throws IOException 

Source Link

Usage

From source file:DataJoinJob.java

License:Apache License

/**
 * Submit/run a map/reduce job./*from  ww w. j  av  a2 s .co  m*/
 * 
 * @param job
 * @return true for success
 * @throws IOException
 */
public static boolean runJob(JobConf job) throws IOException {
    JobClient jc = new JobClient(job);
    boolean sucess = true;
    RunningJob running = null;
    try {
        running = jc.submitJob(job);
        JobID jobId = running.getID();
        System.out.println("Job " + jobId + " is submitted");
        while (!running.isComplete()) {
            System.out.println("Job " + jobId + " is still running.");
            try {
                Thread.sleep(60000);
            } catch (InterruptedException e) {
            }
            running = jc.getJob(jobId);
        }
        sucess = running.isSuccessful();
    } finally {
        if (!sucess && (running != null)) {
            running.killJob();
        }
        jc.close();
    }
    return sucess;
}

From source file:azkaban.jobtype.AzkabanPigListener.java

License:Apache License

@SuppressWarnings("deprecation")
private void addMapReduceJobState(PigJobDagNode node) {
    JobClient jobClient = PigStats.get().getJobClient();

    try {/*w  ww . j  a v  a 2 s.  c o  m*/
        RunningJob runningJob = jobClient.getJob(node.getJobId());
        if (runningJob == null) {
            logger.warn("Couldn't find job status for jobId=" + node.getJobId());
            return;
        }

        JobID jobID = runningJob.getID();
        TaskReport[] mapTaskReport = jobClient.getMapTaskReports(jobID);
        TaskReport[] reduceTaskReport = jobClient.getReduceTaskReports(jobID);
        node.setMapReduceJobState(new MapReduceJobState(runningJob, mapTaskReport, reduceTaskReport));

        if (node.getJobConfiguration() == null) {
            Properties jobConfProperties = StatsUtils.getJobConf(runningJob);
            if (jobConfProperties != null && jobConfProperties.size() > 0) {
                node.setJobConfiguration(jobConfProperties);
            }
        }
    } catch (IOException e) {
        logger.error("Error getting job info.", e);
    }
}

From source file:boa.io.BoaOutputCommitter.java

License:Apache License

@Override
public void abortJob(JobContext context, JobStatus.State runState) throws java.io.IOException {
    super.abortJob(context, runState);

    final JobClient jobClient = new JobClient(new JobConf(context.getConfiguration()));
    final RunningJob job = jobClient.getJob(
            (org.apache.hadoop.mapred.JobID) JobID.forName(context.getConfiguration().get("mapred.job.id")));
    String diag = "";
    for (final TaskCompletionEvent event : job.getTaskCompletionEvents(0))
        switch (event.getTaskStatus()) {
        case SUCCEEDED:
            break;
        case FAILED:
        case KILLED:
        case OBSOLETE:
        case TIPFAILED:
            diag += "Diagnostics for: " + event.getTaskTrackerHttp() + "\n";
            for (final String s : job.getTaskDiagnostics(event.getTaskAttemptId()))
                diag += s + "\n";
            diag += "\n";
            break;
        }/*from w w w .  j av a2 s  . c o  m*/
    updateStatus(diag, context.getConfiguration().getInt("boa.hadoop.jobid", 0));
}

From source file:co.cask.cdap.app.mapreduce.MRJobClient.java

License:Apache License

/**
 * @param runId for which information will be returned.
 * @return a {@link MRJobInfo} containing information about a particular MapReduce program run.
 * @throws IOException if there is failure to communicate through the JobClient.
 * @throws NotFoundException if a Job with the given runId is not found.
 *///from   w  w  w  .ja va2 s .co m
public MRJobInfo getMRJobInfo(Id.Run runId) throws IOException, NotFoundException {
    Preconditions.checkArgument(ProgramType.MAPREDUCE.equals(runId.getProgram().getType()));

    JobClient jobClient = new JobClient(hConf);
    JobStatus[] jobs = jobClient.getAllJobs();

    JobStatus thisJob = findJobForRunId(jobs, runId);

    RunningJob runningJob = jobClient.getJob(thisJob.getJobID());
    if (runningJob == null) {
        throw new IllegalStateException(String.format("JobClient returned null for RunId: '%s', JobId: '%s'",
                runId, thisJob.getJobID()));
    }
    Counters counters = runningJob.getCounters();

    TaskReport[] mapTaskReports = jobClient.getMapTaskReports(thisJob.getJobID());
    TaskReport[] reduceTaskReports = jobClient.getReduceTaskReports(thisJob.getJobID());

    return new MRJobInfo(runningJob.mapProgress(), runningJob.reduceProgress(),
            groupToMap(counters.getGroup(TaskCounter.class.getName())), toMRTaskInfos(mapTaskReports),
            toMRTaskInfos(reduceTaskReports), true);
}

From source file:com.atlantbh.jmeter.plugins.hadooputilities.jobstatistics.JobLayer.java

License:Apache License

private Map<String, String> getJobCounters(String jobTracker, String jobId) throws IOException {
    JobClient client = prepareJobClient(jobTracker);

    JobID id = this.convertToJobId(jobId);

    Map<String, String> counters = new HashMap<String, String>();

    RunningJob job = client.getJob(id);
    Counters counter = job.getCounters();
    Iterator<Group> iter = counter.iterator();

    if (!getGroupName().equalsIgnoreCase("")) {
        while (iter.hasNext()) {
            Group group = iter.next();

            if (group.getDisplayName().equalsIgnoreCase(getGroupName())) {
                Iterator<Counter> cIter = group.iterator();
                while (cIter.hasNext()) {
                    Counter c = cIter.next();
                    counters.put(c.getDisplayName(), String.valueOf(c.getValue()));
                }// w  w  w.j av  a 2s  .  c  o  m
            }
        }
    } else {
        while (iter.hasNext()) {
            Group group = iter.next();
            Iterator<Counter> cIter = group.iterator();
            while (cIter.hasNext()) {
                Counter c = cIter.next();
                counters.put(c.getDisplayName(), String.valueOf(c.getValue()));
            }
        }
    }
    return counters;
}

From source file:com.atlantbh.jmeter.plugins.hadooputilities.jobstatistics.JobLayer.java

License:Apache License

public String getJobStatisticsByJobId(String jobTracker, String jobId) throws IOException {
    StringBuilder jobStatistics = new StringBuilder();

    JobClient client = prepareJobClient(jobTracker);
    JobID id = convertToJobId(jobId);/*from   www . j  a  va  2s.c o m*/

    RunningJob job = client.getJob(id);

    double mapProgress = job.mapProgress() * 100;
    double reduceProgress = job.reduceProgress() * 100;
    String mapPercentage = Double.toString(mapProgress) + "%";
    String reducePercentage = Double.toString(reduceProgress) + "%";

    jobStatistics.append("<job id='").append(jobId).append("'" + " name='").append(job.getJobName())
            .append("'>\n");
    jobStatistics.append(" <mapProgress>").append(mapPercentage).append("</mapProgress>\n");
    jobStatistics.append(" <reduceProgress>").append(reducePercentage).append("</reduceProgress>\n");
    jobStatistics.append(" <complete>").append(job.isComplete()).append("</complete>\n");
    jobStatistics.append(" <successful>").append(job.isSuccessful()).append("</successful>\n");
    jobStatistics.append(" <url>").append(job.getTrackingURL()).append("</url>\n");
    jobStatistics.append("</job>");

    return jobStatistics.toString();
}

From source file:com.atlantbh.jmeter.plugins.hadooputilities.jobstatistics.TaskLayer.java

License:Apache License

public String getTaskLevelCountersByJobId(String jobTracker, String jobId) throws IOException {
    StringBuilder taskCounters = new StringBuilder();

    JobID id = this.convertToJobId(jobId);
    JobClient client = this.prepareJobClient(jobTracker);
    RunningJob job = client.getJob(id);
    TaskReport[] mapTaskReports = client.getMapTaskReports(id);
    TaskReport[] reduceTaskReports = client.getReduceTaskReports(id);

    taskCounters.append("<job id='").append(jobId).append("' name='").append(job.getJobName()).append("'>\n");
    taskCounters.append(" <mapTasks>\n");

    for (TaskReport mapTaskReport : mapTaskReports) {
        taskCounters.append("  <task id='").append(mapTaskReport.getTaskID().toString()).append("'\n");
        taskCounters.append("   <counters>\n");

        Counters counter = mapTaskReport.getCounters();

        Iterator<Group> iter = counter.iterator();

        while (iter.hasNext()) {
            Group group = iter.next();

            Iterator<Counter> cIter = group.iterator();
            while (cIter.hasNext()) {
                Counter c = cIter.next();
                taskCounters.append("    <counter name='").append(c.getDisplayName()).append("' value='")
                        .append(c.getValue()).append("'>\n");
            }/*from   w w w  . j  a  va 2 s  .c  o m*/
        }

        taskCounters.append("   </counters>\n");
        taskCounters.append("  </task>\n");
    }

    taskCounters.append(" </mapTasks>\n");

    taskCounters.append(" <reduceTasks>\n");

    for (TaskReport reduceTaskReport : reduceTaskReports) {
        taskCounters.append("  <task id='").append(reduceTaskReport.getTaskID().toString()).append("'\n");
        taskCounters.append("   <counters>\n");

        Counters counter = reduceTaskReport.getCounters();

        Iterator<Group> iter = counter.iterator();

        while (iter.hasNext()) {
            Group group = iter.next();

            Iterator<Counter> cIter = group.iterator();
            while (cIter.hasNext()) {
                Counter c = cIter.next();
                taskCounters.append("    <counter name='").append(c.getDisplayName()).append("' value='")
                        .append(c.getValue()).append("'>\n");
            }
        }

        taskCounters.append("   </counters>\n");
        taskCounters.append("  </task>\n");
    }

    taskCounters.append(" </reduceTasks>\n");
    taskCounters.append("</job>");

    return taskCounters.toString();
}

From source file:com.atlantbh.jmeter.plugins.hadooputilities.jobstatistics.TaskLayer.java

License:Apache License

public String getTaskStatisticsByJobId(String jobTracker, String jobId) throws IOException {
    StringBuilder taskStatistics = new StringBuilder();
    long taskDuration;
    String duration;//from ww  w  . j  ava2 s.c o m

    JobID id = this.convertToJobId(jobId);
    JobClient client = this.prepareJobClient(jobTracker);
    RunningJob job = client.getJob(id);

    TaskReport[] mapTaskReports = client.getMapTaskReports(id);
    TaskReport[] reduceTaskReports = client.getReduceTaskReports(id);

    taskStatistics.append("<job id='").append(jobId).append("' name='").append(job.getJobName()).append("'>\n");
    taskStatistics.append(" <mapTasks>\n");

    for (TaskReport mapTaskReport : mapTaskReports) {
        taskDuration = mapTaskReport.getFinishTime() - mapTaskReport.getStartTime();

        if (taskDuration < 0) {
            duration = "N/A";
        } else {
            duration = String.valueOf(taskDuration);
        }

        double progress = mapTaskReport.getProgress() * 100;
        String taskProgress = Double.toString(progress) + "%";

        taskStatistics.append("  <task id='").append(mapTaskReport.getTaskID().toString()).append("'\n");
        taskStatistics.append("   <progress>").append(taskProgress).append("</progress>\n");
        taskStatistics.append("   <duration>").append(duration).append("</duration>\n");
        taskStatistics.append("   <status>").append(mapTaskReport.getCurrentStatus().toString())
                .append("</status>\n");
        taskStatistics.append("  </task>\n");
    }

    taskStatistics.append(" </mapTasks>\n");

    taskStatistics.append(" <reduceTasks>\n");

    for (TaskReport reduceTaskReport : reduceTaskReports) {
        taskDuration = reduceTaskReport.getFinishTime() - reduceTaskReport.getStartTime();

        if (taskDuration < 0) {
            duration = "N/A";
        } else {
            duration = String.valueOf(taskDuration);
        }

        double progress = reduceTaskReport.getProgress() * 100;
        String taskProgress = Double.toString(progress) + "%";

        taskStatistics.append("  <task id='").append(reduceTaskReport.getTaskID().toString()).append("'\n");
        taskStatistics.append("   <progress>").append(taskProgress).append("</progress>\n");
        taskStatistics.append("   <duration>").append(duration).append("</duration>\n");
        taskStatistics.append("   <status>").append(reduceTaskReport.getCurrentStatus().toString())
                .append("</status>\n");
        taskStatistics.append("  </task>\n");
    }

    taskStatistics.append(" </reduceTasks>\n");
    taskStatistics.append("</job>");

    return taskStatistics.toString();
}

From source file:com.ikanow.infinit.e.processing.custom.CustomProcessingController.java

License:Open Source License

public boolean killRunningJob(CustomMapReduceJobPojo jobToKillInfo) {
    try {/* w  w w  .j  a  v a2 s . c om*/
        Configuration conf = new Configuration();
        JobClient jc = new JobClient(InfiniteHadoopUtils.getJobClientConnection(prop_custom), conf);
        jc.setConf(conf); // (doesn't seem to be set by the above call)

        RunningJob jobToKill = jc.getJob(new JobID(jobToKillInfo.jobidS, jobToKillInfo.jobidN));
        if (null == jobToKill) {
            _logger.error("Couldn't find this job: " + jobToKillInfo.jobidS + "_" + jobToKillInfo.jobidN + " / "
                    + new JobID(jobToKillInfo.jobidS, jobToKillInfo.jobidN).toString());
            return false;
        }
        jobToKill.killJob();

        int nRuns = 0;
        while (!checkRunningJobs(jobToKillInfo)) {
            try {
                Thread.sleep(5000);
            } catch (Exception e) {
            }
            if (++nRuns > 24) { // bail out after 2 minutes 
                _logger.error("Killed job: " + jobToKillInfo.jobidS + "_" + jobToKillInfo.jobidN
                        + ", but job failed to stop within time allowed");
                return false;
            }
        }
        if (null != jobToKillInfo.derivedFromSourceKey) { // Update the derived source, if one existse 
            BasicDBObject query = new BasicDBObject(SourcePojo.key_, jobToKillInfo.derivedFromSourceKey);
            BasicDBObject setUpdate = new BasicDBObject(SourceHarvestStatusPojo.sourceQuery_harvest_status_,
                    HarvestEnum.error.toString());
            setUpdate.put(SourceHarvestStatusPojo.sourceQuery_harvest_message_, "Manually stopped");
            BasicDBObject srcUpdate = new BasicDBObject(DbManager.set_, setUpdate);
            DbManager.getIngest().getSource().update(query, srcUpdate, false, false);
        } //TESTED (actually a bit pointless usually because is then overwritten by the source publish)
        return true;
    } catch (Exception e) {
        _logger.error("Failed to kill job: " + jobToKillInfo.jobidS + "_" + jobToKillInfo.jobidN + " / "
                + e.getMessage(), e);
        return false;
    }
}

From source file:com.netflix.lipstick.pigtolipstick.BasicP2LClient.java

License:Apache License

/**
 * Build a P2jJobStatus object for the map/reduce job with id jobId.
 *
 * @param jobId the id of the map/reduce job
 * @return the newly created P2jJobStatus
 *//*from w ww .  ja v a  2  s  .  co m*/
@SuppressWarnings("deprecation")
protected P2jJobStatus buildJobStatusMap(String jobId) {
    JobClient jobClient = PigStats.get().getJobClient();
    P2jJobStatus js = jobIdToJobStatusMap.get(jobId);

    try {
        RunningJob rj = jobClient.getJob(jobId);
        if (rj == null) {
            LOG.warn("Couldn't find job status for jobId=" + jobId);
            return js;
        }

        JobID jobID = rj.getID();
        Counters counters = rj.getCounters();
        Map<String, P2jCounters> cMap = Maps.newHashMap();
        for (Group g : counters) {
            P2jCounters countersObj = new P2jCounters();
            cMap.put(g.getDisplayName(), countersObj);
            for (Counter c : g) {
                countersObj.getCounters().put(c.getDisplayName(), c.getValue());
            }
        }

        js.setCounters(cMap);
        TaskReport[] mapTaskReport = jobClient.getMapTaskReports(jobID);
        TaskReport[] reduceTaskReport = jobClient.getReduceTaskReports(jobID);
        js.setJobName(rj.getJobName());
        js.setTrackingUrl(rj.getTrackingURL());
        js.setIsComplete(rj.isComplete());
        js.setIsSuccessful(rj.isSuccessful());
        js.setMapProgress(rj.mapProgress());
        js.setReduceProgress(rj.reduceProgress());
        js.setTotalMappers(mapTaskReport.length);
        js.setTotalReducers(reduceTaskReport.length);
        return js;
    } catch (IOException e) {
        LOG.error("Error getting job info.", e);
    }

    return null;
}