Example usage for org.apache.hadoop.mapreduce Cluster getJob

List of usage examples for org.apache.hadoop.mapreduce Cluster getJob

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Cluster getJob.

Prototype

public Job getJob(JobID jobId) throws IOException, InterruptedException 

Source Link

Document

Get job corresponding to jobid.

Usage

From source file:com.cloudera.oryx.computation.common.DistributedGenerationRunner.java

License:Open Source License

private static Collection<String> find(String instanceDir) throws IOException, InterruptedException {
    Collection<String> result = Lists.newArrayList();
    // This is where we will see Hadoop config problems first, so log extra info
    Cluster cluster;
    try {//from w ww  .  java  2  s.c  o m
        cluster = new Cluster(OryxConfiguration.get());
    } catch (IOException ioe) {
        log.error("Unable to init the Hadoop cluster. Check that an MR2, not MR1, cluster is configured.");
        throw ioe;
    }
    try {
        JobStatus[] statuses = cluster.getAllJobStatuses();
        if (statuses != null) {
            for (JobStatus jobStatus : statuses) {
                JobStatus.State state = jobStatus.getState();
                if (state == JobStatus.State.RUNNING || state == JobStatus.State.PREP) {
                    Job job = cluster.getJob(jobStatus.getJobID());
                    if (job != null) {
                        String jobName = job.getJobName();
                        log.info("Found running job {}", jobName);
                        if (jobName.startsWith("Oryx-" + instanceDir + '-')) {
                            result.add(jobName);
                        }
                    }
                }
            }
        }
    } finally {
        cluster.close();
    }
    return result;
}

From source file:com.cloudera.oryx.computation.common.JobStep.java

License:Open Source License

private StepStatus determineStatus() throws IOException, InterruptedException {
    JobContext job = getJob();/*from  w  ww. j av  a 2s  . c  o m*/
    if (job == null) {
        return StepStatus.COMPLETED;
    }
    Cluster cluster = new Cluster(getConf());
    try {
        JobID jobID = job.getJobID();
        if (jobID == null) {
            return StepStatus.PENDING;
        }
        Job runningJob = cluster.getJob(jobID);
        if (runningJob == null) {
            return StepStatus.PENDING;
        }
        JobStatus.State state = runningJob.getJobState();
        switch (state) {
        case PREP:
            return StepStatus.PENDING;
        case RUNNING:
            return StepStatus.RUNNING;
        case SUCCEEDED:
            return StepStatus.COMPLETED;
        case FAILED:
            return StepStatus.FAILED;
        case KILLED:
            return StepStatus.CANCELLED;
        }
        throw new IllegalArgumentException("Unknown Hadoop job state " + state);
    } finally {
        cluster.close();
    }
}

From source file:com.cloudera.oryx.computation.common.JobStep.java

License:Open Source License

/**
 * @return three progress values, in [0,1], as a {@code float[]}, representing setup, mapper and reducer progress
 *//*  w w  w  . j  a v a  2  s .  c om*/
private float[] determineProgresses() throws IOException, InterruptedException {
    if (exec == null) {
        return null;
    }
    Cluster cluster = new Cluster(getConf());
    try {
        JobID jobID = getJob().getJobID();
        if (jobID == null) {
            return null;
        }
        Job runningJob = cluster.getJob(jobID);
        if (runningJob == null) {
            return null;
        }

        return new float[] { runningJob.setupProgress(), runningJob.mapProgress(),
                runningJob.reduceProgress() };
    } finally {
        cluster.close();
    }
}

From source file:com.ikanow.aleph2.analytics.hadoop.services.HadoopTechnologyService.java

License:Apache License

@Override
public CompletableFuture<BasicMessageBean> stopAnalyticJob(DataBucketBean analytic_bucket,
        Collection<AnalyticThreadJobBean> jobs, AnalyticThreadJobBean job_to_stop, IAnalyticsContext context) {
    try {//from  w w  w .  j av a  2 s . c  o  m
        final Cluster cluster = new Cluster(_config.get());
        final String job_name = BucketUtils.getUniqueSignature(analytic_bucket.full_name(),
                Optional.ofNullable(job_to_stop.name()));
        return Arrays.stream(cluster.getAllJobStatuses())
                .filter(job_status -> job_status.getJobName().equals(job_name)).findFirst()
                .map(Lambdas.wrap_u(job_status -> {
                    final Job job = cluster.getJob(job_status.getJobID());
                    job.killJob();
                    return CompletableFuture
                            .completedFuture(ErrorUtils.buildSuccessMessage(this.getClass().getSimpleName(),
                                    "stopAnalyticJob", analytic_bucket.full_name() + ":" + job_to_stop.name()));
                })).get() // (Will throw if not found falling through to catch below)
        ;
    } catch (Throwable t) {
        return CompletableFuture.completedFuture(
                ErrorUtils.buildSuccessMessage(this.getClass().getSimpleName(), "stopAnalyticJob",
                        HadoopErrorUtils.JOB_STOP_ERROR, job_to_stop.name(), analytic_bucket.full_name()));
    }
}

From source file:crunch.MaxTemperature.java

License:Apache License

@Override
    public int run(String[] args) throws Exception {
        if (args.length != 1) {
            JobBuilder.printUsage(this, "<job ID>");
            return -1;
        }//w w  w  .j  av a2s  .com
        String jobID = args[0];
        // vv NewMissingTemperatureFields
        Cluster cluster = new Cluster(getConf());
        Job job = cluster.getJob(JobID.forName(jobID));
        // ^^ NewMissingTemperatureFields
        if (job == null) {
            System.err.printf("No job with ID %s found.\n", jobID);
            return -1;
        }
        if (!job.isComplete()) {
            System.err.printf("Job %s is not complete.\n", jobID);
            return -1;
        }

        // vv NewMissingTemperatureFields
        Counters counters = job.getCounters();
        long missing = counters.findCounter(MaxTemperatureWithCounters.Temperature.MISSING).getValue();
        long total = counters.findCounter(TaskCounter.MAP_INPUT_RECORDS).getValue();
        // ^^ NewMissingTemperatureFields

        System.out.printf("Records with missing temperature fields: %.2f%%\n", 100.0 * missing / total);
        return 0;
    }

From source file:org.apache.falcon.logging.TaskLogRetrieverYarn.java

License:Apache License

@Override
public List<String> retrieveTaskLogURL(String jobIdStr) throws IOException {
    List<String> taskLogUrls = new ArrayList<String>();
    Configuration conf = getConf();
    Cluster cluster = getCluster(conf);
    JobID jobID = JobID.forName(jobIdStr);
    if (jobID == null) {
        LOG.warn("External id for workflow action is null");
        return null;
    }//from   w w w .  j av  a2s.c o m

    if (conf.get(YARN_LOG_SERVER_URL) == null) {
        LOG.warn("YARN log Server is null");
        return null;
    }

    try {
        Job job = cluster.getJob(jobID);
        if (job != null) {
            TaskCompletionEvent[] events = job.getTaskCompletionEvents(0);
            for (TaskCompletionEvent event : events) {
                LogParams params = cluster.getLogParams(jobID, event.getTaskAttemptId());
                String url = (conf.get(YARN_LOG_SERVER_URL).startsWith(SCHEME) ? conf.get(YARN_LOG_SERVER_URL)
                        : SCHEME + conf.get(YARN_LOG_SERVER_URL)) + "/" + event.getTaskTrackerHttp() + "/"
                        + params.getContainerId() + "/" + params.getApplicationId() + "/" + params.getOwner()
                        + "?start=0";
                LOG.info("Task Log URL for the job {} is {}" + jobIdStr, url);
                taskLogUrls.add(url);
            }
            return taskLogUrls;
        }
        LOG.warn("Unable to find the job in cluster {}" + jobIdStr);
        return null;
    } catch (InterruptedException e) {
        throw new IOException(e);
    }
}

From source file:org.apache.falcon.logging.v2.TaskLogRetrieverYarn.java

License:Apache License

@Override
public List<String> retrieveTaskLogURL(String jobIdStr) throws IOException {
    List<String> taskLogUrls = new ArrayList<String>();
    Configuration conf = getConf();
    Cluster cluster = getCluster(conf);
    JobID jobID = JobID.forName(jobIdStr);
    if (jobID == null) {
        LOG.warn("External id for workflow action is null");
        return null;
    }// w w  w. ja  v  a  2s .co  m
    try {
        Job job = cluster.getJob(jobID);
        if (job != null) {
            TaskCompletionEvent[] events = job.getTaskCompletionEvents(0);
            for (TaskCompletionEvent event : events) {
                LogParams params = cluster.getLogParams(jobID, event.getTaskAttemptId());
                String url = SCHEME + conf.get(YARN_LOG_SERVER_URL) + "/" + event.getTaskTrackerHttp() + "/"
                        + params.getContainerId() + "/" + params.getApplicationId() + "/" + params.getOwner()
                        + "?start=0";
                LOG.info("Task Log URL for the job {} is {}" + jobIdStr, url);
                taskLogUrls.add(url);
            }
            return taskLogUrls;
        }
        LOG.warn("Unable to find the job in cluster {}" + jobIdStr);
        return null;
    } catch (InterruptedException e) {
        throw new IOException(e);
    }
}

From source file:org.apache.falcon.logging.v2.TaskLogRetrieverYarnTest.java

License:Apache License

@DataProvider(name = "testData")
public Object[][] testData() throws IOException, InterruptedException {
    int samples = getRandomValueInRange(10) + 1;
    Object[][] resultSet = new Object[samples][2];
    for (int count = 0; count < samples; count++) {
        List<String> expectedResult = new ArrayList<String>();
        Cluster cluster = getCluster(getConf());
        String jobId = new JobID("job", RANDOM.nextInt(1000)).toString();
        boolean success = RANDOM.nextBoolean();
        JobID jobID = JobID.forName(jobId);
        int numEvents = getRandomValueInRange(10) + 1;
        TaskCompletionEvent[] events = getTaskCompletionEvents(numEvents, jobID);
        Job job = mock(Job.class);
        when(cluster.getJob(jobID)).thenReturn(job);
        when(job.getTaskCompletionEvents(0)).thenReturn(events);
        for (TaskCompletionEvent event : events) {
            if (success) {
                LogParams params = getLogParams();
                when(cluster.getLogParams(jobID, event.getTaskAttemptId())).thenReturn(params);
                String url = SCHEME + getConf().get(YARN_LOG_SERVER_URL) + "/" + event.getTaskTrackerHttp()
                        + "/" + params.getContainerId() + "/" + params.getApplicationId() + "/"
                        + params.getOwner() + "?start=0";
                expectedResult.add(url);
            } else {
                when(cluster.getJob(jobID)).thenReturn(null);
                expectedResult = null;//from  w w w.  jav  a2  s. c om
            }
            resultSet[count] = new Object[] { jobId, expectedResult };
        }
    }
    return resultSet;
}

From source file:org.apache.falcon.oozie.logging.TaskLogRetrieverYarnTest.java

License:Apache License

@DataProvider(name = "testData")
public Object[][] testData() throws IOException, InterruptedException {
    int samples = getRandomValueInRange(10) + 1;
    Object[][] resultSet = new Object[samples][2];
    for (int count = 0; count < samples; count++) {
        List<String> expectedResult = new ArrayList<String>();
        Cluster cluster = getCluster(getConf());
        String jobId = new JobID("job", count).toString();
        boolean success = random.nextBoolean();
        JobID jobID = JobID.forName(jobId);
        int numEvents = getRandomValueInRange(10) + 1;
        TaskCompletionEvent[] events = getTaskCompletionEvents(numEvents, jobID);
        Job job = mock(Job.class);
        when(cluster.getJob(jobID)).thenReturn(job);
        when(job.getTaskCompletionEvents(0)).thenReturn(events);
        for (TaskCompletionEvent event : events) {
            if (success) {
                LogParams params = getLogParams();
                when(cluster.getLogParams(jobID, event.getTaskAttemptId())).thenReturn(params);
                String url = SCHEME + getConf().get(YARN_LOG_SERVER_URL) + "/" + event.getTaskTrackerHttp()
                        + "/" + params.getContainerId() + "/" + params.getApplicationId() + "/"
                        + params.getOwner() + "?start=0";
                expectedResult.add(url);
            } else {
                when(cluster.getJob(jobID)).thenReturn(null);
                expectedResult = null;/*from ww  w  .ja v a  2  s . c  o m*/
                break;
            }
        }
        resultSet[count] = new Object[] { jobId, expectedResult };
    }
    return resultSet;
}

From source file:org.huahinframework.manager.rest.service.JobService.java

License:Apache License

/**
 * @param jobId/*w  w  w.  j  ava 2s. com*/
 * @return {@link JSONObject}
 * @throws JSONException
 */
@Path("/detail/{" + JOBID + "}")
@GET
@Produces(MediaType.APPLICATION_JSON)
public JSONObject detail(@PathParam(JOBID) String jobId) throws JSONException {
    JSONObject jsonObject = null;
    JobConf conf = getJobConf();
    try {
        final Map<String, Object> job = getStatus(jobId);
        if (job != null) {
            Cluster cluster = new Cluster(conf);
            Job j = cluster.getJob(JobID.forName(jobId));
            if (j != null) {
                String jobFile = j.getJobFile();
                job.put(Response.JOB_FILE, jobFile);
                job.put(Response.TRACKING_URL, j.getTrackingURL());

                Map<String, String> jobConf = JobUtils.getJobConfiguration(jobFile, conf);
                if (jobConf != null) {
                    job.put(Response.CONFIGURATION, jobConf);
                }
            }
            jsonObject = new JSONObject(job);
        }
    } catch (Exception e) {
        e.printStackTrace();
        log.error(e);
        Map<String, String> status = new HashMap<String, String>();
        status.put(Response.STATUS, e.getMessage());
        jsonObject = new JSONObject(status);
    }

    if (jsonObject == null) {
        Map<String, String> status = new HashMap<String, String>();
        status.put(Response.STATUS, "Could not find job " + jobId);
        jsonObject = new JSONObject(status);
    }

    return jsonObject;
}