List of usage examples for org.apache.hadoop.mapred TaskCompletionEvent getTaskTrackerHttp
public String getTaskTrackerHttp()
From source file:boa.io.BoaOutputCommitter.java
License:Apache License
@Override public void abortJob(JobContext context, JobStatus.State runState) throws java.io.IOException { super.abortJob(context, runState); final JobClient jobClient = new JobClient(new JobConf(context.getConfiguration())); final RunningJob job = jobClient.getJob( (org.apache.hadoop.mapred.JobID) JobID.forName(context.getConfiguration().get("mapred.job.id"))); String diag = ""; for (final TaskCompletionEvent event : job.getTaskCompletionEvents(0)) switch (event.getTaskStatus()) { case SUCCEEDED: break; case FAILED: case KILLED: case OBSOLETE: case TIPFAILED: diag += "Diagnostics for: " + event.getTaskTrackerHttp() + "\n"; for (final String s : job.getTaskDiagnostics(event.getTaskAttemptId())) diag += s + "\n"; diag += "\n"; break; }/* w w w. j ava 2 s . co m*/ updateStatus(diag, context.getConfiguration().getInt("boa.hadoop.jobid", 0)); }
From source file:com.ibm.jaql.lang.expr.hadoop.Util.java
License:Apache License
public static void logAllTaskSyslogs(RunningJob rj, boolean onlySuccessful) throws Exception { String fetch = System.getProperty(FETCH_SYSLOG_PROP, "false"); if (fetch.equals("false")) return;//from w ww .j av a 2 s. co m TaskCompletionEvent[] events = rj.getTaskCompletionEvents(0); for (TaskCompletionEvent event : events) { if (onlySuccessful && (event.getTaskStatus() == TaskCompletionEvent.Status.SUCCEEDED)) { // print the syslog into the main log STATUS_LOG.info(event.toString()); logTaskSyslogs(event.getTaskAttemptId(), event.getTaskTrackerHttp()); } else { STATUS_LOG.info(event.toString()); logTaskSyslogs(event.getTaskAttemptId(), event.getTaskTrackerHttp()); } } }
From source file:com.liveramp.cascading_ext.flow.LoggingFlow.java
License:Apache License
private static String getFailureLog(TaskCompletionEvent event) { LOG.info("Getting errors for attempt " + event.getTaskAttemptId()); String exception = ""; try {/* w w w . ja v a 2s.co m*/ String fullLog = retrieveTaskLogs(event.getTaskAttemptId(), event.getTaskTrackerHttp()); exception = extractErrorFromLogString(fullLog); } catch (IOException e) { LOG.info("Regex Error!", e); } return "\nCluster Log Exception:\n" + exception; }
From source file:edu.stolaf.cs.wmrserver.HadoopEngine.java
License:Apache License
private Pair<ArrayList<TaskLog>, ArrayList<TaskLog>> getLogsFromCompletionEvents(TaskCompletionEvent[] events) { ArrayList<TaskLog> mapFailures = new ArrayList<TaskLog>(); ArrayList<TaskLog> reduceFailures = new ArrayList<TaskLog>(); for (TaskCompletionEvent event : events) { if (event.getTaskStatus() != TaskCompletionEvent.Status.SUCCEEDED) { TaskLog log = new TaskLog(event.getTaskTrackerHttp(), event.getTaskAttemptId()); if (event.isMapTask()) mapFailures.add(log);// ww w . j av a 2 s. c o m else reduceFailures.add(log); } } return new Pair<ArrayList<TaskLog>, ArrayList<TaskLog>>(mapFailures, reduceFailures); }
From source file:org.apache.falcon.logging.TaskLogRetrieverYarn.java
License:Apache License
@Override public List<String> retrieveTaskLogURL(String jobIdStr) throws IOException { List<String> taskLogUrls = new ArrayList<String>(); Configuration conf = getConf(); Cluster cluster = getCluster(conf);/* www .ja va2 s. c o m*/ JobID jobID = JobID.forName(jobIdStr); if (jobID == null) { LOG.warn("External id for workflow action is null"); return null; } if (conf.get(YARN_LOG_SERVER_URL) == null) { LOG.warn("YARN log Server is null"); return null; } try { Job job = cluster.getJob(jobID); if (job != null) { TaskCompletionEvent[] events = job.getTaskCompletionEvents(0); for (TaskCompletionEvent event : events) { LogParams params = cluster.getLogParams(jobID, event.getTaskAttemptId()); String url = (conf.get(YARN_LOG_SERVER_URL).startsWith(SCHEME) ? conf.get(YARN_LOG_SERVER_URL) : SCHEME + conf.get(YARN_LOG_SERVER_URL)) + "/" + event.getTaskTrackerHttp() + "/" + params.getContainerId() + "/" + params.getApplicationId() + "/" + params.getOwner() + "?start=0"; LOG.info("Task Log URL for the job {} is {}" + jobIdStr, url); taskLogUrls.add(url); } return taskLogUrls; } LOG.warn("Unable to find the job in cluster {}" + jobIdStr); return null; } catch (InterruptedException e) { throw new IOException(e); } }
From source file:org.apache.falcon.logging.v2.TaskLogRetrieverYarn.java
License:Apache License
@Override public List<String> retrieveTaskLogURL(String jobIdStr) throws IOException { List<String> taskLogUrls = new ArrayList<String>(); Configuration conf = getConf(); Cluster cluster = getCluster(conf);//from w w w .ja v a2 s . c om JobID jobID = JobID.forName(jobIdStr); if (jobID == null) { LOG.warn("External id for workflow action is null"); return null; } try { Job job = cluster.getJob(jobID); if (job != null) { TaskCompletionEvent[] events = job.getTaskCompletionEvents(0); for (TaskCompletionEvent event : events) { LogParams params = cluster.getLogParams(jobID, event.getTaskAttemptId()); String url = SCHEME + conf.get(YARN_LOG_SERVER_URL) + "/" + event.getTaskTrackerHttp() + "/" + params.getContainerId() + "/" + params.getApplicationId() + "/" + params.getOwner() + "?start=0"; LOG.info("Task Log URL for the job {} is {}" + jobIdStr, url); taskLogUrls.add(url); } return taskLogUrls; } LOG.warn("Unable to find the job in cluster {}" + jobIdStr); return null; } catch (InterruptedException e) { throw new IOException(e); } }
From source file:org.apache.falcon.logging.v2.TaskLogRetrieverYarnTest.java
License:Apache License
@DataProvider(name = "testData") public Object[][] testData() throws IOException, InterruptedException { int samples = getRandomValueInRange(10) + 1; Object[][] resultSet = new Object[samples][2]; for (int count = 0; count < samples; count++) { List<String> expectedResult = new ArrayList<String>(); Cluster cluster = getCluster(getConf()); String jobId = new JobID("job", RANDOM.nextInt(1000)).toString(); boolean success = RANDOM.nextBoolean(); JobID jobID = JobID.forName(jobId); int numEvents = getRandomValueInRange(10) + 1; TaskCompletionEvent[] events = getTaskCompletionEvents(numEvents, jobID); Job job = mock(Job.class); when(cluster.getJob(jobID)).thenReturn(job); when(job.getTaskCompletionEvents(0)).thenReturn(events); for (TaskCompletionEvent event : events) { if (success) { LogParams params = getLogParams(); when(cluster.getLogParams(jobID, event.getTaskAttemptId())).thenReturn(params); String url = SCHEME + getConf().get(YARN_LOG_SERVER_URL) + "/" + event.getTaskTrackerHttp() + "/" + params.getContainerId() + "/" + params.getApplicationId() + "/" + params.getOwner() + "?start=0"; expectedResult.add(url); } else { when(cluster.getJob(jobID)).thenReturn(null); expectedResult = null;//from w w w . j av a2 s . c om } resultSet[count] = new Object[] { jobId, expectedResult }; } } return resultSet; }
From source file:org.apache.falcon.oozie.logging.TaskLogRetrieverYarnTest.java
License:Apache License
@DataProvider(name = "testData") public Object[][] testData() throws IOException, InterruptedException { int samples = getRandomValueInRange(10) + 1; Object[][] resultSet = new Object[samples][2]; for (int count = 0; count < samples; count++) { List<String> expectedResult = new ArrayList<String>(); Cluster cluster = getCluster(getConf()); String jobId = new JobID("job", count).toString(); boolean success = random.nextBoolean(); JobID jobID = JobID.forName(jobId); int numEvents = getRandomValueInRange(10) + 1; TaskCompletionEvent[] events = getTaskCompletionEvents(numEvents, jobID); Job job = mock(Job.class); when(cluster.getJob(jobID)).thenReturn(job); when(job.getTaskCompletionEvents(0)).thenReturn(events); for (TaskCompletionEvent event : events) { if (success) { LogParams params = getLogParams(); when(cluster.getLogParams(jobID, event.getTaskAttemptId())).thenReturn(params); String url = SCHEME + getConf().get(YARN_LOG_SERVER_URL) + "/" + event.getTaskTrackerHttp() + "/" + params.getContainerId() + "/" + params.getApplicationId() + "/" + params.getOwner() + "?start=0"; expectedResult.add(url); } else { when(cluster.getJob(jobID)).thenReturn(null); expectedResult = null;/*from w w w .j ava2 s.com*/ break; } } resultSet[count] = new Object[] { jobId, expectedResult }; } return resultSet; }
From source file:org.estado.core.JobStatusChecker.java
License:Apache License
private List<TaskStatus> getTaskDetails(RunningJob job) { TaskCompletionEvent[] tasks = new TaskCompletionEvent[0]; List<TaskStatus> taskStatusList = new ArrayList<TaskStatus>(); try {/*from w w w .jav a 2 s .co m*/ tasks = job.getTaskCompletionEvents(0); for (TaskCompletionEvent task : tasks) { TaskStatus taskStatus = new TaskStatus(); taskStatus.setTaskId(task.getTaskAttemptId().toString()); taskStatus.setStatus(task.getTaskStatus().toString()); taskStatus.setDuration(task.getTaskRunTime() * 1L); //change to long taskStatus.setTaskType(task.isMapTask() ? "Map" : "Reduce"); if (!task.getTaskStatus().equals(TaskCompletionEvent.Status.SUCCEEDED)) { String url = task.getTaskTrackerHttp() + "/tasklog?attemptid=" + task.getTaskAttemptId() + "&all=true"; URLConnection connection = new URL(url).openConnection(); connection.setDoOutput(true); connection.connect(); Scanner s = new java.util.Scanner(connection.getInputStream()).useDelimiter("\\A"); String log = s.hasNext() ? s.next() : ""; taskStatus.setLog(log); } taskStatusList.add(taskStatus); } } catch (IOException e) { e.printStackTrace(); } return taskStatusList; }