List of usage examples for org.apache.hadoop.mapreduce Cluster getLogParams
public LogParams getLogParams(JobID jobID, TaskAttemptID taskAttemptID) throws IOException, InterruptedException
From source file:org.apache.falcon.logging.TaskLogRetrieverYarn.java
License:Apache License
@Override public List<String> retrieveTaskLogURL(String jobIdStr) throws IOException { List<String> taskLogUrls = new ArrayList<String>(); Configuration conf = getConf(); Cluster cluster = getCluster(conf); JobID jobID = JobID.forName(jobIdStr); if (jobID == null) { LOG.warn("External id for workflow action is null"); return null; }//from w w w . j a v a 2 s.c o m if (conf.get(YARN_LOG_SERVER_URL) == null) { LOG.warn("YARN log Server is null"); return null; } try { Job job = cluster.getJob(jobID); if (job != null) { TaskCompletionEvent[] events = job.getTaskCompletionEvents(0); for (TaskCompletionEvent event : events) { LogParams params = cluster.getLogParams(jobID, event.getTaskAttemptId()); String url = (conf.get(YARN_LOG_SERVER_URL).startsWith(SCHEME) ? conf.get(YARN_LOG_SERVER_URL) : SCHEME + conf.get(YARN_LOG_SERVER_URL)) + "/" + event.getTaskTrackerHttp() + "/" + params.getContainerId() + "/" + params.getApplicationId() + "/" + params.getOwner() + "?start=0"; LOG.info("Task Log URL for the job {} is {}" + jobIdStr, url); taskLogUrls.add(url); } return taskLogUrls; } LOG.warn("Unable to find the job in cluster {}" + jobIdStr); return null; } catch (InterruptedException e) { throw new IOException(e); } }
From source file:org.apache.falcon.logging.v2.TaskLogRetrieverYarn.java
License:Apache License
@Override public List<String> retrieveTaskLogURL(String jobIdStr) throws IOException { List<String> taskLogUrls = new ArrayList<String>(); Configuration conf = getConf(); Cluster cluster = getCluster(conf); JobID jobID = JobID.forName(jobIdStr); if (jobID == null) { LOG.warn("External id for workflow action is null"); return null; }/* ww w . j a v a 2s. com*/ try { Job job = cluster.getJob(jobID); if (job != null) { TaskCompletionEvent[] events = job.getTaskCompletionEvents(0); for (TaskCompletionEvent event : events) { LogParams params = cluster.getLogParams(jobID, event.getTaskAttemptId()); String url = SCHEME + conf.get(YARN_LOG_SERVER_URL) + "/" + event.getTaskTrackerHttp() + "/" + params.getContainerId() + "/" + params.getApplicationId() + "/" + params.getOwner() + "?start=0"; LOG.info("Task Log URL for the job {} is {}" + jobIdStr, url); taskLogUrls.add(url); } return taskLogUrls; } LOG.warn("Unable to find the job in cluster {}" + jobIdStr); return null; } catch (InterruptedException e) { throw new IOException(e); } }
From source file:org.apache.falcon.logging.v2.TaskLogRetrieverYarnTest.java
License:Apache License
@DataProvider(name = "testData") public Object[][] testData() throws IOException, InterruptedException { int samples = getRandomValueInRange(10) + 1; Object[][] resultSet = new Object[samples][2]; for (int count = 0; count < samples; count++) { List<String> expectedResult = new ArrayList<String>(); Cluster cluster = getCluster(getConf()); String jobId = new JobID("job", RANDOM.nextInt(1000)).toString(); boolean success = RANDOM.nextBoolean(); JobID jobID = JobID.forName(jobId); int numEvents = getRandomValueInRange(10) + 1; TaskCompletionEvent[] events = getTaskCompletionEvents(numEvents, jobID); Job job = mock(Job.class); when(cluster.getJob(jobID)).thenReturn(job); when(job.getTaskCompletionEvents(0)).thenReturn(events); for (TaskCompletionEvent event : events) { if (success) { LogParams params = getLogParams(); when(cluster.getLogParams(jobID, event.getTaskAttemptId())).thenReturn(params); String url = SCHEME + getConf().get(YARN_LOG_SERVER_URL) + "/" + event.getTaskTrackerHttp() + "/" + params.getContainerId() + "/" + params.getApplicationId() + "/" + params.getOwner() + "?start=0"; expectedResult.add(url); } else { when(cluster.getJob(jobID)).thenReturn(null); expectedResult = null;// ww w .java 2s. c o m } resultSet[count] = new Object[] { jobId, expectedResult }; } } return resultSet; }
From source file:org.apache.falcon.oozie.logging.TaskLogRetrieverYarnTest.java
License:Apache License
@DataProvider(name = "testData") public Object[][] testData() throws IOException, InterruptedException { int samples = getRandomValueInRange(10) + 1; Object[][] resultSet = new Object[samples][2]; for (int count = 0; count < samples; count++) { List<String> expectedResult = new ArrayList<String>(); Cluster cluster = getCluster(getConf()); String jobId = new JobID("job", count).toString(); boolean success = random.nextBoolean(); JobID jobID = JobID.forName(jobId); int numEvents = getRandomValueInRange(10) + 1; TaskCompletionEvent[] events = getTaskCompletionEvents(numEvents, jobID); Job job = mock(Job.class); when(cluster.getJob(jobID)).thenReturn(job); when(job.getTaskCompletionEvents(0)).thenReturn(events); for (TaskCompletionEvent event : events) { if (success) { LogParams params = getLogParams(); when(cluster.getLogParams(jobID, event.getTaskAttemptId())).thenReturn(params); String url = SCHEME + getConf().get(YARN_LOG_SERVER_URL) + "/" + event.getTaskTrackerHttp() + "/" + params.getContainerId() + "/" + params.getApplicationId() + "/" + params.getOwner() + "?start=0"; expectedResult.add(url); } else { when(cluster.getJob(jobID)).thenReturn(null); expectedResult = null;/*www. java 2 s . co m*/ break; } } resultSet[count] = new Object[] { jobId, expectedResult }; } return resultSet; }