List of usage examples for org.apache.hadoop.mapred Counters fromEscapedCompactString
public static Counters fromEscapedCompactString(String compactString) throws ParseException
From source file:azkaban.jobtype.MapReduceJobState.java
License:Apache License
@SuppressWarnings("unchecked") public static MapReduceJobState fromJson(Object obj) throws Exception { Map<String, Object> jsonObj = (HashMap<String, Object>) obj; String jobId = (String) jsonObj.get("jobId"); String jobName = (String) jsonObj.get("jobName"); String trackingUrl = (String) jsonObj.get("trackingURL"); boolean isComplete = Boolean.parseBoolean((String) jsonObj.get("isComplete")); boolean isSuccessful = Boolean.parseBoolean((String) jsonObj.get("isSuccessful")); String failureInfo = (String) jsonObj.get("failureInfo"); float mapProgress = Float.parseFloat((String) jsonObj.get("mapProgress")); float reduceProgress = Float.parseFloat((String) jsonObj.get("reduceProgress")); long jobStartTime = Long.parseLong((String) jsonObj.get("jobStartTime")); long jobLastUpdateTime = Long.parseLong((String) jsonObj.get("jobLastUpdateTime")); int totalMappers = Integer.parseInt((String) jsonObj.get("totalMappers")); int finishedMappersCount = Integer.parseInt((String) jsonObj.get("finishedMappersCount")); int totalReducers = Integer.parseInt((String) jsonObj.get("totalReducers")); int finishedReducersCount = Integer.parseInt((String) jsonObj.get("finishedReducersCount")); String countersString = (String) jsonObj.get("countersString"); Counters counters = Counters.fromEscapedCompactString(countersString); return new MapReduceJobState(jobId, jobName, trackingUrl, failureInfo, isComplete, isSuccessful, mapProgress, reduceProgress, jobStartTime, jobLastUpdateTime, totalMappers, finishedMappersCount, totalReducers, finishedReducersCount, counters); }
From source file:com.cloudera.training.metrics.JobHistoryHelper.java
License:Apache License
public static String extractCounter(String counterFromHist, String... counterNames) throws ParseException { Counters counters = Counters.fromEscapedCompactString(counterFromHist); for (Counters.Group group : counters) { for (Counters.Counter counter : group) { for (String counterName : counterNames) { if (counterName.equals(counter.getName())) { return String.valueOf(counter.getCounter()); }//w ww . j a va 2s . c o m } } } return null; }
From source file:com.twitter.hraven.etl.AssertHistoryListener.java
License:Apache License
private void assertCounters(String jobId, String expectedEncodedCounters, CounterMap foundCounterMap) { assertNotNull(foundCounterMap);/*from w w w . j av a2 s .c om*/ Counters expCounters = null; try { expCounters = Counters.fromEscapedCompactString(expectedEncodedCounters); } catch (ParseException e) { fail("Excetion trying to parse counters: " + e.getMessage()); } for (Counters.Group group : expCounters) { String expGroupName = group.getName(); for (Counters.Counter counter : group) { String expName = counter.getName(); long expValue = counter.getValue(); Counter foundCounter = foundCounterMap.getCounter(expGroupName, expName); assertNotNull(String.format("Counter not found for job=%s, group=%s, name=%s", jobId, expGroupName, expName), foundCounter); assertEquals(String.format("Unexpected counter group"), expGroupName, foundCounter.getGroup()); assertEquals(String.format("Unexpected counter name for job=%s, group=%s", jobId, expGroupName), expName, foundCounter.getKey()); assertEquals(String.format("Unexpected counter value for job=%s, group=%s, name=%s", jobId, expGroupName, expName), expValue, foundCounter.getValue()); } } }
From source file:com.twitter.hraven.mapreduce.JobHistoryListener.java
License:Apache License
private void addKeyValues(Put p, byte[] family, JobHistoryKeys key, String value) { if (key == JobHistoryKeys.COUNTERS || key == JobHistoryKeys.MAP_COUNTERS || key == JobHistoryKeys.REDUCE_COUNTERS) { try {//from w w w. ja v a2 s . c om Counters counters = Counters.fromEscapedCompactString(value); /* * Name counter columns as: * g!groupname!countername */ byte[] counterPrefix = null; if (key == JobHistoryKeys.COUNTERS) { counterPrefix = Bytes.add(Constants.COUNTER_COLUMN_PREFIX_BYTES, Constants.SEP_BYTES); } else if (key == JobHistoryKeys.MAP_COUNTERS) { counterPrefix = Bytes.add(Constants.MAP_COUNTER_COLUMN_PREFIX_BYTES, Constants.SEP_BYTES); } else if (key == JobHistoryKeys.REDUCE_COUNTERS) { counterPrefix = Bytes.add(Constants.REDUCE_COUNTER_COLUMN_PREFIX_BYTES, Constants.SEP_BYTES); } else { throw new IllegalArgumentException("Unknown counter type " + key.toString()); } for (Counters.Group group : counters) { byte[] groupPrefix = Bytes.add(counterPrefix, Bytes.toBytes(group.getName()), Constants.SEP_BYTES); for (Counters.Counter counter : group) { String counterName = counter.getName(); long counterValue = counter.getValue(); byte[] qualifier = Bytes.add(groupPrefix, Bytes.toBytes(counterName)); p.add(family, qualifier, Bytes.toBytes(counterValue)); // get the map and reduce slot millis for megabytemillis calculations if (Constants.SLOTS_MILLIS_MAPS.equals(counterName)) { this.jobDetails.setMapSlotMillis(counterValue); } if (Constants.SLOTS_MILLIS_REDUCES.equals(counterName)) { this.jobDetails.setReduceSlotMillis(counterValue); } } } } catch (ParseException pe) { LOG.error("Counters could not be parsed from string'" + value + "'", pe); } } else { @SuppressWarnings("rawtypes") Class clazz = JobHistoryKeys.KEY_TYPES.get(key); byte[] valueBytes = null; if (Integer.class.equals(clazz)) { try { valueBytes = (value != null && value.trim().length() > 0) ? Bytes.toBytes(Integer.parseInt(value)) : Constants.ZERO_INT_BYTES; } catch (NumberFormatException nfe) { // us a default value valueBytes = Constants.ZERO_INT_BYTES; } } else if (Long.class.equals(clazz)) { try { long valueLong = (value != null && value.trim().length() > 0) ? Long.parseLong(value) : 0L; valueBytes = Bytes.toBytes(valueLong); if (key == JobHistoryKeys.TOTAL_MAPS) { jobDetails.setTotalMaps(valueLong); } else if (key == JobHistoryKeys.TOTAL_REDUCES) { jobDetails.setTotalReduces(valueLong); } } catch (NumberFormatException nfe) { // us a default value valueBytes = Constants.ZERO_LONG_BYTES; } } else { // keep the string representation by default valueBytes = Bytes.toBytes(value); } byte[] qualifier = Bytes.toBytes(key.toString().toLowerCase()); p.add(family, qualifier, valueBytes); } }
From source file:org.apache.pig.piggybank.storage.HadoopJobHistoryLoader.java
License:Apache License
@SuppressWarnings("deprecation") private static void parseAndAddJobCounters(Map<String, String> job, String counters) { try {//from w w w.j a v a2 s. c o m Counters counterGroups = Counters.fromEscapedCompactString(counters); for (Group otherGroup : counterGroups) { Group group = counterGroups.getGroup(otherGroup.getName()); for (Counter otherCounter : otherGroup) { Counter counter = group.getCounterForName(otherCounter.getName()); job.put(otherCounter.getName(), String.valueOf(counter.getValue())); } } } catch (ParseException e) { LOG.warn("Failed to parse job counters", e); } }
From source file:org.apache.pig.piggybank.storage.HadoopJobHistoryLoader.java
License:Apache License
@SuppressWarnings("deprecation") private static void populateMapReduceTaskLists(MRJobInfo value, Map<String, JobHistory.Task> taskMap) { Map<String, String> mapT = value.mapTask; Map<String, String> reduceT = value.reduceTask; long minMapRows = Long.MAX_VALUE; long maxMapRows = 0; long minMapTime = Long.MAX_VALUE; long maxMapTime = 0; long avgMapTime = 0; long totalMapTime = 0; int numberMaps = 0; long minReduceRows = Long.MAX_VALUE; long maxReduceRows = 0; long minReduceTime = Long.MAX_VALUE; long maxReduceTime = 0; long avgReduceTime = 0; long totalReduceTime = 0; int numberReduces = 0; int num_tasks = taskMap.entrySet().size(); Iterator<Map.Entry<String, JobHistory.Task>> ti = taskMap.entrySet().iterator(); for (int i = 0; i < num_tasks; i++) { Map.Entry<String, JobHistory.Task> entry = (Map.Entry<String, JobHistory.Task>) ti.next(); JobHistory.Task task = entry.getValue(); if (task.get(Keys.TASK_TYPE).equals("MAP")) { Map<JobHistory.Keys, String> mapTask = task.getValues(); Map<JobHistory.Keys, String> successTaskAttemptMap = getLastSuccessfulTaskAttempt(task); // NOTE: Following would lead to less number of actual tasks collected in the tasklist array if (successTaskAttemptMap != null) { mapTask.putAll(successTaskAttemptMap); } else { LOG.info("Task:<" + task.get(Keys.TASKID) + "> is not successful - SKIPPING"); }// w w w . j a v a 2s. co m long duration = 0; long startTime = 0; long endTime = 0; int size = mapTask.size(); numberMaps++; Iterator<Map.Entry<JobHistory.Keys, String>> kv = mapTask.entrySet().iterator(); for (int j = 0; j < size; j++) { Map.Entry<JobHistory.Keys, String> mtc = kv.next(); JobHistory.Keys key = mtc.getKey(); String val = mtc.getValue(); switch (key) { case START_TIME: startTime = Long.valueOf(val); break; case FINISH_TIME: endTime = Long.valueOf(val); break; case COUNTERS: { try { Counters counters = Counters.fromEscapedCompactString(val); long rows = counters.getGroup(TASK_COUNTER_GROUP).getCounterForName(MAP_INPUT_RECORDS) .getCounter(); if (rows < minMapRows) minMapRows = rows; if (rows > maxMapRows) maxMapRows = rows; } catch (ParseException e) { LOG.warn("Failed to parse job counters", e); } } break; default: LOG.warn("JobHistory.Keys." + key + " : NOT INCLUDED IN PERFORMANCE ADVISOR MAP COUNTERS"); break; } } duration = endTime - startTime; if (minMapTime > duration) minMapTime = duration; if (maxMapTime < duration) maxMapTime = duration; totalMapTime += duration; } else if (task.get(Keys.TASK_TYPE).equals("REDUCE")) { Map<JobHistory.Keys, String> reduceTask = task.getValues(); Map<JobHistory.Keys, String> successTaskAttemptMap = getLastSuccessfulTaskAttempt(task); // NOTE: Following would lead to less number of actual tasks collected in the tasklist array if (successTaskAttemptMap != null) { reduceTask.putAll(successTaskAttemptMap); } else { LOG.warn("Task:<" + task.get(Keys.TASKID) + "> is not successful - SKIPPING"); } long duration = 0; long startTime = 0; long endTime = 0; int size = reduceTask.size(); numberReduces++; Iterator<Map.Entry<JobHistory.Keys, String>> kv = reduceTask.entrySet().iterator(); for (int j = 0; j < size; j++) { Map.Entry<JobHistory.Keys, String> rtc = kv.next(); JobHistory.Keys key = rtc.getKey(); String val = rtc.getValue(); switch (key) { case START_TIME: startTime = Long.valueOf(val); break; case FINISH_TIME: endTime = Long.valueOf(val); break; case COUNTERS: { try { Counters counters = Counters.fromEscapedCompactString(val); long rows = counters.getGroup(TASK_COUNTER_GROUP) .getCounterForName(REDUCE_INPUT_RECORDS).getCounter(); if (rows < minReduceRows) minReduceRows = rows; if (rows > maxReduceRows) maxReduceRows = rows; } catch (ParseException e) { LOG.warn("Failed to parse job counters", e); } } break; default: LOG.warn("JobHistory.Keys." + key + " : NOT INCLUDED IN PERFORMANCE ADVISOR REDUCE COUNTERS"); break; } } duration = endTime - startTime; if (minReduceTime > duration) minReduceTime = duration; if (maxReduceTime < duration) maxReduceTime = duration; totalReduceTime += duration; } else if (task.get(Keys.TASK_TYPE).equals("CLEANUP")) { LOG.info("IGNORING TASK TYPE : " + task.get(Keys.TASK_TYPE)); } else { LOG.warn("UNKNOWN TASK TYPE : " + task.get(Keys.TASK_TYPE)); } } if (numberMaps > 0) { avgMapTime = (totalMapTime / numberMaps); mapT.put("MIN_MAP_TIME", String.valueOf(minMapTime)); mapT.put("MAX_MAP_TIME", String.valueOf(maxMapTime)); mapT.put("MIN_MAP_INPUT_ROWS", String.valueOf(minMapRows)); mapT.put("MAX_MAP_INPUT_ROWS", String.valueOf(maxMapRows)); mapT.put("AVG_MAP_TIME", String.valueOf(avgMapTime)); mapT.put("NUMBER_MAPS", String.valueOf(numberMaps)); } if (numberReduces > 0) { avgReduceTime = (totalReduceTime / numberReduces); reduceT.put("MIN_REDUCE_TIME", String.valueOf(minReduceTime)); reduceT.put("MAX_REDUCE_TIME", String.valueOf(maxReduceTime)); reduceT.put("AVG_REDUCE_TIME", String.valueOf(avgReduceTime)); reduceT.put("MIN_REDUCE_INPUT_ROWS", String.valueOf(minReduceTime)); reduceT.put("MAX_REDUCE_INPUT_ROWS", String.valueOf(maxReduceTime)); reduceT.put("NUMBER_REDUCES", String.valueOf(numberReduces)); } else { reduceT.put("NUMBER_REDUCES", String.valueOf(0)); } }