List of usage examples for org.apache.hadoop.mapred Counters getGroupNames
@SuppressWarnings("unchecked") public synchronized Collection<String> getGroupNames()
From source file:azkaban.jobtype.javautils.AbstractHadoopJob.java
License:Apache License
public void run() throws Exception { JobConf conf = getJobConf();//from w w w. j ava 2 s. c o m if (System.getenv(HADOOP_TOKEN_FILE_LOCATION) != null) { conf.set(MAPREDUCE_JOB_CREDENTIALS_BINARY, System.getenv(HADOOP_TOKEN_FILE_LOCATION)); } jobClient = new JobClient(conf); runningJob = jobClient.submitJob(conf); logger.info("See " + runningJob.getTrackingURL() + " for details."); jobClient.monitorAndPrintJob(conf, runningJob); if (!runningJob.isSuccessful()) { throw new Exception("Hadoop job:" + getJobName() + " failed!"); } // dump all counters Counters counters = runningJob.getCounters(); for (String groupName : counters.getGroupNames()) { Counters.Group group = counters.getGroup(groupName); logger.info("Group: " + group.getDisplayName()); for (Counter counter : group) logger.info(counter.getDisplayName() + ":\t" + counter.getValue()); } updateMapReduceJobState(conf); }
From source file:azkaban.jobtype.StatsUtils.java
License:Apache License
public static Object countersToJson(Counters counters) { Map<String, Object> jsonObj = new HashMap<String, Object>(); if (counters == null) { return jsonObj; }//from www .j av a 2s .c o m Collection<String> counterGroups = counters.getGroupNames(); for (String groupName : counterGroups) { Map<String, String> counterStats = new HashMap<String, String>(); Group group = counters.getGroup(groupName); Iterator<Counters.Counter> it = group.iterator(); while (it.hasNext()) { Counter counter = it.next(); counterStats.put(counter.getDisplayName(), String.valueOf(counter.getCounter())); } jsonObj.put(groupName, counterStats); } return jsonObj; }
From source file:cascading.flow.hadoop.HadoopStepStats.java
License:Open Source License
@Override public Collection<String> getCounterGroups() { try {//from w w w. j a va2 s .c om RunningJob runningJob = getRunningJob(); if (runningJob == null) return Collections.emptySet(); Counters counters = runningJob.getCounters(); if (counters == null) return Collections.emptySet(); return Collections.unmodifiableCollection(counters.getGroupNames()); } catch (IOException exception) { throw new FlowException("unable to get remote counter groups"); } }
From source file:cascading.flow.hadoop.HadoopStepStats.java
License:Open Source License
@Override public Collection<String> getCounterGroupsMatching(String regex) { try {//w w w . j ava 2 s .co m RunningJob runningJob = getRunningJob(); if (runningJob == null) return Collections.emptySet(); Counters counters = runningJob.getCounters(); if (counters == null) return Collections.emptySet(); Set<String> results = new HashSet<String>(); for (String counter : counters.getGroupNames()) { if (counter.matches(regex)) results.add(counter); } return Collections.unmodifiableCollection(results); } catch (IOException exception) { throw new FlowException("unable to get remote counter groups"); } }
From source file:com.twitter.pig.backend.hadoop.executionengine.tez.TezJobControlCompiler.java
License:Apache License
/** * Reads the global counters produced by a job on the group labeled with PIG_MAP_RANK_NAME. * Then, it is calculated the cumulative sum, which consists on the sum of previous cumulative * sum plus the previous global counter value. * @param job with the global counters collected. * @param operationID After being collected on global counters (POCounter), * these values are passed via configuration file to PORank, by using the unique * operation identifier/*from w ww .jav a 2 s.c o m*/ */ private void saveCounters(Job job, String operationID) { Counters counters; Group groupCounters; Long previousValue = 0L; Long previousSum = 0L; ArrayList<Pair<String, Long>> counterPairs; try { counters = HadoopShims.getCounters(job); groupCounters = counters.getGroup(getGroupName(counters.getGroupNames())); Iterator<Counter> it = groupCounters.iterator(); HashMap<Integer, Long> counterList = new HashMap<Integer, Long>(); while (it.hasNext()) { try { Counter c = it.next(); counterList.put(Integer.valueOf(c.getDisplayName()), c.getValue()); } catch (Exception ex) { ex.printStackTrace(); } } counterSize = counterList.size(); counterPairs = new ArrayList<Pair<String, Long>>(); for (int i = 0; i < counterSize; i++) { previousSum += previousValue; previousValue = counterList.get(Integer.valueOf(i)); counterPairs.add(new Pair<String, Long>(TezJobControlCompiler.PIG_MAP_COUNTER + operationID + TezJobControlCompiler.PIG_MAP_SEPARATOR + i, previousSum)); } globalCounters.put(operationID, counterPairs); } catch (Exception e) { String msg = "Error to read counters into Rank operation counterSize " + counterSize; throw new RuntimeException(msg, e); } }
From source file:org.apache.oozie.action.hadoop.MapReduceActionExecutor.java
License:Apache License
@SuppressWarnings("unchecked") private JSONObject counterstoJson(Counters counters) { if (counters == null) { return null; }/* w ww.j ava 2 s . c o m*/ JSONObject groups = new JSONObject(); for (String gName : counters.getGroupNames()) { JSONObject group = new JSONObject(); for (Counters.Counter counter : counters.getGroup(gName)) { String cName = counter.getName(); Long cValue = counter.getCounter(); group.put(cName, cValue); } groups.put(gName, group); } return groups; }
From source file:org.apache.oozie.action.hadoop.OoziePigStats.java
License:Apache License
@SuppressWarnings("unchecked") private static JSONObject toJSONFromCounters(Counters counters) { if (counters == null) { return null; }// w w w.jav a 2 s.c om JSONObject groups = new JSONObject(); for (String gName : counters.getGroupNames()) { JSONObject group = new JSONObject(); for (Counter counter : counters.getGroup(gName)) { String cName = counter.getName(); Long cValue = counter.getValue(); group.put(cName, Long.toString(cValue)); } groups.put(gName, group); } return groups; }
From source file:org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.JobControlCompiler.java
License:Apache License
/** * Reads the global counters produced by a job on the group labeled with PIG_MAP_RANK_NAME. * Then, it is calculated the cumulative sum, which consists on the sum of previous cumulative * sum plus the previous global counter value. * @param job with the global counters collected. * @param operationID After being collected on global counters (POCounter), * these values are passed via configuration file to PORank, by using the unique * operation identifier// ww w . j a v a2 s . co m */ private void saveCounters(Job job, String operationID) { Counters counters; Group groupCounters; Long previousValue = 0L; Long previousSum = 0L; ArrayList<Pair<String, Long>> counterPairs; try { counters = HadoopShims.getCounters(job); String groupName = getGroupName(counters.getGroupNames()); // In case that the counter group was not find, we need to find // out why. Only acceptable state is that the relation has been // empty. if (groupName == null) { Counter outputRecords = counters.getGroup(MRPigStatsUtil.TASK_COUNTER_GROUP) .getCounterForName(MRPigStatsUtil.MAP_OUTPUT_RECORDS); if (outputRecords.getCounter() == 0) { globalCounters.put(operationID, new ArrayList<Pair<String, Long>>()); return; } else { throw new RuntimeException("Did not found RANK counter group for operationId: " + operationID); } } groupCounters = counters.getGroup(groupName); Iterator<Counter> it = groupCounters.iterator(); HashMap<Integer, Long> counterList = new HashMap<Integer, Long>(); while (it.hasNext()) { try { Counter c = it.next(); counterList.put(Integer.valueOf(c.getDisplayName()), c.getValue()); } catch (Exception ex) { ex.printStackTrace(); } } counterSize = counterList.size(); counterPairs = new ArrayList<Pair<String, Long>>(); for (int i = 0; i < counterSize; i++) { previousSum += previousValue; previousValue = counterList.get(Integer.valueOf(i)); counterPairs.add(new Pair<String, Long>( JobControlCompiler.PIG_MAP_COUNTER + operationID + JobControlCompiler.PIG_MAP_SEPARATOR + i, previousSum)); } globalCounters.put(operationID, counterPairs); } catch (Exception e) { String msg = "Error to read counters into Rank operation counterSize " + counterSize; throw new RuntimeException(msg, e); } }
From source file:org.godhuli.rhipe.FileUtils.java
License:Apache License
public static REXP buildlistFromOldCounter(org.apache.hadoop.mapred.Counters c, double dur) { String[] groupnames = c.getGroupNames().toArray(new String[] {}); String[] groupdispname = new String[groupnames.length + 1]; Vector<REXP> cn = new Vector<REXP>(); for (int i = 0; i < groupnames.length; i++) { org.apache.hadoop.mapred.Counters.Group cgroup = c.getGroup(groupnames[i]); groupdispname[i] = cgroup.getDisplayName(); REXP.Builder cvalues = REXP.newBuilder(); Vector<String> cnames = new Vector<String>(); cvalues.setRclass(REXP.RClass.REAL); for (org.apache.hadoop.mapred.Counters.Counter counter : cgroup) { cvalues.addRealValue((double) counter.getValue()); cnames.add(counter.getDisplayName()); }//from w w w. j av a2s . co m cvalues.addAttrName("names"); cvalues.addAttrValue(RObjects.makeStringVector(cnames.toArray(new String[] {}))); cn.add(cvalues.build()); } groupdispname[groupnames.length] = "job_time"; REXP.Builder cvalues = REXP.newBuilder(); cvalues.setRclass(REXP.RClass.REAL); cvalues.addRealValue(dur); cn.add(cvalues.build()); return (RObjects.makeList(groupdispname, cn)); }
From source file:voldemort.store.readonly.mr.azkaban.AbstractHadoopJob.java
License:Apache License
public void run(JobConf conf) throws Exception { _runningJob = new JobClient(conf).submitJob(conf); info("See " + _runningJob.getTrackingURL() + " for details."); _runningJob.waitForCompletion();//from w w w. ja va 2 s . c o m if (!_runningJob.isSuccessful()) { throw new Exception("Hadoop job:" + getId() + " failed!"); } // dump all counters Counters counters = _runningJob.getCounters(); for (String groupName : counters.getGroupNames()) { Counters.Group group = counters.getGroup(groupName); info("Group: " + group.getDisplayName()); for (Counter counter : group) info(counter.getDisplayName() + ":\t" + counter.getValue()); } }