List of usage examples for org.apache.hadoop.mapreduce CounterGroup getName
String getName();
From source file:cascading.stats.hadoop.HadoopNodeCounterCache.java
License:Open Source License
protected Map<String, Map<String, Long>> getCounters(FlowNodeStats flowNodeStats) throws IOException { // will use final or cached remote stats flowNodeStats.captureDetail(CascadingStats.Type.SLICE); Map<String, Map<String, Long>> allCounters = new HashMap<>(); Collection<FlowSliceStats> children = flowNodeStats.getChildren(); for (FlowSliceStats sliceStats : children) { TaskReport taskReport = ((HadoopSliceStats) sliceStats).getTaskReport(); Counters counters = taskReport.getTaskCounters(); for (CounterGroup group : counters) { Map<String, Long> values = allCounters.get(group.getName()); if (values == null) { values = new HashMap<>(); allCounters.put(group.getName(), values); }/* w ww . jav a 2s. c o m*/ for (Counter counter : group) { Long value = values.get(counter.getName()); if (value == null) value = 0L; value += counter.getValue(); values.put(counter.getName(), value); } } } return allCounters; }
From source file:cascading.stats.hadoop.HadoopSliceStats.java
License:Open Source License
private void setCounters(TaskReport taskReport) { this.counters = new HashMap<>(); Counters hadoopCounters = taskReport.getTaskCounters(); for (CounterGroup group : hadoopCounters) { Map<String, Long> values = new HashMap<String, Long>(); this.counters.put(group.getName(), values); for (Counter counter : group) values.put(counter.getName(), counter.getValue()); }//ww w .j ava 2 s . c o m }
From source file:co.cask.cdap.internal.app.runtime.workflow.BasicWorkflowToken.java
License:Apache License
public synchronized void setMapReduceCounters(Counters counters) { ImmutableMap.Builder<String, Map<String, Long>> countersBuilder = ImmutableMap.builder(); for (CounterGroup group : counters) { ImmutableMap.Builder<String, Long> groupBuilder = ImmutableMap.builder(); for (Counter counter : group) { groupBuilder.put(counter.getName(), counter.getValue()); // Also put the counter to system scope. put(group.getName() + "." + counter.getName(), Value.of(counter.getValue()), WorkflowToken.Scope.SYSTEM); }//from w ww.j a va 2 s . c o m countersBuilder.put(group.getName(), groupBuilder.build()); } this.mapReduceCounters = countersBuilder.build(); }
From source file:co.cask.cdap.internal.app.runtime.workflow.MapReduceProgramWorkflowRunner.java
License:Apache License
private void updateWorkflowToken(MapReduceContext context) throws Exception { Map<String, Map<String, Long>> mapReduceCounters = Maps.newHashMap(); WorkflowToken workflowTokenFromContext = context.getWorkflowToken(); if (workflowTokenFromContext == null) { throw new IllegalStateException( "WorkflowToken cannot be null when the " + "MapReduce program is started by Workflow."); }// w ww .j a v a 2s. c o m Counters counters = ((Job) context.getHadoopJob()).getCounters(); for (CounterGroup group : counters) { mapReduceCounters.put(group.getName(), new HashMap<String, Long>()); for (Counter counter : group) { mapReduceCounters.get(group.getName()).put(counter.getName(), counter.getValue()); ((BasicWorkflowToken) workflowTokenFromContext).put(group.getName() + "." + counter.getName(), Value.of(counter.getValue()), WorkflowToken.Scope.SYSTEM); } } ((BasicWorkflowToken) workflowTokenFromContext).setMapReduceCounters(mapReduceCounters); ((BasicWorkflowToken) token).mergeToken((BasicWorkflowToken) workflowTokenFromContext); }
From source file:com.google.appengine.tools.mapreduce.MapReduceState.java
License:Apache License
private static JSONObject toJson(Counters counters) throws JSONException { JSONObject retValue = new JSONObject(); for (CounterGroup group : counters) { for (Counter counter : group) { retValue.put(group.getName() + ":" + counter.getName(), counter.getValue()); }/*from ww w. jav a2 s . c o m*/ } return retValue; }
From source file:com.inmobi.conduit.distcp.tools.mapred.CounterProvider.java
License:Apache License
public org.apache.hadoop.mapred.Counters getJobCounters(JobID ignore) throws IOException { org.apache.hadoop.mapred.Counters retCounter = new org.apache.hadoop.mapred.Counters(); for (CounterGroup group : counters) { for (Counter counter : group) { retCounter.incrCounter(group.getName(), counter.getName(), counter.getValue()); }/*from w w w . java 2 s. c o m*/ } return retCounter; }
From source file:com.linkedin.drelephant.mapreduce.fetchers.MapReduceFSFetcherHadoop2.java
License:Apache License
private MapReduceCounterData getCounterData(Counters counters) { MapReduceCounterData holder = new MapReduceCounterData(); for (CounterGroup group : counters) { String groupName = group.getName(); for (Counter counter : group) { holder.set(groupName, counter.getName(), counter.getValue()); }/* w w w . j a v a 2 s.co m*/ } return holder; }
From source file:org.apache.blur.mapreduce.lib.BlurInputFormatTest.java
License:Apache License
private void assertMapTask(int i, Counters counters) { for (CounterGroup counterGroup : counters) { String name = counterGroup.getName(); boolean jobCounterGroup = false; if (name.equals("org.apache.hadoop.mapreduce.JobCounter")) { jobCounterGroup = true;/*from w w w . j a v a 2 s . c o m*/ } else if (name.equals("org.apache.hadoop.mapred.JobInProgress$Counter")) { jobCounterGroup = true; } if (jobCounterGroup) { for (Counter counter : counterGroup) { if (counter.getName().equals("TOTAL_LAUNCHED_MAPS")) { assertEquals(1, counter.getValue()); return; } } } } fail(); }
From source file:org.apache.crunch.impl.mem.CountersWrapper.java
License:Apache License
public synchronized void incrAllCounters(Counters other) { for (CounterGroup cg : other) { for (Counter c : cg) { findCounter(cg.getName(), c.getName()).increment(c.getValue()); }/*from w w w .j a v a 2 s . c om*/ } }
From source file:org.apache.ignite.client.hadoop.counter.GridHadoopClientCounters.java
License:Apache License
/** {@inheritDoc} */ @Override/*from w ww . ja va 2 s . c om*/ public synchronized CounterGroup addGroup(CounterGroup grp) { return addGroup(grp.getName(), grp.getDisplayName()); }