Example usage for org.apache.hadoop.mapreduce Counter getName

List of usage examples for org.apache.hadoop.mapreduce Counter getName

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Counter getName.

Prototype

String getName();

Source Link

Usage

From source file:cascading.stats.hadoop.HadoopNodeCounterCache.java

License:Open Source License

protected Map<String, Map<String, Long>> getCounters(FlowNodeStats flowNodeStats) throws IOException {
    // will use final or cached remote stats
    flowNodeStats.captureDetail(CascadingStats.Type.SLICE);

    Map<String, Map<String, Long>> allCounters = new HashMap<>();

    Collection<FlowSliceStats> children = flowNodeStats.getChildren();

    for (FlowSliceStats sliceStats : children) {
        TaskReport taskReport = ((HadoopSliceStats) sliceStats).getTaskReport();

        Counters counters = taskReport.getTaskCounters();

        for (CounterGroup group : counters) {
            Map<String, Long> values = allCounters.get(group.getName());

            if (values == null) {
                values = new HashMap<>();
                allCounters.put(group.getName(), values);
            }//from   ww w  .j a  va 2s  .c o  m

            for (Counter counter : group) {
                Long value = values.get(counter.getName());

                if (value == null)
                    value = 0L;

                value += counter.getValue();

                values.put(counter.getName(), value);
            }
        }
    }

    return allCounters;
}

From source file:cascading.stats.hadoop.HadoopSliceStats.java

License:Open Source License

private void setCounters(TaskReport taskReport) {
    this.counters = new HashMap<>();

    Counters hadoopCounters = taskReport.getTaskCounters();

    for (CounterGroup group : hadoopCounters) {
        Map<String, Long> values = new HashMap<String, Long>();

        this.counters.put(group.getName(), values);

        for (Counter counter : group)
            values.put(counter.getName(), counter.getValue());
    }//from   w  ww  .  ja  v a 2s . co  m
}

From source file:cascading.stats.hadoop.HadoopStepCounterCache.java

License:Open Source License

@Override
protected Set<String> getCountersFor(Counters counters, String group) {
    Set<String> results = new HashSet<>();

    for (Counter counter : counters.getGroup(group))
        results.add(counter.getName());

    return results;
}

From source file:co.cask.cdap.internal.app.runtime.workflow.BasicWorkflowToken.java

License:Apache License

public synchronized void setMapReduceCounters(Counters counters) {
    ImmutableMap.Builder<String, Map<String, Long>> countersBuilder = ImmutableMap.builder();

    for (CounterGroup group : counters) {
        ImmutableMap.Builder<String, Long> groupBuilder = ImmutableMap.builder();
        for (Counter counter : group) {
            groupBuilder.put(counter.getName(), counter.getValue());
            // Also put the counter to system scope.
            put(group.getName() + "." + counter.getName(), Value.of(counter.getValue()),
                    WorkflowToken.Scope.SYSTEM);
        }//from   ww w . j a va2  s.c  om
        countersBuilder.put(group.getName(), groupBuilder.build());
    }

    this.mapReduceCounters = countersBuilder.build();
}

From source file:co.cask.cdap.internal.app.runtime.workflow.MapReduceProgramWorkflowRunner.java

License:Apache License

private void updateWorkflowToken(MapReduceContext context) throws Exception {
    Map<String, Map<String, Long>> mapReduceCounters = Maps.newHashMap();
    WorkflowToken workflowTokenFromContext = context.getWorkflowToken();

    if (workflowTokenFromContext == null) {
        throw new IllegalStateException(
                "WorkflowToken cannot be null when the " + "MapReduce program is started by Workflow.");
    }//  w  w  w. ja  v a 2  s  .c  o  m

    Counters counters = ((Job) context.getHadoopJob()).getCounters();
    for (CounterGroup group : counters) {
        mapReduceCounters.put(group.getName(), new HashMap<String, Long>());
        for (Counter counter : group) {
            mapReduceCounters.get(group.getName()).put(counter.getName(), counter.getValue());
            ((BasicWorkflowToken) workflowTokenFromContext).put(group.getName() + "." + counter.getName(),
                    Value.of(counter.getValue()), WorkflowToken.Scope.SYSTEM);
        }
    }

    ((BasicWorkflowToken) workflowTokenFromContext).setMapReduceCounters(mapReduceCounters);
    ((BasicWorkflowToken) token).mergeToken((BasicWorkflowToken) workflowTokenFromContext);
}

From source file:com.cloudera.accumulo.upgrade.compatibility.DataCompatibilityVerify.java

License:Open Source License

@Override
public int run(String[] args) throws Exception {
    final String jobName = this.getClass().getName();
    options.parseArgs(jobName, args);/* w  w  w.j ava2 s.c  o m*/
    try {
        final int totalMapSlots = getConf().getInt("mapred.map.tasks",
                DataCompatibilityTestCli.DEFAULT_NUM_ROWS);
        if (-1 == options.test.numRows) {
            options.test.numRows = totalMapSlots;
        }
        final TableOperations ops = options.connection.getConnector().tableOperations();
        final List<String> names = options.test.getTableNames(ops);
        int totalReduceSlots = getConf().getInt("mapred.reduce.tasks", 0);
        if (-1 != options.test.numReduceSlots) {
            totalReduceSlots = options.test.numReduceSlots;
        }
        if (0 == totalReduceSlots) {
            totalReduceSlots = names.size();
        }
        final int reducesPerJob = Math.max(1, totalReduceSlots / names.size());

        final List<Job> jobs = new ArrayList();
        for (String name : names) {
            final Job job = new Job(getConf(), jobName + " " + name);
            job.setJarByClass(this.getClass());
            options.input.useAccumuloInputFormat(job, name);
            job.setMapperClass(DataVerifyMapper.class);
            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(LongWritable.class);
            job.setReducerClass(LongSumReducer.class);
            job.setCombinerClass(LongSumReducer.class);
            job.setOutputFormatClass(TextOutputFormat.class);
            TextOutputFormat.setOutputPath(job, new Path(options.test.output, name));
            job.setNumReduceTasks(reducesPerJob);
            job.submit();
            jobs.add(job);
        }

        boolean success = true;
        final long numCellsPerRow = options.test.qualifiers * DataCompatibilityLoad.FAMILIES.length;
        final long numCellsPerFamily = options.test.qualifiers * options.test.numRows;
        for (Job job : jobs) {
            success &= job.waitForCompletion(true);
            final CounterGroup group = job.getCounters().getGroup(DataVerifyMapper.class.getName());
            if (null == group) {
                log.error("Job '" + job.getJobName() + "' doesn't have counters for the verification mapper.");
                success = false;
            } else {
                final Counter badCounter = group.findCounter(BAD_COUNTER);
                if (null != badCounter && 0 < badCounter.getValue()) {
                    log.error("Job '" + job.getJobName() + "' has " + badCounter.getValue()
                            + " entries with bad checksums.");
                    success = false;
                }
                int numRows = 0;
                int numFamilies = 0;
                for (Counter counter : group) {
                    if (counter.getName().startsWith(ROW_COUNTER_PREFIX)) {
                        numRows++;
                        if (numCellsPerRow != counter.getValue()) {
                            log.error("Job '" + job.getJobName() + "', counter '" + counter.getName()
                                    + "' should have " + numCellsPerRow + " cells, but instead has "
                                    + counter.getValue());
                            success = false;
                        }
                    } else if (counter.getName().startsWith(FAMILY_COUNTER_PREFIX)) {
                        numFamilies++;
                        if (numCellsPerFamily != counter.getValue()) {
                            log.error("Job '" + job.getJobName() + "', counter '" + counter.getName()
                                    + "' should have " + numCellsPerFamily + " cells, but instead has "
                                    + counter.getValue());
                            success = false;
                        }
                    }
                }
                if (options.test.numRows != numRows) {
                    log.error("Job '" + job.getJobName() + "' is supposed to have " + options.test.numRows
                            + " rows, but has " + numRows);
                    success = false;
                }
                if (DataCompatibilityLoad.FAMILIES.length != numFamilies) {
                    log.error("Job '" + job.getJobName() + "' is supposed to have "
                            + DataCompatibilityLoad.FAMILIES.length + " families, but has " + numFamilies);
                    success = false;
                }
            }
        }
        if (success) {
            log.info("All internal checks passed.");
        } else {
            log.info("Some checks failed. see log.");
        }
        return success ? 0 : 1;
    } finally {
        options.input.close();
    }
}

From source file:com.google.appengine.tools.mapreduce.MapReduceState.java

License:Apache License

private static JSONObject toJson(Counters counters) throws JSONException {
    JSONObject retValue = new JSONObject();
    for (CounterGroup group : counters) {
        for (Counter counter : group) {
            retValue.put(group.getName() + ":" + counter.getName(), counter.getValue());
        }// w  ww .  ja  v a 2  s .  c o  m
    }

    return retValue;
}

From source file:com.inmobi.conduit.distcp.tools.mapred.CounterProvider.java

License:Apache License

public org.apache.hadoop.mapred.Counters getJobCounters(JobID ignore) throws IOException {
    org.apache.hadoop.mapred.Counters retCounter = new org.apache.hadoop.mapred.Counters();
    for (CounterGroup group : counters) {
        for (Counter counter : group) {
            retCounter.incrCounter(group.getName(), counter.getName(), counter.getValue());
        }/* w w  w .  j  a  v  a  2s. com*/
    }
    return retCounter;
}

From source file:com.linkedin.drelephant.mapreduce.fetchers.MapReduceFSFetcherHadoop2.java

License:Apache License

private MapReduceCounterData getCounterData(Counters counters) {
    MapReduceCounterData holder = new MapReduceCounterData();
    for (CounterGroup group : counters) {
        String groupName = group.getName();
        for (Counter counter : group) {
            holder.set(groupName, counter.getName(), counter.getValue());
        }//from w  w  w. ja  va 2  s. co m
    }
    return holder;
}

From source file:com.moz.fiji.mapreduce.framework.JobHistoryFijiTable.java

License:Apache License

/**
 * Extract the counters from a Job./*from  www .j a v a 2s  .  c o  m*/
 *
 * @param job Job from which to get counters.
 * @return a map from counters to their counts. Keys are group:name.
 * @throws IOException in case of an error getting the counters.
 */
private static Map<String, Long> getCounters(final Job job) throws IOException {
    final Counters counters = job.getCounters();
    final Map<String, Long> countersMap = Maps.newHashMap();
    for (String group : counters.getGroupNames()) {
        for (Counter counter : counters.getGroup(group)) {
            countersMap.put(String.format("%s:%s", group, counter.getName()), counter.getValue());
        }
    }
    return countersMap;
}