Example usage for org.apache.hadoop.mapreduce Counter getValue

List of usage examples for org.apache.hadoop.mapreduce Counter getValue

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Counter getValue.

Prototype

long getValue();

Source Link

Document

What is the current value of this counter?

Usage

From source file:EggContext.java

License:Open Source License

/** Get the current value of the Hadoop metric counter of the
 *  passed group and name/*from ww  w.  j av  a  2 s .c o  m*/
 *  @param group   The counter group
 *  @param name    The counter name
 *  @return        The current value of the counter
 */
@JSFunction
public Double counter(String group, String name) {
    Counter counter = task.getCounter(group, name);
    return new Double(counter.getValue());
}

From source file:Analysis.A10_Weekday_v_Weekend_Listens.Listen_History_Weekday_Weekend_Driver.java

/**
 * @param args the command line arguments
 *//*from   w w w .  ja v a 2s .  c  o  m*/
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "Listen History - Weekday v Weekend");
    job.setJarByClass(Listen_History_Weekday_Weekend_Driver.class);

    job.setMapperClass(Listen_History_Weekday_Weekend_Mapper.class);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(NullWritable.class);

    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    int code = job.waitForCompletion(true) ? 0 : 1;

    if (code == 0) {
        for (Counter counter : job.getCounters()
                .getGroup(Listen_History_Weekday_Weekend_Mapper.DAY_COUNTER_GROUP)) {
            System.out.println(counter.getDisplayName() + "\t" + counter.getValue());
        }
    }

    FileSystem.get(conf).delete(new Path(args[1]), true);

    System.exit(code);
}

From source file:Analysis.A9_Max_Activity_By_Time_of_Day.Most_Listens_By_Time_of_Day_Driver.java

/**
 * @param args the command line arguments
 *//*from w w w . j  av  a2 s . c  o  m*/
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "Most listens by Time of the Day");
    job.setJarByClass(Most_Listens_By_Time_of_Day_Driver.class);

    job.setMapperClass(Most_Listens_By_Time_of_Day_Mapper.class);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(NullWritable.class);

    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    int code = job.waitForCompletion(true) ? 0 : 1;

    if (code == 0) {
        for (Counter counter : job.getCounters()
                .getGroup(Most_Listens_By_Time_of_Day_Mapper.HOUR_COUNTER_GROUP)) {
            System.out.println(counter.getDisplayName() + "\t" + counter.getValue());
        }
    }

    FileSystem.get(conf).delete(new Path(args[1]), true);

    System.exit(code);
}

From source file:bb.BranchAndBound.java

License:Apache License

public static void main(String[] args) throws Exception {
    /*Configuration conf = new Configuration();
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length != 2) {/*w  ww .j a  v a2  s.c  om*/
       System.err.println("Usage: branchandbound <input> <output>");
       System.exit(2);
    }
    Job job = new Job(conf, "branch and bound");
    job.setJarByClass(BranchAndBound.class);
    job.setMapperClass(BBMapper.class);
    //      job.setCombinerClass(IntSumReducer.class);
    //      job.setReducerClass(IntSumReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
    FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);*/
    int n;
    String[] inputargs = new GenericOptionsParser(new Configuration(), args).getRemainingArgs();
    if (inputargs.length != 2) {
        System.err.println("Usage: branchandbound <data directory> <n>");
        System.exit(2);
    }
    n = Integer.parseInt(inputargs[1]);
    String dataDir = inputargs[0];
    String prev_output = dataDir + "/input";
    /*      for( int i = 1 ; i <= n ; i++ ) {
             for( int j = 0 ; j < 2 ; j++ ) {
    String input = prev_output ;
    String output = inputargs[1] + "/iteration" + i + "_" + j ;
    Job job = getJob(input, output, i, j) ;
    job.waitForCompletion(true) ; // if failed ????
    prev_output = output;
             }
          }
    */
    //prev_output = dataDir + "/output" + "/iteration" + 17;
    long totalNodes = 0;
    long searchedNodes = 0;
    long cutbyDEE = 0;
    int mapTotal = 768;
    for (int i = 0; i <= n; i++) {
        iterRound = i;
        String input = prev_output;
        String output = dataDir + "/output" + "/iteration" + i;
        Job job = getJob(input, output, dataDir, i);
        if (i == n) {
            numReduceTasks = 1;
        }
        //job.setNumMapTasks(200);
        if (numOutput > mapTotal) {
            FileInputFormat.setMaxInputSplitSize(job, 10 * (8 * n + 10) + numOutput * (8 * n + 10) / 3000);
            FileInputFormat.setMinInputSplitSize(job, Math.max((8 * n + 10), numOutput * (8 * n + 10) / 5000));
        } else {
            FileInputFormat.setMaxInputSplitSize(job, (8 * n + 10));
        }
        /*
        if( i == 0 ) {
        job.setNumReduceTasks(1);
        } else {
        job.setNumReduceTasks(0);
        }
         */
        job.setNumReduceTasks(0);
        job.waitForCompletion(true); // if failed ????
        prev_output = output;
        Counters counters = job.getCounters();
        Counter counter = counters.findCounter("MyCounter", "Map Output Counter");
        numOutput = counter.getValue();
        totalNodes += numOutput;
        cutbyDEE += counters.findCounter("MyCounter", "Cut By DEE").getValue();
        searchedNodes += totalNodes + cutbyDEE + counters.findCounter("MyCounter", "Cut By Bound").getValue();
        System.out.println(numOutput + " " + (8 * n + 10) + " " + (numOutput * (8 * n + 10) / 768));
    }
    System.out.println("searchedNodes " + searchedNodes);
    System.out.println(totalNodes);
    System.out.println("cut by dee " + cutbyDEE);
}

From source file:cascading.stats.hadoop.HadoopNodeCounterCache.java

License:Open Source License

protected Map<String, Map<String, Long>> getCounters(FlowNodeStats flowNodeStats) throws IOException {
    // will use final or cached remote stats
    flowNodeStats.captureDetail(CascadingStats.Type.SLICE);

    Map<String, Map<String, Long>> allCounters = new HashMap<>();

    Collection<FlowSliceStats> children = flowNodeStats.getChildren();

    for (FlowSliceStats sliceStats : children) {
        TaskReport taskReport = ((HadoopSliceStats) sliceStats).getTaskReport();

        Counters counters = taskReport.getTaskCounters();

        for (CounterGroup group : counters) {
            Map<String, Long> values = allCounters.get(group.getName());

            if (values == null) {
                values = new HashMap<>();
                allCounters.put(group.getName(), values);
            }/*ww  w  .  j  a va  2 s  .  c o m*/

            for (Counter counter : group) {
                Long value = values.get(counter.getName());

                if (value == null)
                    value = 0L;

                value += counter.getValue();

                values.put(counter.getName(), value);
            }
        }
    }

    return allCounters;
}

From source file:cascading.stats.hadoop.HadoopSliceStats.java

License:Open Source License

private void setCounters(TaskReport taskReport) {
    this.counters = new HashMap<>();

    Counters hadoopCounters = taskReport.getTaskCounters();

    for (CounterGroup group : hadoopCounters) {
        Map<String, Long> values = new HashMap<String, Long>();

        this.counters.put(group.getName(), values);

        for (Counter counter : group)
            values.put(counter.getName(), counter.getValue());
    }/*from  w w  w. j a va  2 s . c om*/
}

From source file:cascading.stats.hadoop.HadoopStepCounterCache.java

License:Open Source License

@Override
protected long getCounterValue(Counters counters, Enum counter) {
    Counter result = counters.findCounter(counter);

    if (result == null)
        return 0;

    return result.getValue();
}

From source file:cascading.stats.hadoop.HadoopStepCounterCache.java

License:Open Source License

@Override
protected long getCounterValue(Counters counters, String groupName, String counterName) {
    CounterGroup counterGroup = counters.getGroup(groupName);

    if (counterGroup == null)
        return 0;

    // getCounter actually searches the display name, wtf
    // in theory this is lazily created if does not exist, but don't rely on it
    Counter counterValue = counterGroup.findCounter(counterName);

    if (counterValue == null)
        return 0;

    return counterValue.getValue();
}

From source file:co.cask.cdap.internal.app.runtime.workflow.BasicWorkflowToken.java

License:Apache License

public synchronized void setMapReduceCounters(Counters counters) {
    ImmutableMap.Builder<String, Map<String, Long>> countersBuilder = ImmutableMap.builder();

    for (CounterGroup group : counters) {
        ImmutableMap.Builder<String, Long> groupBuilder = ImmutableMap.builder();
        for (Counter counter : group) {
            groupBuilder.put(counter.getName(), counter.getValue());
            // Also put the counter to system scope.
            put(group.getName() + "." + counter.getName(), Value.of(counter.getValue()),
                    WorkflowToken.Scope.SYSTEM);
        }// ww w. j  ava 2s .com
        countersBuilder.put(group.getName(), groupBuilder.build());
    }

    this.mapReduceCounters = countersBuilder.build();
}

From source file:co.cask.cdap.internal.app.runtime.workflow.MapReduceProgramWorkflowRunner.java

License:Apache License

private void updateWorkflowToken(MapReduceContext context) throws Exception {
    Map<String, Map<String, Long>> mapReduceCounters = Maps.newHashMap();
    WorkflowToken workflowTokenFromContext = context.getWorkflowToken();

    if (workflowTokenFromContext == null) {
        throw new IllegalStateException(
                "WorkflowToken cannot be null when the " + "MapReduce program is started by Workflow.");
    }/*from   w  w w .  ja va 2 s.  c  om*/

    Counters counters = ((Job) context.getHadoopJob()).getCounters();
    for (CounterGroup group : counters) {
        mapReduceCounters.put(group.getName(), new HashMap<String, Long>());
        for (Counter counter : group) {
            mapReduceCounters.get(group.getName()).put(counter.getName(), counter.getValue());
            ((BasicWorkflowToken) workflowTokenFromContext).put(group.getName() + "." + counter.getName(),
                    Value.of(counter.getValue()), WorkflowToken.Scope.SYSTEM);
        }
    }

    ((BasicWorkflowToken) workflowTokenFromContext).setMapReduceCounters(mapReduceCounters);
    ((BasicWorkflowToken) token).mergeToken((BasicWorkflowToken) workflowTokenFromContext);
}