Example usage for org.apache.hadoop.mapreduce Counters Counters

List of usage examples for org.apache.hadoop.mapreduce Counters Counters

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Counters Counters.

Prototype

public Counters() 

Source Link

Document

Default constructor

Usage

From source file:co.nubetech.hiho.merge.TestMergeKeyReducer.java

License:Apache License

@Test
public void testReducerForIntWritableKeyAndValue() throws IOException, InterruptedException {
    IntWritable key = new IntWritable(123);
    HihoTuple hihoTuple = new HihoTuple();
    hihoTuple.setKey(key);//from  ww  w.jav  a 2 s .  c o m

    HihoValue hihoValue1 = new HihoValue();
    HihoValue hihoValue2 = new HihoValue();
    IntWritable value1 = new IntWritable(456);
    IntWritable value2 = new IntWritable(789);
    hihoValue1.setVal(value1);
    hihoValue2.setVal(value2);
    hihoValue1.setIsOld(true);
    hihoValue2.setIsOld(false);
    ArrayList<HihoValue> values = new ArrayList<HihoValue>();
    values.add(hihoValue1);
    values.add(hihoValue2);

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(MergeRecordCounter.OUTPUT);
    when(context.getCounter(MergeRecordCounter.OUTPUT)).thenReturn(counter);
    MergeKeyReducer mergeReducer = new MergeKeyReducer();
    mergeReducer.reduce(hihoTuple, values, context);
    verify(context).write(key, value2);
    assertEquals(1, context.getCounter(MergeRecordCounter.OUTPUT).getValue());
}

From source file:co.nubetech.hiho.merge.TestMergeValueMapper.java

License:Apache License

@Test(expected = IOException.class)
public final void testMapperForNullKeyValue() throws IOException, InterruptedException {
    Mapper.Context context = mock(Mapper.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(MergeRecordCounter.BAD_RECORD);
    when(context.getCounter(MergeRecordCounter.BAD_RECORD)).thenReturn(counter);
    MergeValueMapper mapper = new MergeValueMapper();
    Text val = new Text("valueOfKey");
    mapper.map(null, val, context);
}

From source file:co.nubetech.hiho.merge.TestMergeValueMapper.java

License:Apache License

@Test
public final void testMapperValidValues() throws IOException, InterruptedException {
    Mapper.Context context = mock(Mapper.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(MergeRecordCounter.TOTAL_RECORDS_NEW);
    when(context.getCounter(MergeRecordCounter.TOTAL_RECORDS_NEW)).thenReturn(counter);
    MergeValueMapper mapper = new MergeValueMapper();
    Text key = new Text("abc123");
    Text val = new Text("valueOfKey");
    mapper.isOld = false;//from   www  .  ja  v  a 2  s .com
    mapper.map(key, val, context);

    HihoValue hihoValue = new HihoValue();
    hihoValue.setVal(key);
    hihoValue.setIsOld(false);
    HihoTuple hihoTuple = new HihoTuple();
    hihoTuple.setKey(val);
    verify(context).write(hihoTuple, hihoValue);
    assertEquals(1, context.getCounter(MergeRecordCounter.TOTAL_RECORDS_NEW).getValue());
}

From source file:co.nubetech.hiho.merge.TestMergeValueReducer.java

License:Apache License

@Test
public void testReducerValidValues() throws IOException, InterruptedException {
    Text key = new Text("key123");
    HihoTuple hihoTuple = new HihoTuple();
    hihoTuple.setKey(key);//  w  w  w  .  jav  a  2s .  c  o  m

    HihoValue hihoValue1 = new HihoValue();
    HihoValue hihoValue2 = new HihoValue();
    Text value1 = new Text("value1");
    Text value2 = new Text("value2");
    hihoValue1.setVal(value1);
    hihoValue2.setVal(value2);
    hihoValue1.setIsOld(true);
    hihoValue2.setIsOld(false);
    ArrayList<HihoValue> values = new ArrayList<HihoValue>();
    values.add(hihoValue1);
    values.add(hihoValue2);

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(MergeRecordCounter.OUTPUT);
    when(context.getCounter(MergeRecordCounter.OUTPUT)).thenReturn(counter);
    MergeValueReducer mergeReducer = new MergeValueReducer();
    mergeReducer.reduce(hihoTuple, values, context);
    verify(context).write(value2, key);
    assertEquals(1, context.getCounter(MergeRecordCounter.OUTPUT).getValue());
}

From source file:co.nubetech.hiho.merge.TestMergeValueReducer.java

License:Apache License

@Test
public void testReducerNullValues() throws IOException, InterruptedException {
    Text key = new Text("key123");
    HihoTuple hihoTuple = new HihoTuple();
    hihoTuple.setKey(key);//from ww w. j  a  v a2  s.co  m
    ArrayList<HihoValue> values = new ArrayList<HihoValue>();

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(MergeRecordCounter.OUTPUT);
    when(context.getCounter(MergeRecordCounter.OUTPUT)).thenReturn(counter);
    MergeValueReducer mergeReducer = new MergeValueReducer();
    mergeReducer.reduce(hihoTuple, values, context);
    verify(context).write(null, key);
    assertEquals(1, context.getCounter(MergeRecordCounter.OUTPUT).getValue());
}

From source file:co.nubetech.hiho.merge.TestMergeValueReducer.java

License:Apache License

@Test
public void testReducerForLongWritableKey() throws IOException, InterruptedException {
    LongWritable key = new LongWritable(Long.parseLong("123"));
    HihoTuple hihoTuple = new HihoTuple();
    hihoTuple.setKey(key);//from   w w w  .ja v a  2  s. com

    HihoValue hihoValue1 = new HihoValue();
    HihoValue hihoValue2 = new HihoValue();
    Text value1 = new Text("value1");
    Text value2 = new Text("value2");
    hihoValue1.setVal(value1);
    hihoValue2.setVal(value2);
    hihoValue1.setIsOld(true);
    hihoValue2.setIsOld(false);
    ArrayList<HihoValue> values = new ArrayList<HihoValue>();
    values.add(hihoValue1);
    values.add(hihoValue2);

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(MergeRecordCounter.OUTPUT);
    when(context.getCounter(MergeRecordCounter.OUTPUT)).thenReturn(counter);
    MergeValueReducer mergeReducer = new MergeValueReducer();
    mergeReducer.reduce(hihoTuple, values, context);
    verify(context).write(value2, key);
    assertEquals(1, context.getCounter(MergeRecordCounter.OUTPUT).getValue());
}

From source file:co.nubetech.hiho.merge.TestMergeValueReducer.java

License:Apache License

@Test
public void testReducerForBytesWritableKeyAndValue() throws IOException, InterruptedException {
    BytesWritable key = new BytesWritable("abc123".getBytes());
    HihoTuple hihoTuple = new HihoTuple();
    hihoTuple.setKey(key);/*from   ww w .j  a v  a  2s. co m*/

    HihoValue hihoValue1 = new HihoValue();
    HihoValue hihoValue2 = new HihoValue();
    BytesWritable value1 = new BytesWritable("value1".getBytes());
    BytesWritable value2 = new BytesWritable("value2".getBytes());
    hihoValue1.setVal(value1);
    hihoValue2.setVal(value2);
    hihoValue1.setIsOld(true);
    hihoValue2.setIsOld(false);
    ArrayList<HihoValue> values = new ArrayList<HihoValue>();
    values.add(hihoValue1);
    values.add(hihoValue2);

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(MergeRecordCounter.OUTPUT);
    when(context.getCounter(MergeRecordCounter.OUTPUT)).thenReturn(counter);
    MergeValueReducer mergeReducer = new MergeValueReducer();
    mergeReducer.reduce(hihoTuple, values, context);
    verify(context).write(value2, key);
    assertEquals(1, context.getCounter(MergeRecordCounter.OUTPUT).getValue());
}

From source file:co.nubetech.hiho.merge.TestMergeValueReducer.java

License:Apache License

@Test
public void testReducerForIntWritableKeyAndValue() throws IOException, InterruptedException {
    IntWritable key = new IntWritable(123);
    HihoTuple hihoTuple = new HihoTuple();
    hihoTuple.setKey(key);/*from w  ww.j a  v a 2s. c o  m*/

    HihoValue hihoValue1 = new HihoValue();
    HihoValue hihoValue2 = new HihoValue();
    IntWritable value1 = new IntWritable(456);
    IntWritable value2 = new IntWritable(789);
    hihoValue1.setVal(value1);
    hihoValue2.setVal(value2);
    hihoValue1.setIsOld(true);
    hihoValue2.setIsOld(false);
    ArrayList<HihoValue> values = new ArrayList<HihoValue>();
    values.add(hihoValue1);
    values.add(hihoValue2);

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(MergeRecordCounter.OUTPUT);
    when(context.getCounter(MergeRecordCounter.OUTPUT)).thenReturn(counter);
    MergeValueReducer mergeReducer = new MergeValueReducer();
    mergeReducer.reduce(hihoTuple, values, context);
    verify(context).write(value2, key);
    assertEquals(1, context.getCounter(MergeRecordCounter.OUTPUT).getValue());
}

From source file:com.google.appengine.tools.mapreduce.MapReduceServletTest.java

License:Apache License

/**
 * Test that handleController has reasonable behavior when there are still
 * active workers.//from   www  . j a  v a  2 s . c  om
 *
 * @throws EntityNotFoundException
 */
public void testHandleController_withContinue() throws EntityNotFoundException {
    JobID jobId = new JobID("foo", 1);
    HttpServletRequest request = createMockControllerRequest(0, jobId);
    replay(request);

    Configuration sampleConf = getSampleMapReduceConfiguration();

    persistMRState(jobId, sampleConf);

    ShardState shardState1 = ShardState.generateInitializedShardState(ds,
            new TaskAttemptID(new TaskID(jobId, true, 1), 1));
    Counters counters1 = new Counters();
    counters1.findCounter("a", "z").increment(1);
    shardState1.setCounters(counters1);
    shardState1.setInputSplit(sampleConf, new StubInputSplit(1));
    shardState1.setRecordReader(sampleConf, new StubRecordReader());
    shardState1.persist();

    ShardState shardState2 = ShardState.generateInitializedShardState(ds,
            new TaskAttemptID(new TaskID(jobId, true, 2), 1));
    Counters counters2 = new Counters();
    counters2.findCounter("a", "z").increment(1);
    shardState2.setCounters(counters2);
    shardState2.setInputSplit(sampleConf, new StubInputSplit(2));
    shardState2.setRecordReader(sampleConf, new StubRecordReader());
    shardState2.setDone();
    shardState2.persist();

    // doPost should call handleCallback()
    // resp is never used
    servlet.doPost(request, null);

    MapReduceState mrState = MapReduceState.getMapReduceStateFromJobID(ds, jobId);

    // Check result of aggregateState()
    assertEquals(2, mrState.getCounters().findCounter("a", "z").getValue());

    // Check the result of refillQuota()
    // Should fill the active thread but not the done one.
    assertEquals(1000, new QuotaManager(MemcacheServiceFactory.getMemcacheService())
            .get("" + shardState1.getTaskAttemptID()));
    assertEquals(0, new QuotaManager(MemcacheServiceFactory.getMemcacheService())
            .get("" + shardState2.getTaskAttemptID()));

    // Check that the next controller task got enqueued.
    QueueStateInfo defaultQueue = getDefaultQueueInfo();
    assertEquals(1, defaultQueue.getCountTasks());
    TaskStateInfo firstTask = defaultQueue.getTaskInfo().get(0);
    assertEquals("/mapreduce/" + MapReduceServlet.CONTROLLER_PATH, firstTask.getUrl());
    assertTrue(firstTask.getBody(), firstTask.getBody().indexOf("jobID=job_foo_0001") != -1);

    assertEquals(1, mrState.getActiveShardCount());
    assertEquals(2, mrState.getShardCount());

    verify(request);
}

From source file:com.google.appengine.tools.mapreduce.MapReduceState.java

License:Apache License

/**
 * Generates a MapReduceState that's configured with the given parameters, is
 * set as active, and has made no progress as of yet.
 * // www .  j av  a2  s .co  m
 * The MapReduceState needs to have a configuration set via
 * {@code #setConfigurationXML(String)} before it can be persisted.
 * 
 * @param service the datastore to persist the MapReduceState to
 * @string name user visible name for this MapReduce
 * @param jobId the JobID this MapReduceState corresponds to
 * @param time start time for this MapReduce, in milliseconds from the epoch
 * @return the initialized MapReduceState
 */
public static MapReduceState generateInitializedMapReduceState(DatastoreService service, String name,
        JobID jobId, long time) {
    MapReduceState state = new MapReduceState(service);
    state.entity = new Entity("MapReduceState", jobId.toString());
    state.setName(name);
    state.entity.setProperty(PROGRESS_PROPERTY, 0.0);
    state.entity.setProperty(STATUS_PROPERTY, "" + Status.ACTIVE);
    state.entity.setProperty(START_TIME_PROPERTY, time);
    state.entity.setUnindexedProperty(CHART_PROPERTY, new Text(""));
    state.setCounters(new Counters());
    state.setActiveShardCount(0);
    state.setShardCount(0);
    return state;
}