Example usage for org.apache.hadoop.mapreduce TaskCounter MAP_INPUT_RECORDS

List of usage examples for org.apache.hadoop.mapreduce TaskCounter MAP_INPUT_RECORDS

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce TaskCounter MAP_INPUT_RECORDS.

Prototype

TaskCounter MAP_INPUT_RECORDS

To view the source code for org.apache.hadoop.mapreduce TaskCounter MAP_INPUT_RECORDS.

Click Source Link

Usage

From source file:org.apache.kylin.job.common.HadoopCmdOutput.java

License:Apache License

public void updateJobCounter() {
    try {/*from ww w .j ava 2s.c  o m*/
        Counters counters = job.getCounters();
        if (counters == null) {
            String errorMsg = "no counters for job " + getMrJobId();
            log.warn(errorMsg);
            output.append(errorMsg);
            return;
        }
        this.output.append(counters.toString()).append("\n");
        log.debug(counters.toString());

        mapInputRecords = String.valueOf(counters.findCounter(TaskCounter.MAP_INPUT_RECORDS).getValue());
        hdfsBytesWritten = String
                .valueOf(counters.findCounter("FileSystemCounters", "HDFS_BYTES_WRITTEN").getValue());
        hdfsBytesRead = String
                .valueOf(counters.findCounter("FileSystemCounters", "HDFS_BYTES_READ").getValue());
    } catch (Exception e) {
        log.error(e.getLocalizedMessage(), e);
        output.append(e.getLocalizedMessage());
    }
}

From source file:org.schedoscope.export.ftp.FtpExportCSVMapper.java

License:Apache License

@Override
protected void map(WritableComparable<?> key, HCatRecord value, Context context)
        throws IOException, InterruptedException {

    List<TextPairWritable> items = new ArrayList<TextPairWritable>();

    for (String f : inputSchema.getFieldNames()) {

        String fieldValue = "";

        Object obj = value.get(f, inputSchema);
        if (obj != null) {

            if (inputSchema.get(f).isComplex()) {
                fieldValue = serializer.getFieldAsJson(value, f);
            } else {
                fieldValue = obj.toString();
                fieldValue = HCatUtils.getHashValueIfInList(f, fieldValue, anonFields, salt);
            }//from   w  ww  .ja v  a2 s  .c o  m
        }

        TextPairWritable item = new TextPairWritable(f, fieldValue);
        items.add(item);
    }

    TextPairArrayWritable record = new TextPairArrayWritable(Iterables.toArray(items, TextPairWritable.class));

    LongWritable localKey = new LongWritable(context.getCounter(TaskCounter.MAP_INPUT_RECORDS).getValue());
    context.write(localKey, record);
}

From source file:org.schedoscope.export.ftp.FtpExportJsonMapper.java

License:Apache License

@Override
protected void map(WritableComparable<?> key, HCatRecord value, Context context)
        throws IOException, InterruptedException {

    GenericRecord record = converter.convert(value, avroSchema);
    AvroValue<GenericRecord> recordWrapper = new AvroValue<GenericRecord>(record);

    LongWritable localKey = new LongWritable(context.getCounter(TaskCounter.MAP_INPUT_RECORDS).getValue());
    context.write(localKey, recordWrapper);
}

From source file:org.schedoscope.export.jdbc.JdbcExportMapper.java

License:Apache License

@Override
protected void map(WritableComparable<?> key, HCatRecord value, Context context)
        throws IOException, InterruptedException {

    List<Pair<String, String>> record = new ArrayList<Pair<String, String>>();

    for (String f : inputSchema.getFieldNames()) {

        String fieldValue = "NULL";
        String fieldType = typeMapping.get(columnTypes[inputSchema.getPosition(f)]);

        Object obj = value.get(f, inputSchema);
        if (obj != null) {

            if (inputSchema.get(f).isComplex()) {
                fieldValue = serializer.getFieldAsJson(value, f);
            } else {
                fieldValue = obj.toString();
                fieldValue = HCatUtils.getHashValueIfInList(f, fieldValue, anonFields, salt);
            }/*ww  w. j  a  v  a  2s  .c o m*/
        }
        record.add(Pair.of(fieldType, fieldValue));
    }

    String filterType = typeMapping.get(columnTypes[columnTypes.length - 1]);
    if (inputFilter == null) {
        record.add(Pair.of(filterType, "NULL"));
    } else {
        record.add(Pair.of(filterType, inputFilter));
    }

    LongWritable localKey = new LongWritable(context.getCounter(TaskCounter.MAP_INPUT_RECORDS).getValue());
    context.write(localKey, new JdbcOutputWritable(record));
}