Example usage for org.apache.hadoop.mapreduce TaskID getId

List of usage examples for org.apache.hadoop.mapreduce TaskID getId

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce TaskID getId.

Prototype

public int getId() 

Source Link

Document

returns the int which represents the identifier

Usage

From source file:parquet.hadoop.ParquetMultiOutputFormat.java

License:Apache License

/**
 * {@inheritDoc}/*  w w  w  . jav a2  s . c  o m*/
 */
@Override
public RecordWriter<K, T> getRecordWriter(TaskAttemptContext taskAttemptContext)
        throws IOException, InterruptedException {
    final Configuration conf = getConfiguration(taskAttemptContext);

    CompressionCodecName codec = getCodec(taskAttemptContext);
    String extension = codec.getExtension() + ".parquet";
    TaskID taskId = taskAttemptContext.getTaskAttemptID().getTaskID();

    Path workPath = ((ParquetMultiOutputCommitter) getOutputCommitter(taskAttemptContext)).getWorkPath();

    return getRecordWriter(conf, workPath, extension, String.format("%05d", taskId.getId()), codec);
}

From source file:simsql.runtime.RecordOutputFormat.java

License:Apache License

public RecordWriter<WritableKey, WritableValue> getRecordWriter(TaskAttemptContext job)
        throws IOException, InterruptedException {

    Configuration conf = job.getConfiguration();

    // here's what we do -- if we have a map-only job and a value for
    // lastInputSplit as given to us by RecordInputFormat, then we
    // will get our part number from that file. otherwise, we'll use
    // the one we get from the job.

    // get the part from the job.
    TaskID taskId = job.getTaskAttemptID().getTaskID();
    int part = taskId.getId();
    if (RecordOutputFormat.lastInputSplit != null && taskId.getTaskType() == TaskType.MAP) {

        part = RecordOutputFormat.getPartNumber(RecordOutputFormat.lastInputSplit);
        System.out.println("MAP-ONLY JOB: USING PART NUMBER " + part + " FROM INPUT SPLIT");

        // set it back to null
        RecordOutputFormat.lastInputSplit = null;
    }//from   w  w w  . j  a va  2 s. co m

    FileOutputCommitter committer = (FileOutputCommitter) getOutputCommitter(job);
    Path file = new Path(committer.getWorkPath(), RecordOutputFormat.getFileNumber(part));

    /* Path file = getDefaultWorkFile (job, ".tbl"); */
    FileSystem fs = file.getFileSystem(conf);
    FSDataOutputStream fileOut = fs.create(file, false);
    return new OutputFileSerializer(fileOut);
}