Example usage for org.apache.hadoop.mapreduce OutputCommitter commitTask

List of usage examples for org.apache.hadoop.mapreduce OutputCommitter commitTask

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce OutputCommitter commitTask.

Prototype

public abstract void commitTask(TaskAttemptContext taskContext) throws IOException;

Source Link

Document

To promote the task's temporary output to final output location.

Usage

From source file:org.gridgain.grid.kernal.processors.hadoop.v2.GridHadoopV2Task.java

License:Open Source License

/**
 * Commit task.// ww  w  . j a v  a  2s . co  m
 *
 * @param outputFormat Output format.
 * @throws GridException In case of Grid exception.
 * @throws IOException In case of IO exception.
 * @throws InterruptedException In case of interrupt.
 */
protected void commit(@Nullable OutputFormat outputFormat)
        throws GridException, IOException, InterruptedException {
    if (hadoopCtx.writer() != null) {
        assert outputFormat != null;

        OutputCommitter outputCommitter = outputFormat.getOutputCommitter(hadoopCtx);

        if (outputCommitter.needsTaskCommit(hadoopCtx))
            outputCommitter.commitTask(hadoopCtx);
    }
}

From source file:org.mrgeo.hadoop.multipleoutputs.DirectoryMultipleOutputsCommitter.java

License:Apache License

@Override
public void commitTask(final TaskAttemptContext taskContext) throws IOException {
    for (final OutputCommitter that : committers) {
        that.commitTask(taskContext);
    }//from w w w  .jav  a  2 s .  c  o m
}

From source file:org.tensorflow.hadoop.io.TFRecordFileTest.java

License:Open Source License

@Test
public void testInputOutputFormat() throws Exception {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf);//from   w ww .j  av a 2s  .  com

    Path outdir = new Path(System.getProperty("test.build.data", "/tmp"), "tfr-test");

    TFRecordFileOutputFormat.setOutputPath(job, outdir);

    TaskAttemptContext context = MapReduceTestUtil.createDummyMapTaskAttemptContext(job.getConfiguration());
    OutputFormat<BytesWritable, NullWritable> outputFormat = new TFRecordFileOutputFormat();
    OutputCommitter committer = outputFormat.getOutputCommitter(context);
    committer.setupJob(job);
    RecordWriter<BytesWritable, NullWritable> writer = outputFormat.getRecordWriter(context);

    // Write Example with random numbers
    Random rand = new Random();
    Map<Long, Long> records = new TreeMap<Long, Long>();
    try {
        for (int i = 0; i < RECORDS; ++i) {
            long randValue = rand.nextLong();
            records.put((long) i, randValue);
            Int64List data = Int64List.newBuilder().addValue(i).addValue(randValue).build();
            Feature feature = Feature.newBuilder().setInt64List(data).build();
            Features features = Features.newBuilder().putFeature("data", feature).build();
            Example example = Example.newBuilder().setFeatures(features).build();
            BytesWritable key = new BytesWritable(example.toByteArray());
            writer.write(key, NullWritable.get());
        }
    } finally {
        writer.close(context);
    }
    committer.commitTask(context);
    committer.commitJob(job);

    // Read and compare
    TFRecordFileInputFormat.setInputPaths(job, outdir);
    InputFormat<BytesWritable, NullWritable> inputFormat = new TFRecordFileInputFormat();
    for (InputSplit split : inputFormat.getSplits(job)) {
        RecordReader<BytesWritable, NullWritable> reader = inputFormat.createRecordReader(split, context);
        MapContext<BytesWritable, NullWritable, BytesWritable, NullWritable> mcontext = new MapContextImpl<BytesWritable, NullWritable, BytesWritable, NullWritable>(
                job.getConfiguration(), context.getTaskAttemptID(), reader, null, null,
                MapReduceTestUtil.createDummyReporter(), split);
        reader.initialize(split, mcontext);
        try {
            while (reader.nextKeyValue()) {
                BytesWritable bytes = reader.getCurrentKey();
                Example example = Example.parseFrom(bytes.getBytes());
                Int64List data = example.getFeatures().getFeatureMap().get("data").getInt64List();
                Long key = data.getValue(0);
                Long value = data.getValue(1);
                assertEquals(records.get(key), value);
                records.remove(key);
            }
        } finally {
            reader.close();
        }
    }
    assertEquals(0, records.size());
}