Example usage for org.apache.hadoop.mapreduce TaskAttemptContext getOutputValueClass

List of usage examples for org.apache.hadoop.mapreduce TaskAttemptContext getOutputValueClass

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce TaskAttemptContext getOutputValueClass.

Prototype

public Class<?> getOutputValueClass();

Source Link

Document

Get the value class for job outputs.

Usage

From source file:org.apache.avro.mapreduce.AvroKeyValueOutputFormat.java

License:Apache License

/** {@inheritDoc} */
@Override/*from   ww  w.  j av  a2 s .  c o  m*/
@SuppressWarnings("unchecked")
public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context) throws IOException {
    Configuration conf = context.getConfiguration();

    AvroDatumConverterFactory converterFactory = new AvroDatumConverterFactory(conf);

    AvroDatumConverter<K, ?> keyConverter = converterFactory.create((Class<K>) context.getOutputKeyClass());
    AvroDatumConverter<V, ?> valueConverter = converterFactory.create((Class<V>) context.getOutputValueClass());

    GenericData dataModel = AvroSerialization.createDataModel(conf);

    return new AvroKeyValueRecordWriter<K, V>(keyConverter, valueConverter, dataModel,
            getCompressionCodec(context), getAvroFileOutputStream(context));
}

From source file:org.apache.avro.mapreduce.AvroSequenceFileOutputFormat.java

License:Apache License

/** {@inheritDoc} */
@Override/* w ww. j av  a 2 s .  c o  m*/
public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();

    // Configure compression if requested.
    CompressionCodec codec = null;
    CompressionType compressionType = CompressionType.NONE;
    if (getCompressOutput(context)) {
        // Find the kind of compression to do.
        compressionType = getOutputCompressionType(conf);

        // Find the right codec.
        Class<?> codecClass = getOutputCompressorClass(context, DefaultCodec.class);
        codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, conf);
    }

    // Get the path of the output file.
    Path outputFile = getDefaultWorkFile(context, "");
    FileSystem fs = outputFile.getFileSystem(conf);

    // Configure the writer.
    AvroSequenceFile.Writer.Options options = new AvroSequenceFile.Writer.Options().withFileSystem(fs)
            .withConfiguration(conf).withOutputPath(outputFile).withKeyClass(context.getOutputKeyClass())
            .withValueClass(context.getOutputValueClass()).withProgressable(context)
            .withCompressionType(compressionType).withCompressionCodec(codec);
    Schema keySchema = AvroJob.getOutputKeySchema(conf);
    if (null != keySchema) {
        options.withKeySchema(keySchema);
    }
    Schema valueSchema = AvroJob.getOutputValueSchema(conf);
    if (null != valueSchema) {
        options.withValueSchema(valueSchema);
    }
    final SequenceFile.Writer out = AvroSequenceFile.createWriter(options);

    return new RecordWriter<K, V>() {
        @Override
        public void write(K key, V value) throws IOException {
            out.append(key, value);
        }

        @Override
        public void close(TaskAttemptContext context) throws IOException {
            out.close();
        }
    };
}

From source file:org.apache.avro.mapreduce.DirectAvroKeyValueOutputFormat.java

License:Apache License

/** {@inheritDoc} */
@Override//from  www .java 2s.  co m
@SuppressWarnings("unchecked")
public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context) throws IOException {
    Configuration conf = context.getConfiguration();

    AvroDatumConverterFactory converterFactory = new AvroDatumConverterFactory(conf);

    AvroDatumConverter<K, ?> keyConverter = converterFactory.create((Class<K>) context.getOutputKeyClass());
    AvroDatumConverter<V, ?> valueConverter = converterFactory.create((Class<V>) context.getOutputValueClass());

    GenericData dataModel = AvroSerialization.createDataModel(conf);

    return new AvroKeyValueRecordWriter<K, V>(keyConverter, valueConverter, dataModel,
            getCompressionCodec(context), getAvroFileOutputStream(context), getSyncInterval(context));
}

From source file:org.apache.trevni.avro.mapreduce.AvroTrevniKeyValueRecordWriter.java

License:Apache License

/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override/*from  w  w w  .ja  v  a  2 s.c  o  m*/
protected Schema initSchema(TaskAttemptContext context) {
    AvroDatumConverterFactory converterFactory = new AvroDatumConverterFactory(context.getConfiguration());

    keyConverter = converterFactory.create((Class<K>) context.getOutputKeyClass());
    valueConverter = converterFactory.create((Class<V>) context.getOutputValueClass());

    // Create the generic record schema for the key/value pair.
    return AvroKeyValue.getSchema(keyConverter.getWriterSchema(), valueConverter.getWriterSchema());

}

From source file:org.kiji.avro.mapreduce.AvroKeyValueOutputFormat.java

License:Apache License

/** {@inheritDoc} */
@Override/*from  w  w  w  .  ja  v a 2  s .  c o m*/
@SuppressWarnings("unchecked")
public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context) throws IOException {
    AvroDatumConverterFactory converterFactory = new AvroDatumConverterFactory(context.getConfiguration());

    AvroDatumConverter<K, ?> keyConverter = converterFactory.create((Class<K>) context.getOutputKeyClass());
    AvroDatumConverter<V, ?> valueConverter = converterFactory.create((Class<V>) context.getOutputValueClass());

    return new AvroKeyValueRecordWriter<K, V>(keyConverter, valueConverter, getCompressionCodec(context),
            getAvroFileOutputStream(context));
}

From source file:org.mrgeo.hdfs.output.image.HdfsMrsPyramidOutputFormat.java

License:Apache License

protected MapFile.Writer createMapFileWriter(TaskAttemptContext context, CompressionCodec codec,
        SequenceFile.CompressionType compressionType, Path file) throws IOException {
    return new MapFile.Writer(context.getConfiguration(), file,
            MapFile.Writer.keyClass(context.getOutputKeyClass().asSubclass(WritableComparable.class)),
            MapFile.Writer.valueClass(context.getOutputValueClass().asSubclass(Writable.class)),
            MapFile.Writer.compression(compressionType, codec), MapFile.Writer.progressable(context));
}

From source file:org.mrgeo.hdfs.output.MapFileOutputFormat.java

License:Apache License

@Override
public RecordWriter<WritableComparable<?>, Writable> getRecordWriter(TaskAttemptContext context)
        throws IOException {
    Configuration conf = context.getConfiguration();
    CompressionCodec codec = null;/*  w  w  w  .j  a  va 2 s.  com*/
    CompressionType compressionType = CompressionType.NONE;
    if (getCompressOutput(context)) {
        // find the kind of compression to do
        compressionType = SequenceFileOutputFormat.getOutputCompressionType(context);

        // find the right codec
        Class<?> codecClass = getOutputCompressorClass(context, DefaultCodec.class);
        codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, conf);
    }

    Path file = getDefaultWorkFile(context, "");
    FileSystem fs = file.getFileSystem(conf);
    // ignore the progress parameter, since MapFile is local
    final MapFile.Writer out = new MapFile.Writer(conf, fs, file.toString(),
            context.getOutputKeyClass().asSubclass(WritableComparable.class),
            context.getOutputValueClass().asSubclass(Writable.class), compressionType, codec, context);

    return new RecordWriter<WritableComparable<?>, Writable>() {
        @Override
        public void write(WritableComparable<?> key, Writable value) throws IOException {
            out.append(key, value);
        }

        @Override
        public void close(TaskAttemptContext contxt) throws IOException {
            out.close();
        }
    };
}