Example usage for org.apache.hadoop.mapreduce TaskAttemptContext getOutputValueClass

List of usage examples for org.apache.hadoop.mapreduce TaskAttemptContext getOutputValueClass

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce TaskAttemptContext getOutputValueClass.

Prototype

public Class<?> getOutputValueClass();

Source Link

Document

Get the value class for job outputs.

Usage

From source file:com.asakusafw.runtime.stage.output.TemporaryOutputFormat.java

License:Apache License

@Override
public RecordWriter<NullWritable, T> getRecordWriter(TaskAttemptContext context)
        throws IOException, InterruptedException {
    @SuppressWarnings("unchecked")
    Class<T> valueClass = (Class<T>) context.getOutputValueClass();
    String name = context.getConfiguration().get(KEY_FILE_NAME, DEFAULT_FILE_NAME);
    return createRecordWriter(context, name, valueClass);
}

From source file:com.linkedin.cubert.io.rubix.RubixOutputFormat.java

License:Open Source License

@Override
public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();
    String extension = RubixConstants.RUBIX_EXTENSION;

    CompressionCodec codec = null;/*from ww  w  . j  a  v a 2  s.co m*/
    boolean isCompressed = getCompressOutput(context);

    if (isCompressed) {
        Class<?> codecClass = getOutputCompressorClass(context, DefaultCodec.class);
        codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, conf);
        extension += codec.getDefaultExtension();
    }

    Path file = getDefaultWorkFile(context, extension);
    FileSystem fs = file.getFileSystem(conf);

    FSDataOutputStream fileOut = fs.create(file, false);
    return new RubixRecordWriter<K, V>(conf, fileOut, context.getOutputKeyClass(),
            context.getOutputValueClass(), codec);
}

From source file:com.linkedin.json.JsonSequenceFileOutputFormat.java

License:Apache License

@Override
public RecordWriter<Object, Object> getRecordWriter(final TaskAttemptContext context)
        throws IOException, InterruptedException {
    // Shamelessly copy in hadoop code to allow us to set the metadata with our schema

    Configuration conf = context.getConfiguration();

    CompressionCodec codec = null;/*from   w  w w  . ja v a 2  s . c o m*/
    CompressionType compressionType = CompressionType.NONE;
    if (getCompressOutput(context)) {
        // find the kind of compression to do
        compressionType = SequenceFileOutputFormat.getOutputCompressionType(context);

        // find the right codec
        Class<?> codecClass = getOutputCompressorClass(context, DefaultCodec.class);
        codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, conf);
    }
    // get the path of the temporary output file
    Path file = getDefaultWorkFile(context, "");
    FileSystem fs = file.getFileSystem(conf);

    final String keySchema = getSchema("output.key.schema", conf);
    final String valueSchema = getSchema("output.value.schema", conf);

    /* begin cheddar's stealing of jay's code */
    SequenceFile.Metadata meta = new SequenceFile.Metadata();

    meta.set(new Text("key.schema"), new Text(keySchema));
    meta.set(new Text("value.schema"), new Text(valueSchema));

    final SequenceFile.Writer out = SequenceFile.createWriter(fs, conf, file, context.getOutputKeyClass(),
            context.getOutputValueClass(), compressionType, codec, context, meta);
    /* end cheddar's stealing of jay's code */

    final JsonTypeSerializer keySerializer = new JsonTypeSerializer(keySchema);
    final JsonTypeSerializer valueSerializer = new JsonTypeSerializer(valueSchema);

    return new RecordWriter<Object, Object>() {

        public void write(Object key, Object value) throws IOException {

            out.append(new BytesWritable(keySerializer.toBytes(key)),
                    new BytesWritable(valueSerializer.toBytes(value)));
            context.progress();
        }

        public void close(TaskAttemptContext context) throws IOException {
            out.close();
        }
    };
}

From source file:com.scaleoutsoftware.soss.hserver.GridOutputFormat.java

License:Apache License

@Override
public RecordWriter<K, V> getRecordWriter(TaskAttemptContext taskAttemptContext)
        throws IOException, InterruptedException {

    Configuration configuration = taskAttemptContext.getConfiguration();

    if (configuration.getBoolean(outputIsNamedMapProperty, false)) { //This is a NamedMap
        String mapName = configuration.get(outputNamedMapProperty);
        Class<CustomSerializer<K>> keySerializerClass = (Class<CustomSerializer<K>>) configuration
                .getClass(outputNamedMapKeySerializerProperty, null);
        Class<CustomSerializer<V>> valueSerializerClass = (Class<CustomSerializer<V>>) configuration
                .getClass(outputNamedMapValueSerializerProperty, null);
        int smOrdinal = configuration.getInt(SERIALIZATION_MODE, SerializationMode.DEFAULT.ordinal());
        int amOrdinal = configuration.getInt(AVAILABILITY_MODE, AvailabilityMode.USE_REPLICAS.ordinal());
        SerializationMode serializationMode = SerializationMode.values()[smOrdinal];
        AvailabilityMode availabilityMode = AvailabilityMode.values()[amOrdinal];

        if (mapName == null || mapName.length() == 0 || keySerializerClass == null
                || valueSerializerClass == null) {
            throw new IOException("Input format is not configured with a valid NamedMap.");
        }/*w  w  w.  j a v  a2  s  . com*/

        CustomSerializer<K> keySerializer = ReflectionUtils.newInstance(keySerializerClass, configuration);
        keySerializer.setObjectClass((Class<K>) configuration.getClass(outputNamedMapKeyProperty, null));
        CustomSerializer<V> valueSerializer = ReflectionUtils.newInstance(valueSerializerClass, configuration);
        valueSerializer.setObjectClass((Class<V>) configuration.getClass(outputNamedMapValueProperty, null));
        NamedMap<K, V> namedMap = NamedMapFactory.getMap(mapName, keySerializer, valueSerializer);
        namedMap.setSerializationMode(serializationMode);
        namedMap.setAvailabilityMode(availabilityMode);
        return new NamedMapRecordWriter<K, V>(namedMap);
    } else { //This is a NamedCache
        String cacheName = configuration.get(outputNamedCacheProperty);
        if (cacheName == null || cacheName.length() == 0)
            throw new IOException("Output NamedCache not specified.");

        NamedCache cache;

        try {
            cache = CacheFactory.getCache(cacheName);
        } catch (NamedCacheException e) {
            throw new IOException("Cannot initialize NamedCache.", e);
        }

        Class valueClass = taskAttemptContext.getOutputValueClass();
        if (Writable.class.isAssignableFrom(valueClass)) {
            cache.setCustomSerialization(new WritableSerializer(valueClass));
        }

        return new NamedCacheRecordWriter<K, V>(cache);
    }
}

From source file:com.skp.experiment.common.mapreduce.MapFileOutputFormat.java

License:Apache License

public RecordWriter<WritableComparable<?>, Writable> getRecordWriter(TaskAttemptContext context)
        throws IOException {
    Configuration conf = context.getConfiguration();
    CompressionCodec codec = null;/*from ww  w .  j a va  2 s  . com*/
    CompressionType compressionType = CompressionType.NONE;
    if (getCompressOutput(context)) {
        // find the kind of compression to do
        compressionType = SequenceFileOutputFormat.getOutputCompressionType(context);

        // find the right codec
        Class<?> codecClass = getOutputCompressorClass(context, DefaultCodec.class);
        codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, conf);
    }

    Path file = getDefaultWorkFile(context, "");
    FileSystem fs = file.getFileSystem(conf);
    // ignore the progress parameter, since MapFile is local
    final MapFile.Writer out = new MapFile.Writer(conf, fs, file.toString(),
            context.getOutputKeyClass().asSubclass(WritableComparable.class),
            context.getOutputValueClass().asSubclass(Writable.class), compressionType, codec, context);

    return new RecordWriter<WritableComparable<?>, Writable>() {
        public void write(WritableComparable<?> key, Writable value) throws IOException {
            out.append(key, value);
        }

        public void close(TaskAttemptContext context) throws IOException {
            out.close();
        }
    };
}

From source file:datafu.hourglass.avro.AvroKeyValueWithMetadataOutputFormat.java

License:Apache License

/** {@inheritDoc} */
@Override/*  w  w  w  .  j av  a 2 s.c o m*/
@SuppressWarnings("unchecked")
public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context) throws IOException {
    AvroDatumConverterFactory converterFactory = new AvroDatumConverterFactory(context.getConfiguration());

    AvroDatumConverter<K, ?> keyConverter = converterFactory.create((Class<K>) context.getOutputKeyClass());
    AvroDatumConverter<V, ?> valueConverter = converterFactory.create((Class<V>) context.getOutputValueClass());

    return new AvroKeyValueWithMetadataRecordWriter<K, V>(keyConverter, valueConverter,
            getCompressionCodec(context), getAvroFileOutputStream(context), context.getConfiguration());
}

From source file:edu.arizona.cs.hadoop.fs.irods.output.HirodsMapFileOutputFormat.java

License:Apache License

@Override
public RecordWriter<WritableComparable<?>, Writable> getRecordWriter(TaskAttemptContext context)
        throws IOException {
    Configuration conf = context.getConfiguration();
    CompressionCodec codec = null;//from w w w. j  av  a  2 s.c o m
    CompressionType compressionType = CompressionType.NONE;
    if (getCompressOutput(context)) {
        // find the kind of compression to do
        compressionType = HirodsSequenceFileOutputFormat.getOutputCompressionType(context);

        // find the right codec
        Class<?> codecClass = getOutputCompressorClass(context, DefaultCodec.class);
        codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, conf);
    }

    Path file = getDefaultWorkFile(context, "");
    FileSystem fs = file.getFileSystem(conf);
    // ignore the progress parameter, since MapFile is local
    final MapFile.Writer out = new MapFile.Writer(conf, fs, file.toString(),
            context.getOutputKeyClass().asSubclass(WritableComparable.class),
            context.getOutputValueClass().asSubclass(Writable.class), compressionType, codec, context);

    return new RecordWriter<WritableComparable<?>, Writable>() {
        public void write(WritableComparable<?> key, Writable value) throws IOException {
            out.append(key, value);
        }

        public void close(TaskAttemptContext context) throws IOException {
            out.close();
        }
    };
}

From source file:edu.arizona.cs.hadoop.fs.irods.output.HirodsSequenceFileOutputFormat.java

License:Apache License

@Override
public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();

    CompressionCodec codec = null;//from  ww  w  . jav  a 2 s .co  m
    CompressionType compressionType = CompressionType.NONE;
    if (getCompressOutput(context)) {
        // find the kind of compression to do
        compressionType = getOutputCompressionType(context);

        // find the right codec
        Class<?> codecClass = getOutputCompressorClass(context, DefaultCodec.class);
        codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, conf);
    }
    // get the path of the temporary output file 
    Path file = getDefaultWorkFile(context, "");
    FileSystem fs = file.getFileSystem(conf);
    final SequenceFile.Writer out = SequenceFile.createWriter(fs, conf, file, context.getOutputKeyClass(),
            context.getOutputValueClass(), compressionType, codec, context);

    return new RecordWriter<K, V>() {

        @Override
        public void write(K key, V value) throws IOException {

            out.append(key, value);
        }

        @Override
        public void close(TaskAttemptContext context) throws IOException {
            out.close();
        }
    };
}

From source file:edu.purdue.cs.HSPGiST.HadoopClasses.LocalHSPGiSTOutputFormat.java

License:Apache License

public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException {
    final Writer out = getWriter(context, context.getOutputKeyClass(), context.getOutputValueClass());

    return new RecordWriter<K, V>() {

        public void write(K key, V value) throws IOException {
            out.append(key, value);/*from   www.  j a v  a  2  s  .c o  m*/
        }

        public void close(TaskAttemptContext context) throws IOException {
            out.close();
        }
    };
}

From source file:kogiri.common.hadoop.io.format.map.BloomMapFileOutputFormat.java

License:Apache License

@Override
public RecordWriter<WritableComparable<?>, Writable> getRecordWriter(TaskAttemptContext context)
        throws IOException {
    Configuration conf = context.getConfiguration();
    CompressionCodec codec = null;// w  ww.  ja  v  a 2  s .c om
    CompressionType compressionType = CompressionType.NONE;
    if (getCompressOutput(context)) {
        // find the kind of compression to do
        compressionType = SequenceFileOutputFormat.getOutputCompressionType(context);

        // find the right codec
        Class<?> codecClass = getOutputCompressorClass(context, DefaultCodec.class);
        codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, conf);
    }

    Path file = getDefaultWorkFile(context, "");
    FileSystem fs = file.getFileSystem(conf);
    // ignore the progress parameter, since MapFile is local
    final BloomMapFile.Writer out = new BloomMapFile.Writer(conf, fs, file.toString(),
            context.getOutputKeyClass().asSubclass(WritableComparable.class),
            context.getOutputValueClass().asSubclass(Writable.class), compressionType, codec, context);

    return new RecordWriter<WritableComparable<?>, Writable>() {
        @Override
        public void write(WritableComparable<?> key, Writable value) throws IOException {
            out.append(key, value);
        }

        @Override
        public void close(TaskAttemptContext context) throws IOException {
            out.close();
        }
    };
}