Example usage for org.apache.hadoop.mapreduce TaskAttemptContext getConfiguration

List of usage examples for org.apache.hadoop.mapreduce TaskAttemptContext getConfiguration

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce TaskAttemptContext getConfiguration.

Prototype

public Configuration getConfiguration();

Source Link

Document

Return the configuration for the job.

Usage

From source file:com.scaleoutsoftware.soss.hserver.DatasetInputFormat.java

License:Apache License

@Override
public RecordReader<K, V> createRecordReader(InputSplit split, TaskAttemptContext context)
        throws IOException, InterruptedException {
    if (split instanceof ImageInputSplit) {
        InputFormat<K, V> underlyingInputFormat = getUnderlyingInputFormat(context.getConfiguration());
        RecordReader<K, V> underlyingRecordReader = underlyingInputFormat
                .createRecordReader(((ImageInputSplit) split).getFallbackInputSplit(), context);
        return new DatasetRecordReader<K, V>(underlyingRecordReader);
    } else {// w w  w  . jav a 2s  .com
        LOG.error("Input split is of unknown type, falling back to underlying input format.");
        InputFormat<K, V> underlyingInputFormat = getUnderlyingInputFormat(context.getConfiguration());
        return underlyingInputFormat.createRecordReader(split, context);
    }

}

From source file:com.scaleoutsoftware.soss.hserver.GridOutputFormat.java

License:Apache License

@Override
public RecordWriter<K, V> getRecordWriter(TaskAttemptContext taskAttemptContext)
        throws IOException, InterruptedException {

    Configuration configuration = taskAttemptContext.getConfiguration();

    if (configuration.getBoolean(outputIsNamedMapProperty, false)) { //This is a NamedMap
        String mapName = configuration.get(outputNamedMapProperty);
        Class<CustomSerializer<K>> keySerializerClass = (Class<CustomSerializer<K>>) configuration
                .getClass(outputNamedMapKeySerializerProperty, null);
        Class<CustomSerializer<V>> valueSerializerClass = (Class<CustomSerializer<V>>) configuration
                .getClass(outputNamedMapValueSerializerProperty, null);
        int smOrdinal = configuration.getInt(SERIALIZATION_MODE, SerializationMode.DEFAULT.ordinal());
        int amOrdinal = configuration.getInt(AVAILABILITY_MODE, AvailabilityMode.USE_REPLICAS.ordinal());
        SerializationMode serializationMode = SerializationMode.values()[smOrdinal];
        AvailabilityMode availabilityMode = AvailabilityMode.values()[amOrdinal];

        if (mapName == null || mapName.length() == 0 || keySerializerClass == null
                || valueSerializerClass == null) {
            throw new IOException("Input format is not configured with a valid NamedMap.");
        }//from   w w w  .j a va2  s. co m

        CustomSerializer<K> keySerializer = ReflectionUtils.newInstance(keySerializerClass, configuration);
        keySerializer.setObjectClass((Class<K>) configuration.getClass(outputNamedMapKeyProperty, null));
        CustomSerializer<V> valueSerializer = ReflectionUtils.newInstance(valueSerializerClass, configuration);
        valueSerializer.setObjectClass((Class<V>) configuration.getClass(outputNamedMapValueProperty, null));
        NamedMap<K, V> namedMap = NamedMapFactory.getMap(mapName, keySerializer, valueSerializer);
        namedMap.setSerializationMode(serializationMode);
        namedMap.setAvailabilityMode(availabilityMode);
        return new NamedMapRecordWriter<K, V>(namedMap);
    } else { //This is a NamedCache
        String cacheName = configuration.get(outputNamedCacheProperty);
        if (cacheName == null || cacheName.length() == 0)
            throw new IOException("Output NamedCache not specified.");

        NamedCache cache;

        try {
            cache = CacheFactory.getCache(cacheName);
        } catch (NamedCacheException e) {
            throw new IOException("Cannot initialize NamedCache.", e);
        }

        Class valueClass = taskAttemptContext.getOutputValueClass();
        if (Writable.class.isAssignableFrom(valueClass)) {
            cache.setCustomSerialization(new WritableSerializer(valueClass));
        }

        return new NamedCacheRecordWriter<K, V>(cache);
    }
}

From source file:com.scaleoutsoftware.soss.hserver.NamedCacheInputFormat.java

License:Apache License

@Override
@SuppressWarnings("unchecked")
public BucketRecordReader<V> createRecordReader(InputSplit inputSplit, TaskAttemptContext taskAttemptContext)
        throws IOException, InterruptedException {
    Class objectClass = taskAttemptContext.getConfiguration().getClass(inputObjectClassProperty, null);
    if (objectClass == null) {
        throw new IOException("Type of the input objects in the NamedCache cache is not set.");
    }//from  w ww.j a  v  a 2 s  .  c o  m
    return new BucketRecordReader<V>(objectClass);
}

From source file:com.scaleoutsoftware.soss.hserver.NamedMapInputFormat.java

License:Apache License

@Override
@SuppressWarnings("unchecked")
public RecordReader<K, V> createRecordReader(InputSplit inputSplit, TaskAttemptContext taskAttemptContext)
        throws IOException, InterruptedException {
    Configuration configuration = taskAttemptContext.getConfiguration();
    int mapId = configuration.getInt(inputAppIdProperty, 0);
    Class<CustomSerializer<K>> keySerializerClass = (Class<CustomSerializer<K>>) configuration
            .getClass(inputNamedMapKeySerializerProperty, null);
    Class<CustomSerializer<V>> valueSerializerClass = (Class<CustomSerializer<V>>) configuration
            .getClass(inputNamedMapValueSerializerProperty, null);

    if (mapId == 0 || keySerializerClass == null || valueSerializerClass == null) {
        throw new IOException("Input format is not configured with a valid NamedMap.");
    }//from  w w  w.  j  a  v  a 2  s .  c om

    CustomSerializer<K> keySerializer = ReflectionUtils.newInstance(keySerializerClass, configuration);
    keySerializer.setObjectClass((Class<K>) configuration.getClass(inputNamedMapKeyProperty, null));
    CustomSerializer<V> valueSerializer = ReflectionUtils.newInstance(valueSerializerClass, configuration);
    valueSerializer.setObjectClass((Class<V>) configuration.getClass(inputNamedMapValueProperty, null));
    int smOrdinal = configuration.getInt(SERIALIZATION_MODE, SerializationMode.DEFAULT.ordinal());
    SerializationMode serializationMode = SerializationMode.values()[smOrdinal];
    return new NamedMapReader<K, V>(configuration, mapId, keySerializer, valueSerializer, serializationMode);
}

From source file:com.scaleoutsoftware.soss.hserver.ReplayingRecordReader.java

License:Apache License

@Override
@SuppressWarnings("unchecked")
public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {

    inputSplit = (ImageInputSplit) split;
    BucketStore bucketStore;/*from  w  w  w . ja va 2  s  .  com*/
    KeyValuePair<K, V> keyValuePair;
    try {
        bucketStore = BucketStoreFactory.getBucketStore(inputSplit.getImageIdString());
    } catch (StateServerException e) {
        throw new IOException("Cannot access ScaleOut StateServer", e);
    }
    if (inputSplit.isRecorded()) {
        Configuration conf = context.getConfiguration();

        try {
            keyValuePair = DatasetInputFormat.createFixedKeyValueSizePair(context.getConfiguration());
            if (keyValuePair == null) { //No fixed size KVP specified
                Class keyClass = Class.forName(inputSplit.getKeyClass());
                Class valueClass = Class.forName(inputSplit.getValueClass());
                keyValuePair = new KeyValuePair<K, V>((K) ReflectionUtils.newInstance(keyClass, conf),
                        (V) ReflectionUtils.newInstance(valueClass, conf));
            }
        } catch (Exception e) {
            throw new IOException("Cannot find key or value class.", e);
        }
        bucketReader = new BucketReader<K, V>(bucketStore, inputSplit.getBucketId(),
                inputSplit.getNumberOfChunks(), context.getConfiguration(), keyValuePair);
        bucketReader.startReading();

        numberProcessed = 0;
    } else {
        throw new IOException("Cannot replay split, it was not recorded");
    }

}

From source file:com.skp.experiment.common.mapreduce.MapFileOutputFormat.java

License:Apache License

public RecordWriter<WritableComparable<?>, Writable> getRecordWriter(TaskAttemptContext context)
        throws IOException {
    Configuration conf = context.getConfiguration();
    CompressionCodec codec = null;//from  w ww.j a v  a2  s  . c o m
    CompressionType compressionType = CompressionType.NONE;
    if (getCompressOutput(context)) {
        // find the kind of compression to do
        compressionType = SequenceFileOutputFormat.getOutputCompressionType(context);

        // find the right codec
        Class<?> codecClass = getOutputCompressorClass(context, DefaultCodec.class);
        codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, conf);
    }

    Path file = getDefaultWorkFile(context, "");
    FileSystem fs = file.getFileSystem(conf);
    // ignore the progress parameter, since MapFile is local
    final MapFile.Writer out = new MapFile.Writer(conf, fs, file.toString(),
            context.getOutputKeyClass().asSubclass(WritableComparable.class),
            context.getOutputValueClass().asSubclass(Writable.class), compressionType, codec, context);

    return new RecordWriter<WritableComparable<?>, Writable>() {
        public void write(WritableComparable<?> key, Writable value) throws IOException {
            out.append(key, value);
        }

        public void close(TaskAttemptContext context) throws IOException {
            out.close();
        }
    };
}

From source file:com.soteradefense.dga.io.formats.HBSEOutputFormatTest.java

License:Apache License

@Test
public void testWriterDefault() throws IOException, InterruptedException {
    TaskAttemptContext tac = mock(TaskAttemptContext.class);
    when(tac.getConfiguration()).thenReturn(conf);
    TextVertexWriter writer = createVertexWriter(tac);
    writer.setConf(conf);//from w w  w  .  j av  a2 s  .co  m
    writer.initialize(tac);
    writer.writeVertex(vertex);
    verify(tw).write(new Text("1"), new Text("0.0"));
    writer.writeVertex(vertex2);
    verify(tw).write(new Text("2"), new Text("1.0"));
}

From source file:com.sourcecode.TextInputFormat.java

License:Apache License

@Override
public RecordReader<LongWritable, Text> createRecordReader(InputSplit split, TaskAttemptContext context) {
    String delimiter = context.getConfiguration().get("textinputformat.record.delimiter");
    byte[] recordDelimiterBytes = null;
    if (null != delimiter)
        recordDelimiterBytes = delimiter.getBytes(Charsets.UTF_8);
    return new LineRecordReader(recordDelimiterBytes);
}

From source file:com.splicemachine.derby.stream.spark.fake.FakeOutputCommitter.java

License:Apache License

@Override
public void abortTask(TaskAttemptContext taskAttemptContext) throws IOException {
    String abortDirectory = taskAttemptContext.getConfiguration().get("abort.directory");
    File file = new File(abortDirectory, taskAttemptContext.getTaskAttemptID().getTaskID().toString());
    file.createNewFile();//ww  w.ja  v  a 2s. c o  m
}

From source file:com.splicemachine.mrio.api.core.SMInputFormat.java

License:Apache License

@Override
public RecordReader<RowLocation, ExecRow> createRecordReader(InputSplit split, TaskAttemptContext context)
        throws IOException, InterruptedException {
    if (LOG.isDebugEnabled())
        SpliceLogUtils.debug(LOG, "createRecordReader for split=%s, context %s", split, context);
    if (rr != null)
        return rr;
    return getRecordReader(split, context.getConfiguration());
}