Example usage for org.apache.hadoop.mapreduce TaskAttemptContext getConfiguration

List of usage examples for org.apache.hadoop.mapreduce TaskAttemptContext getConfiguration

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce TaskAttemptContext getConfiguration.

Prototype

public Configuration getConfiguration();

Source Link

Document

Return the configuration for the job.

Usage

From source file:com.asakusafw.bridge.hadoop.directio.DirectFileInputFormat.java

License:Apache License

private <T> RecordReader<NullWritable, Object> createRecordReader(DataDefinition<T> definition,
        DirectFileInputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
    assert definition != null;
    assert split != null;
    assert context != null;
    Configuration conf = context.getConfiguration();
    T buffer = ReflectionUtils.newInstance(definition.getDataClass(), conf);
    Counter counter = new Counter();
    DirectInputFragment fragment = split.getInputFragment();
    ModelInput<T> input = createInput(context, split.getContainerPath(), definition, counter, fragment);
    return new ModelInputRecordReader<>(input, buffer, counter, fragment.getSize());
}

From source file:com.asakusafw.lang.compiler.mapreduce.testing.mock.WritableOutputFormat.java

License:Apache License

@Override
public RecordWriter<NullWritable, T> getRecordWriter(TaskAttemptContext context)
        throws IOException, InterruptedException {
    Path path = getDefaultWorkFile(context, null);
    FileSystem fs = path.getFileSystem(context.getConfiguration());
    return new Writer<>(new WritableModelOutput<T>(fs.create(path, true)));
}

From source file:com.asakusafw.runtime.stage.input.BridgeInputFormat.java

License:Apache License

@Override
public RecordReader<NullWritable, Object> createRecordReader(InputSplit split, TaskAttemptContext context)
        throws IOException, InterruptedException {
    if (split instanceof BridgeInputSplit) {
        BridgeInputSplit bridgeInfo = (BridgeInputSplit) split;
        DataDefinition<?> definition = createDataDefinition(context.getConfiguration(), bridgeInfo.group);
        return createRecordReader(definition, bridgeInfo, context);
    } else if (split instanceof NullInputSplit) {
        return createNullRecordReader(context);
    } else {/*from w w  w .  j av  a  2 s.c o  m*/
        throw new IOException(MessageFormat.format("Unknown input split: {0}", split));
    }
}

From source file:com.asakusafw.runtime.stage.input.BridgeInputFormat.java

License:Apache License

private <T> RecordReader<NullWritable, Object> createRecordReader(DataDefinition<T> definition,
        BridgeInputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
    assert definition != null;
    assert split != null;
    assert context != null;
    Configuration conf = context.getConfiguration();
    T buffer = ReflectionUtils.newInstance(definition.getDataClass(), conf);
    Counter counter = new Counter();
    ModelInput<T> input = createInput(context, split.group.containerPath, definition, counter, split.fragment);
    return new BridgeRecordReader<>(input, buffer, counter, split.fragment.getSize());
}

From source file:com.asakusafw.runtime.stage.output.StageOutputDriver.java

License:Apache License

private ResultOutput<?> buildNormalSink(String name,
        @SuppressWarnings("rawtypes") Class<? extends OutputFormat> formatClass, Class<?> keyClass,
        Class<?> valueClass, List<Counter> counters) throws IOException, InterruptedException {
    assert context != null;
    assert name != null;
    assert formatClass != null;
    assert keyClass != null;
    assert valueClass != null;
    assert counters != null;
    Job job = JobCompatibility.newJob(context.getConfiguration());
    job.setOutputFormatClass(formatClass);
    job.setOutputKeyClass(keyClass);//from   www . j a  va  2 s  . c om
    job.setOutputValueClass(valueClass);
    TaskAttemptContext localContext = JobCompatibility.newTaskAttemptContext(job.getConfiguration(),
            context.getTaskAttemptID());
    if (FileOutputFormat.class.isAssignableFrom(formatClass)) {
        setOutputFilePrefix(localContext, name);
    }
    OutputFormat<?, ?> format = ReflectionUtils.newInstance(formatClass, localContext.getConfiguration());
    RecordWriter<?, ?> writer = format.getRecordWriter(localContext);
    return new ResultOutput<Writable>(localContext, writer);
}

From source file:com.asakusafw.runtime.stage.output.TemporaryOutputFormat.java

License:Apache License

@Override
public RecordWriter<NullWritable, T> getRecordWriter(TaskAttemptContext context)
        throws IOException, InterruptedException {
    @SuppressWarnings("unchecked")
    Class<T> valueClass = (Class<T>) context.getOutputValueClass();
    String name = context.getConfiguration().get(KEY_FILE_NAME, DEFAULT_FILE_NAME);
    return createRecordWriter(context, name, valueClass);
}

From source file:com.asakusafw.runtime.stage.output.TemporaryOutputFormat.java

License:Apache License

/**
 * Creates a new {@link RecordWriter} to output temporary data.
 * @param <V> value type//w w w  .j a va  2  s .c  o  m
 * @param context current context
 * @param name output name
 * @param dataType value type
 * @return the created writer
 * @throws IOException if failed to create a new {@link RecordWriter}
 * @throws InterruptedException if interrupted
 * @throws IllegalArgumentException if some parameters were {@code null}
 */
public <V> RecordWriter<NullWritable, V> createRecordWriter(TaskAttemptContext context, String name,
        Class<V> dataType) throws IOException, InterruptedException {
    if (context == null) {
        throw new IllegalArgumentException("context must not be null"); //$NON-NLS-1$
    }
    if (name == null) {
        throw new IllegalArgumentException("name must not be null"); //$NON-NLS-1$
    }
    if (dataType == null) {
        throw new IllegalArgumentException("dataType must not be null"); //$NON-NLS-1$
    }
    CompressionCodec codec = null;
    Configuration conf = context.getConfiguration();
    if (FileOutputFormat.getCompressOutput(context)) {
        Class<?> codecClass = FileOutputFormat.getOutputCompressorClass(context, DefaultCodec.class);
        codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, conf);
    }
    FileOutputCommitter committer = getOutputCommitter(context);
    final Path file = new Path(committer.getWorkPath(), FileOutputFormat.getUniqueFile(context, name, "")); //$NON-NLS-1$
    final ModelOutput<V> out = TemporaryStorage.openOutput(conf, dataType, file, codec);
    return new RecordWriter<NullWritable, V>() {

        @Override
        public void write(NullWritable key, V value) throws IOException {
            out.write(value);
        }

        @Override
        public void close(TaskAttemptContext ignored) throws IOException {
            out.close();
        }

        @Override
        public String toString() {
            return String.format("TemporaryOutput(%s)", file); //$NON-NLS-1$
        }
    };
}

From source file:com.ashishpaliwal.hadoop.utils.inputformat.CsvRecordReader.java

License:Apache License

public void initialize(InputSplit genericSplit, TaskAttemptContext context) throws IOException {
    FileSplit split = (FileSplit) genericSplit;

    Configuration job = context.getConfiguration();
    this.maxLineLength = job.getInt(MAX_LINE_LENGTH, 2147483647);
    this.start = split.getStart();
    this.end = (this.start + split.getLength());
    Path file = split.getPath();/*from   ww w  .ja  v a2s  . co  m*/
    this.compressionCodecs = new CompressionCodecFactory(job);
    this.codec = this.compressionCodecs.getCodec(file);

    FileSystem fs = file.getFileSystem(job);
    this.fileIn = fs.open(file);
    if (isCompressedInput()) {
        this.decompressor = CodecPool.getDecompressor(this.codec);
        if ((this.codec instanceof SplittableCompressionCodec)) {
            SplitCompressionInputStream cIn = ((SplittableCompressionCodec) this.codec).createInputStream(
                    this.fileIn, this.decompressor, this.start, this.end,
                    SplittableCompressionCodec.READ_MODE.BYBLOCK);

            this.in = new CsvLineReader(cIn, job);
            this.start = cIn.getAdjustedStart();
            this.end = cIn.getAdjustedEnd();
            this.filePosition = cIn;
        } else {
            this.in = new CsvLineReader(this.codec.createInputStream(this.fileIn, this.decompressor), job);
            this.filePosition = this.fileIn;
        }
    } else {
        this.fileIn.seek(this.start);
        this.in = new CsvLineReader(this.fileIn, job);
        this.filePosition = this.fileIn;
    }

    if (this.start != 0L) {
        this.start += this.in.readLine(new Text(), 0, maxBytesToConsume(this.start));
    }
    this.pos = this.start;
}

From source file:com.avira.couchdoop.exp.CouchbaseOutputFormat.java

License:Apache License

public RecordWriter<String, CouchbaseAction> getRecordWriter(TaskAttemptContext context)
        throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();
    ExportArgs args;/* w w  w . j  av a 2  s .c o m*/
    try {
        args = new ExportArgs(conf);
    } catch (ArgsException e) {
        throw new IllegalArgumentException(e);
    }

    CouchbaseRecordWriter couchbaseRecordWriter = new CouchbaseRecordWriter(args.getUrls(), args.getBucket(),
            args.getPassword());

    couchbaseRecordWriter
            .setExpBackoffMaxTries(conf.getInt(CONF_EXP_BACKOFF_MAX_TRIES_PER_TASK, EXP_BACKOFF_MAX_TRIES));
    couchbaseRecordWriter.setExpBackoffMaxRetryInterval(
            conf.getInt(CONF_EXP_BACKOFF_MAX_RETRY_INTERVAL_PER_TASK, EXP_BACKOFF_MAX_RETRY_INTERVAL));
    couchbaseRecordWriter.setExpBackoffMaxTotalTimeout(
            conf.getInt(CONF_EXP_BACKOFF_MAX_TOTAL_TIMEOUT_PER_TASK, EXP_BACKOFF_MAX_TOTAL_TIMEOUT));

    return couchbaseRecordWriter;
}

From source file:com.awcoleman.BouncyCastleGenericCDRHadoop.RawFileRecordReader.java

License:Apache License

@Override
public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();
    path = ((FileSplit) split).getPath();
    FileSystem fs = path.getFileSystem(conf);
    FSDataInputStream fsin = fs.open(path);
    is = decompressStream(fsin);/*w w  w .  j a  v  a2 s  .  com*/
    asnin = new ASN1InputStream(is);
}