Example usage for org.apache.hadoop.mapreduce TaskAttemptContext getTaskAttemptID

List of usage examples for org.apache.hadoop.mapreduce TaskAttemptContext getTaskAttemptID

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce TaskAttemptContext getTaskAttemptID.

Prototype

public TaskAttemptID getTaskAttemptID();

Source Link

Document

Get the unique name for this task attempt.

Usage

From source file:alluxio.hadoop.mapreduce.KeyValueOutputFormat.java

License:Apache License

/**
 * @param taskContext MapReduce task context
 * @return the task's temporary output path ${job output directory}/_temporary/${task attempt id}
 *///from   w  w w .ja v a 2 s .c  o m
public static AlluxioURI getTaskOutputURI(TaskAttemptContext taskContext) {
    return getJobOutputURI(taskContext).join(KeyValueOutputCommitter.getPendingDirName())
            .join(taskContext.getTaskAttemptID().toString());
}

From source file:co.cask.cdap.internal.app.runtime.batch.dataset.output.MultipleOutputs.java

License:Apache License

static TaskAttemptContext getNamedTaskContext(TaskAttemptContext context, String namedOutput)
        throws IOException {
    Job job = getNamedJob(context, namedOutput);
    return new TaskAttemptContextImpl(job.getConfiguration(), context.getTaskAttemptID(),
            new WrappedStatusReporter(context));
}

From source file:co.cask.cdap.internal.app.runtime.batch.dataset.partitioned.DynamicPartitionerWriterWrapper.java

License:Apache License

private TaskAttemptContext getTaskAttemptContext(TaskAttemptContext context, String newOutputName)
        throws IOException {
    Job job = new Job(context.getConfiguration());
    DynamicPartitioningOutputFormat.setOutputName(job, newOutputName);
    // CDAP-4806 We must set this parameter in addition to calling FileOutputFormat#setOutputName, because
    // AvroKeyOutputFormat/AvroKeyValueOutputFormat use a different parameter for the output name than FileOutputFormat.
    if (isAvroOutputFormat(getFileOutputFormat(context))) {
        job.getConfiguration().set("avro.mo.config.namedOutput", newOutputName);
    }// w  w  w .  ja  v  a 2s .c  om

    Path jobOutputPath = DynamicPartitioningOutputFormat
            .createJobSpecificPath(FileOutputFormat.getOutputPath(job), context);
    DynamicPartitioningOutputFormat.setOutputPath(job, jobOutputPath);

    return new TaskAttemptContextImpl(job.getConfiguration(), context.getTaskAttemptID());
}

From source file:co.cask.cdap.internal.app.runtime.batch.dataset.partitioned.DynamicPartitioningOutputFormat.java

License:Apache License

private TaskAttemptContext getTaskAttemptContext(TaskAttemptContext context, String newOutputName)
        throws IOException {
    Job job = new Job(context.getConfiguration());
    FileOutputFormat.setOutputName(job, newOutputName);
    // CDAP-4806 We must set this parameter in addition to calling FileOutputFormat#setOutputName, because
    // AvroKeyOutputFormat/AvroKeyValueOutputFormat use a different parameter for the output name than FileOutputFormat.
    if (isAvroOutputFormat(getFileOutputFormat(context))) {
        job.getConfiguration().set("avro.mo.config.namedOutput", newOutputName);
    }//from   ww w . j a va 2s .co m

    Path jobOutputPath = createJobSpecificPath(FileOutputFormat.getOutputPath(job), context);
    FileOutputFormat.setOutputPath(job, jobOutputPath);

    return new TaskAttemptContextImpl(job.getConfiguration(), context.getTaskAttemptID());
}

From source file:co.nubetech.hiho.mapreduce.lib.output.AppendSequenceFileOutputFormat.java

License:Apache License

public synchronized static String getUniqueFile(TaskAttemptContext context, String name, String extension) {

    TaskID taskId = context.getTaskAttemptID().getTaskID();
    int partition = taskId.getId();
    partition = partition + (int) fileCount;
    StringBuilder result = new StringBuilder();
    result.append(name);//from w ww .  j  a  v a 2 s . com
    result.append('-');
    // result.append(taskId.isMap() ? 'm' : 'r');
    result.append('-');
    result.append(NUMBER_FORMAT.format(partition));
    result.append(extension);
    return result.toString();
}

From source file:com.asakusafw.runtime.directio.hadoop.HadoopDataSourceUtil.java

License:Apache License

private static String getAttemptId(TaskAttemptContext taskContext, String datasourceId) {
    assert taskContext != null;
    assert datasourceId != null;
    return taskContext.getTaskAttemptID().toString();
}

From source file:com.asakusafw.runtime.mapreduce.simple.SimpleJobRunner.java

License:Apache License

private void doAbortTask(TaskAttemptContext context, OutputCommitter committer) {
    try {//from  www.j  a  va2 s  . c  o  m
        committer.abortTask(context);
    } catch (IOException e) {
        LOG.error(MessageFormat.format("error occurred while aborting task: {0} ({1})",
                context.getTaskAttemptID(), context.getJobName()), e);
    }
}

From source file:com.asakusafw.runtime.stage.output.BridgeOutputFormat.java

License:Apache License

@Override
public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException {
    synchronized (this) {
        TaskAttemptID id = context.getTaskAttemptID();
        OutputCommitter committer = commiterCache.get(id);
        if (committer == null) {
            committer = createOutputCommitter(context);
        }//from   w w w. j av a2  s.c  o m
        commiterCache.put(id, committer);
        return committer;
    }
}

From source file:com.asakusafw.runtime.stage.output.TemporaryOutputFormat.java

License:Apache License

@Override
public FileOutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException {
    synchronized (this) {
        TaskAttemptID id = context.getTaskAttemptID();
        FileOutputCommitter committer = commiterCache.get(id);
        if (committer == null) {
            committer = createOutputCommitter(context);
        }//from  w  ww.j  a va 2  s .com
        commiterCache.put(id, committer);
        return committer;
    }
}

From source file:com.baynote.kafka.hadoop.MultipleKafkaInputFormat.java

License:Apache License

/**
 * {@inheritDoc}//from w w w  . j av  a  2s .  co  m
 */
@Override
public RecordReader<LongWritable, BytesWritable> createRecordReader(final InputSplit split,
        final TaskAttemptContext context) throws IOException, InterruptedException {
    final TaggedInputSplit taggedInputSplit = (TaggedInputSplit) split;
    final TaskAttemptContext taskAttemptContextClone = new TaskAttemptContextImpl(taggedInputSplit.getConf(),
            context.getTaskAttemptID());
    taskAttemptContextClone.setStatus(context.getStatus());
    return new DelegatingRecordReader<LongWritable, BytesWritable>(split, taskAttemptContextClone);
}