List of usage examples for org.apache.hadoop.mapreduce OutputCommitter abortTask
public abstract void abortTask(TaskAttemptContext taskContext) throws IOException;
From source file:com.asakusafw.runtime.mapreduce.simple.SimpleJobRunner.java
License:Apache License
private void doAbortTask(TaskAttemptContext context, OutputCommitter committer) { try {/* w ww . j a v a 2 s . co m*/ committer.abortTask(context); } catch (IOException e) { LOG.error(MessageFormat.format("error occurred while aborting task: {0} ({1})", context.getTaskAttemptID(), context.getJobName()), e); } }
From source file:org.apache.giraph.io.internal.WrappedEdgeOutputFormat.java
License:Apache License
@Override public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { final OutputCommitter outputCommitter = originalOutputFormat .getOutputCommitter(HadoopUtils.makeTaskAttemptContext(getConf(), context)); return new OutputCommitter() { @Override/*from www.ja v a 2s .c om*/ public void setupJob(JobContext context) throws IOException { outputCommitter.setupJob(HadoopUtils.makeJobContext(getConf(), context)); } @Override public void setupTask(TaskAttemptContext context) throws IOException { outputCommitter.setupTask(HadoopUtils.makeTaskAttemptContext(getConf(), context)); } @Override public boolean needsTaskCommit(TaskAttemptContext context) throws IOException { return outputCommitter.needsTaskCommit(HadoopUtils.makeTaskAttemptContext(getConf(), context)); } @Override public void commitTask(TaskAttemptContext context) throws IOException { outputCommitter.commitTask(HadoopUtils.makeTaskAttemptContext(getConf(), context)); } @Override public void abortTask(TaskAttemptContext context) throws IOException { outputCommitter.abortTask(HadoopUtils.makeTaskAttemptContext(getConf(), context)); } @Override public void cleanupJob(JobContext context) throws IOException { outputCommitter.cleanupJob(HadoopUtils.makeJobContext(getConf(), context)); } /*if_not[HADOOP_NON_COMMIT_JOB]*/ @Override public void commitJob(JobContext context) throws IOException { outputCommitter.commitJob(HadoopUtils.makeJobContext(getConf(), context)); } @Override public void abortJob(JobContext context, JobStatus.State state) throws IOException { outputCommitter.abortJob(HadoopUtils.makeJobContext(getConf(), context), state); } /*end[HADOOP_NON_COMMIT_JOB]*/ }; }
From source file:org.apache.giraph.io.internal.WrappedVertexOutputFormat.java
License:Apache License
@Override public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { final OutputCommitter outputCommitter = originalOutputFormat .getOutputCommitter(HadoopUtils.makeTaskAttemptContext(getConf(), context)); return new OutputCommitter() { @Override//from w ww . j a va2s .c o m public void setupJob(JobContext context) throws IOException { outputCommitter.setupJob(HadoopUtils.makeJobContext(getConf(), context)); } @Override public void setupTask(TaskAttemptContext context) throws IOException { outputCommitter.setupTask(HadoopUtils.makeTaskAttemptContext(getConf(), context)); } @Override public boolean needsTaskCommit(TaskAttemptContext context) throws IOException { return outputCommitter.needsTaskCommit(HadoopUtils.makeTaskAttemptContext(getConf(), context)); } @Override public void commitTask(TaskAttemptContext context) throws IOException { outputCommitter.commitTask(HadoopUtils.makeTaskAttemptContext(getConf(), context)); } @Override public void abortTask(TaskAttemptContext context) throws IOException { outputCommitter.abortTask(HadoopUtils.makeTaskAttemptContext(getConf(), context)); } @Override public void cleanupJob(JobContext context) throws IOException { outputCommitter.cleanupJob(HadoopUtils.makeJobContext(getConf(), context)); } /*if_not[HADOOP_NON_COMMIT_JOB]*/ @Override public void commitJob(JobContext context) throws IOException { outputCommitter.commitJob(HadoopUtils.makeJobContext(getConf(), context)); } @Override public void abortJob(JobContext context, JobStatus.State state) throws IOException { outputCommitter.abortJob(HadoopUtils.makeJobContext(getConf(), context), state); } /*end[HADOOP_NON_COMMIT_JOB]*/ }; }
From source file:org.apache.hcatalog.data.transfer.impl.HCatOutputFormatWriter.java
License:Apache License
@Override public void write(Iterator<HCatRecord> recordItr) throws HCatException { int id = sp.getId(); setVarsInConf(id);/* w w w . ja va 2 s . co m*/ HCatOutputFormat outFormat = new HCatOutputFormat(); TaskAttemptContext cntxt = HCatHadoopShims.Instance.get().createTaskAttemptContext(conf, new TaskAttemptID(HCatHadoopShims.Instance.get().createTaskID(), id)); OutputCommitter committer = null; RecordWriter<WritableComparable<?>, HCatRecord> writer; try { committer = outFormat.getOutputCommitter(cntxt); committer.setupTask(cntxt); writer = outFormat.getRecordWriter(cntxt); while (recordItr.hasNext()) { HCatRecord rec = recordItr.next(); writer.write(null, rec); } writer.close(cntxt); if (committer.needsTaskCommit(cntxt)) { committer.commitTask(cntxt); } } catch (IOException e) { if (null != committer) { try { committer.abortTask(cntxt); } catch (IOException e1) { throw new HCatException(ErrorType.ERROR_INTERNAL_EXCEPTION, e1); } } throw new HCatException("Failed while writing", e); } catch (InterruptedException e) { if (null != committer) { try { committer.abortTask(cntxt); } catch (IOException e1) { throw new HCatException(ErrorType.ERROR_INTERNAL_EXCEPTION, e1); } } throw new HCatException("Failed while writing", e); } }
From source file:org.apache.hive.hcatalog.data.transfer.impl.HCatOutputFormatWriter.java
License:Apache License
@Override public void write(Iterator<HCatRecord> recordItr) throws HCatException { int id = sp.getId(); setVarsInConf(id);// w w w . ja va 2 s . com HCatOutputFormat outFormat = new HCatOutputFormat(); TaskAttemptContext cntxt = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf, new TaskAttemptID(ShimLoader.getHadoopShims().getHCatShim().createTaskID(), id)); OutputCommitter committer = null; RecordWriter<WritableComparable<?>, HCatRecord> writer; try { committer = outFormat.getOutputCommitter(cntxt); committer.setupTask(cntxt); writer = outFormat.getRecordWriter(cntxt); while (recordItr.hasNext()) { HCatRecord rec = recordItr.next(); writer.write(null, rec); } writer.close(cntxt); if (committer.needsTaskCommit(cntxt)) { committer.commitTask(cntxt); } } catch (IOException e) { if (null != committer) { try { committer.abortTask(cntxt); } catch (IOException e1) { throw new HCatException(ErrorType.ERROR_INTERNAL_EXCEPTION, e1); } } throw new HCatException("Failed while writing", e); } catch (InterruptedException e) { if (null != committer) { try { committer.abortTask(cntxt); } catch (IOException e1) { throw new HCatException(ErrorType.ERROR_INTERNAL_EXCEPTION, e1); } } throw new HCatException("Failed while writing", e); } }
From source file:org.mrgeo.hadoop.multipleoutputs.DirectoryMultipleOutputsCommitter.java
License:Apache License
@Override public void abortTask(final TaskAttemptContext taskContext) throws IOException { for (final OutputCommitter that : committers) { that.abortTask(taskContext); }/*from w w w. jav a 2s . c o m*/ }