List of usage examples for org.apache.hadoop.mapreduce.lib.output FileOutputFormat getRecordWriter
public abstract RecordWriter<K, V> getRecordWriter(TaskAttemptContext job) throws IOException, InterruptedException;
From source file:com.asakusafw.testdriver.file.FileOutputFormatDriver.java
License:Apache License
/** * Creates a new instance./*from w w w. ja v a2 s . c o m*/ * @param <K> type of key type * @param context target context with destination information * @param format the output format * @param key the common key to output * @throws IOException if failed to initialize * @throws IllegalArgumentException if some parameters were {@code null} */ <K> FileOutputFormatDriver(TaskAttemptContext context, FileOutputFormat<? super K, ? super V> format, K key) throws IOException { if (context == null) { throw new IllegalArgumentException("context must not be null"); //$NON-NLS-1$ } if (format == null) { throw new IllegalArgumentException("format must not be null"); //$NON-NLS-1$ } if (key == null) { throw new IllegalArgumentException("key must not be null"); //$NON-NLS-1$ } LOG.debug("Emulating OutputFormat: {}", format.getClass().getName()); this.context = context; this.format = format; this.key = key; try { this.writer = format.getRecordWriter(context); } catch (InterruptedException e) { throw (InterruptedIOException) new InterruptedIOException().initCause(e); } }
From source file:org.goldenorb.OrbPartition.java
License:Apache License
private void dumpData() { Configuration conf = new Configuration(); Job job = null;/*from w w w .j a v a 2 s .com*/ JobContext jobContext = null; TaskAttemptContext tao = null; RecordWriter rw; VertexWriter vw; FileOutputFormat outputFormat; boolean tryAgain = true; int count = 0; while (tryAgain && count < 15) try { count++; tryAgain = false; if (job == null) { job = new Job(conf); job.setOutputFormatClass(TextOutputFormat.class); FileOutputFormat.setOutputPath(job, new Path(new String(getOrbConf().getNameNode() + getOrbConf().getFileOutputPath()))); } if (jobContext == null) { jobContext = new JobContext(job.getConfiguration(), new JobID()); } System.out.println(jobContext.getConfiguration().get("mapred.output.dir")); tao = new TaskAttemptContext(jobContext.getConfiguration(), new TaskAttemptID(new TaskID(jobContext.getJobID(), true, getPartitionID()), 0)); outputFormat = (FileOutputFormat) tao.getOutputFormatClass().newInstance(); rw = outputFormat.getRecordWriter(tao); vw = (VertexWriter) getOrbConf().getVertexOutputFormatClass().newInstance(); for (Vertex v : vertices.values()) { OrbContext oc = vw.vertexWrite(v); rw.write(oc.getKey(), oc.getValue()); // orbLogger.info("Partition: " + Integer.toString(partitionId) + "writing: " + // oc.getKey().toString() + ", " + oc.getValue().toString()); } rw.close(tao); FileOutputCommitter cm = (FileOutputCommitter) outputFormat.getOutputCommitter(tao); if (cm.needsTaskCommit(tao)) { cm.commitTask(tao); cm.cleanupJob(jobContext); } else { cm.cleanupJob(jobContext); tryAgain = true; } } catch (IOException e) { tryAgain = true; e.printStackTrace(); } catch (InstantiationException e) { tryAgain = true; e.printStackTrace(); } catch (IllegalAccessException e) { tryAgain = true; e.printStackTrace(); } catch (ClassNotFoundException e) { tryAgain = true; e.printStackTrace(); } catch (InterruptedException e) { tryAgain = true; e.printStackTrace(); } if (tryAgain) { synchronized (this) { try { wait(1000); } catch (InterruptedException e) { e.printStackTrace(); } } } }