List of usage examples for org.apache.hadoop.mapred JobConf getOutputValueClass
public Class<?> getOutputValueClass()
From source file:voldemort.store.readonly.mr.serialization.JsonSequenceFileOutputFormat.java
License:Apache License
public RecordWriter<BytesWritable, BytesWritable> getRecordWriter(FileSystem ignored, JobConf job, String name, Progressable progress) throws IOException { // Shamelessly copy in hadoop code to allow us to set the metadata with // our schema // get the path of the temporary output file Path file = FileOutputFormat.getTaskOutputPath(job, name); FileSystem fs = file.getFileSystem(job); CompressionType compressionType = CompressionType.BLOCK; // find the right codec Class<?> codecClass = getOutputCompressorClass(job, DefaultCodec.class); CompressionCodec codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, job); // set the schema metadata /* begin jays code */ SequenceFile.Metadata meta = new SequenceFile.Metadata(); meta.set(new Text("key.schema"), new Text(getSchema("reducer.output.key.schema", job))); meta.set(new Text("value.schema"), new Text(getSchema("reducer.output.value.schema", job))); final SequenceFile.Writer out = SequenceFile.createWriter(fs, job, file, job.getOutputKeyClass(), job.getOutputValueClass(), compressionType, codec, progress, meta); /* end jays code */ return new RecordWriter<BytesWritable, BytesWritable>() { public void write(BytesWritable key, BytesWritable value) throws IOException { out.append(key, value);/* w ww. jav a 2 s . c om*/ } public void close(Reporter reporter) throws IOException { out.close(); } }; }