Example usage for org.apache.hadoop.io NullWritable get

List of usage examples for org.apache.hadoop.io NullWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io NullWritable get.

Prototype

public static NullWritable get() 

Source Link

Document

Returns the single instance of this class.

Usage

From source file:com.cloudera.dataflow.hadoop.NullWritableCoder.java

License:Open Source License

@Override
public NullWritable decode(InputStream inStream, Context context) {
    return NullWritable.get();
}

From source file:com.cloudera.dataflow.hadoop.WritableCoderTest.java

License:Open Source License

@Test
public void testNullWritableEncoding() throws Exception {
    WritableCoder<NullWritable> coder = WritableCoder.of(NullWritable.class);

    CoderProperties.coderDecodeEncodeEqual(coder, NullWritable.get());
}

From source file:com.cloudera.dataflow.spark.TransformTranslator.java

License:Open Source License

private static <T> TransformEvaluator<AvroIO.Write.Bound<T>> writeAvro() {
    return new TransformEvaluator<AvroIO.Write.Bound<T>>() {
        @Override//from ww w .j  a  v a 2s  .  c o  m
        public void evaluate(AvroIO.Write.Bound<T> transform, EvaluationContext context) {
            Job job;
            try {
                job = Job.getInstance();
            } catch (IOException e) {
                throw new IllegalStateException(e);
            }
            AvroJob.setOutputKeySchema(job, transform.getSchema());
            @SuppressWarnings("unchecked")
            JavaPairRDD<AvroKey<T>, NullWritable> last = ((JavaRDDLike<WindowedValue<T>, ?>) context
                    .getInputRDD(transform)).map(WindowingHelpers.<T>unwindowFunction())
                            .mapToPair(new PairFunction<T, AvroKey<T>, NullWritable>() {
                                @Override
                                public Tuple2<AvroKey<T>, NullWritable> call(T t) throws Exception {
                                    return new Tuple2<>(new AvroKey<>(t), NullWritable.get());
                                }
                            });
            ShardTemplateInformation shardTemplateInfo = new ShardTemplateInformation(transform.getNumShards(),
                    transform.getShardTemplate(), transform.getFilenamePrefix(), transform.getFilenameSuffix());
            writeHadoopFile(last, job.getConfiguration(), shardTemplateInfo, AvroKey.class, NullWritable.class,
                    TemplatedAvroKeyOutputFormat.class);
        }
    };
}

From source file:com.cloudera.sa.ExcelRecordReader.java

License:Apache License

@Override
public NullWritable getCurrentKey() throws IOException, InterruptedException {
    return NullWritable.get();
}

From source file:com.cloudera.sa.hbase.to.hdfs.utils.NMapInputFormat.java

License:Apache License

@Override
public RecordReader<NullWritable, NullWritable> createRecordReader(InputSplit split, TaskAttemptContext tac)
        throws IOException, InterruptedException {
    return new SingleRecordReader<NullWritable, NullWritable>(NullWritable.get(), NullWritable.get());
}

From source file:com.cloudera.seismic.segy.SegyUnloader.java

License:Open Source License

private void write(Path path, DataOutputStream out, Configuration conf) throws Exception {
    System.out.println("Reading: " + path);
    SequenceFile.Reader reader = new SequenceFile.Reader(FileSystem.get(conf), path, conf);
    BytesWritable value = new BytesWritable();
    while (reader.next(NullWritable.get(), value)) {
        out.write(value.getBytes(), 0, value.getLength());
    }/*  ww w . j  ava2  s  .  c  om*/
    reader.close();
}

From source file:com.cloudera.sqoop.mapreduce.AvroImportMapper.java

License:Apache License

@Override
protected void map(LongWritable key, SqoopRecord val, Context context)
        throws IOException, InterruptedException {
    wrapper.datum(toGenericRecord(val));
    context.write(wrapper, NullWritable.get());
}

From source file:com.cloudera.sqoop.mapreduce.HBaseImportMapper.java

License:Apache License

@Override
public void map(LongWritable key, SqoopRecord val, Context context) throws IOException, InterruptedException {
    context.write(val, NullWritable.get());
}

From source file:com.cloudera.sqoop.mapreduce.MergeReducer.java

License:Apache License

@Override
public void reduce(Text key, Iterable<MergeRecord> vals, Context c) throws IOException, InterruptedException {
    SqoopRecord bestRecord = null;/*w ww.  j a  v a2s .  co m*/
    try {
        for (MergeRecord val : vals) {
            if (null == bestRecord && !val.isNewRecord()) {
                // Use an old record if we don't have a new record.
                bestRecord = (SqoopRecord) val.getSqoopRecord().clone();
            } else if (val.isNewRecord()) {
                bestRecord = (SqoopRecord) val.getSqoopRecord().clone();
            }
        }
    } catch (CloneNotSupportedException cnse) {
        throw new IOException(cnse);
    }

    if (null != bestRecord) {
        c.write(bestRecord, NullWritable.get());
    }
}

From source file:com.cloudera.sqoop.mapreduce.SequenceFileExportMapper.java

License:Apache License

public void map(LongWritable key, SqoopRecord val, Context context) throws IOException, InterruptedException {
    context.write(val, NullWritable.get());
}