Example usage for org.apache.hadoop.io NullWritable get

List of usage examples for org.apache.hadoop.io NullWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io NullWritable get.

Prototype

public static NullWritable get() 

Source Link

Document

Returns the single instance of this class.

Usage

From source file:org.apache.crunch.io.hbase.HBaseValueConverter.java

License:Apache License

@Override
public Object outputKey(V value) {
    return NullWritable.get();
}

From source file:org.apache.crunch.io.hbase.HFileUtils.java

License:Apache License

private static void writePartitionInfo(Configuration conf, Path path, List<KeyValue> splitPoints)
        throws IOException {
    LOG.info("Writing " + splitPoints.size() + " split points to " + path);
    SequenceFile.Writer writer = SequenceFile.createWriter(path.getFileSystem(conf), conf, path,
            NullWritable.class, KeyValue.class);
    for (KeyValue key : splitPoints) {
        writer.append(NullWritable.get(), writables(KeyValue.class).getOutputMapFn().map(key));
    }/*from   w  ww .  j  a v  a  2s .  c om*/
    writer.close();
}

From source file:org.apache.crunch.io.orc.OrcFileReaderFactory.java

License:Apache License

@Override
public Iterator<T> read(FileSystem fs, final Path path) {
    try {//from   ww w . j av  a 2s .  c  o m
        if (!fs.isFile(path)) {
            throw new CrunchRuntimeException("Not a file: " + path);
        }

        inputFn.initialize();

        FileStatus status = fs.getFileStatus(path);
        FileSplit split = new FileSplit(path, 0, status.getLen(), new String[0]);

        JobConf conf = new JobConf();
        if (readColumns != null) {
            conf.setBoolean(OrcFileSource.HIVE_READ_ALL_COLUMNS, false);
            conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR,
                    OrcFileSource.getColumnIdsStr(readColumns));
        }
        final RecordReader<NullWritable, OrcStruct> reader = inputFormat.getRecordReader(split, conf,
                Reporter.NULL);

        return new UnmodifiableIterator<T>() {

            private boolean checked = false;
            private boolean hasNext;
            private OrcStruct value;
            private OrcWritable writable = new OrcWritable();

            @Override
            public boolean hasNext() {
                try {
                    if (value == null) {
                        value = reader.createValue();
                    }
                    if (!checked) {
                        hasNext = reader.next(NullWritable.get(), value);
                        checked = true;
                    }
                    return hasNext;
                } catch (Exception e) {
                    throw new CrunchRuntimeException("Error while reading local file: " + path, e);
                }
            }

            @Override
            public T next() {
                try {
                    if (value == null) {
                        value = reader.createValue();
                    }
                    if (!checked) {
                        reader.next(NullWritable.get(), value);
                    }
                    checked = false;
                    writable.set(value);
                    return inputFn.map(writable);
                } catch (Exception e) {
                    throw new CrunchRuntimeException("Error while reading local file: " + path, e);
                }
            }

        };
    } catch (Exception e) {
        throw new CrunchRuntimeException("Error while reading local file: " + path, e);
    }
}

From source file:org.apache.crunch.io.orc.OrcFileWriter.java

License:Apache License

public void write(T t) throws IOException {
    OrcWritable ow = (OrcWritable) mapFn.map(t);
    if (ow.get() == null) {
        throw new NullPointerException("Cannot write null records to orc file");
    }// w  w w  . j  av a 2  s .co  m
    writer.write(NullWritable.get(), serde.serialize(ow.get(), ow.getObjectInspector()));
}

From source file:org.apache.crunch.io.seq.SeqFileReaderFactory.java

License:Apache License

public SeqFileReaderFactory(PType<T> ptype) {
    this.converter = ptype.getConverter();
    this.mapFn = ptype.getInputMapFn();
    if (ptype instanceof PTableType) {
        PTableType ptt = (PTableType) ptype;
        this.key = SeqFileHelper.newInstance(ptt.getKeyType(), null);
        this.value = SeqFileHelper.newInstance(ptt.getValueType(), null);
    } else {//  w w  w  . j av a 2 s. c o m
        this.key = NullWritable.get();
        this.value = SeqFileHelper.newInstance(ptype, null);
    }
}

From source file:org.apache.crunch.io.seq.SeqFileReaderFactory.java

License:Apache License

public SeqFileReaderFactory(Class clazz) {
    PType<T> ptype = Writables.writables(clazz);
    this.converter = ptype.getConverter();
    this.mapFn = ptype.getInputMapFn();
    this.key = NullWritable.get();
    this.value = (Writable) ReflectionUtils.newInstance(clazz, null);
}

From source file:org.apache.crunch.lib.TupleWritablePartitionerTest.java

License:Apache License

@Test
public void testGetPartition() {
    IntWritable intWritable = new IntWritable(3);
    TupleWritable key = new TupleWritable(new Writable[] { intWritable });
    assertEquals(3, tupleWritableParitioner.getPartition(key, NullWritable.get(), 5));
    assertEquals(1, tupleWritableParitioner.getPartition(key, NullWritable.get(), 2));
}

From source file:org.apache.crunch.lib.TupleWritablePartitionerTest.java

License:Apache License

@Test
public void testGetPartition_NegativeHashValue() {
    IntWritable intWritable = new IntWritable(-3);
    // Sanity check, if this doesn't work then the premise of this test is wrong
    assertEquals(-3, intWritable.hashCode());

    TupleWritable key = new TupleWritable(new Writable[] { intWritable });
    assertEquals(3, tupleWritableParitioner.getPartition(key, NullWritable.get(), 5));
    assertEquals(1, tupleWritableParitioner.getPartition(key, NullWritable.get(), 2));
}

From source file:org.apache.crunch.lib.TupleWritablePartitionerTest.java

License:Apache License

@Test
public void testGetPartition_IntegerMinValue() {
    IntWritable intWritable = new IntWritable(Integer.MIN_VALUE);
    // Sanity check, if this doesn't work then the premise of this test is wrong
    assertEquals(Integer.MIN_VALUE, intWritable.hashCode());

    TupleWritable key = new TupleWritable(new Writable[] { intWritable });
    assertEquals(0, tupleWritableParitioner.getPartition(key, NullWritable.get(), Integer.MAX_VALUE));
}

From source file:org.apache.crunch.types.avro.AvroKeyConverter.java

License:Apache License

@Override
public NullWritable outputValue(K value) {
    return NullWritable.get();
}