Example usage for org.apache.hadoop.io NullWritable get

List of usage examples for org.apache.hadoop.io NullWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io NullWritable get.

Prototype

public static NullWritable get() 

Source Link

Document

Returns the single instance of this class.

Usage

From source file:org.apache.camel.component.hdfs2.HdfsConsumerTest.java

License:Apache License

@Test
public void testReadInt() throws Exception {
    if (!canTest()) {
        return;//from ww  w.  j  ava  2  s  .com
    }

    final Path file = new Path(new File("target/test/test-camel-int").getAbsolutePath());
    Configuration conf = new Configuration();
    SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, IntWritable.class);
    NullWritable keyWritable = NullWritable.get();
    IntWritable valueWritable = new IntWritable();
    int value = 314159265;
    valueWritable.set(value);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();

    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);

    context.addRoutes(new RouteBuilder() {
        public void configure() {
            from("hdfs2:///" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0")
                    .to("mock:result");
        }
    });
    context.start();

    resultEndpoint.assertIsSatisfied();
}

From source file:org.apache.camel.component.hdfs2.HdfsConsumerTest.java

License:Apache License

@Test
public void testReadLong() throws Exception {
    if (!canTest()) {
        return;//from   www.j  a  v  a 2s. c  om
    }

    final Path file = new Path(new File("target/test/test-camel-long").getAbsolutePath());
    Configuration conf = new Configuration();
    SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, LongWritable.class);
    NullWritable keyWritable = NullWritable.get();
    LongWritable valueWritable = new LongWritable();
    long value = 31415926535L;
    valueWritable.set(value);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();

    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);

    context.addRoutes(new RouteBuilder() {
        public void configure() {
            from("hdfs2:///" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0")
                    .to("mock:result");
        }
    });
    context.start();

    resultEndpoint.assertIsSatisfied();
}

From source file:org.apache.camel.component.hdfs2.HdfsConsumerTest.java

License:Apache License

@Test
public void testReadBytes() throws Exception {
    if (!canTest()) {
        return;/*from   w  ww.ja v a  2 s  .c om*/
    }

    final Path file = new Path(new File("target/test/test-camel-bytes").getAbsolutePath());
    Configuration conf = new Configuration();
    SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, BytesWritable.class);
    NullWritable keyWritable = NullWritable.get();
    BytesWritable valueWritable = new BytesWritable();
    String value = "CIAO!";
    valueWritable.set(value.getBytes(), 0, value.getBytes().length);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();

    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);

    context.addRoutes(new RouteBuilder() {
        public void configure() {
            from("hdfs2:///" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0")
                    .to("mock:result");
        }
    });
    context.start();

    resultEndpoint.assertIsSatisfied();
}

From source file:org.apache.camel.component.hdfs2.HdfsConsumerTest.java

License:Apache License

@Test
public void testReadString() throws Exception {
    if (!canTest()) {
        return;/*from w  ww  .j  ava  2s .co  m*/
    }

    final Path file = new Path(new File("target/test/test-camel-string").getAbsolutePath());
    Configuration conf = new Configuration();
    SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, Text.class);
    NullWritable keyWritable = NullWritable.get();
    Text valueWritable = new Text();
    String value = "CIAO!";
    valueWritable.set(value);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();

    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);

    context.addRoutes(new RouteBuilder() {
        public void configure() {
            from("hdfs2:///" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0")
                    .to("mock:result");
        }
    });
    context.start();

    resultEndpoint.assertIsSatisfied();
}

From source file:org.apache.camel.itest.osgi.hdfs.HdfsRouteTest.java

License:Apache License

@Test
public void testReadString() throws Exception {
    if (SKIP) {/*from  w ww. j a  v  a  2  s.c  o m*/
        return;
    }

    final Path file = new Path(new File("../../../../target/test/test-camel-string").getAbsolutePath());
    org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
    //conf.setClassLoader(this.getClass().getClassLoader());
    // add the default configure into the resource
    conf.addResource(HdfsRouteTest.class.getResourceAsStream("/core-default.xml"));
    FileSystem fs1 = FileSystem.get(file.toUri(), conf);
    SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, Text.class);
    NullWritable keyWritable = NullWritable.get();
    Text valueWritable = new Text();
    String value = "CIAO!";
    valueWritable.set(value);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();

    context.addRoutes(new RouteBuilder() {
        public void configure() {
            from("hdfs:///" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0")
                    .to("mock:result");
        }
    });
    context.start();

    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);
    resultEndpoint.assertIsSatisfied();
}

From source file:org.apache.carbondata.sdk.file.AvroCarbonWriter.java

License:Apache License

/**
 * Write single row data, input row is Avro Record
 *//*from w w w.  jav a2s  .c om*/
@Override
public void write(Object object) throws IOException {
    try {
        GenericData.Record record = (GenericData.Record) object;

        // convert Avro record to CSV String[]
        Object[] csvRecord = avroToCsv(record);
        writable.set(csvRecord);
        recordWriter.write(NullWritable.get(), writable);
    } catch (Exception e) {
        close();
        throw new IOException(e);
    }
}

From source file:org.apache.carbondata.sdk.file.CSVCarbonWriter.java

License:Apache License

/**
 * Write single row data, input row is of type String[]
 *///from   ww  w. j a  v a 2 s  .c o  m
@Override
public void write(Object object) throws IOException {
    try {
        writable.set((String[]) object);
        recordWriter.write(NullWritable.get(), writable);
    } catch (Exception e) {
        throw new IOException(e);
    }
}

From source file:org.apache.carbondata.sdk.file.JsonCarbonWriter.java

License:Apache License

/**
 * Write single row data, accepts one row of data as json string
 *
 * @param object (json row as a string)//from w  w w  .ja  v a 2  s .com
 * @throws IOException
 */
@Override
public void write(Object object) throws IOException {
    Objects.requireNonNull(object, "Input cannot be null");
    try {
        String[] jsonString = new String[1];
        jsonString[0] = (String) object;
        writable.set(jsonString);
        recordWriter.write(NullWritable.get(), writable);
    } catch (Exception e) {
        close();
        throw new IOException(e);
    }
}

From source file:org.apache.crunch.impl.mem.MemPipeline.java

License:Apache License

private void writeSequenceFileFromPCollection(final FileSystem fs, final Path path,
        final PCollection collection) throws IOException {
    final PType pType = collection.getPType();
    final Converter converter = pType.getConverter();
    final Class valueClass = converter.getValueClass();

    final SequenceFile.Writer writer = new SequenceFile.Writer(fs, fs.getConf(), path, NullWritable.class,
            valueClass);/* w ww.jav  a 2 s.c  o m*/

    for (final Object o : collection.materialize()) {
        final Object value = pType.getOutputMapFn().map(o);
        writer.append(NullWritable.get(), value);
    }

    writer.close();
}

From source file:org.apache.crunch.impl.mem.MemPipelineFileReadingWritingIT.java

License:Apache License

@Test
public void testMemPipelineWriteSequenceFile_PCollection() throws IOException {
    // write/*from   w  w  w .  j  a  va  2  s  .  c o  m*/
    PCollection<String> collection = MemPipeline.typedCollectionOf(Writables.strings(), EXPECTED_COLLECTION);
    final Target target = To.sequenceFile(outputFile.toString());
    MemPipeline.getInstance().write(collection, target);

    // read
    final SequenceFile.Reader reader = new Reader(FileSystem.getLocal(baseTmpDir.getDefaultConfiguration()),
            new Path(outputFile.toString()), baseTmpDir.getDefaultConfiguration());
    final List<String> actual = Lists.newArrayList();
    final NullWritable key = NullWritable.get();
    final Text value = new Text();
    while (reader.next(key, value)) {
        actual.add(value.toString());
    }
    reader.close();

    // assert read same as written
    assertEquals(EXPECTED_COLLECTION, actual);
}