List of usage examples for org.apache.hadoop.io NullWritable get
public static NullWritable get()
From source file:org.apache.camel.component.hdfs.HdfsConsumerTest.java
License:Apache License
@Test public void testReadFloat() throws Exception { if (!canTest()) { return;//from w w w . j a v a 2 s . co m } final Path file = new Path(new File("target/test/test-camel-float").getAbsolutePath()); Configuration conf = new Configuration(); FileSystem fs1 = FileSystem.get(file.toUri(), conf); SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, FloatWritable.class); NullWritable keyWritable = NullWritable.get(); FloatWritable valueWritable = new FloatWritable(); float value = 3.1415926535f; valueWritable.set(value); writer.append(keyWritable, valueWritable); writer.sync(); writer.close(); MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class); resultEndpoint.expectedMessageCount(1); context.addRoutes(new RouteBuilder() { public void configure() { from("hdfs:///" + file.toUri() + "??fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0") .to("mock:result"); } }); context.start(); resultEndpoint.assertIsSatisfied(); }
From source file:org.apache.camel.component.hdfs.HdfsConsumerTest.java
License:Apache License
@Test public void testReadDouble() throws Exception { if (!canTest()) { return;//from w w w . jav a2 s . c om } final Path file = new Path(new File("target/test/test-camel-double").getAbsolutePath()); Configuration conf = new Configuration(); FileSystem fs1 = FileSystem.get(file.toUri(), conf); SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, DoubleWritable.class); NullWritable keyWritable = NullWritable.get(); DoubleWritable valueWritable = new DoubleWritable(); double value = 3.1415926535; valueWritable.set(value); writer.append(keyWritable, valueWritable); writer.sync(); writer.close(); MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class); resultEndpoint.expectedMessageCount(1); context.addRoutes(new RouteBuilder() { public void configure() { from("hdfs:///" + file.toUri() + "??fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0") .to("mock:result"); } }); context.start(); resultEndpoint.assertIsSatisfied(); }
From source file:org.apache.camel.component.hdfs.HdfsConsumerTest.java
License:Apache License
@Test public void testReadInt() throws Exception { if (!canTest()) { return;/*from w ww .jav a2s . co m*/ } final Path file = new Path(new File("target/test/test-camel-int").getAbsolutePath()); Configuration conf = new Configuration(); FileSystem fs1 = FileSystem.get(file.toUri(), conf); SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, IntWritable.class); NullWritable keyWritable = NullWritable.get(); IntWritable valueWritable = new IntWritable(); int value = 314159265; valueWritable.set(value); writer.append(keyWritable, valueWritable); writer.sync(); writer.close(); MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class); resultEndpoint.expectedMessageCount(1); context.addRoutes(new RouteBuilder() { public void configure() { from("hdfs:///" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0") .to("mock:result"); } }); context.start(); resultEndpoint.assertIsSatisfied(); }
From source file:org.apache.camel.component.hdfs.HdfsConsumerTest.java
License:Apache License
@Test public void testReadLong() throws Exception { if (!canTest()) { return;/* w w w .j a v a2 s. c o m*/ } final Path file = new Path(new File("target/test/test-camel-long").getAbsolutePath()); Configuration conf = new Configuration(); FileSystem fs1 = FileSystem.get(file.toUri(), conf); SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, LongWritable.class); NullWritable keyWritable = NullWritable.get(); LongWritable valueWritable = new LongWritable(); long value = 31415926535L; valueWritable.set(value); writer.append(keyWritable, valueWritable); writer.sync(); writer.close(); MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class); resultEndpoint.expectedMessageCount(1); context.addRoutes(new RouteBuilder() { public void configure() { from("hdfs:///" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0") .to("mock:result"); } }); context.start(); resultEndpoint.assertIsSatisfied(); }
From source file:org.apache.camel.component.hdfs.HdfsConsumerTest.java
License:Apache License
@Test public void testReadBytes() throws Exception { if (!canTest()) { return;/*w w w . ja v a 2s. co m*/ } final Path file = new Path(new File("target/test/test-camel-bytes").getAbsolutePath()); Configuration conf = new Configuration(); FileSystem fs1 = FileSystem.get(file.toUri(), conf); SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, BytesWritable.class); NullWritable keyWritable = NullWritable.get(); BytesWritable valueWritable = new BytesWritable(); String value = "CIAO!"; valueWritable.set(value.getBytes(), 0, value.getBytes().length); writer.append(keyWritable, valueWritable); writer.sync(); writer.close(); MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class); resultEndpoint.expectedMessageCount(1); context.addRoutes(new RouteBuilder() { public void configure() { from("hdfs:///" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0") .to("mock:result"); } }); context.start(); resultEndpoint.assertIsSatisfied(); }
From source file:org.apache.camel.component.hdfs.HdfsConsumerTest.java
License:Apache License
@Test public void testReadString() throws Exception { if (!canTest()) { return;/*from w w w. j a v a 2s . c om*/ } final Path file = new Path(new File("target/test/test-camel-string").getAbsolutePath()); Configuration conf = new Configuration(); FileSystem fs1 = FileSystem.get(file.toUri(), conf); SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, Text.class); NullWritable keyWritable = NullWritable.get(); Text valueWritable = new Text(); String value = "CIAO!"; valueWritable.set(value); writer.append(keyWritable, valueWritable); writer.sync(); writer.close(); MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class); resultEndpoint.expectedMessageCount(1); context.addRoutes(new RouteBuilder() { public void configure() { from("hdfs:///" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0") .to("mock:result"); } }); context.start(); resultEndpoint.assertIsSatisfied(); }
From source file:org.apache.camel.component.hdfs2.HdfsConsumerTest.java
License:Apache License
@Test public void testReadBoolean() throws Exception { if (!canTest()) { return;//from w w w. ja v a2 s . c o m } final Path file = new Path(new File("target/test/test-camel-boolean").getAbsolutePath()); Configuration conf = new Configuration(); SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, BooleanWritable.class); NullWritable keyWritable = NullWritable.get(); BooleanWritable valueWritable = new BooleanWritable(); valueWritable.set(true); writer.append(keyWritable, valueWritable); writer.sync(); writer.close(); context.addRoutes(new RouteBuilder() { public void configure() { from("hdfs2:///" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0") .to("mock:result"); } }); context.start(); MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class); resultEndpoint.expectedMessageCount(1); resultEndpoint.assertIsSatisfied(); }
From source file:org.apache.camel.component.hdfs2.HdfsConsumerTest.java
License:Apache License
@Test public void testReadByte() throws Exception { if (!canTest()) { return;/*w ww . j ava 2 s . com*/ } final Path file = new Path(new File("target/test/test-camel-byte").getAbsolutePath()); Configuration conf = new Configuration(); SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, ByteWritable.class); NullWritable keyWritable = NullWritable.get(); ByteWritable valueWritable = new ByteWritable(); byte value = 3; valueWritable.set(value); writer.append(keyWritable, valueWritable); writer.sync(); writer.close(); MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class); resultEndpoint.expectedMessageCount(1); resultEndpoint.message(0).body(byte.class).isEqualTo(3); context.addRoutes(new RouteBuilder() { public void configure() { from("hdfs2:///" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0") .to("mock:result"); } }); context.start(); resultEndpoint.assertIsSatisfied(); }
From source file:org.apache.camel.component.hdfs2.HdfsConsumerTest.java
License:Apache License
@Test public void testReadFloat() throws Exception { if (!canTest()) { return;/*w ww. j a va 2s . com*/ } final Path file = new Path(new File("target/test/test-camel-float").getAbsolutePath()); Configuration conf = new Configuration(); SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, FloatWritable.class); NullWritable keyWritable = NullWritable.get(); FloatWritable valueWritable = new FloatWritable(); float value = 3.1415926535f; valueWritable.set(value); writer.append(keyWritable, valueWritable); writer.sync(); writer.close(); MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class); resultEndpoint.expectedMessageCount(1); context.addRoutes(new RouteBuilder() { public void configure() { from("hdfs2:///" + file.toUri() + "??fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0") .to("mock:result"); } }); context.start(); resultEndpoint.assertIsSatisfied(); }
From source file:org.apache.camel.component.hdfs2.HdfsConsumerTest.java
License:Apache License
@Test public void testReadDouble() throws Exception { if (!canTest()) { return;/* w w w .j a v a 2s . c om*/ } final Path file = new Path(new File("target/test/test-camel-double").getAbsolutePath()); Configuration conf = new Configuration(); SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, DoubleWritable.class); NullWritable keyWritable = NullWritable.get(); DoubleWritable valueWritable = new DoubleWritable(); double value = 3.1415926535; valueWritable.set(value); writer.append(keyWritable, valueWritable); writer.sync(); writer.close(); MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class); resultEndpoint.expectedMessageCount(1); context.addRoutes(new RouteBuilder() { public void configure() { from("hdfs2:///" + file.toUri() + "??fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0") .to("mock:result"); } }); context.start(); resultEndpoint.assertIsSatisfied(); }