List of usage examples for org.apache.hadoop.io NullWritable get
public static NullWritable get()
From source file:com.stratio.deep.es.utils.UtilES.java
License:Apache License
/** * Returns the object inside Writable/*from w w w . j av a2 s. co m*/ * * @param writable * @return * @throws IllegalAccessException * @throws InstantiationException * @throws InvocationTargetException * @throws NoSuchMethodException */ private static Object getObjectFromWritable(Writable writable) throws IllegalAccessException, InstantiationException, InvocationTargetException { Object object = null; if (writable instanceof NullWritable) { object = NullWritable.get(); } else if (writable instanceof BooleanWritable) { object = ((BooleanWritable) writable).get(); } else if (writable instanceof Text) { object = writable.toString(); } else if (writable instanceof ByteWritable) { object = ((ByteWritable) writable).get(); } else if (writable instanceof IntWritable) { object = ((IntWritable) writable).get(); } else if (writable instanceof LongWritable) { object = ((LongWritable) writable).get(); } else if (writable instanceof BytesWritable) { object = ((BytesWritable) writable).getBytes(); } else if (writable instanceof DoubleWritable) { object = ((DoubleWritable) writable).get(); } else if (writable instanceof FloatWritable) { object = ((FloatWritable) writable).get(); } else { // TODO : do nothing } return object; }
From source file:com.talis.mapreduce.dicenc.ThirdReducer.java
License:Apache License
@Override public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException { String s = null;/*from w w w . j ava 2s .c o m*/ String p = null; String o = null; String g = null; for (Text value : values) { String[] v = value.toString().split("-"); if (v[1].equals("s")) s = v[0]; if (v[1].equals("p")) p = v[0]; if (v[1].equals("o")) o = v[0]; if (v[1].equals("g")) g = v[0]; } if (g != null) { context.write(NullWritable.get(), new Text(s + " " + p + " " + o + " " + g)); } else if ((s != null) && (p != null) && (o != null)) { context.write(NullWritable.get(), new Text(s + " " + p + " " + o)); } }
From source file:com.tdunning.plume.local.lazy.MSCRReducer.java
License:Apache License
@SuppressWarnings("unchecked") protected void reduce(final PlumeObject arg0, java.lang.Iterable<PlumeObject> values, Reducer<PlumeObject, PlumeObject, NullWritable, NullWritable>.Context arg2) throws IOException, InterruptedException { PCollection col = mscr.getChannelByNumber().get(arg0.sourceId); OutputChannel oC = mscr.getOutputChannels().get(col); if (oC.reducer != null) { // apply reducer ParallelDo pDo = oC.reducer;/*from ww w. j a v a 2 s. c om*/ DoFn reducer = pDo.getFunction(); // TODO how to check / report this List<WritableComparable> vals = Lists.newArrayList(); for (PlumeObject val : values) { vals.add(val.obj); } reducer.process(Pair.create(arg0.obj, vals), new EmitFn() { @Override public void emit(Object v) { try { if (v instanceof Pair) { Pair p = (Pair) v; mos.write(arg0.sourceId + "", p.getKey(), p.getValue()); } else { mos.write(arg0.sourceId + "", NullWritable.get(), (WritableComparable) v); } } catch (Exception e) { e.printStackTrace(); // TODO How to report this } } }); } else { // direct writing - write all key, value pairs for (PlumeObject val : values) { if (oC.output instanceof PTable) { mos.write(arg0.sourceId + "", arg0.obj, val.obj); } else { mos.write(arg0.sourceId + "", NullWritable.get(), val.obj); } } } }
From source file:com.telefonica.iot.tidoop.mrlib.reducers.LinesJoiner.java
License:Open Source License
@Override public void reduce(Text key, Iterable<Text> filteredLines, Context context) throws IOException, InterruptedException { for (Text filteredLine : filteredLines) { context.write(NullWritable.get(), filteredLine); } // for//w ww. j ava 2 s . co m }
From source file:com.teradata.benchto.generator.HiveTypesGeneratorTest.java
License:Apache License
@Test public void testMapper() throws Exception { Configuration serializationConfiguration = new Configuration(); MapDriver mapDriver = MapDriver.newMapDriver(new HiveTypesGenerator.HiveTypesMapper()) .withInput(new LongWritable(0L), NullWritable.get()) .withInput(new LongWritable(1L), NullWritable.get()) .withInput(new LongWritable(2L), NullWritable.get()) .withInput(new LongWritable(3L), NullWritable.get()) .withInput(new LongWritable(4L), NullWritable.get()) .withOutputFormat(HiveTypesGenerator.getOutputFormatClass(format), HiveTypesGenerator.getInputFormatClass(format)) .withOutputSerializationConfiguration(serializationConfiguration); mapDriver.getConfiguration().set(HiveTypesGenerator.FORMAT_PROPERTY_NAME, format); mapDriver.getConfiguration().set(HiveTypesGenerator.HIVE_TYPE_PROPERTY_NAME, type); mapDriver.getConfiguration().setLong(HiveTypesGenerator.NUM_ROWS_PROPERTY_NAME, 5L); mapDriver.getConfiguration().setInt(NUM_MAPS, 1); List<Pair<NullWritable, Writable>> output = mapDriver.run(); extractMapperProperties(mapDriver);// w w w . j a v a 2 s .c om assertEquals(expectedSerializedRow(0), output.get(0).getSecond()); assertEquals(expectedSerializedRow(1), output.get(1).getSecond()); assertEquals(expectedSerializedRow(2), output.get(2).getSecond()); assertEquals(expectedSerializedRow(3), output.get(3).getSecond()); assertEquals(expectedSerializedRow(4), output.get(4).getSecond()); }
From source file:com.tfm.utad.reducerdata.ReducerDataPigMapperReducerTest.java
@Test public void testReducer() throws IOException, ParseException { List<ReducerPigKey> values = new ArrayList<>(); Date date = sdf.parse("2014-12-06 17:43:21"); ReducerPigKey pigKey = new ReducerPigKey(new Long("123456"), Double.valueOf("40.48989"), Double.valueOf("-3.65754"), "User189", date, "20141206-34567-189"); values.add(pigKey);/*from w w w .j a v a2 s. c om*/ reduceDriver.withInput(new LongWritable(new Long("123456")), values); reduceDriver.withOutput(new Text(pigKey.toString()), NullWritable.get()); reduceDriver.runTest(); }
From source file:com.tfm.utad.reducerdata.ReducerDataPigReducer.java
@Override public void reduce(LongWritable key, Iterable<ReducerPigKey> values, Context context) throws IOException, InterruptedException { for (ReducerPigKey value : values) { String str = value.toString().replace("\n", ""); context.write(new Text(str), NullWritable.get()); }/* w ww . j a v a2 s . c o m*/ }
From source file:com.tfm.utad.reducerdata.ReducerDataVerticaMapperReducerTest.java
@Test public void testReducer() throws IOException, ParseException { List<ReducerVerticaValue> values = new ArrayList<>(); Date date = sdf.parse("2014-12-06 17:43:21"); ReducerVerticaValue verticaValue = new ReducerVerticaValue(new LongWritable((long) 123456), new Text("User189"), date, new Text("20141206-34567-189"), new DoubleWritable(Double.valueOf("40.48989")), new DoubleWritable(Double.valueOf("-3.65754")), new LongWritable(new Long("189"))); values.add(verticaValue);//from www. j a v a 2s. com reduceDriver.withInput(new Text("User189" + "20141206-34567-189"), values); reduceDriver.withOutput(new Text(values.get(0).toString()), NullWritable.get()); reduceDriver.runTest(); }
From source file:com.tfm.utad.reducerdata.ReducerDataVerticaReducer.java
@Override public void reduce(Text key, Iterable<ReducerVerticaValue> values, Context context) throws IOException, InterruptedException { int count = 0; for (ReducerVerticaValue value : values) { if (count == 0) { context.write(new Text(value.toString()), NullWritable.get()); } else {/*w w w . ja v a 2 s .c o m*/ if (count % 10 == 0) { context.write(new Text(value.toString()), NullWritable.get()); } } count++; } }
From source file:com.tomslabs.grid.avro.JSONTextToAvroRecordReducer.java
License:Apache License
public void reduce(Text key, Iterator<Text> values, OutputCollector<AvroWrapper<GenericRecord>, NullWritable> output, Reporter reporter) throws IOException { GenericRecord record = from(key.toString(), schema); AvroWrapper<GenericRecord> wrapper = new AvroWrapper<GenericRecord>(record); output.collect(wrapper, NullWritable.get()); }