List of usage examples for org.apache.hadoop.io NullWritable get
public static NullWritable get()
From source file:br.com.lassal.nqueens.grid.mapreduce.NQueenIncrementalCounterReducer.java
protected void reduce(Text key, Iterable<Text> values, Context context) throws java.io.IOException, InterruptedException { BigInteger partialCounter = BigInteger.valueOf(0); String lastResult = ""; for (Text value : values) { NQueenCountRecord record = NQueenCountRecord.parse(value.toString(), NQueenIncrementalCounterMapper.QTD_POSICOES_PRESOLUCAO); if (record.isBranchSolved()) { if (record.getPartialSolutionText().compareTo(lastResult) > 0) { lastResult = record.getPartialSolutionText(); }/*from w w w . j av a2 s . c o m*/ partialCounter = partialCounter.add(record.getSolutionsCount()); } else { context.write(NullWritable.get(), value); } } if (partialCounter.compareTo(BigInteger.valueOf(0)) > 0) { String sumValue = key.toString() + "=" + partialCounter.toString() + ";"; context.write(NullWritable.get(), new Text(sumValue)); } }
From source file:br.com.lassal.nqueens.grid.mapreduce.NQueenIncrementalCounterResultReducer.java
protected void reduce(Text key, Iterable<Text> values, Context context) throws java.io.IOException, InterruptedException { BigInteger partialCount = BigInteger.ZERO; long numPartialSolutions = 0; for (Text item : values) { String value = item.toString(); if (NQueenIncrementalCounterResultMapper.PARTIAL_SOLUTION_ID.equals(value) && numPartialSolutions < Long.MAX_VALUE) { numPartialSolutions++;// w w w. ja v a 2 s.c om } else { BigInteger solutionCount = new BigInteger(value); partialCount = partialCount.add(solutionCount); } } context.write(NullWritable.get(), new Text(key.toString() + "=" + partialCount.toString() + ";")); if (numPartialSolutions > 0) { context.write(NullWritable.get(), new Text( key.toString() + " NO FINALIZADO - EXISTEM " + numPartialSolutions + " SOLUES ABERTAS.")); } }
From source file:br.com.lassal.nqueens.grid.mapreduce.NQueenPartialShotMapperReducerTest.java
@Test public void testMapper() throws IOException { mapDriver.getConfiguration().set(NQueenPartialShotMapper.NQueenRowSize_PROP, "4"); mapDriver.withInput(new LongWritable(), new Text("4#")); mapDriver.withOutput(new Text("4:1,3,0,2"), NullWritable.get()); mapDriver.withOutput(new Text("4:2,0,3,1"), NullWritable.get()); mapDriver.runTest();/*from w w w .j a v a2s . c om*/ }
From source file:br.com.lassal.nqueens.grid.mapreduce.NQueenPartialShotMapperReducerTest.java
/** * 3,0,4,1 => 5,2/*from ww w .ja v a2 s . co m*/ 1,3,5,0 => 2,5 2,5,1,4 => 0,3 4,2,0,5 => 3,1 * */ @Test public void testMapper2ndStepBoard6() throws IOException { mapDriver.withInput(new LongWritable(), new Text("6:1,3,5,0")); mapDriver.withInput(new LongWritable(), new Text("6:2,5,1,4")); mapDriver.withInput(new LongWritable(), new Text("6:3,0,4,1")); mapDriver.withInput(new LongWritable(), new Text("6:4,2,0,5")); mapDriver.withOutput(new Text("6:1,3,5,0,2,4"), NullWritable.get()); mapDriver.withOutput(new Text("6:2,5,1,4,0,3"), NullWritable.get()); mapDriver.withOutput(new Text("6:3,0,4,1,5,2"), NullWritable.get()); mapDriver.withOutput(new Text("6:4,2,0,5,3,1"), NullWritable.get()); mapDriver.runTest(); }
From source file:br.com.lassal.nqueens.grid.mapreduce.NQueenPartialShotMapperReducerTest.java
/** * 3,0,4,1 => 5,2//w w w . j a va 2 s . c om 1,3,5,0 => 2,5 2,5,1,4 => 0,3 4,2,0,5 => 3,1 * */ @Test public void testMapper2ndStepBoard8() throws IOException { mapDriver.withInput(new LongWritable(), new Text("6:1,3,5,0")); mapDriver.withInput(new LongWritable(), new Text("6:2,5,1,4")); mapDriver.withInput(new LongWritable(), new Text("6:3,0,4,1")); mapDriver.withInput(new LongWritable(), new Text("6:4,2,0,5")); mapDriver.withOutput(new Text("6:1,3,5,0,2,4"), NullWritable.get()); mapDriver.withOutput(new Text("6:2,5,1,4,0,3"), NullWritable.get()); mapDriver.withOutput(new Text("6:3,0,4,1,5,2"), NullWritable.get()); mapDriver.withOutput(new Text("6:4,2,0,5,3,1"), NullWritable.get()); mapDriver.runTest(); }
From source file:br.com.lassal.nqueens.grid.mapreduce.NQueensIncrementalSolutionCounterTest.java
@Test public void testReduceFunction() throws IOException { List<Text> values = new ArrayList<Text>(); values.add(new Text("12:0,2,4,1,0,0,0,0=5;")); values.add(new Text("12:0,2,4,1,0,0,0,1=3;")); values.add(new Text("12:0,2,4,1,0,0,0,2,5,5=3")); reduceDriver.addInput(new Text("12:0,2,4,1"), values); reduceDriver.addOutput(NullWritable.get(), new Text("12:0,2,4,1,0,0,0,2,5,5=3")); reduceDriver.addOutput(NullWritable.get(), new Text("12:0,2,4,1=8;")); reduceDriver.runTest();//from w w w . ja v a2 s.co m }
From source file:ca.nlap.giraphsociallayout.ConnectedComponentsComputationTestInMemory.java
License:Apache License
public static Entry<IntWritable, NullWritable>[] makeEdges(int... args) { Entry<IntWritable, NullWritable> result[] = new Entry[args.length]; for (int i = 0; i < args.length; i++) { result[i] = new SimpleEntry<IntWritable, NullWritable>(new IntWritable(args[i]), NullWritable.get()); }/*from w w w . ja v a 2 s .c o m*/ return result; }
From source file:cascading.avro.AvroScheme.java
License:Apache License
/** * Sink method to take an outgoing tuple and write it to Avro. * * @param flowProcess The cascading FlowProcess object. Should be passed in by cascading automatically. * @param sinkCall The cascading SinkCall object. Should be passed in by cascading automatically. * @throws IOException//from w w w .ja v a2 s .c o m */ @Override public void sink(FlowProcess<JobConf> flowProcess, SinkCall<Object[], OutputCollector> sinkCall) throws IOException { TupleEntry tupleEntry = sinkCall.getOutgoingEntry(); IndexedRecord record = new Record((Schema) sinkCall.getContext()[0]); Object[] objectArray = CascadingToAvro.parseTupleEntry(tupleEntry, (Schema) sinkCall.getContext()[0]); for (int i = 0; i < objectArray.length; i++) { record.put(i, objectArray[i]); } //noinspection unchecked sinkCall.getOutput().collect(new AvroWrapper<IndexedRecord>(record), NullWritable.get()); }
From source file:cascading.scheme.DeprecatedAvroScheme.java
License:Apache License
/** * Sink method to take an outgoing tuple and write it to Avro. * * @param flowProcess The cascading FlowProcess object. Should be passed in by cascading automatically. * @param sinkCall The cascading SinkCall object. Should be passed in by cascading automatically. * @throws IOException//from w w w .j av a2 s. c o m */ @Override public void sink(FlowProcess<? extends Configuration> flowProcess, SinkCall<Object[], OutputCollector> sinkCall) throws IOException { TupleEntry tupleEntry = sinkCall.getOutgoingEntry(); IndexedRecord record = new Record((Schema) sinkCall.getContext()[0]); Object[] objectArray = CascadingToAvro.parseTupleEntry(tupleEntry, (Schema) sinkCall.getContext()[0]); for (int i = 0; i < objectArray.length; i++) { record.put(i, objectArray[i]); } //noinspection unchecked sinkCall.getOutput().collect(new AvroWrapper<IndexedRecord>(record), NullWritable.get()); }
From source file:cascading.scheme.hadoop.WritableSequenceFile.java
License:Open Source License
@Override public void sink(FlowProcess<? extends Configuration> flowProcess, SinkCall<Void, OutputCollector> sinkCall) throws IOException { TupleEntry tupleEntry = sinkCall.getOutgoingEntry(); Writable keyValue = NullWritable.get(); Writable valueValue = NullWritable.get(); if (keyType == null) { valueValue = (Writable) tupleEntry.getObject(0); } else if (valueType == null) { keyValue = (Writable) tupleEntry.getObject(0); } else {//from ww w .j a va 2 s. co m keyValue = (Writable) tupleEntry.getObject(0); valueValue = (Writable) tupleEntry.getObject(1); } sinkCall.getOutput().collect(keyValue, valueValue); }