List of usage examples for org.apache.hadoop.io LongWritable LongWritable
public LongWritable(long value)
From source file:co.nubetech.hiho.merge.TestMergeValueReducer.java
License:Apache License
@Test public void testReducerForLongWritableKey() throws IOException, InterruptedException { LongWritable key = new LongWritable(Long.parseLong("123")); HihoTuple hihoTuple = new HihoTuple(); hihoTuple.setKey(key);// w w w .j ava2 s. co m HihoValue hihoValue1 = new HihoValue(); HihoValue hihoValue2 = new HihoValue(); Text value1 = new Text("value1"); Text value2 = new Text("value2"); hihoValue1.setVal(value1); hihoValue2.setVal(value2); hihoValue1.setIsOld(true); hihoValue2.setIsOld(false); ArrayList<HihoValue> values = new ArrayList<HihoValue>(); values.add(hihoValue1); values.add(hihoValue2); Reducer.Context context = mock(Reducer.Context.class); Counters counters = new Counters(); Counter counter = counters.findCounter(MergeRecordCounter.OUTPUT); when(context.getCounter(MergeRecordCounter.OUTPUT)).thenReturn(counter); MergeValueReducer mergeReducer = new MergeValueReducer(); mergeReducer.reduce(hihoTuple, values, context); verify(context).write(value2, key); assertEquals(1, context.getCounter(MergeRecordCounter.OUTPUT).getValue()); }
From source file:co.nubetech.hiho.similarity.ngram.ScoreReducer.java
License:Apache License
@Override public void reduce(ValuePair key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException { if (key == null) { throw new IOException("Key is null"); }//from w w w .j a v a 2 s . com logger.info("Key in ScoreReducer is: " + key); long valInLong = 0; Iterator<IntWritable> iterator = values.iterator(); while (iterator.hasNext()) { logger.info("Value in ScoreReducer is: " + iterator.next()); valInLong = valInLong + 1l; } context.write(key, new LongWritable(valInLong)); }
From source file:co.nubetech.hiho.similarity.ngram.TestScoreJob.java
License:Apache License
@Test public void testScoreJobForValidValues() throws Exception { ValuePair valuePair = new ValuePair(); valuePair.setValue1(new Text("This is a bookdelimiterBetweenKeyAndValuevalue1")); valuePair.setValue2(new Text("This is not a bookdelimiterBetweenKeyAndValuevalue2")); HashMap<ValuePair, IntWritable> inputData1 = new HashMap<ValuePair, IntWritable>(); inputData1.put(valuePair, new IntWritable(1)); createSequenceFileInHdfs(inputData1, "outputOfNGramJob", "part-r-00000"); HashMap<ValuePair, IntWritable> inputData2 = new HashMap<ValuePair, IntWritable>(); inputData2.put(valuePair, new IntWritable(1)); createSequenceFileInHdfs(inputData2, "outputOfNGramJob", "part-r-00001"); String[] args = new String[] {}; ScoreJob job = runScoreJob(args);/*w ww . j a v a2 s. c o m*/ FileSystem outputFS = getFileSystem(); Path outputPath = new Path(outputFS.getHomeDirectory(), "outputOfScoreJob/part-r-00000"); Configuration conf = new Configuration(); SequenceFile.Reader reader = new SequenceFile.Reader(outputFS, outputPath, conf); Writable writableKey = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf); Writable writableValue = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf); List<ValuePair> expectedOutputForKey = new ArrayList<ValuePair>(); expectedOutputForKey.add(valuePair); List<LongWritable> expectedOutputForValue = new ArrayList<LongWritable>(); expectedOutputForValue.add(new LongWritable(2)); int count = 0; while (reader.next(writableKey, writableValue)) { logger.debug("Key and value is: " + writableKey + ", " + writableValue); assertTrue("Matched output " + writableKey, expectedOutputForKey.contains(writableKey)); assertTrue("Matched output " + writableValue, expectedOutputForValue.contains(writableValue)); count++; } IOUtils.closeStream(reader); assertEquals(1, count); }
From source file:co.nubetech.hiho.similarity.ngram.TestScoreReducer.java
License:Apache License
@Test public void testReducerValidValues() throws IOException, InterruptedException { ArrayList<IntWritable> values = new ArrayList<IntWritable>(); values.add(new IntWritable(1)); values.add(new IntWritable(1)); ValuePair key = new ValuePair(); key.setValue1(new Text("This is a bookdelimiterBetweenKeyAndValuevalue1")); key.setValue2(new Text("This is not a bookdelimiterBetweenKeyAndValuevalue2")); Reducer.Context context = mock(Reducer.Context.class); ScoreReducer scoreReducer = new ScoreReducer(); scoreReducer.reduce(key, values, context); verify(context).write(key, new LongWritable(2)); }
From source file:co.nubetech.hiho.similarity.ngram.TestSimilarityJob.java
License:Apache License
@Test public void testSimilarityJobForValidValues() throws Exception { final String inputData = "This is a book value1\nThis is not a book value2"; createTextFileInHDFS(inputData, "input", "testFile1.txt"); String[] args = new String[] { "-inputPath", "input" }; SimilarityJob.main(args);//w w w. java2 s .c o m FileSystem outputFS = getFileSystem(); Path outputPath = new Path(outputFS.getHomeDirectory(), "outputOfScoreJob/part-r-00000"); Configuration conf = new Configuration(); SequenceFile.Reader reader = new SequenceFile.Reader(outputFS, outputPath, conf); Writable writableKey = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf); Writable writableValue = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf); List<ValuePair> expectedOutputForKey = new ArrayList<ValuePair>(); ValuePair valuePair = new ValuePair(); valuePair.setValue1(new Text("This is a bookdelimiterBetweenKeyAndValuevalue1")); valuePair.setValue2(new Text("This is not a bookdelimiterBetweenKeyAndValuevalue2")); expectedOutputForKey.add(valuePair); List<LongWritable> expectedOutputForValue = new ArrayList<LongWritable>(); expectedOutputForValue.add(new LongWritable(2)); int count = 0; while (reader.next(writableKey, writableValue)) { logger.debug("Key and value is: " + writableKey + ", " + writableValue); assertTrue("Matched output " + writableKey, expectedOutputForKey.contains(writableKey)); assertTrue("Matched output " + writableValue, expectedOutputForValue.contains(writableValue)); count++; } IOUtils.closeStream(reader); assertEquals(1, count); }
From source file:com.alectenharmsel.research.hadoop.MoabLicenseInfoTest.java
@Test public void testNoOutputNeeded() throws IOException { List<Pair<Text, Text>> res = mapDriver.withInput(new LongWritable(0), new Text("05/11 22:58:25 MNodeUpdateResExpression(nyx5624,FALSE,TRUE)")).run(); Assert.assertTrue(res.isEmpty());//from w w w . j av a2s .c o m }
From source file:com.alectenharmsel.research.hadoop.MoabLicenseInfoTest.java
@Test public void testLicenseLine() throws IOException { mapDriver.withInput(new LongWritable(0), new Text( "05/11 22:58:25 INFO: License cfd_solv_ser 0 of 6 available (Idle: 33.3% Active: 66.67%)")) .withOutput(new Text("cfd_solv_ser-05-11"), new Text("0,6")).runTest(); }
From source file:com.alectenharmsel.research.LineCountMapper.java
License:Apache License
public void map(Text key, Text contents, Context context) throws IOException, InterruptedException { long numLines = 0; String tmp = contents.toString(); for (int i = 0; i < tmp.length(); i++) { if (tmp.charAt(i) == '\n') { numLines++;//from w w w . j a v a2 s . co m } } context.write(key, new LongWritable(numLines)); }
From source file:com.alectenharmsel.research.LineCountReducer.java
License:Apache License
public void reduce(Text key, Iterable<LongWritable> counts, Context context) throws IOException, InterruptedException { long total = 0; for (LongWritable tmp : counts) { total += tmp.get();//from w w w. j a va 2 s .c o m } context.getCounter(LcCounters.NUM_LINES).increment(total); context.write(key, new LongWritable(total)); }
From source file:com.alectenharmsel.research.MoabLicensesMapperTest.java
@Test public void testNoOutputNeeded() throws IOException { List<Pair<Text, Text>> res = driver.withInput(new LongWritable(0), new Text("05/11 22:58:25 MNodeUpdateResExpression(nyx5624,FALSE,TRUE)")).run(); Assert.assertTrue(res.isEmpty());//from w ww .ja v a2 s . com }