List of usage examples for org.apache.hadoop.io IntWritable get
public int get()
From source file:org.plista.kornakapi.core.training.LDATopicFactorizer.java
License:Apache License
/** * //from w w w . jav a2s.com * @throws IOException */ private void indexItem() throws IOException { if (indexItem == null) { indexItem = new HashMap<Integer, String>(); itemIndex = new HashMap<String, Integer>(); Reader reader = new SequenceFile.Reader(fs, new Path(this.conf.getCVBInputPath() + "/docIndex"), lconf); IntWritable key = new IntWritable(); Text newVal = new Text(); while (reader.next(key, newVal)) { indexItem.put(key.get(), newVal.toString().substring(1)); itemIndex.put(newVal.toString().substring(1), key.get()); } Closeables.close(reader, false); } }
From source file:org.plista.kornakapi.core.training.LDATopicFactorizer.java
License:Apache License
/** * gets topic posterior from lda output//from w w w. j ava 2 s . com * @throws IOException */ private void getAllTopicPosterior() throws IOException { itemFeatures = new HashMap<String, Vector>(); Reader reader = new SequenceFile.Reader(fs, new Path(this.conf.getLDADocTopicsPath()), lconf); IntWritable key = new IntWritable(); VectorWritable newVal = new VectorWritable(); while (reader.next(key, newVal)) { itemFeatures.put(getIndexItem(key.get()), newVal.get()); } Closeables.close(reader, false); }
From source file:org.plista.kornakapi.core.training.SemanticModel.java
License:Apache License
/** * method to load model from squence file * @throws IOException/*from w w w .j av a2 s .c om*/ */ public void read() throws IOException { Path indexPath = path.suffix("/indexItem.model"); if (fs.exists(indexPath)) { indexItem = new HashMap<Integer, String>(); Reader reader = new SequenceFile.Reader(fs, indexPath, lconf); IntWritable key = new IntWritable(); Text val = new Text(); while (reader.next(key, val)) { indexItem.put(key.get(), val.toString()); } Closeables.close(reader, false); } Path itemIndexPath = path.suffix("/itemIndex.model"); if (fs.exists(itemIndexPath)) { itemIndex = new HashMap<String, Integer>(); Reader reader = new SequenceFile.Reader(fs, itemIndexPath, lconf); IntWritable val = new IntWritable(); Text key = new Text(); while (reader.next(key, val)) { itemIndex.put(key.toString(), val.get()); } Closeables.close(reader, false); } Path featurePath = path.suffix("/itemFeature.model"); if (fs.exists(featurePath)) { Reader reader = new SequenceFile.Reader(fs, featurePath, lconf); itemFeatures = new HashMap<String, Vector>(); Text key = new Text(); VectorWritable val = new VectorWritable(); while (reader.next(key, val)) { itemFeatures.put(key.toString(), val.get()); } Closeables.close(reader, false); } if (log.isInfoEnabled()) { log.info("LDA Model Read"); } }
From source file:org.qcri.pca.CompositeJobTest.java
License:Apache License
private void verifyYtX(DummyRecordWriter<IntWritable, VectorWritable> writer) { Assert.assertEquals("The reducer should output " + cols + " keys!", cols, writer.getKeys().size()); for (IntWritable key : writer.getKeys()) { List<VectorWritable> list = writer.getValue(key); assertEquals("reducer produces more than one values per key!", 1, list.size()); Vector v = list.get(0).get(); assertEquals("reducer vector size must match the x size!", xsize, v.size()); for (int c = 0; c < xsize; c++) Assert.assertEquals("The ytx[" + key.get() + "][" + c + "] is incorrect: ", ytx[key.get()][c], v.get(c), EPSILON);//ww w.j a v a 2s. com } }
From source file:org.qcri.pca.MeanAndSpanJobTest.java
License:Apache License
private void verifyMapperOutput(DummyRecordWriter<IntWritable, VectorWritable> writer) { Assert.assertEquals("Each mapper should output three keys!", 3, writer.getKeys().size()); for (IntWritable key : writer.getKeys()) { List<VectorWritable> list = writer.getValue(key); assertEquals("Mapper did not combine the results!", 1, list.size()); Vector v = list.get(0).get(); switch (key.get()) { case MeanAndSpanJob.MEANVECTOR: Assert.assertEquals("MeanVector size does not match!", v.size(), cols + 1); Assert.assertEquals("MeanVector count does not match!", rows, v.get(0), EPSILON); verifySum(inputVectors, v.viewPart(1, cols)); break; case MeanAndSpanJob.MINVECTOR: Assert.assertEquals("MinVector size does not match!", v.size(), cols); verifyMin(inputVectors, v);//w w w . ja va2s.c om break; case MeanAndSpanJob.MAXVECTOR: Assert.assertEquals("MaxVector size does not match!", v.size(), cols); verifyMax(inputVectors, v); break; default: Assert.fail("Unknown key from mapper"); } } }
From source file:org.qcri.pca.MeanAndSpanJobTest.java
License:Apache License
private void verifyReducerOutput(DummyRecordWriter<IntWritable, VectorWritable> writer) { Assert.assertEquals("The reducer should output two keys!", 2, writer.getKeys().size()); for (IntWritable key : writer.getKeys()) { List<VectorWritable> list = writer.getValue(key); assertEquals("Reducer did not combine the results!", 1, list.size()); Vector v = list.get(0).get(); switch (key.get()) { case MeanAndSpanJob.MEANVECTOR: Assert.assertEquals("MeanVector size does not match!", v.size(), cols); verifyMean(inputVectors, v); break; case MeanAndSpanJob.SPANVECTOR: Assert.assertEquals("SpanVector size does not match!", v.size(), cols); verifySpan(inputVectors, v); break; default:/* w w w . j ava 2s . c om*/ Assert.fail("Unknown key from mapper"); } } }
From source file:org.qcri.pca.NormalizeJobTest.java
License:Apache License
private void verifyMapperOutput(DummyRecordWriter<IntWritable, VectorWritable> writer) { Assert.assertEquals("The mapper should output " + rows + " keys!", rows, writer.getKeys().size()); double[][] normalizedVectors = normalize(inputVectors); for (IntWritable key : writer.getKeys()) { List<VectorWritable> list = writer.getValue(key); assertEquals("Mapper produces more than one values per key!", 1, list.size()); Vector v = list.get(0).get(); for (int c = 0; c < cols; c++) Assert.assertEquals("The normalized value is incorrect: ", normalizedVectors[key.get()][c], v.get(c), EPSILON);//from w w w .j av a 2 s . c o m } }
From source file:org.qcri.pca.ReconstructionErrJobTest.java
License:Apache License
private void verifyReducerOutput(DummyRecordWriter<IntWritable, DoubleWritable> writer) { Assert.assertEquals("The reducer should output three key!", 3, writer.getKeys().size()); for (IntWritable key : writer.getKeys()) { List<DoubleWritable> list = writer.getValue(key); assertEquals("reducer produces more than one values per key!", 1, list.size()); Double value = list.get(0).get(); switch (key.get()) { case 0:/*ww w . j a v a 2 s . com*/ assertEquals("the computed reconstructionError is incorrect!", reconstructionError, value, EPSILON); break; case 1: assertEquals("the computed yNorm is incorrect!", yNorm, value, EPSILON); break; case 2: assertEquals("the computed centralizedYNorm is incorrect!", centralizedYNorm, value, EPSILON); break; default: fail("Unknown key in reading the results: " + key); } } }
From source file:org.qcri.pca.TestSequenceFile.java
private static void printSequenceFile(String inputStr, int printRow) throws IOException { Configuration conf = new Configuration(); Path finalNumberFile = new Path(inputStr); SequenceFile.Reader reader = new SequenceFile.Reader(FileSystem.get(conf), finalNumberFile, conf); IntWritable key = new IntWritable(); VectorWritable value = new VectorWritable(); Vector printVector = null;/*from www . j a va2 s . c o m*/ while (reader.next(key, value)) { if (key.get() == printRow) printVector = value.get(); int cnt = 0; Iterator<Element> iter = value.get().nonZeroes().iterator(); for (; iter.hasNext(); iter.next()) cnt++; System.out.println("# " + key + " " + cnt + " " + value.get().zSum()); } reader.close(); if (printVector != null) System.out.println("##### " + printRow + " " + printVector); else System.out.println("##### " + key + " " + value.get()); }
From source file:org.shadowmask.engine.hive.udf.UDFAge.java
License:Apache License
/** * Integer version/*from w w w . j av a 2 s . co m*/ */ public IntWritable evaluate(IntWritable age, IntWritable level, IntWritable unit) { if (age == null) { return null; } int ageVal = age.get(); int levelVal = level.get(); int unitVal = unit.get(); Generalizer<Integer, Integer> generalizer = new IntGeneralizer(Integer.MAX_VALUE, unitVal); IntWritable result = new IntWritable(generalizer.generalize(ageVal, levelVal)); return result; }