List of usage examples for org.apache.hadoop.io IntWritable get
public int get()
From source file:org.apache.nutch.scoring.orphan.TestOrphanScoringFilter.java
License:Apache License
protected int getTime(CrawlDatum datum) { IntWritable writable = (IntWritable) datum.getMetaData().get(OrphanScoringFilter.ORPHAN_KEY_WRITABLE); return writable.get(); }
From source file:org.apache.orc.impl.TestStringRedBlackTree.java
License:Apache License
/** * Checks the red-black tree rules to make sure that we have correctly built * a valid tree./*from w w w.j a v a2 s . c om*/ * * Properties: * 1. Red nodes must have black children * 2. Each node must have the same black height on both sides. * * @param node The id of the root of the subtree to check for the red-black * tree properties. * @return The black-height of the subtree. */ private int checkSubtree(RedBlackTree tree, int node, IntWritable count) throws IOException { if (node == RedBlackTree.NULL) { return 1; } count.set(count.get() + 1); boolean is_red = tree.isRed(node); int left = tree.getLeft(node); int right = tree.getRight(node); if (is_red) { if (tree.isRed(left)) { printTree(tree, "", tree.root); throw new IllegalStateException("Left node of " + node + " is " + left + " and both are red."); } if (tree.isRed(right)) { printTree(tree, "", tree.root); throw new IllegalStateException("Right node of " + node + " is " + right + " and both are red."); } } int left_depth = checkSubtree(tree, left, count); int right_depth = checkSubtree(tree, right, count); if (left_depth != right_depth) { printTree(tree, "", tree.root); throw new IllegalStateException( "Lopsided tree at node " + node + " with depths " + left_depth + " and " + right_depth); } if (is_red) { return left_depth; } else { return left_depth + 1; } }
From source file:org.apache.orc.impl.TestStringRedBlackTree.java
License:Apache License
/** * Checks the validity of the entire tree. Also ensures that the number of * nodes visited is the same as the size of the set. *//*from ww w . j a v a 2 s. c o m*/ void checkTree(RedBlackTree tree) throws IOException { IntWritable count = new IntWritable(0); if (tree.isRed(tree.root)) { printTree(tree, "", tree.root); throw new IllegalStateException("root is red"); } checkSubtree(tree, tree.root, count); if (count.get() != tree.size) { printTree(tree, "", tree.root); throw new IllegalStateException("Broken tree! visited= " + count.get() + " size=" + tree.size); } }
From source file:org.apache.orc.tools.convert.CsvReader.java
License:Apache License
/** * Create a CSV reader/*from www . jav a 2s . c o m*/ * @param reader the stream to read from * @param input the underlying file that is only used for getting the * position within the file * @param size the number of bytes in the underlying stream * @param schema the schema to read into * @param separatorChar the character between fields * @param quoteChar the quote character * @param escapeChar the escape character * @param headerLines the number of header lines * @param nullString the string that is translated to null * @throws IOException */ public CsvReader(java.io.Reader reader, FSDataInputStream input, long size, TypeDescription schema, char separatorChar, char quoteChar, char escapeChar, int headerLines, String nullString) throws IOException { this.underlying = input; this.reader = new CSVReader(reader, separatorChar, quoteChar, escapeChar, headerLines); this.nullString = nullString; this.totalSize = size; IntWritable nextColumn = new IntWritable(0); this.converter = buildConverter(nextColumn, schema); this.columns = nextColumn.get(); }
From source file:org.apache.pig.builtin.TestOrcStorage.java
License:Apache License
@Test public void testSimpleStore() throws Exception { pigServer.registerQuery("A = load '" + INPUT1 + "' as (a0:int, a1:chararray);"); pigServer.store("A", OUTPUT1, "OrcStorage"); Path outputFilePath = new Path(new Path(OUTPUT1), "part-m-00000"); Reader reader = OrcFile.createReader(fs, outputFilePath); assertEquals(reader.getNumberOfRows(), 2); RecordReader rows = reader.rows(null); Object row = rows.next(null); StructObjectInspector soi = (StructObjectInspector) reader.getObjectInspector(); IntWritable intWritable = (IntWritable) soi.getStructFieldData(row, soi.getAllStructFieldRefs().get(0)); Text text = (Text) soi.getStructFieldData(row, soi.getAllStructFieldRefs().get(1)); assertEquals(intWritable.get(), 65536); assertEquals(text.toString(), "world"); row = rows.next(null);/*from w w w.j ava 2 s. c o m*/ intWritable = (IntWritable) soi.getStructFieldData(row, soi.getAllStructFieldRefs().get(0)); text = (Text) soi.getStructFieldData(row, soi.getAllStructFieldRefs().get(1)); assertEquals(intWritable.get(), 1); assertEquals(text.toString(), "hello"); // A bug in ORC InputFormat does not allow empty file in input directory fs.delete(new Path(OUTPUT1, "_SUCCESS"), true); // Read the output file back pigServer.registerQuery("A = load '" + OUTPUT1 + "' using OrcStorage();"); Schema s = pigServer.dumpSchema("A"); assertEquals(s.toString(), "{a0: int,a1: chararray}"); Iterator<Tuple> iter = pigServer.openIterator("A"); Tuple t = iter.next(); assertEquals(t.size(), 2); assertEquals(t.get(0), 65536); assertEquals(t.get(1), "world"); t = iter.next(); assertEquals(t.size(), 2); assertEquals(t.get(0), 1); assertEquals(t.get(1), "hello"); assertFalse(iter.hasNext()); }
From source file:org.apache.pig.piggybank.squeal.backend.storm.oper.TriBasicPersist.java
License:Apache License
@Override public MapIdxWritable combine(MapIdxWritable val1, MapIdxWritable val2) { MapIdxWritable ret = zero();// w w w. j a va 2s. c o m if (val1 != null) { for (Entry<Writable, Writable> ent : val1.entrySet()) { ret.put(ent.getKey(), new IntWritable(((IntWritable) ent.getValue()).get())); } } // We're going to merge into val1. if (val2 != null) { for (Entry<Writable, Writable> ent : val2.entrySet()) { int c = ((IntWritable) ent.getValue()).get(); IntWritable iw = (IntWritable) ret.get(ent.getKey()); if (iw == null) { iw = new IntWritable(c); ret.put(ent.getKey(), iw); } else { iw.set(iw.get() + c); } } } return ret; }
From source file:org.apache.pig.piggybank.squeal.backend.storm.oper.TriWindowCombinePersist.java
License:Apache License
void addTuple(MapIdxWritable s, NullableTuple t, int c) { int idx = t.getIndex(); Long ws = windowSettings.get(idx); if (ws != null) { IntWritable key_tmp = new IntWritable(idx); // Pull the window. WindowBundle<NullableTuple> w = (WindowBundle<NullableTuple>) s.get(key_tmp); /*/*from ww w. ja va 2s .c o m*/ * FIXME: If we get the negative before the positive, this won't work. * The proper way to do this would be to count the removes in window * state so we can ignore adds when the matching positive values come * in. */ if (c < 0) { // Remove the item for negative items. w.remove(t); } else { // Add it otherwise. w.push(t); } } else { // This is not a windowed element, just add like BASEPERSIST. IntWritable iw = (IntWritable) s.get(t); if (iw == null) { iw = new IntWritable(c); s.put(t, iw); } else { iw.set(iw.get() + c); } } }
From source file:org.apache.pig.piggybank.squeal.backend.storm.state.WindowBundle.java
License:Apache License
public void merge(WindowBundle other) { // Pull the closed windows in. for (Object o : other.closed.values()) { Window w = (Window) o; if (closed.containsKey(w.cur_id)) { // Create a new closed window with a new id. Window nw = new Window(); nw.closedTS = w.closedTS;/*from w ww. j ava2s . co m*/ nw.cur_id = getId(); nw.isClosed = true; nw.itemCount = w.itemCount; nw.contents = w.contents; closed.put(nw.cur_id, nw); } else { closed.put(w.cur_id, w); } } // Merge the tuples from the other open window. if (other.openWin != null) { // If we have no open window, clone the id from the merging value. if (openWin == null) { openNewWindow(); openWin.cur_id = other.openWin.cur_id; } for (Entry<Writable, Writable> ent : other.openWin.contents.entrySet()) { IntWritable c = (IntWritable) ent.getValue(); int delta = 1; int count = c.get(); if (c.get() < 0) { delta = -1; count = -count; } // Unroll things so we don't end up with windows with too much data. for (int i = 0; i < count; i++) { update(ent.getKey(), delta); } } } }
From source file:org.apache.pig.piggybank.squeal.backend.storm.state.WindowBundle.java
License:Apache License
void update(Writable o, int c) { if (openWin == null) { openNewWindow();/* w ww . j av a 2 s .c o m*/ } IntWritable iw = (IntWritable) openWin.contents.get(o); if (iw == null) { iw = new IntWritable(c); openWin.contents.put(o, iw); } else { iw.set(iw.get() + c); if (iw.get() == 0) { openWin.contents.remove(o); } } // FIXME: This is incorrect for c != +/- 1. if (c > 0 && iw.get() > 0) { openWin.itemCount += 1; } else if (c < 0 && openWin.itemCount > 0) { openWin.itemCount -= 1; } if (openWin.itemCount == maxSize) { closeWindow(); } }
From source file:org.apache.pirk.responder.wideskies.mapreduce.RowCalcReducer.java
License:Apache License
@Override public void reduce(IntWritable rowIndex, Iterable<BytesArrayWritable> dataElementPartitions, Context ctx) throws IOException, InterruptedException { logger.debug("Processing reducer for hash = " + rowIndex); ctx.getCounter(MRStats.NUM_HASHES_REDUCER).increment(1); if (queryInfo.useHDFSExpLookupTable()) { ComputeEncryptedRow.loadCacheFromHDFS(fs, query.getExpFile(rowIndex.get()), query); }/*from www. ja v a 2 s .com*/ // Compute the encrypted row elements for a query from extracted data partitions List<Tuple2<Long, BigInteger>> encRowValues = ComputeEncryptedRow.computeEncRow(dataElementPartitions, query, rowIndex.get(), limitHitsPerSelector, maxHitsPerSelector, useLocalCache); // Emit <colNum, colVal> for (Tuple2<Long, BigInteger> encRowVal : encRowValues) { keyOut.set(encRowVal._1); BigInteger val = encRowVal._2; valueOut.set(val.toString()); mos.write(FileConst.PIR, keyOut, valueOut); } }