Example usage for org.apache.hadoop.io IntWritable get

List of usage examples for org.apache.hadoop.io IntWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io IntWritable get.

Prototype

public int get() 

Source Link

Document

Return the value of this IntWritable.

Usage

From source file:org.apache.ignite.internal.processors.hadoop.impl.shuffle.collections.HadoopSkipListSelfTest.java

License:Apache License

/**
 * Check.//from w ww  . jav  a2 s. c om
 * @param m The multimap.
 * @param mm The multimap storing expectations.
 * @param vis The multimap to store visitor results.
 * @param taskCtx The task context.
 * @throws Exception On error.
 */
private void check(HadoopMultimap m, Multimap<Integer, Integer> mm, final Multimap<Integer, Integer> vis,
        HadoopTaskContext taskCtx) throws Exception {
    final HadoopTaskInput in = m.input(taskCtx);

    Map<Integer, Collection<Integer>> mmm = mm.asMap();

    int keys = 0;

    int prevKey = Integer.MIN_VALUE;

    while (in.next()) {
        keys++;

        IntWritable k = (IntWritable) in.key();

        assertNotNull(k);

        assertTrue(k.get() > prevKey);

        prevKey = k.get();

        Deque<Integer> vs = new LinkedList<>();

        Iterator<?> it = in.values();

        while (it.hasNext())
            vs.addFirst(((IntWritable) it.next()).get());

        Collection<Integer> exp = mmm.get(k.get());

        assertEquals(exp, vs);
    }

    assertEquals(mmm.size(), keys);

    //!        assertEquals(m.keys(), keys);

    // Check visitor.

    final byte[] buf = new byte[4];

    final GridDataInput dataInput = new GridUnsafeDataInput();

    m.visit(false, new HadoopMultimap.Visitor() {
        /** */
        IntWritable key = new IntWritable();

        /** */
        IntWritable val = new IntWritable();

        @Override
        public void onKey(long keyPtr, int keySize) {
            read(keyPtr, keySize, key);
        }

        @Override
        public void onValue(long valPtr, int valSize) {
            read(valPtr, valSize, val);

            vis.put(key.get(), val.get());
        }

        private void read(long ptr, int size, Writable w) {
            assert size == 4 : size;

            GridUnsafe.copyOffheapHeap(ptr, buf, GridUnsafe.BYTE_ARR_OFF, size);

            dataInput.bytes(buf, size);

            try {
                w.readFields(dataInput);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
    });

    //        X.println("vis: " + vis);

    assertEquals(mm, vis);

    in.close();
}

From source file:org.apache.ignite.internal.processors.hadoop.impl.shuffle.collections.HadoopSkipListSelfTest.java

License:Apache License

/**
 * @throws Exception if failed.// ww  w  .j av  a 2s.  c  o m
 */
public void testMultiThreaded() throws Exception {
    GridUnsafeMemory mem = new GridUnsafeMemory(0);

    X.println("___ Started");

    Random rnd = new GridRandom();

    for (int i = 0; i < 20; i++) {
        HadoopJobInfo job = new JobInfo();

        final HadoopTaskContext taskCtx = new TaskContext();

        final HadoopMultimap m = new HadoopSkipList(job, mem);

        final ConcurrentMap<Integer, Collection<Integer>> mm = new ConcurrentHashMap<>();

        X.println("___ MT");

        multithreaded(new Callable<Object>() {
            @Override
            public Object call() throws Exception {
                X.println("___ TH in");

                Random rnd = new GridRandom();

                IntWritable key = new IntWritable();
                IntWritable val = new IntWritable();

                HadoopMultimap.Adder a = m.startAdding(taskCtx);

                for (int i = 0; i < 50000; i++) {
                    int k = rnd.nextInt(32000);
                    int v = rnd.nextInt();

                    key.set(k);
                    val.set(v);

                    a.write(key, val);

                    Collection<Integer> list = mm.get(k);

                    if (list == null) {
                        list = new ConcurrentLinkedQueue<>();

                        Collection<Integer> old = mm.putIfAbsent(k, list);

                        if (old != null)
                            list = old;
                    }

                    list.add(v);
                }

                a.close();

                X.println("___ TH out");

                return null;
            }
        }, 3 + rnd.nextInt(27));

        HadoopTaskInput in = m.input(taskCtx);

        int prevKey = Integer.MIN_VALUE;

        while (in.next()) {
            IntWritable key = (IntWritable) in.key();

            assertTrue(key.get() > prevKey);

            prevKey = key.get();

            Iterator<?> valsIter = in.values();

            Collection<Integer> vals = mm.remove(key.get());

            assertNotNull(vals);

            while (valsIter.hasNext()) {
                IntWritable val = (IntWritable) valsIter.next();

                assertTrue(vals.remove(val.get()));
            }

            assertTrue(vals.isEmpty());
        }

        in.close();
        m.close();

        assertEquals(0, mem.allocatedSize());
    }
}

From source file:org.apache.ignite.internal.processors.hadoop.shuffle.collections.GridHadoopConcurrentHashMultimapSelftest.java

License:Apache License

private void check(GridHadoopConcurrentHashMultimap m, Multimap<Integer, Integer> mm,
        final Multimap<Integer, Integer> vis, GridHadoopTaskContext taskCtx) throws Exception {
    final GridHadoopTaskInput in = m.input(taskCtx);

    Map<Integer, Collection<Integer>> mmm = mm.asMap();

    int keys = 0;

    while (in.next()) {
        keys++;/*from   www .j av  a 2 s. c o  m*/

        IntWritable k = (IntWritable) in.key();

        assertNotNull(k);

        Deque<Integer> vs = new LinkedList<>();

        Iterator<?> it = in.values();

        while (it.hasNext())
            vs.addFirst(((IntWritable) it.next()).get());

        Collection<Integer> exp = mmm.get(k.get());

        assertEquals(exp, vs);
    }

    assertEquals(mmm.size(), keys);

    assertEquals(m.keys(), keys);

    X.println("keys: " + keys + " cap: " + m.capacity());

    // Check visitor.

    final byte[] buf = new byte[4];

    final GridDataInput dataInput = new GridUnsafeDataInput();

    m.visit(false, new GridHadoopConcurrentHashMultimap.Visitor() {
        /** */
        IntWritable key = new IntWritable();

        /** */
        IntWritable val = new IntWritable();

        @Override
        public void onKey(long keyPtr, int keySize) {
            read(keyPtr, keySize, key);
        }

        @Override
        public void onValue(long valPtr, int valSize) {
            read(valPtr, valSize, val);

            vis.put(key.get(), val.get());
        }

        private void read(long ptr, int size, Writable w) {
            assert size == 4 : size;

            UNSAFE.copyMemory(null, ptr, buf, BYTE_ARR_OFF, size);

            dataInput.bytes(buf, size);

            try {
                w.readFields(dataInput);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
    });

    //        X.println("vis: " + vis);

    assertEquals(mm, vis);

    in.close();
}

From source file:org.apache.ignite.internal.processors.hadoop.shuffle.collections.GridHadoopConcurrentHashMultimapSelftest.java

License:Apache License

/**
 * @throws Exception if failed.//from w ww  . ja v a2  s. c o  m
 */
public void testMultiThreaded() throws Exception {
    GridUnsafeMemory mem = new GridUnsafeMemory(0);

    X.println("___ Started");

    Random rnd = new GridRandom();

    for (int i = 0; i < 20; i++) {
        GridHadoopJobInfo job = new JobInfo();

        final GridHadoopTaskContext taskCtx = new TaskContext();

        final GridHadoopConcurrentHashMultimap m = new GridHadoopConcurrentHashMultimap(job, mem, 16);

        final ConcurrentMap<Integer, Collection<Integer>> mm = new ConcurrentHashMap<>();

        X.println("___ MT");

        multithreaded(new Callable<Object>() {
            @Override
            public Object call() throws Exception {
                X.println("___ TH in");

                Random rnd = new GridRandom();

                IntWritable key = new IntWritable();
                IntWritable val = new IntWritable();

                GridHadoopMultimap.Adder a = m.startAdding(taskCtx);

                for (int i = 0; i < 50000; i++) {
                    int k = rnd.nextInt(32000);
                    int v = rnd.nextInt();

                    key.set(k);
                    val.set(v);

                    a.write(key, val);

                    Collection<Integer> list = mm.get(k);

                    if (list == null) {
                        list = new ConcurrentLinkedQueue<>();

                        Collection<Integer> old = mm.putIfAbsent(k, list);

                        if (old != null)
                            list = old;
                    }

                    list.add(v);
                }

                a.close();

                X.println("___ TH out");

                return null;
            }
        }, 3 + rnd.nextInt(27));

        X.println("___ Check: " + m.capacity());

        assertEquals(mm.size(), m.keys());

        assertTrue(m.capacity() > 32000);

        GridHadoopTaskInput in = m.input(taskCtx);

        while (in.next()) {
            IntWritable key = (IntWritable) in.key();

            Iterator<?> valsIter = in.values();

            Collection<Integer> vals = mm.remove(key.get());

            assertNotNull(vals);

            while (valsIter.hasNext()) {
                IntWritable val = (IntWritable) valsIter.next();

                assertTrue(vals.remove(val.get()));
            }

            assertTrue(vals.isEmpty());
        }

        in.close();
        m.close();

        assertEquals(0, mem.allocatedSize());
    }
}

From source file:org.apache.ignite.internal.processors.hadoop.shuffle.collections.GridHadoopHashMapSelfTest.java

License:Apache License

private void check(GridHadoopHashMultimap m, Multimap<Integer, Integer> mm, GridHadoopTaskContext taskCtx)
        throws Exception {
    final GridHadoopTaskInput in = m.input(taskCtx);

    Map<Integer, Collection<Integer>> mmm = mm.asMap();

    int keys = 0;

    while (in.next()) {
        keys++;//from   w  w w.j  av a 2  s  .co m

        IntWritable k = (IntWritable) in.key();

        assertNotNull(k);

        ArrayList<Integer> vs = new ArrayList<>();

        Iterator<?> it = in.values();

        while (it.hasNext())
            vs.add(((IntWritable) it.next()).get());

        Collection<Integer> exp = mmm.get(k.get());

        assertEquals(sorted(exp), sorted(vs));
    }

    X.println("keys: " + keys + " cap: " + m.capacity());

    assertEquals(mmm.size(), keys);

    assertEquals(m.keys(), keys);

    in.close();
}

From source file:org.apache.ignite.internal.processors.hadoop.shuffle.collections.GridHadoopSkipListSelfTest.java

License:Apache License

private void check(GridHadoopMultimap m, Multimap<Integer, Integer> mm, final Multimap<Integer, Integer> vis,
        GridHadoopTaskContext taskCtx) throws Exception {
    final GridHadoopTaskInput in = m.input(taskCtx);

    Map<Integer, Collection<Integer>> mmm = mm.asMap();

    int keys = 0;

    int prevKey = Integer.MIN_VALUE;

    while (in.next()) {
        keys++;/*from   w  w w  .  j a v a 2 s . c  o m*/

        IntWritable k = (IntWritable) in.key();

        assertNotNull(k);

        assertTrue(k.get() > prevKey);

        prevKey = k.get();

        Deque<Integer> vs = new LinkedList<>();

        Iterator<?> it = in.values();

        while (it.hasNext())
            vs.addFirst(((IntWritable) it.next()).get());

        Collection<Integer> exp = mmm.get(k.get());

        assertEquals(exp, vs);
    }

    assertEquals(mmm.size(), keys);

    //!        assertEquals(m.keys(), keys);

    // Check visitor.

    final byte[] buf = new byte[4];

    final GridDataInput dataInput = new GridUnsafeDataInput();

    m.visit(false, new GridHadoopConcurrentHashMultimap.Visitor() {
        /** */
        IntWritable key = new IntWritable();

        /** */
        IntWritable val = new IntWritable();

        @Override
        public void onKey(long keyPtr, int keySize) {
            read(keyPtr, keySize, key);
        }

        @Override
        public void onValue(long valPtr, int valSize) {
            read(valPtr, valSize, val);

            vis.put(key.get(), val.get());
        }

        private void read(long ptr, int size, Writable w) {
            assert size == 4 : size;

            UNSAFE.copyMemory(null, ptr, buf, BYTE_ARR_OFF, size);

            dataInput.bytes(buf, size);

            try {
                w.readFields(dataInput);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
    });

    //        X.println("vis: " + vis);

    assertEquals(mm, vis);

    in.close();
}

From source file:org.apache.ignite.internal.processors.hadoop.shuffle.collections.GridHadoopSkipListSelfTest.java

License:Apache License

/**
 * @throws Exception if failed.//w  w w . j a  v  a  2  s.c  om
 */
public void testMultiThreaded() throws Exception {
    GridUnsafeMemory mem = new GridUnsafeMemory(0);

    X.println("___ Started");

    Random rnd = new GridRandom();

    for (int i = 0; i < 20; i++) {
        GridHadoopJobInfo job = new JobInfo();

        final GridHadoopTaskContext taskCtx = new TaskContext();

        final GridHadoopMultimap m = new GridHadoopSkipList(job, mem);

        final ConcurrentMap<Integer, Collection<Integer>> mm = new ConcurrentHashMap<>();

        X.println("___ MT");

        multithreaded(new Callable<Object>() {
            @Override
            public Object call() throws Exception {
                X.println("___ TH in");

                Random rnd = new GridRandom();

                IntWritable key = new IntWritable();
                IntWritable val = new IntWritable();

                GridHadoopMultimap.Adder a = m.startAdding(taskCtx);

                for (int i = 0; i < 50000; i++) {
                    int k = rnd.nextInt(32000);
                    int v = rnd.nextInt();

                    key.set(k);
                    val.set(v);

                    a.write(key, val);

                    Collection<Integer> list = mm.get(k);

                    if (list == null) {
                        list = new ConcurrentLinkedQueue<>();

                        Collection<Integer> old = mm.putIfAbsent(k, list);

                        if (old != null)
                            list = old;
                    }

                    list.add(v);
                }

                a.close();

                X.println("___ TH out");

                return null;
            }
        }, 3 + rnd.nextInt(27));

        GridHadoopTaskInput in = m.input(taskCtx);

        int prevKey = Integer.MIN_VALUE;

        while (in.next()) {
            IntWritable key = (IntWritable) in.key();

            assertTrue(key.get() > prevKey);

            prevKey = key.get();

            Iterator<?> valsIter = in.values();

            Collection<Integer> vals = mm.remove(key.get());

            assertNotNull(vals);

            while (valsIter.hasNext()) {
                IntWritable val = (IntWritable) valsIter.next();

                assertTrue(vals.remove(val.get()));
            }

            assertTrue(vals.isEmpty());
        }

        in.close();
        m.close();

        assertEquals(0, mem.allocatedSize());
    }
}

From source file:org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopConcurrentHashMultimapSelftest.java

License:Apache License

private void check(HadoopConcurrentHashMultimap m, Multimap<Integer, Integer> mm,
        final Multimap<Integer, Integer> vis, HadoopTaskContext taskCtx) throws Exception {
    final HadoopTaskInput in = m.input(taskCtx);

    Map<Integer, Collection<Integer>> mmm = mm.asMap();

    int keys = 0;

    while (in.next()) {
        keys++;//from  ww w.j  a va2  s .c  om

        IntWritable k = (IntWritable) in.key();

        assertNotNull(k);

        Deque<Integer> vs = new LinkedList<>();

        Iterator<?> it = in.values();

        while (it.hasNext())
            vs.addFirst(((IntWritable) it.next()).get());

        Collection<Integer> exp = mmm.get(k.get());

        assertEquals(exp, vs);
    }

    assertEquals(mmm.size(), keys);

    assertEquals(m.keys(), keys);

    X.println("keys: " + keys + " cap: " + m.capacity());

    // Check visitor.

    final byte[] buf = new byte[4];

    final GridDataInput dataInput = new GridUnsafeDataInput();

    m.visit(false, new HadoopConcurrentHashMultimap.Visitor() {
        /** */
        IntWritable key = new IntWritable();

        /** */
        IntWritable val = new IntWritable();

        @Override
        public void onKey(long keyPtr, int keySize) {
            read(keyPtr, keySize, key);
        }

        @Override
        public void onValue(long valPtr, int valSize) {
            read(valPtr, valSize, val);

            vis.put(key.get(), val.get());
        }

        private void read(long ptr, int size, Writable w) {
            assert size == 4 : size;

            UNSAFE.copyMemory(null, ptr, buf, BYTE_ARR_OFF, size);

            dataInput.bytes(buf, size);

            try {
                w.readFields(dataInput);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
    });

    //        X.println("vis: " + vis);

    assertEquals(mm, vis);

    in.close();
}

From source file:org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopSkipListSelfTest.java

License:Apache License

private void check(HadoopMultimap m, Multimap<Integer, Integer> mm, final Multimap<Integer, Integer> vis,
        HadoopTaskContext taskCtx) throws Exception {
    final HadoopTaskInput in = m.input(taskCtx);

    Map<Integer, Collection<Integer>> mmm = mm.asMap();

    int keys = 0;

    int prevKey = Integer.MIN_VALUE;

    while (in.next()) {
        keys++;//ww w .j av  a2  s  . co m

        IntWritable k = (IntWritable) in.key();

        assertNotNull(k);

        assertTrue(k.get() > prevKey);

        prevKey = k.get();

        Deque<Integer> vs = new LinkedList<>();

        Iterator<?> it = in.values();

        while (it.hasNext())
            vs.addFirst(((IntWritable) it.next()).get());

        Collection<Integer> exp = mmm.get(k.get());

        assertEquals(exp, vs);
    }

    assertEquals(mmm.size(), keys);

    //!        assertEquals(m.keys(), keys);

    // Check visitor.

    final byte[] buf = new byte[4];

    final GridDataInput dataInput = new GridUnsafeDataInput();

    m.visit(false, new HadoopConcurrentHashMultimap.Visitor() {
        /** */
        IntWritable key = new IntWritable();

        /** */
        IntWritable val = new IntWritable();

        @Override
        public void onKey(long keyPtr, int keySize) {
            read(keyPtr, keySize, key);
        }

        @Override
        public void onValue(long valPtr, int valSize) {
            read(valPtr, valSize, val);

            vis.put(key.get(), val.get());
        }

        private void read(long ptr, int size, Writable w) {
            assert size == 4 : size;

            UNSAFE.copyMemory(null, ptr, buf, BYTE_ARR_OFF, size);

            dataInput.bytes(buf, size);

            try {
                w.readFields(dataInput);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
    });

    //        X.println("vis: " + vis);

    assertEquals(mm, vis);

    in.close();
}

From source file:org.apache.impala.hive.executor.TestUdf.java

License:Apache License

public int evaluate(IntWritable a, IntWritable b) {
    if (a == null || b == null)
        return -1;
    return a.get() + b.get();
}