Example usage for org.apache.hadoop.io Writable readFields

List of usage examples for org.apache.hadoop.io Writable readFields

Introduction

In this page you can find the example usage for org.apache.hadoop.io Writable readFields.

Prototype

void readFields(DataInput in) throws IOException;

Source Link

Document

Deserialize the fields of this object from in.

Usage

From source file:org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopWritableSerialization.java

License:Apache License

/** {@inheritDoc} */
@Override/*from  w w  w . ja va 2s. c o m*/
public Object read(DataInput in, @Nullable Object obj) throws IgniteCheckedException {
    Writable w = obj == null ? U.newInstance(cls) : cls.cast(obj);

    try {
        w.readFields(in);
    } catch (IOException e) {
        throw new IgniteCheckedException(e);
    }

    return w;
}

From source file:org.apache.ignite.internal.processors.hadoop.shuffle.collections.GridHadoopConcurrentHashMultimapSelftest.java

License:Apache License

private void check(GridHadoopConcurrentHashMultimap m, Multimap<Integer, Integer> mm,
        final Multimap<Integer, Integer> vis, GridHadoopTaskContext taskCtx) throws Exception {
    final GridHadoopTaskInput in = m.input(taskCtx);

    Map<Integer, Collection<Integer>> mmm = mm.asMap();

    int keys = 0;

    while (in.next()) {
        keys++;//w  ww .j  a  v a 2s  .  co  m

        IntWritable k = (IntWritable) in.key();

        assertNotNull(k);

        Deque<Integer> vs = new LinkedList<>();

        Iterator<?> it = in.values();

        while (it.hasNext())
            vs.addFirst(((IntWritable) it.next()).get());

        Collection<Integer> exp = mmm.get(k.get());

        assertEquals(exp, vs);
    }

    assertEquals(mmm.size(), keys);

    assertEquals(m.keys(), keys);

    X.println("keys: " + keys + " cap: " + m.capacity());

    // Check visitor.

    final byte[] buf = new byte[4];

    final GridDataInput dataInput = new GridUnsafeDataInput();

    m.visit(false, new GridHadoopConcurrentHashMultimap.Visitor() {
        /** */
        IntWritable key = new IntWritable();

        /** */
        IntWritable val = new IntWritable();

        @Override
        public void onKey(long keyPtr, int keySize) {
            read(keyPtr, keySize, key);
        }

        @Override
        public void onValue(long valPtr, int valSize) {
            read(valPtr, valSize, val);

            vis.put(key.get(), val.get());
        }

        private void read(long ptr, int size, Writable w) {
            assert size == 4 : size;

            UNSAFE.copyMemory(null, ptr, buf, BYTE_ARR_OFF, size);

            dataInput.bytes(buf, size);

            try {
                w.readFields(dataInput);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
    });

    //        X.println("vis: " + vis);

    assertEquals(mm, vis);

    in.close();
}

From source file:org.apache.ignite.internal.processors.hadoop.shuffle.collections.GridHadoopSkipListSelfTest.java

License:Apache License

private void check(GridHadoopMultimap m, Multimap<Integer, Integer> mm, final Multimap<Integer, Integer> vis,
        GridHadoopTaskContext taskCtx) throws Exception {
    final GridHadoopTaskInput in = m.input(taskCtx);

    Map<Integer, Collection<Integer>> mmm = mm.asMap();

    int keys = 0;

    int prevKey = Integer.MIN_VALUE;

    while (in.next()) {
        keys++;//from w ww  .  ja v  a  2 s.com

        IntWritable k = (IntWritable) in.key();

        assertNotNull(k);

        assertTrue(k.get() > prevKey);

        prevKey = k.get();

        Deque<Integer> vs = new LinkedList<>();

        Iterator<?> it = in.values();

        while (it.hasNext())
            vs.addFirst(((IntWritable) it.next()).get());

        Collection<Integer> exp = mmm.get(k.get());

        assertEquals(exp, vs);
    }

    assertEquals(mmm.size(), keys);

    //!        assertEquals(m.keys(), keys);

    // Check visitor.

    final byte[] buf = new byte[4];

    final GridDataInput dataInput = new GridUnsafeDataInput();

    m.visit(false, new GridHadoopConcurrentHashMultimap.Visitor() {
        /** */
        IntWritable key = new IntWritable();

        /** */
        IntWritable val = new IntWritable();

        @Override
        public void onKey(long keyPtr, int keySize) {
            read(keyPtr, keySize, key);
        }

        @Override
        public void onValue(long valPtr, int valSize) {
            read(valPtr, valSize, val);

            vis.put(key.get(), val.get());
        }

        private void read(long ptr, int size, Writable w) {
            assert size == 4 : size;

            UNSAFE.copyMemory(null, ptr, buf, BYTE_ARR_OFF, size);

            dataInput.bytes(buf, size);

            try {
                w.readFields(dataInput);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
    });

    //        X.println("vis: " + vis);

    assertEquals(mm, vis);

    in.close();
}

From source file:org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopConcurrentHashMultimapSelftest.java

License:Apache License

private void check(HadoopConcurrentHashMultimap m, Multimap<Integer, Integer> mm,
        final Multimap<Integer, Integer> vis, HadoopTaskContext taskCtx) throws Exception {
    final HadoopTaskInput in = m.input(taskCtx);

    Map<Integer, Collection<Integer>> mmm = mm.asMap();

    int keys = 0;

    while (in.next()) {
        keys++;/*from w w w. j a  v  a2 s  . c  o m*/

        IntWritable k = (IntWritable) in.key();

        assertNotNull(k);

        Deque<Integer> vs = new LinkedList<>();

        Iterator<?> it = in.values();

        while (it.hasNext())
            vs.addFirst(((IntWritable) it.next()).get());

        Collection<Integer> exp = mmm.get(k.get());

        assertEquals(exp, vs);
    }

    assertEquals(mmm.size(), keys);

    assertEquals(m.keys(), keys);

    X.println("keys: " + keys + " cap: " + m.capacity());

    // Check visitor.

    final byte[] buf = new byte[4];

    final GridDataInput dataInput = new GridUnsafeDataInput();

    m.visit(false, new HadoopConcurrentHashMultimap.Visitor() {
        /** */
        IntWritable key = new IntWritable();

        /** */
        IntWritable val = new IntWritable();

        @Override
        public void onKey(long keyPtr, int keySize) {
            read(keyPtr, keySize, key);
        }

        @Override
        public void onValue(long valPtr, int valSize) {
            read(valPtr, valSize, val);

            vis.put(key.get(), val.get());
        }

        private void read(long ptr, int size, Writable w) {
            assert size == 4 : size;

            UNSAFE.copyMemory(null, ptr, buf, BYTE_ARR_OFF, size);

            dataInput.bytes(buf, size);

            try {
                w.readFields(dataInput);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
    });

    //        X.println("vis: " + vis);

    assertEquals(mm, vis);

    in.close();
}

From source file:org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopSkipListSelfTest.java

License:Apache License

private void check(HadoopMultimap m, Multimap<Integer, Integer> mm, final Multimap<Integer, Integer> vis,
        HadoopTaskContext taskCtx) throws Exception {
    final HadoopTaskInput in = m.input(taskCtx);

    Map<Integer, Collection<Integer>> mmm = mm.asMap();

    int keys = 0;

    int prevKey = Integer.MIN_VALUE;

    while (in.next()) {
        keys++;//from  www.java 2  s  .c  o  m

        IntWritable k = (IntWritable) in.key();

        assertNotNull(k);

        assertTrue(k.get() > prevKey);

        prevKey = k.get();

        Deque<Integer> vs = new LinkedList<>();

        Iterator<?> it = in.values();

        while (it.hasNext())
            vs.addFirst(((IntWritable) it.next()).get());

        Collection<Integer> exp = mmm.get(k.get());

        assertEquals(exp, vs);
    }

    assertEquals(mmm.size(), keys);

    //!        assertEquals(m.keys(), keys);

    // Check visitor.

    final byte[] buf = new byte[4];

    final GridDataInput dataInput = new GridUnsafeDataInput();

    m.visit(false, new HadoopConcurrentHashMultimap.Visitor() {
        /** */
        IntWritable key = new IntWritable();

        /** */
        IntWritable val = new IntWritable();

        @Override
        public void onKey(long keyPtr, int keySize) {
            read(keyPtr, keySize, key);
        }

        @Override
        public void onValue(long valPtr, int valSize) {
            read(valPtr, valSize, val);

            vis.put(key.get(), val.get());
        }

        private void read(long ptr, int size, Writable w) {
            assert size == 4 : size;

            UNSAFE.copyMemory(null, ptr, buf, BYTE_ARR_OFF, size);

            dataInput.bytes(buf, size);

            try {
                w.readFields(dataInput);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
    });

    //        X.println("vis: " + vis);

    assertEquals(mm, vis);

    in.close();
}

From source file:org.apache.mahout.common.DummyRecordWriter.java

License:Apache License

private Writable cloneWritable(Writable original) throws IOException {

    Writable clone;
    try {//from   w  ww . j  a v  a2  s .com
        clone = original.getClass().asSubclass(Writable.class).newInstance();
    } catch (Exception e) {
        throw new RuntimeException("Unable to instantiate writable!", e);
    }
    ByteArrayOutputStream bytes = new ByteArrayOutputStream();

    original.write(new DataOutputStream(bytes));
    clone.readFields(new DataInputStream(new ByteArrayInputStream(bytes.toByteArray())));

    return clone;
}

From source file:org.apache.mahout.math.MatrixWritableTest.java

License:Apache License

private static void writeAndRead(Writable toWrite, Writable toRead) throws IOException {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(baos);
    try {/* w ww.j  a va2  s .c  o m*/
        toWrite.write(dos);
    } finally {
        Closeables.close(dos, false);
    }

    ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
    DataInputStream dis = new DataInputStream(bais);
    try {
        toRead.readFields(dis);
    } finally {
        Closeables.close(dis, true);
    }
}

From source file:org.apache.mahout.math.VectorTest.java

License:Apache License

private static void writeAndRead(Writable toWrite, Writable toRead) throws IOException {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutput dos = new DataOutputStream(baos);
    toWrite.write(dos);/*  w ww . j  a v a 2  s . co  m*/

    ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
    DataInput dis = new DataInputStream(bais);
    toRead.readFields(dis);
}

From source file:org.apache.mahout.math.VectorWritableTest.java

License:Apache License

private static void writeAndRead(Writable toWrite, Writable toRead) throws IOException {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(baos);
    try {//ww w . ja v  a2 s . co m
        toWrite.write(dos);
    } finally {
        Closeables.close(dos, false);
    }

    ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
    DataInputStream dis = new DataInputStream(bais);
    try {
        toRead.readFields(dis);
    } finally {
        Closeables.close(dos, true);
    }
}

From source file:org.apache.nutch.crawl.TestMapWritable.java

License:Apache License

/** Utility method for testing writables, from hadoop code */
public void testWritable(Writable before) throws Exception {
    DataOutputBuffer dob = new DataOutputBuffer();
    before.write(dob);/*from w  w w . j  a va 2s.  c om*/

    DataInputBuffer dib = new DataInputBuffer();
    dib.reset(dob.getData(), dob.getLength());

    Writable after = (Writable) before.getClass().newInstance();
    after.readFields(dib);

    assertEquals(before, after);
}