Example usage for org.apache.hadoop.io Writable readFields

List of usage examples for org.apache.hadoop.io Writable readFields

Introduction

In this page you can find the example usage for org.apache.hadoop.io Writable readFields.

Prototype

void readFields(DataInput in) throws IOException;

Source Link

Document

Deserialize the fields of this object from in.

Usage

From source file:org.apache.giraph.utils.WritableUtils.java

License:Apache License

/**
 * Reads list of Writable objects from data input stream.
 * Input stream should have class information along with object data.
 * @param input input stream/*from  w ww.  j a  va  2  s  . co m*/
 * @return deserialized list
 * @throws IOException
 */
public static List<? extends Writable> readList(DataInput input) throws IOException {
    try {
        List<Writable> res = null;
        if (input.readBoolean()) {
            int size = input.readInt();
            res = new ArrayList<>(size);
            Class<? extends Writable> clazz = null;
            for (int i = 0; i < size; i++) {
                boolean isNull = input.readBoolean();
                if (isNull) {
                    res.add(null);
                } else {
                    boolean hasClassInfo = input.readBoolean();
                    if (hasClassInfo) {
                        clazz = readClass(input);
                    }
                    Writable element = clazz.newInstance();
                    element.readFields(input);
                    res.add(element);
                }
            }
        }
        return res;

    } catch (InstantiationException | IllegalAccessException e) {
        throw new IllegalStateException("unable to instantiate object", e);
    }
}

From source file:org.apache.hama.bsp.sync.ZKSyncClient.java

License:Apache License

/**
 * Utility function to read Writable object value from byte array.
 * //from  w ww  .j  a  va 2  s  . co m
 * @param data The byte array
 * @param valueHolder The Class object of expected Writable object.
 * @return The instance of Writable object.
 * @throws IOException
 */
protected boolean getValueFromBytes(byte[] data, Writable valueHolder) throws IOException {
    if (data != null) {
        ByteArrayInputStream istream = new ByteArrayInputStream(data);
        DataInputStream diStream = new DataInputStream(istream);
        try {
            valueHolder.readFields(diStream);
        } finally {
            diStream.close();
        }
        return true;
    }
    return false;
}

From source file:org.apache.hama.graph.GraphJobRunner.java

License:Apache License

public Iterable<Writable> getIterableMessages(final byte[] valuesBytes, final int numOfValues) {

    return new Iterable<Writable>() {
        DataInputStream dis;/*  w ww.  j  a v a  2s .c  o  m*/

        @Override
        public Iterator<Writable> iterator() {
            if (!conf.getBoolean("hama.use.unsafeserialization", false)) {
                dis = new DataInputStream(new ByteArrayInputStream(valuesBytes));
            } else {
                dis = new DataInputStream(new UnsafeByteArrayInputStream(valuesBytes));
            }

            return new Iterator<Writable>() {
                int index = 0;

                @Override
                public boolean hasNext() {
                    return (index < numOfValues) ? true : false;
                }

                @Override
                public Writable next() {
                    Writable v = createVertexValue();
                    try {
                        v.readFields(dis);
                    } catch (IOException e) {
                        throw new RuntimeException(e);
                    }
                    index++;
                    return v;
                }

                @Override
                public void remove() {
                }
            };
        }
    };
}

From source file:org.apache.hama.pipes.protocol.UplinkReader.java

License:Apache License

/**
 * Read the given object from stream. If it is a IntWritable, LongWritable,
 * FloatWritable, DoubleWritable, Text or BytesWritable, read it directly.
 * Otherwise, read it to a buffer and then write the length and data to the
 * stream.// w  w w  .j ava  2s.co  m
 * 
 * @param obj the object to read
 * @throws IOException
 */
protected void readObject(Writable obj) throws IOException {
    byte[] buffer;
    // For BytesWritable and Text, use the specified length to set the length
    // this causes the "obvious" translations to work. So that if you emit
    // a string "abc" from C++, it shows up as "abc".
    if (obj instanceof Text) {
        int numBytes = WritableUtils.readVInt(this.inStream);
        buffer = new byte[numBytes];
        this.inStream.readFully(buffer);
        ((Text) obj).set(buffer);

    } else if (obj instanceof BytesWritable) {
        int numBytes = WritableUtils.readVInt(this.inStream);
        buffer = new byte[numBytes];
        this.inStream.readFully(buffer);
        ((BytesWritable) obj).set(buffer, 0, numBytes);

    } else if (obj instanceof IntWritable) {
        ((IntWritable) obj).set(WritableUtils.readVInt(this.inStream));

    } else if (obj instanceof LongWritable) {
        ((LongWritable) obj).set(WritableUtils.readVLong(this.inStream));

    } else {
        try {
            LOG.debug("reading type: " + obj.getClass().getName());

            // try reading object
            obj.readFields(this.inStream);

        } catch (IOException e) {
            throw new IOException("Hama Pipes is not able to read " + obj.getClass().getName(), e);
        }
    }
}

From source file:org.apache.hama.util.WritableUtils.java

License:Apache License

public static void deserialize(byte[] bytes, Writable obj) {
    DataInputStream in = new DataInputStream(new ByteArrayInputStream(bytes));
    try {/*from w ww . j ava2  s  .  c  o m*/
        obj.readFields(in);
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:org.apache.hama.util.WritableUtils.java

License:Apache License

public static void unsafeDeserialize(byte[] bytes, Writable obj) {
    DataInputStream in = new DataInputStream(new UnsafeByteArrayInputStream(bytes));
    try {//from  w w w . jav a 2 s. co  m
        obj.readFields(in);
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:org.apache.ignite.internal.processors.hadoop.GridHadoopUtils.java

License:Apache License

/**
 * Unwraps native split./* ww  w. java  2 s  . c  om*/
 *
 * @param o Wrapper.
 * @return Split.
 */
public static Object unwrapSplit(GridHadoopSplitWrapper o) {
    try {
        Writable w = (Writable) GridHadoopUtils.class.getClassLoader().loadClass(o.className()).newInstance();

        w.readFields(new ObjectInputStream(new ByteArrayInputStream(o.bytes())));

        return w;
    } catch (Exception e) {
        throw new IllegalStateException(e);
    }
}

From source file:org.apache.ignite.internal.processors.hadoop.HadoopUtils.java

License:Apache License

/**
 * Unwraps native split./*from www . j  a  va 2 s.  co  m*/
 *
 * @param o Wrapper.
 * @return Split.
 */
public static Object unwrapSplit(HadoopSplitWrapper o) {
    try {
        Writable w = (Writable) HadoopUtils.class.getClassLoader().loadClass(o.className()).newInstance();

        w.readFields(new ObjectInputStream(new ByteArrayInputStream(o.bytes())));

        return w;
    } catch (Exception e) {
        throw new IllegalStateException(e);
    }
}

From source file:org.apache.ignite.internal.processors.hadoop.impl.shuffle.collections.HadoopConcurrentHashMultimapSelftest.java

License:Apache License

private void check(HadoopConcurrentHashMultimap m, Multimap<Integer, Integer> mm,
        final Multimap<Integer, Integer> vis, HadoopTaskContext taskCtx) throws Exception {
    final HadoopTaskInput in = m.input(taskCtx);

    Map<Integer, Collection<Integer>> mmm = mm.asMap();

    int keys = 0;

    while (in.next()) {
        keys++;/*from www  .j  av  a  2 s.  c  om*/

        IntWritable k = (IntWritable) in.key();

        assertNotNull(k);

        Deque<Integer> vs = new LinkedList<>();

        Iterator<?> it = in.values();

        while (it.hasNext())
            vs.addFirst(((IntWritable) it.next()).get());

        Collection<Integer> exp = mmm.get(k.get());

        assertEquals(exp, vs);
    }

    assertEquals(mmm.size(), keys);

    assertEquals(m.keys(), keys);

    X.println("keys: " + keys + " cap: " + m.capacity());

    // Check visitor.

    final byte[] buf = new byte[4];

    final GridDataInput dataInput = new GridUnsafeDataInput();

    m.visit(false, new HadoopConcurrentHashMultimap.Visitor() {
        /** */
        IntWritable key = new IntWritable();

        /** */
        IntWritable val = new IntWritable();

        @Override
        public void onKey(long keyPtr, int keySize) {
            read(keyPtr, keySize, key);
        }

        @Override
        public void onValue(long valPtr, int valSize) {
            read(valPtr, valSize, val);

            vis.put(key.get(), val.get());
        }

        private void read(long ptr, int size, Writable w) {
            assert size == 4 : size;

            GridUnsafe.copyOffheapHeap(ptr, buf, GridUnsafe.BYTE_ARR_OFF, size);

            dataInput.bytes(buf, size);

            try {
                w.readFields(dataInput);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
    });

    //        X.println("vis: " + vis);

    assertEquals(mm, vis);

    in.close();
}

From source file:org.apache.ignite.internal.processors.hadoop.impl.shuffle.collections.HadoopSkipListSelfTest.java

License:Apache License

/**
 * Check.// w w w .java  2s.c  o m
 * @param m The multimap.
 * @param mm The multimap storing expectations.
 * @param vis The multimap to store visitor results.
 * @param taskCtx The task context.
 * @throws Exception On error.
 */
private void check(HadoopMultimap m, Multimap<Integer, Integer> mm, final Multimap<Integer, Integer> vis,
        HadoopTaskContext taskCtx) throws Exception {
    final HadoopTaskInput in = m.input(taskCtx);

    Map<Integer, Collection<Integer>> mmm = mm.asMap();

    int keys = 0;

    int prevKey = Integer.MIN_VALUE;

    while (in.next()) {
        keys++;

        IntWritable k = (IntWritable) in.key();

        assertNotNull(k);

        assertTrue(k.get() > prevKey);

        prevKey = k.get();

        Deque<Integer> vs = new LinkedList<>();

        Iterator<?> it = in.values();

        while (it.hasNext())
            vs.addFirst(((IntWritable) it.next()).get());

        Collection<Integer> exp = mmm.get(k.get());

        assertEquals(exp, vs);
    }

    assertEquals(mmm.size(), keys);

    //!        assertEquals(m.keys(), keys);

    // Check visitor.

    final byte[] buf = new byte[4];

    final GridDataInput dataInput = new GridUnsafeDataInput();

    m.visit(false, new HadoopMultimap.Visitor() {
        /** */
        IntWritable key = new IntWritable();

        /** */
        IntWritable val = new IntWritable();

        @Override
        public void onKey(long keyPtr, int keySize) {
            read(keyPtr, keySize, key);
        }

        @Override
        public void onValue(long valPtr, int valSize) {
            read(valPtr, valSize, val);

            vis.put(key.get(), val.get());
        }

        private void read(long ptr, int size, Writable w) {
            assert size == 4 : size;

            GridUnsafe.copyOffheapHeap(ptr, buf, GridUnsafe.BYTE_ARR_OFF, size);

            dataInput.bytes(buf, size);

            try {
                w.readFields(dataInput);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
    });

    //        X.println("vis: " + vis);

    assertEquals(mm, vis);

    in.close();
}