Example usage for org.apache.hadoop.io Writable write

List of usage examples for org.apache.hadoop.io Writable write

Introduction

In this page you can find the example usage for org.apache.hadoop.io Writable write.

Prototype

void write(DataOutput out) throws IOException;

Source Link

Document

Serialize the fields of this object to out.

Usage

From source file:crunch.MaxTemperature.java

License:Apache License

public static byte[] serialize(Writable writable) throws IOException {
        ByteArrayOutputStream out = new ByteArrayOutputStream();
        DataOutputStream dataOut = new DataOutputStream(out);
        writable.write(dataOut);
        dataOut.close();// www  .j av a2  s .c  om
        return out.toByteArray();
    }

From source file:crunch.MaxTemperature.java

License:Apache License

public static DataOutputBuffer serialize(Writable writable) throws IOException {
        DataOutputBuffer out = new DataOutputBuffer();
        DataOutputStream dataOut = new DataOutputStream(out);
        writable.write(dataOut);
        dataOut.close();/*  w w w . j a  va 2  s  .c  om*/
        return out;
    }

From source file:eagle.query.aggregate.raw.WritableList.java

License:Apache License

/**
 * Serialize the fields of this object to <code>out</code>.
 *
 * @param out <code>DataOuput</code> to serialize this object into.
 * @throws java.io.IOException/*from   ww w  .  j  a  v  a 2s  . c om*/
 */
@Override
public void write(DataOutput out) throws IOException {
    this.check();
    out.writeInt(this.size());
    for (Writable item : this) {
        item.write(out);
    }
}

From source file:eagle.storage.hbase.query.coprocessor.ProtoBufConverter.java

License:Apache License

public static ByteString writableToByteString(Writable writable) throws IOException {
    ByteArrayDataOutput dataOutput = ByteStreams.newDataOutput();
    ;/*from   w  w w. j a  va2  s . com*/
    writable.write(dataOutput);
    return ByteString.copyFrom(dataOutput.toByteArray());
}

From source file:edu.mit.ll.graphulo.util.SerializationUtil.java

License:Apache License

public static void serializeWritable(Writable obj, OutputStream outputStream) {
    Preconditions.checkNotNull(obj);/*from w ww . ja va  2 s  . com*/
    Preconditions.checkNotNull(outputStream);
    try (DataOutputStream out = new DataOutputStream(outputStream)) {
        obj.write(out);
    } catch (IOException ex) {
        throw new RuntimeException(ex);
    }
}

From source file:edu.uci.ics.hyracks.imru.util.SerDeUtils.java

License:Apache License

public static byte[] serialize(Writable object) throws IOException {
    ByteArrayOutputStream bbos = new ByteArrayOutputStream();
    DataOutput output = new DataOutputStream(bbos);
    object.write(output);
    return bbos.toByteArray();
}

From source file:edu.uci.ics.pregelix.dataflow.util.IterationUtils.java

License:Apache License

public static void writeGlobalAggregateValue(Configuration conf, String jobId, Writable agg)
        throws HyracksDataException {
    try {/*from w ww.  j a va2  s  . c  o  m*/
        FileSystem dfs = FileSystem.get(conf);
        String pathStr = IterationUtils.TMP_DIR + jobId + "agg";
        Path path = new Path(pathStr);
        FSDataOutputStream output = dfs.create(path, true);
        agg.write(output);
        output.flush();
        output.close();
    } catch (IOException e) {
        throw new HyracksDataException(e);
    }
}

From source file:edu.uci.ics.pregelix.runtime.function.ComputeUpdateFunctionFactory.java

License:Apache License

@Override
public IUpdateFunction createFunction() {
    return new IUpdateFunction() {
        // for writing intermediate data
        private final ArrayTupleBuilder tbMsg = new ArrayTupleBuilder(2);
        private final ArrayTupleBuilder tbAlive = new ArrayTupleBuilder(2);
        private final ArrayTupleBuilder tbTerminate = new ArrayTupleBuilder(1);
        private final ArrayTupleBuilder tbGlobalAggregate = new ArrayTupleBuilder(1);
        private final ArrayTupleBuilder tbInsert = new ArrayTupleBuilder(2);
        private final ArrayTupleBuilder tbDelete = new ArrayTupleBuilder(1);

        // for writing out to message channel
        private IFrameWriter writerMsg;
        private FrameTupleAppender appenderMsg;
        private ByteBuffer bufferMsg;

        // for writing out to alive message channel
        private IFrameWriter writerAlive;
        private FrameTupleAppender appenderAlive;
        private ByteBuffer bufferAlive;
        private boolean pushAlive;

        // for writing out termination detection control channel
        private IFrameWriter writerTerminate;
        private FrameTupleAppender appenderTerminate;
        private ByteBuffer bufferTerminate;
        private boolean terminate = true;

        // for writing out termination detection control channel
        private IFrameWriter writerGlobalAggregate;
        private FrameTupleAppender appenderGlobalAggregate;
        private ByteBuffer bufferGlobalAggregate;
        private GlobalAggregator aggregator;

        // for writing out to insert vertex channel
        private IFrameWriter writerInsert;
        private FrameTupleAppender appenderInsert;
        private ByteBuffer bufferInsert;

        // for writing out to delete vertex channel
        private IFrameWriter writerDelete;
        private FrameTupleAppender appenderDelete;
        private ByteBuffer bufferDelete;

        private Vertex vertex;
        private ResetableByteArrayOutputStream bbos = new ResetableByteArrayOutputStream();
        private DataOutput output = new DataOutputStream(bbos);

        private ArrayIterator msgIterator = new ArrayIterator();
        private final List<IFrameWriter> writers = new ArrayList<IFrameWriter>();
        private final List<FrameTupleAppender> appenders = new ArrayList<FrameTupleAppender>();
        private final List<ArrayTupleBuilder> tbs = new ArrayList<ArrayTupleBuilder>();
        private Configuration conf;
        private boolean dynamicStateLength;

        @Override/*  ww w  .j a v a2 s. c  o m*/
        public void open(IHyracksTaskContext ctx, RecordDescriptor rd, IFrameWriter... writers)
                throws HyracksDataException {
            this.conf = confFactory.createConfiguration();
            this.dynamicStateLength = BspUtils.getDynamicVertexValueSize(conf);
            this.aggregator = BspUtils.createGlobalAggregator(conf);
            this.aggregator.init();

            this.writerMsg = writers[0];
            this.bufferMsg = ctx.allocateFrame();
            this.appenderMsg = new FrameTupleAppender(ctx.getFrameSize());
            this.appenderMsg.reset(bufferMsg, true);
            this.writers.add(writerMsg);
            this.appenders.add(appenderMsg);

            this.writerTerminate = writers[1];
            this.bufferTerminate = ctx.allocateFrame();
            this.appenderTerminate = new FrameTupleAppender(ctx.getFrameSize());
            this.appenderTerminate.reset(bufferTerminate, true);

            this.writerGlobalAggregate = writers[2];
            this.bufferGlobalAggregate = ctx.allocateFrame();
            this.appenderGlobalAggregate = new FrameTupleAppender(ctx.getFrameSize());
            this.appenderGlobalAggregate.reset(bufferGlobalAggregate, true);

            this.writerInsert = writers[3];
            this.bufferInsert = ctx.allocateFrame();
            this.appenderInsert = new FrameTupleAppender(ctx.getFrameSize());
            this.appenderInsert.reset(bufferInsert, true);
            this.writers.add(writerInsert);
            this.appenders.add(appenderInsert);

            this.writerDelete = writers[4];
            this.bufferDelete = ctx.allocateFrame();
            this.appenderDelete = new FrameTupleAppender(ctx.getFrameSize());
            this.appenderDelete.reset(bufferDelete, true);
            this.writers.add(writerDelete);
            this.appenders.add(appenderDelete);

            if (writers.length > 5) {
                this.writerAlive = writers[5];
                this.bufferAlive = ctx.allocateFrame();
                this.appenderAlive = new FrameTupleAppender(ctx.getFrameSize());
                this.appenderAlive.reset(bufferAlive, true);
                this.pushAlive = true;
                this.writers.add(writerAlive);
                this.appenders.add(appenderAlive);
            }

            tbs.add(tbMsg);
            tbs.add(tbInsert);
            tbs.add(tbDelete);
            tbs.add(tbAlive);
        }

        @Override
        public void process(Object[] tuple) throws HyracksDataException {
            // vertex Id, msg content List, vertex Id, vertex
            tbMsg.reset();
            tbAlive.reset();

            vertex = (Vertex) tuple[3];
            vertex.setOutputWriters(writers);
            vertex.setOutputAppenders(appenders);
            vertex.setOutputTupleBuilders(tbs);

            ArrayListWritable msgContentList = (ArrayListWritable) tuple[1];
            msgContentList.reset(msgIterator);

            if (!msgIterator.hasNext() && vertex.isHalted()) {
                return;
            }
            if (vertex.isHalted()) {
                vertex.activate();
            }

            try {
                vertex.compute(msgIterator);
                vertex.finishCompute();
            } catch (IOException e) {
                throw new HyracksDataException(e);
            }

            /**
             * this partition should not terminate
             */
            if (terminate && (!vertex.isHalted() || vertex.hasMessage() || vertex.createdNewLiveVertex()))
                terminate = false;

            aggregator.step(vertex);
        }

        @Override
        public void close() throws HyracksDataException {
            FrameTupleUtils.flushTuplesFinal(appenderMsg, writerMsg);
            FrameTupleUtils.flushTuplesFinal(appenderInsert, writerInsert);
            FrameTupleUtils.flushTuplesFinal(appenderDelete, writerDelete);

            if (pushAlive)
                FrameTupleUtils.flushTuplesFinal(appenderAlive, writerAlive);
            if (!terminate) {
                writeOutTerminationState();
            }

            /** write out global aggregate value */
            writeOutGlobalAggregate();
        }

        private void writeOutGlobalAggregate() throws HyracksDataException {
            try {
                /**
                 * get partial aggregate result and flush to the final
                 * aggregator
                 */
                Writable agg = aggregator.finishPartial();
                agg.write(tbGlobalAggregate.getDataOutput());
                tbGlobalAggregate.addFieldEndOffset();
                appenderGlobalAggregate.append(tbGlobalAggregate.getFieldEndOffsets(),
                        tbGlobalAggregate.getByteArray(), 0, tbGlobalAggregate.getSize());
                FrameTupleUtils.flushTuplesFinal(appenderGlobalAggregate, writerGlobalAggregate);
            } catch (IOException e) {
                throw new HyracksDataException(e);
            }
        }

        private void writeOutTerminationState() throws HyracksDataException {
            try {
                tbTerminate.getDataOutput().writeLong(0);
                tbTerminate.addFieldEndOffset();
                appenderTerminate.append(tbTerminate.getFieldEndOffsets(), tbTerminate.getByteArray(), 0,
                        tbTerminate.getSize());
                FrameTupleUtils.flushTuplesFinal(appenderTerminate, writerTerminate);
            } catch (IOException e) {
                throw new HyracksDataException(e);
            }
        }

        @Override
        public void update(ITupleReference tupleRef, ArrayTupleBuilder cloneUpdateTb)
                throws HyracksDataException {
            try {
                if (vertex != null && vertex.hasUpdate()) {
                    if (!dynamicStateLength) {
                        // in-place update
                        int fieldCount = tupleRef.getFieldCount();
                        for (int i = 1; i < fieldCount; i++) {
                            byte[] data = tupleRef.getFieldData(i);
                            int offset = tupleRef.getFieldStart(i);
                            bbos.setByteArray(data, offset);
                            vertex.write(output);
                        }
                    } else {
                        // write the vertex id
                        DataOutput tbOutput = cloneUpdateTb.getDataOutput();
                        vertex.getVertexId().write(tbOutput);
                        cloneUpdateTb.addFieldEndOffset();

                        // write the vertex value
                        vertex.write(tbOutput);
                        cloneUpdateTb.addFieldEndOffset();
                    }
                }
            } catch (IOException e) {
                throw new HyracksDataException(e);
            }
        }
    };
}

From source file:edu.uci.ics.pregelix.runtime.function.StartComputeUpdateFunctionFactory.java

License:Apache License

@Override
public IUpdateFunction createFunction() {
    return new IUpdateFunction() {
        // for writing intermediate data
        private final ArrayTupleBuilder tbMsg = new ArrayTupleBuilder(2);
        private final ArrayTupleBuilder tbAlive = new ArrayTupleBuilder(2);
        private final ArrayTupleBuilder tbTerminate = new ArrayTupleBuilder(1);
        private final ArrayTupleBuilder tbGlobalAggregate = new ArrayTupleBuilder(1);
        private final ArrayTupleBuilder tbInsert = new ArrayTupleBuilder(2);
        private final ArrayTupleBuilder tbDelete = new ArrayTupleBuilder(1);

        // for writing out to message channel
        private IFrameWriter writerMsg;
        private FrameTupleAppender appenderMsg;
        private ByteBuffer bufferMsg;

        // for writing out to alive message channel
        private IFrameWriter writerAlive;
        private FrameTupleAppender appenderAlive;
        private ByteBuffer bufferAlive;
        private boolean pushAlive;

        // for writing out termination detection control channel
        private IFrameWriter writerGlobalAggregate;
        private FrameTupleAppender appenderGlobalAggregate;
        private ByteBuffer bufferGlobalAggregate;
        private GlobalAggregator aggregator;

        // for writing out the global aggregate
        private IFrameWriter writerTerminate;
        private FrameTupleAppender appenderTerminate;
        private ByteBuffer bufferTerminate;
        private boolean terminate = true;

        // for writing out to insert vertex channel
        private IFrameWriter writerInsert;
        private FrameTupleAppender appenderInsert;
        private ByteBuffer bufferInsert;

        // for writing out to delete vertex channel
        private IFrameWriter writerDelete;
        private FrameTupleAppender appenderDelete;
        private ByteBuffer bufferDelete;

        // dummy empty msgList
        private MsgList msgList = new MsgList();
        private ArrayIterator msgIterator = new ArrayIterator();

        private Vertex vertex;
        private ResetableByteArrayOutputStream bbos = new ResetableByteArrayOutputStream();
        private DataOutput output = new DataOutputStream(bbos);

        private final List<IFrameWriter> writers = new ArrayList<IFrameWriter>();
        private final List<FrameTupleAppender> appenders = new ArrayList<FrameTupleAppender>();
        private final List<ArrayTupleBuilder> tbs = new ArrayList<ArrayTupleBuilder>();
        private Configuration conf;
        private boolean dynamicStateLength;

        @Override//from ww  w .j  a v  a  2s. co  m
        public void open(IHyracksTaskContext ctx, RecordDescriptor rd, IFrameWriter... writers)
                throws HyracksDataException {
            this.conf = confFactory.createConfiguration();
            this.dynamicStateLength = BspUtils.getDynamicVertexValueSize(conf);
            this.aggregator = BspUtils.createGlobalAggregator(conf);
            this.aggregator.init();

            this.writerMsg = writers[0];
            this.bufferMsg = ctx.allocateFrame();
            this.appenderMsg = new FrameTupleAppender(ctx.getFrameSize());
            this.appenderMsg.reset(bufferMsg, true);
            this.writers.add(writerMsg);
            this.appenders.add(appenderMsg);

            this.writerTerminate = writers[1];
            this.bufferTerminate = ctx.allocateFrame();
            this.appenderTerminate = new FrameTupleAppender(ctx.getFrameSize());
            this.appenderTerminate.reset(bufferTerminate, true);

            this.writerGlobalAggregate = writers[2];
            this.bufferGlobalAggregate = ctx.allocateFrame();
            this.appenderGlobalAggregate = new FrameTupleAppender(ctx.getFrameSize());
            this.appenderGlobalAggregate.reset(bufferGlobalAggregate, true);

            this.writerInsert = writers[3];
            this.bufferInsert = ctx.allocateFrame();
            this.appenderInsert = new FrameTupleAppender(ctx.getFrameSize());
            this.appenderInsert.reset(bufferInsert, true);
            this.writers.add(writerInsert);
            this.appenders.add(appenderInsert);

            this.writerDelete = writers[4];
            this.bufferDelete = ctx.allocateFrame();
            this.appenderDelete = new FrameTupleAppender(ctx.getFrameSize());
            this.appenderDelete.reset(bufferDelete, true);
            this.writers.add(writerDelete);
            this.appenders.add(appenderDelete);

            if (writers.length > 5) {
                this.writerAlive = writers[5];
                this.bufferAlive = ctx.allocateFrame();
                this.appenderAlive = new FrameTupleAppender(ctx.getFrameSize());
                this.appenderAlive.reset(bufferAlive, true);
                this.pushAlive = true;
                this.writers.add(writerAlive);
                this.appenders.add(appenderAlive);
            }
            msgList.reset(msgIterator);

            tbs.add(tbMsg);
            tbs.add(tbInsert);
            tbs.add(tbDelete);
            tbs.add(tbAlive);
        }

        @Override
        public void process(Object[] tuple) throws HyracksDataException {
            // vertex Id, vertex
            tbMsg.reset();
            tbAlive.reset();

            vertex = (Vertex) tuple[1];
            vertex.setOutputWriters(writers);
            vertex.setOutputAppenders(appenders);
            vertex.setOutputTupleBuilders(tbs);

            if (!msgIterator.hasNext() && vertex.isHalted()) {
                return;
            }
            if (vertex.isHalted()) {
                vertex.activate();
            }

            try {
                vertex.compute(msgIterator);
                vertex.finishCompute();
            } catch (IOException e) {
                throw new HyracksDataException(e);
            }

            /**
             * this partition should not terminate
             */
            if (terminate && (!vertex.isHalted() || vertex.hasMessage() || vertex.createdNewLiveVertex()))
                terminate = false;

            /**
             * call the global aggregator
             */
            aggregator.step(vertex);
        }

        @Override
        public void close() throws HyracksDataException {
            FrameTupleUtils.flushTuplesFinal(appenderMsg, writerMsg);
            FrameTupleUtils.flushTuplesFinal(appenderInsert, writerInsert);
            FrameTupleUtils.flushTuplesFinal(appenderDelete, writerDelete);

            if (pushAlive)
                FrameTupleUtils.flushTuplesFinal(appenderAlive, writerAlive);
            if (!terminate) {
                writeOutTerminationState();
            }

            /** write out global aggregate value */
            writeOutGlobalAggregate();
        }

        private void writeOutGlobalAggregate() throws HyracksDataException {
            try {
                /**
                 * get partial aggregate result and flush to the final
                 * aggregator
                 */
                Writable agg = aggregator.finishPartial();
                agg.write(tbGlobalAggregate.getDataOutput());
                tbGlobalAggregate.addFieldEndOffset();
                appenderGlobalAggregate.append(tbGlobalAggregate.getFieldEndOffsets(),
                        tbGlobalAggregate.getByteArray(), 0, tbGlobalAggregate.getSize());
                FrameTupleUtils.flushTuplesFinal(appenderGlobalAggregate, writerGlobalAggregate);
            } catch (IOException e) {
                throw new HyracksDataException(e);
            }
        }

        private void writeOutTerminationState() throws HyracksDataException {
            try {
                tbTerminate.getDataOutput().writeLong(0);
                tbTerminate.addFieldEndOffset();
                appenderTerminate.append(tbTerminate.getFieldEndOffsets(), tbTerminate.getByteArray(), 0,
                        tbTerminate.getSize());
                FrameTupleUtils.flushTuplesFinal(appenderTerminate, writerTerminate);
            } catch (IOException e) {
                throw new HyracksDataException(e);
            }
        }

        @Override
        public void update(ITupleReference tupleRef, ArrayTupleBuilder cloneUpdateTb)
                throws HyracksDataException {
            try {
                if (vertex != null && vertex.hasUpdate()) {
                    if (!dynamicStateLength) {
                        // in-place update
                        int fieldCount = tupleRef.getFieldCount();
                        for (int i = 1; i < fieldCount; i++) {
                            byte[] data = tupleRef.getFieldData(i);
                            int offset = tupleRef.getFieldStart(i);
                            bbos.setByteArray(data, offset);
                            vertex.write(output);
                        }
                    } else {
                        // write the vertex id
                        DataOutput tbOutput = cloneUpdateTb.getDataOutput();
                        vertex.getVertexId().write(tbOutput);
                        cloneUpdateTb.addFieldEndOffset();

                        // write the vertex value
                        vertex.write(tbOutput);
                        cloneUpdateTb.addFieldEndOffset();
                    }
                }
            } catch (IOException e) {
                throw new HyracksDataException(e);
            }
        }
    };
}

From source file:eu.stratosphere.hadoopcompatibility.mapreduce.wrapper.HadoopInputSplit.java

License:Apache License

@Override
public void write(DataOutput out) throws IOException {
    out.writeInt(this.splitNumber);
    out.writeUTF(this.mapreduceInputSplit.getClass().getName());
    Writable w = (Writable) this.mapreduceInputSplit;
    w.write(out);
}