Example usage for org.apache.hadoop.io WritableUtils writeVLong

List of usage examples for org.apache.hadoop.io WritableUtils writeVLong

Introduction

In this page you can find the example usage for org.apache.hadoop.io WritableUtils writeVLong.

Prototype

public static void writeVLong(DataOutput stream, long i) throws IOException 

Source Link

Document

Serializes a long to a binary stream with zero-compressed encoding.

Usage

From source file:org.apache.accumulo.core.file.rfile.RelativeKey.java

License:Apache License

@Override
public void write(DataOutput out) throws IOException {

    out.writeByte(fieldsSame);/*  www.  ja v  a 2  s  .co m*/

    if ((fieldsSame & PREFIX_COMPRESSION_ENABLED) == PREFIX_COMPRESSION_ENABLED) {
        out.write(fieldsPrefixed);
    }

    if ((fieldsSame & ROW_SAME) == ROW_SAME) {
        // same, write nothing
    } else if ((fieldsPrefixed & ROW_COMMON_PREFIX) == ROW_COMMON_PREFIX) {
        // similar, write what's common
        writePrefix(out, key.getRowData(), rowCommonPrefixLen);
    } else {
        // write it all
        write(out, key.getRowData());
    }

    if ((fieldsSame & CF_SAME) == CF_SAME) {
        // same, write nothing
    } else if ((fieldsPrefixed & CF_COMMON_PREFIX) == CF_COMMON_PREFIX) {
        // similar, write what's common
        writePrefix(out, key.getColumnFamilyData(), cfCommonPrefixLen);
    } else {
        // write it all
        write(out, key.getColumnFamilyData());
    }

    if ((fieldsSame & CQ_SAME) == CQ_SAME) {
        // same, write nothing
    } else if ((fieldsPrefixed & CQ_COMMON_PREFIX) == CQ_COMMON_PREFIX) {
        // similar, write what's common
        writePrefix(out, key.getColumnQualifierData(), cqCommonPrefixLen);
    } else {
        // write it all
        write(out, key.getColumnQualifierData());
    }

    if ((fieldsSame & CV_SAME) == CV_SAME) {
        // same, write nothing
    } else if ((fieldsPrefixed & CV_COMMON_PREFIX) == CV_COMMON_PREFIX) {
        // similar, write what's common
        writePrefix(out, key.getColumnVisibilityData(), cvCommonPrefixLen);
    } else {
        // write it all
        write(out, key.getColumnVisibilityData());
    }

    if ((fieldsSame & TS_SAME) == TS_SAME) {
        // same, write nothing
    } else if ((fieldsPrefixed & TS_DIFF) == TS_DIFF) {
        // similar, write what's common
        WritableUtils.writeVLong(out, tsDiff);
    } else {
        // write it all
        WritableUtils.writeVLong(out, key.getTimestamp());
    }
}

From source file:org.apache.accumulo.core.iterators.aggregation.NumArraySummation.java

License:Apache License

public static byte[] longArrayToBytes(long[] la) throws IOException {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(baos);

    WritableUtils.writeVInt(dos, la.length);
    for (int i = 0; i < la.length; i++) {
        WritableUtils.writeVLong(dos, la[i]);
    }//  w w  w.j  ava2  s .co  m

    return baos.toByteArray();
}

From source file:org.apache.accumulo.core.iterators.aggregation.NumSummation.java

License:Apache License

public static byte[] longToBytes(long l) throws IOException {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(baos);

    WritableUtils.writeVLong(dos, l);

    return baos.toByteArray();
}

From source file:org.apache.accumulo.core.util.UnsynchronizedBufferTest.java

License:Apache License

@Test
public void compareWithWritableUtils() throws Exception {
    byte[] hadoopBytes;
    byte[] accumuloBytes;
    int oneByteInt = 0x7f;
    int threeByteInt = 0x7fff;
    long sixByteLong = 0x7fffffffffL;
    try (ByteArrayOutputStream baos = new ByteArrayOutputStream();
            DataOutputStream dos = new DataOutputStream(baos)) {
        WritableUtils.writeVInt(dos, oneByteInt);
        WritableUtils.writeVInt(dos, threeByteInt);
        WritableUtils.writeVLong(dos, sixByteLong);
        dos.flush();/*from   w  w  w  . j a  va 2s .  c o  m*/
        hadoopBytes = baos.toByteArray();
    }
    try (ByteArrayOutputStream baos = new ByteArrayOutputStream();
            DataOutputStream dos = new DataOutputStream(baos)) {
        UnsynchronizedBuffer.writeVInt(dos, new byte[5], oneByteInt);
        UnsynchronizedBuffer.writeVInt(dos, new byte[5], threeByteInt);
        UnsynchronizedBuffer.writeVLong(dos, new byte[9], sixByteLong);
        dos.flush();
        accumuloBytes = baos.toByteArray();
    }
    assertTrue("The byte array written to by UnsynchronizedBuffer is not equal to WritableUtils",
            Arrays.equals(hadoopBytes, accumuloBytes));
}

From source file:org.apache.accumulo.server.data.ServerMutation.java

License:Apache License

@Override
public void write(DataOutput out) throws IOException {
    super.write(out);
    WritableUtils.writeVLong(out, systemTime);
}

From source file:org.apache.druid.indexer.InputRowSerde.java

License:Apache License

public static final SerializeResult toBytes(final Map<String, IndexSerdeTypeHelper> typeHelperMap,
        final InputRow row, AggregatorFactory[] aggs) {
    try {/*from   w  w w . j a va 2 s.c o m*/
        List<String> parseExceptionMessages = new ArrayList<>();
        ByteArrayDataOutput out = ByteStreams.newDataOutput();

        //write timestamp
        out.writeLong(row.getTimestampFromEpoch());

        //writing all dimensions
        List<String> dimList = row.getDimensions();

        WritableUtils.writeVInt(out, dimList.size());
        for (String dim : dimList) {
            IndexSerdeTypeHelper typeHelper = typeHelperMap.get(dim);
            if (typeHelper == null) {
                typeHelper = STRING_HELPER;
            }
            writeString(dim, out);

            try {
                typeHelper.serialize(out, row.getRaw(dim));
            } catch (ParseException pe) {
                parseExceptionMessages.add(pe.getMessage());
            }
        }

        //writing all metrics
        Supplier<InputRow> supplier = () -> row;
        WritableUtils.writeVInt(out, aggs.length);
        for (AggregatorFactory aggFactory : aggs) {
            String k = aggFactory.getName();
            writeString(k, out);

            try (Aggregator agg = aggFactory.factorize(IncrementalIndex
                    .makeColumnSelectorFactory(VirtualColumns.EMPTY, aggFactory, supplier, true))) {
                try {
                    agg.aggregate();
                } catch (ParseException e) {
                    // "aggregate" can throw ParseExceptions if a selector expects something but gets something else.
                    log.debug(e, "Encountered parse error, skipping aggregator[%s].", k);
                    parseExceptionMessages.add(e.getMessage());
                }

                String t = aggFactory.getTypeName();
                if (agg.isNull()) {
                    out.writeByte(NullHandling.IS_NULL_BYTE);
                } else {
                    out.writeByte(NullHandling.IS_NOT_NULL_BYTE);
                    if ("float".equals(t)) {
                        out.writeFloat(agg.getFloat());
                    } else if ("long".equals(t)) {
                        WritableUtils.writeVLong(out, agg.getLong());
                    } else if ("double".equals(t)) {
                        out.writeDouble(agg.getDouble());
                    } else {
                        //its a complex metric
                        Object val = agg.get();
                        ComplexMetricSerde serde = getComplexMetricSerde(t);
                        writeBytes(serde.toBytes(val), out);
                    }
                }
            }
        }

        return new SerializeResult(out.toByteArray(), parseExceptionMessages);
    } catch (IOException ex) {
        throw new RuntimeException(ex);
    }
}

From source file:org.apache.gora.query.impl.QueryBase.java

License:Apache License

public void write(DataOutput out) throws IOException {
    //write datastore
    Text.writeString(out, dataStore.getClass().getCanonicalName());
    dataStore.write(out);//from w w w  .ja  v a2 s.  c o m

    IOUtils.writeNullFieldsInfo(out, queryString, (fields), startKey, endKey, filter);

    if (queryString != null)
        Text.writeString(out, queryString);
    if (fields != null)
        IOUtils.writeStringArray(out, fields);
    if (startKey != null)
        IOUtils.serialize(getConf(), out, startKey, dataStore.getKeyClass());
    if (endKey != null)
        IOUtils.serialize(getConf(), out, endKey, dataStore.getKeyClass());
    if (filter != null) {
        Text.writeString(out, filter.getClass().getCanonicalName());
        filter.write(out);
    }

    WritableUtils.writeVLong(out, getStartTime());
    WritableUtils.writeVLong(out, getEndTime());
    WritableUtils.writeVLong(out, getLimit());
    out.writeBoolean(localFilterEnabled);
}

From source file:org.apache.hama.pipes.protocol.BinaryProtocol.java

License:Apache License

/**
 * Write the given object to the stream. If it is a IntWritable, LongWritable,
 * FloatWritable, DoubleWritable, Text or BytesWritable, write it directly.
 * Otherwise, write it to a buffer and then write the length and data to the
 * stream.//  w  ww .  j  a va2s. com
 * 
 * @param obj the object to write
 * @throws IOException
 */
protected void writeObject(Writable obj) throws IOException {
    // For basic types IntWritable, LongWritable, Text and BytesWritable,
    // encode them directly, so that they end up
    // in C++ as the natural translations.
    if (obj instanceof Text) {
        Text t = (Text) obj;
        int len = t.getLength();
        WritableUtils.writeVInt(this.outStream, len);
        this.outStream.write(t.getBytes(), 0, len);

    } else if (obj instanceof BytesWritable) {
        BytesWritable b = (BytesWritable) obj;
        int len = b.getLength();
        WritableUtils.writeVInt(this.outStream, len);
        this.outStream.write(b.getBytes(), 0, len);

    } else if (obj instanceof IntWritable) {
        WritableUtils.writeVInt(this.outStream, ((IntWritable) obj).get());

    } else if (obj instanceof LongWritable) {
        WritableUtils.writeVLong(this.outStream, ((LongWritable) obj).get());

    } else {
        // Note: FloatWritable and DoubleWritable are written here
        obj.write(this.outStream);
    }
}

From source file:org.apache.hama.pipes.protocol.UplinkReader.java

License:Apache License

public void getSuperstepCount() throws IOException {
    WritableUtils.writeVInt(this.outStream, MessageType.GET_SUPERSTEP_COUNT.code);
    WritableUtils.writeVLong(this.outStream, peer.getSuperstepCount());
    binProtocol.flush();/* w w  w . jav a  2  s .  co  m*/
    LOG.debug("Responded MessageType.GET_SUPERSTEP_COUNT - SuperstepCount: " + peer.getSuperstepCount());
}

From source file:org.apache.mrql.MR_long.java

License:Apache License

final public void write(DataOutput out) throws IOException {
    out.writeByte(MRContainer.LONG);
    WritableUtils.writeVLong(out, value);
}