Example usage for org.apache.hadoop.io WritableUtils writeVInt

List of usage examples for org.apache.hadoop.io WritableUtils writeVInt

Introduction

In this page you can find the example usage for org.apache.hadoop.io WritableUtils writeVInt.

Prototype

public static void writeVInt(DataOutput stream, int i) throws IOException 

Source Link

Document

Serializes an integer to a binary stream with zero-compressed encoding.

Usage

From source file:com.datasalt.utils.mapred.joiner.MultiJoinPair.java

License:Apache License

@Override
public void write(DataOutput out) throws IOException {
    group.write(out);/*from  ww  w .j a va2 s .co m*/
    WritableUtils.writeVInt(out, channelId);
    if (secondarySortClass != null) {
        secondarySort.write(out);
    }
}

From source file:com.emadbarsoum.lib.Tuple.java

License:Apache License

/** Writes each Writable to <code>out</code>.
 * Tuple format:/*  www. ja  v a  2s  . co m*/
 * {@code
 *  <count><type1><type2>...<typen><obj1><obj2>...<objn>
 * }
 */
public void write(DataOutput out) throws IOException {
    WritableUtils.writeVInt(out, values.length);
    WritableUtils.writeVLong(out, written);
    for (int i = 0; i < values.length; ++i) {
        Text.writeString(out, values[i].getClass().getName());
    }
    for (int i = 0; i < values.length; ++i) {
        if (has(i)) {
            values[i].write(out);
        }
    }
}

From source file:com.facebook.presto.rcfile.TestRcFileDecoderUtils.java

License:Apache License

private static Slice writeVintOld(SliceOutput output, long value) throws IOException {
    output.reset();/*from   w w w. j  av  a2  s .  c  om*/
    WritableUtils.writeVLong(output, value);
    Slice vLong = Slices.copyOf(output.slice());

    if (value == (int) value) {
        output.reset();
        WritableUtils.writeVInt(output, (int) value);
        Slice vInt = Slices.copyOf(output.slice());
        assertEquals(vInt, vLong);
    }
    return vLong;
}

From source file:com.ibm.bi.dml.runtime.transform.DistinctValue.java

License:Open Source License

@Override
public void write(DataOutput out) throws IOException {
    // write word
    WritableUtils.writeVInt(out, _length);
    out.write(_bytes, 0, _length);/*from   w  ww . ja v  a 2 s. c o  m*/
    // write count
    out.writeLong(_count);
}

From source file:com.marklogic.contentpump.DatabaseDocumentWithMeta.java

License:Apache License

@Override
public void write(DataOutput out) throws IOException {
    if (null != content) {
        WritableUtils.writeVInt(out, content.length);
        out.write(content, 0, content.length);
        out.writeInt(contentType.ordinal());
    } else {/*from  w  w w. ja  v a2 s.  c o  m*/
        WritableUtils.writeVInt(out, 0);
    }
    byte[] xml = meta.toXML().getBytes();
    out.writeInt(xml.length);
    out.write(xml);
}

From source file:com.marklogic.mapreduce.DatabaseDocument.java

License:Apache License

@Override
public void write(DataOutput out) throws IOException {
    out.writeInt(contentType.ordinal());
    WritableUtils.writeVInt(out, content.length);
    out.write(content, 0, content.length);
}

From source file:com.moz.fiji.hive.io.EntityIdWritable.java

License:Apache License

@Override
public void write(DataOutput out) throws IOException {
    WritableUtils.writeCompressedByteArray(out, mHBaseRowKey);

    // Write the components
    WritableUtils.writeVInt(out, mComponents.size());
    for (Object component : mComponents) {
        if (component instanceof String) {
            WritableUtils.writeEnum(out, Component.STRING);
            String stringComponent = (String) component;
            WritableUtils.writeString(out, stringComponent);
        } else if (component instanceof Integer) {
            WritableUtils.writeEnum(out, Component.INTEGER);
            Integer intComponent = (Integer) component;
            WritableUtils.writeVInt(out, intComponent);
        } else if (component instanceof Long) {
            WritableUtils.writeEnum(out, Component.LONG);
            Long longComponent = (Long) component;
            WritableUtils.writeVLong(out, longComponent);
        } else if (component instanceof byte[]) {
            Preconditions.checkState(mComponents.size() == 1, "byte[] only valid as sole component.");
            WritableUtils.writeEnum(out, Component.RAW_HBASE_KEY);
            byte[] byteArrayComponent = (byte[]) component;
            WritableUtils.writeCompressedByteArray(out, byteArrayComponent);
        } else if (component == null) {
            WritableUtils.writeEnum(out, Component.NULL);
        } else {//from ww w.  ja  v  a2 s.co m
            throw new EntityIdException("Unexpected type for Component " + component.getClass().getName());
        }
    }

    WritableUtils.writeString(out, mShellString);
}

From source file:com.moz.fiji.hive.io.FijiCellWritable.java

License:Apache License

/**
 * Reads and converts data according to the specified schema.
 *
 * @param out DataOutput to serialize this object into.
 * @param data data to be serialized./*from  ww  w .  ja v a 2 s  .  co m*/
 * @param schema Schema to be used for serializing this data.
 * @throws IOException if there was an error writing.
 */
private static void writeData(DataOutput out, Object data, Schema schema) throws IOException {
    switch (schema.getType()) {
    case INT:
        Integer intData = (Integer) data;
        WritableUtils.writeVInt(out, intData);
        break;
    case LONG:
        Long longData = (Long) data;
        WritableUtils.writeVLong(out, longData);
        break;
    case DOUBLE:
        Double doubleData = (Double) data;
        DoubleWritable doubleWritable = new DoubleWritable(doubleData);
        doubleWritable.write(out);
        break;
    case ENUM:
    case STRING:
        String stringData = data.toString();
        WritableUtils.writeString(out, stringData);
        break;
    case FLOAT:
        Float floatData = (Float) data;
        FloatWritable floatWritable = new FloatWritable(floatData);
        floatWritable.write(out);
        break;
    case ARRAY:
        List<Object> listData = (List<Object>) data;
        WritableUtils.writeVInt(out, listData.size());
        for (Object listElement : listData) {
            writeData(out, listElement, schema.getElementType());
        }
        break;
    case RECORD:
        IndexedRecord recordData = (IndexedRecord) data;
        WritableUtils.writeVInt(out, schema.getFields().size());
        for (Schema.Field field : schema.getFields()) {
            WritableUtils.writeString(out, field.name());
            writeData(out, recordData.get(field.pos()), field.schema());
        }
        break;
    case MAP:
        Map<String, Object> mapData = (Map<String, Object>) data;
        WritableUtils.writeVInt(out, mapData.size());
        for (Map.Entry<String, Object> entry : mapData.entrySet()) {
            WritableUtils.writeString(out, entry.getKey());
            writeData(out, entry.getValue(), schema.getValueType());
        }
        break;
    case UNION:
        final Integer tag = GenericData.get().resolveUnion(schema, data);
        WritableUtils.writeVInt(out, tag);
        Schema unionSubSchema = schema.getTypes().get(tag);
        writeData(out, data, unionSubSchema);
        break;
    case BYTES:
        byte[] bytesData = (byte[]) data;
        WritableUtils.writeCompressedByteArray(out, bytesData);
        break;
    case BOOLEAN:
        Boolean booleanData = (Boolean) data;
        BooleanWritable booleanWritable = new BooleanWritable(booleanData);
        booleanWritable.write(out);
        break;
    case NULL:
        // Don't need to write anything for null.
        break;
    case FIXED:
    default:
        throw new UnsupportedOperationException("Unsupported type: " + schema.getType());
    }
}

From source file:com.moz.fiji.hive.io.FijiRowDataWritable.java

License:Apache License

/**
 * Helper method for the {@link org.apache.hadoop.io.Writable} interface that for writing
 * FijiRowDataWritable objects.  If passed a FijiColumnName, it will replace the data for the
 * specified column(relevant for paging through results).
 *
 * @param out DataOutput for the Hadoop Writable to write to.
 * @param pageData map of columns to paged data to be substituted(or an empty map if there are
 *                 no pages to substitute).
 * @throws IOException if there was an issue.
 *///from ww  w . j  a  va  2 s . c o m
protected void writeWithPages(DataOutput out,
        Map<FijiColumnName, NavigableMap<Long, FijiCellWritable>> pageData) throws IOException {

    // Write the EntityId
    mEntityId.write(out);

    // Count the total number of columns to write.
    Set<FijiColumnName> columnNames = Sets.newHashSet();
    for (FijiColumnName columnName : mWritableData.keySet()) {
        if (!mFijiQualifierPagers.containsKey(columnName.getFamily())) {
            columnNames.add(columnName);
        }
    }
    columnNames.addAll(pageData.keySet());
    WritableUtils.writeVInt(out, columnNames.size());

    // Write the unpaged data.
    for (Entry<FijiColumnName, NavigableMap<Long, FijiCellWritable>> entry : mWritableData.entrySet()) {
        FijiColumnName fijiColumnName = entry.getKey();
        if (!pageData.containsKey(fijiColumnName)
                && !mFijiQualifierPagers.containsKey(fijiColumnName.getFamily())) {
            // Only write if it's not part of the paged data.
            writeColumn(out, fijiColumnName, entry.getValue());
        }
    }

    // Write paged data if any.
    for (Entry<FijiColumnName, NavigableMap<Long, FijiCellWritable>> entry : pageData.entrySet()) {
        writeColumn(out, entry.getKey(), entry.getValue());
    }

    WritableUtils.writeVInt(out, mSchemas.size());
    for (Map.Entry<FijiColumnName, Schema> entry : mSchemas.entrySet()) {
        WritableUtils.writeString(out, entry.getKey().getName());
        WritableUtils.writeString(out, entry.getValue().toString());
    }
}

From source file:com.moz.fiji.hive.io.FijiRowDataWritable.java

License:Apache License

/**
 * Helper function to write a column and its associated data.
 *
 * @param out DataOutput for the Hadoop Writable to write to.
 * @param fijiColumnName to write//from  ww  w  .j  av a2s  .  c  o  m
 * @param data to write
 * @throws IOException if there was an issue.
 */
private void writeColumn(DataOutput out, FijiColumnName fijiColumnName,
        NavigableMap<Long, FijiCellWritable> data) throws IOException {
    WritableUtils.writeString(out, fijiColumnName.getName());
    WritableUtils.writeVInt(out, data.size()); // number in the timeseries
    for (Map.Entry<Long, FijiCellWritable> cellEntry : data.entrySet()) {
        WritableUtils.writeVLong(out, cellEntry.getKey());
        cellEntry.getValue().write(out);
    }
}