Example usage for org.apache.hadoop.io WritableUtils readVInt

List of usage examples for org.apache.hadoop.io WritableUtils readVInt

Introduction

In this page you can find the example usage for org.apache.hadoop.io WritableUtils readVInt.

Prototype

public static int readVInt(DataInput stream) throws IOException 

Source Link

Document

Reads a zero-compressed encoded integer from input stream and returns it.

Usage

From source file:com.ibm.bi.dml.runtime.transform.DistinctValue.java

License:Open Source License

@Override
public void readFields(DataInput in) throws IOException {
    // read word 
    int newLength = WritableUtils.readVInt(in);
    _bytes = new byte[newLength];
    in.readFully(_bytes, 0, newLength);/*from  w  ww  .ja  v a 2s  .c o  m*/
    _length = newLength;
    if (_length != _bytes.length)
        System.out.println("ERROR in DistinctValue.readFields()");
    // read count
    _count = in.readLong();
}

From source file:com.marklogic.contentpump.DatabaseDocumentWithMeta.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    int contentLen = WritableUtils.readVInt(in);
    if (0 != contentLen) {
        content = new byte[contentLen];
        in.readFully(content, 0, contentLen);
        int ordinal = in.readInt();
        contentType = ContentType.valueOf(ordinal);
    }/*www  .j a  va 2s . c o  m*/
    int len = in.readInt();
    byte[] xml = new byte[len];
    in.readFully(xml, 0, len);
    StringReader reader = new StringReader(new String(xml));
    meta = DocumentMetadata.fromXML(reader);
}

From source file:com.marklogic.mapreduce.DatabaseDocument.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    int ordinal = in.readInt();
    contentType = ContentType.valueOf(ordinal);
    int length = WritableUtils.readVInt(in);
    content = new byte[length];
    in.readFully(content, 0, length);/* w  w w  . j  a v  a 2 s  . c  om*/
}

From source file:com.moz.fiji.hive.io.EntityIdWritable.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    byte[] bytes = WritableUtils.readCompressedByteArray(in);
    mHBaseRowKey = bytes;/*  www  .  j  a  v  a2s  .  co  m*/

    // Read the components
    int numComponents = WritableUtils.readVInt(in);
    List<Object> components = Lists.newArrayList();
    for (int c = 0; c < numComponents; c++) {
        Component componentType = WritableUtils.readEnum(in, Component.class);
        switch (componentType) {
        case STRING:
            String stringComponent = WritableUtils.readString(in);
            components.add(stringComponent);
            break;
        case INTEGER:
            Integer intComponent = WritableUtils.readVInt(in);
            components.add(intComponent);
            break;
        case LONG:
            Long longComponent = WritableUtils.readVLong(in);
            components.add(longComponent);
            break;
        case RAW_HBASE_KEY:
            byte[] byteArrayComponent = WritableUtils.readCompressedByteArray(in);
            components.add(byteArrayComponent);
            break;
        case NULL:
            break;
        default:
            throw new EntityIdException("Unexpected type for Component " + componentType);
        }
    }
    mComponents = components;

    String shellString = WritableUtils.readString(in);
    mShellString = shellString;
}

From source file:com.moz.fiji.hive.io.FijiCellWritable.java

License:Apache License

/**
 * Reads and converts data according to the specified schema.
 *
 * @param in DataInput to deserialize this object from.
 * @param schema Schema to be used for deserializing this data.
 * @return the data read and converted according to the schema.
 * @throws IOException if there was an error reading.
 *//*  www  .j ava  2s  .  c  o m*/
private static Object readData(DataInput in, Schema schema) throws IOException {
    switch (schema.getType()) {
    case INT:
        Integer intData = WritableUtils.readVInt(in);
        return intData;
    case LONG:
        Long longData = WritableUtils.readVLong(in);
        return longData;
    case DOUBLE:
        DoubleWritable doubleWritable = (DoubleWritable) WritableFactories.newInstance(DoubleWritable.class);
        doubleWritable.readFields(in);
        return doubleWritable.get();
    case ENUM:
    case STRING:
        String stringData = WritableUtils.readString(in);
        return stringData;
    case FLOAT:
        FloatWritable floatWritable = (FloatWritable) WritableFactories.newInstance(FloatWritable.class);
        floatWritable.readFields(in);
        return floatWritable.get();
    case ARRAY:
        List<Object> listData = Lists.newArrayList();
        Integer numElements = WritableUtils.readVInt(in);
        for (int c = 0; c < numElements; c++) {
            Object listElement = readData(in, schema.getElementType());
            listData.add(listElement);
        }
        return listData;
    case RECORD:
        GenericRecord recordData = new GenericData.Record(schema);
        Integer numFields = WritableUtils.readVInt(in);
        for (int c = 0; c < numFields; c++) {
            String fieldName = WritableUtils.readString(in);
            Object fieldData = readData(in, schema.getField(fieldName).schema());
            recordData.put(fieldName, fieldData);
        }
        return recordData;
    case MAP:
        Map<String, Object> mapData = Maps.newHashMap();
        Integer numEntries = WritableUtils.readVInt(in);
        for (int c = 0; c < numEntries; c++) {
            String key = WritableUtils.readString(in);
            Object value = readData(in, schema.getValueType());
            mapData.put(key, value);
        }
        return mapData;
    case UNION:
        Integer tag = WritableUtils.readVInt(in);
        Schema unionSubSchema = schema.getTypes().get(tag);
        Object unionData = readData(in, unionSubSchema);
        return unionData;
    case BYTES:
        byte[] bytesData = WritableUtils.readCompressedByteArray(in);
        return bytesData;
    case BOOLEAN:
        BooleanWritable booleanWritable = (BooleanWritable) WritableFactories
                .newInstance(BooleanWritable.class);
        booleanWritable.readFields(in);
        return booleanWritable.get();
    case NULL:
        return null;
    default:
        throw new UnsupportedOperationException("Unsupported type: " + schema.getType());
    }
}

From source file:com.moz.fiji.hive.io.FijiRowDataWritable.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    EntityIdWritable entityIdWritable = (EntityIdWritable) WritableFactories
            .newInstance(EntityIdWritable.class);
    entityIdWritable.readFields(in);/*from   ww w . j a v a 2 s .com*/
    mEntityId = entityIdWritable;

    int numDecodedData = WritableUtils.readVInt(in);

    // We need to dirty the decoded data so that these objects can be reused.
    mDecodedData = null;

    mWritableData = Maps.newHashMap();
    for (int c = 0; c < numDecodedData; c++) {
        String columnText = WritableUtils.readString(in);
        FijiColumnName column = new FijiColumnName(columnText);

        NavigableMap<Long, FijiCellWritable> data = Maps.newTreeMap();
        int numCells = WritableUtils.readVInt(in);
        for (int d = 0; d < numCells; d++) {
            long ts = WritableUtils.readVLong(in);
            FijiCellWritable cellWritable = (FijiCellWritable) WritableFactories
                    .newInstance(FijiCellWritable.class);
            cellWritable.readFields(in);
            data.put(ts, cellWritable);
        }

        mWritableData.put(column, data);
    }

    mSchemas = Maps.newHashMap();
    int numSchemas = WritableUtils.readVInt(in);
    for (int c = 0; c < numSchemas; c++) {
        String columnText = WritableUtils.readString(in);
        FijiColumnName column = new FijiColumnName(columnText);
        String schemaString = WritableUtils.readString(in);
        Schema schema = new Schema.Parser().parse(schemaString);
        mSchemas.put(column, schema);
    }
}

From source file:com.newland.bi.bigdata.hdfs.Configuration.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    clear();//from  w  ww  .  j av a  2  s . c o  m
    int size = WritableUtils.readVInt(in);
    for (int i = 0; i < size; ++i) {
        String key = org.apache.hadoop.io.Text.readString(in);
        String value = org.apache.hadoop.io.Text.readString(in);
        set(key, value);
        String sources[] = WritableUtils.readCompressedStringArray(in);
        updatingResource.put(key, sources);
    }
}

From source file:com.ning.metrics.action.hdfs.data.RowSmile.java

License:Apache License

/**
 * Replace the current row content with a specified DataInput
 *
 * @param in DataInput to read/*from   w  w  w . jav a  2  s .c  o m*/
 * @throws java.io.IOException generic serialization error
 */
@Override
public void readFields(DataInput in) throws IOException {
    schema.readFields(in);
    int numberOfItems = WritableUtils.readVInt(in);
    int smilePayloadSize = WritableUtils.readVInt(in);

    int itemsRead = 0;

    byte[] smilePayload = new byte[smilePayloadSize];
    in.readFully(smilePayload);

    JsonParser jp = objectMapper.getJsonFactory().createJsonParser(smilePayload);
    while (jp.nextToken() != null && itemsRead < numberOfItems) {
        objectMapper.readValue(jp, JsonNodeComparable.class);
        itemsRead++;
    }
    jp.close();
}

From source file:com.ning.metrics.action.hdfs.data.RowText.java

License:Apache License

/**
 * Replace the current row content with a specified DataInput
 *
 * @param in DataInput to read/*from w ww . jav  a 2s  .  c  o  m*/
 * @throws java.io.IOException generic serialization error
 */
@Override
public void readFields(DataInput in) throws IOException {
    schema.readFields(in);
    int size = WritableUtils.readVInt(in);

    data = new ArrayList<String>(size);
    for (int i = 0; i < size; i++) {
        int length = in.readInt();
        byte[] bytes = new byte[length];

        in.readFully(bytes);
        data.add(new String(bytes));
    }
}

From source file:com.ning.metrics.action.hdfs.data.RowThrift.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    schema.readFields(in);// ww  w.j  av a  2  s .  c o  m
    int size = WritableUtils.readVInt(in);

    data = new ArrayList<DataItem>(size);
    for (int i = 0; i < size; i++) {
        data.add(new DataItemDeserializer().fromHadoop(in));
    }
}