Example usage for org.apache.hadoop.io WritableUtils readCompressedByteArray

List of usage examples for org.apache.hadoop.io WritableUtils readCompressedByteArray

Introduction

In this page you can find the example usage for org.apache.hadoop.io WritableUtils readCompressedByteArray.

Prototype

public static byte[] readCompressedByteArray(DataInput in) throws IOException 

Source Link

Usage

From source file:com.moz.fiji.hive.io.EntityIdWritable.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    byte[] bytes = WritableUtils.readCompressedByteArray(in);
    mHBaseRowKey = bytes;/*  www .  ja v a  2s . com*/

    // Read the components
    int numComponents = WritableUtils.readVInt(in);
    List<Object> components = Lists.newArrayList();
    for (int c = 0; c < numComponents; c++) {
        Component componentType = WritableUtils.readEnum(in, Component.class);
        switch (componentType) {
        case STRING:
            String stringComponent = WritableUtils.readString(in);
            components.add(stringComponent);
            break;
        case INTEGER:
            Integer intComponent = WritableUtils.readVInt(in);
            components.add(intComponent);
            break;
        case LONG:
            Long longComponent = WritableUtils.readVLong(in);
            components.add(longComponent);
            break;
        case RAW_HBASE_KEY:
            byte[] byteArrayComponent = WritableUtils.readCompressedByteArray(in);
            components.add(byteArrayComponent);
            break;
        case NULL:
            break;
        default:
            throw new EntityIdException("Unexpected type for Component " + componentType);
        }
    }
    mComponents = components;

    String shellString = WritableUtils.readString(in);
    mShellString = shellString;
}

From source file:com.moz.fiji.hive.io.FijiCellWritable.java

License:Apache License

/**
 * Reads and converts data according to the specified schema.
 *
 * @param in DataInput to deserialize this object from.
 * @param schema Schema to be used for deserializing this data.
 * @return the data read and converted according to the schema.
 * @throws IOException if there was an error reading.
 *//*from  ww  w.  ja  va2 s  .  c  om*/
private static Object readData(DataInput in, Schema schema) throws IOException {
    switch (schema.getType()) {
    case INT:
        Integer intData = WritableUtils.readVInt(in);
        return intData;
    case LONG:
        Long longData = WritableUtils.readVLong(in);
        return longData;
    case DOUBLE:
        DoubleWritable doubleWritable = (DoubleWritable) WritableFactories.newInstance(DoubleWritable.class);
        doubleWritable.readFields(in);
        return doubleWritable.get();
    case ENUM:
    case STRING:
        String stringData = WritableUtils.readString(in);
        return stringData;
    case FLOAT:
        FloatWritable floatWritable = (FloatWritable) WritableFactories.newInstance(FloatWritable.class);
        floatWritable.readFields(in);
        return floatWritable.get();
    case ARRAY:
        List<Object> listData = Lists.newArrayList();
        Integer numElements = WritableUtils.readVInt(in);
        for (int c = 0; c < numElements; c++) {
            Object listElement = readData(in, schema.getElementType());
            listData.add(listElement);
        }
        return listData;
    case RECORD:
        GenericRecord recordData = new GenericData.Record(schema);
        Integer numFields = WritableUtils.readVInt(in);
        for (int c = 0; c < numFields; c++) {
            String fieldName = WritableUtils.readString(in);
            Object fieldData = readData(in, schema.getField(fieldName).schema());
            recordData.put(fieldName, fieldData);
        }
        return recordData;
    case MAP:
        Map<String, Object> mapData = Maps.newHashMap();
        Integer numEntries = WritableUtils.readVInt(in);
        for (int c = 0; c < numEntries; c++) {
            String key = WritableUtils.readString(in);
            Object value = readData(in, schema.getValueType());
            mapData.put(key, value);
        }
        return mapData;
    case UNION:
        Integer tag = WritableUtils.readVInt(in);
        Schema unionSubSchema = schema.getTypes().get(tag);
        Object unionData = readData(in, unionSubSchema);
        return unionData;
    case BYTES:
        byte[] bytesData = WritableUtils.readCompressedByteArray(in);
        return bytesData;
    case BOOLEAN:
        BooleanWritable booleanWritable = (BooleanWritable) WritableFactories
                .newInstance(BooleanWritable.class);
        booleanWritable.readFields(in);
        return booleanWritable.get();
    case NULL:
        return null;
    default:
        throw new UnsupportedOperationException("Unsupported type: " + schema.getType());
    }
}

From source file:com.willetinc.hadoop.mapreduce.dynamodb.AttributeValueIOUtils.java

License:Apache License

public static AttributeValue read(Types type, DataInput in) throws IOException {
    AttributeValue value = new AttributeValue();
    switch (type) {
    case STRING://from w ww  .ja v  a 2s  .  c o  m
        value.withS(Text.readString(in));
        break;
    case NUMBER:
        value.withN(Text.readString(in));
        break;
    case BINARY:
        byte[] bytes = WritableUtils.readCompressedByteArray(in);
        ByteBuffer buf = ByteBuffer.wrap(bytes);
        value.withB(buf);
    case STRING_SET:
    case NUMBER_SET:
    case BINARY_SET: {
        // handle sets
        int size = in.readInt();
        List<AttributeValue> values = new ArrayList<AttributeValue>(size);
        for (int i = 0; i < size; i++) {
            switch (type) {
            case STRING_SET:
                values.add(read(Types.STRING, in));
                break;
            case NUMBER_SET:
                values.add(read(Types.NUMBER, in));
                break;
            case BINARY_SET:
                values.add(read(Types.BINARY, in));
                break;
            default:
                throw new IOException("Nested sets of sets are not permitted");
            }
        }
        break;
    }
    }

    return value;
}

From source file:org.apache.accumulo.core.client.security.tokens.PasswordToken.java

License:Apache License

@Override
public void readFields(DataInput arg0) throws IOException {
    password = WritableUtils.readCompressedByteArray(arg0);
}

From source file:org.apache.phoenix.filter.ColumnProjectionFilter.java

License:Apache License

@Override
public void readFields(DataInput input) throws IOException {
    this.emptyCFName = WritableUtils.readCompressedByteArray(input);
    int familyMapSize = WritableUtils.readVInt(input);
    assert familyMapSize > 0;
    columnsTracker = new TreeMap<ImmutableBytesPtr, NavigableSet<ImmutableBytesPtr>>();
    while (familyMapSize > 0) {
        byte[] cf = WritableUtils.readCompressedByteArray(input);
        int qualifiersSize = WritableUtils.readVInt(input);
        NavigableSet<ImmutableBytesPtr> qualifiers = null;
        if (qualifiersSize > 0) {
            qualifiers = new TreeSet<ImmutableBytesPtr>();
            while (qualifiersSize > 0) {
                qualifiers.add(new ImmutableBytesPtr(WritableUtils.readCompressedByteArray(input)));
                qualifiersSize--;//from   ww w  .  j  av  a2s.co  m
            }
        }
        columnsTracker.put(new ImmutableBytesPtr(cf), qualifiers);
        familyMapSize--;
    }
    int conditionOnlyCfsSize = WritableUtils.readVInt(input);
    usesEncodedColumnNames = conditionOnlyCfsSize > 0;
    emptyKVQualifier = EncodedColumnsUtil.getEmptyKeyValueInfo(usesEncodedColumnNames).getFirst();
    conditionOnlyCfsSize = Math.abs(conditionOnlyCfsSize) - 1; // restore to the actual value.
    this.conditionOnlyCfs = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
    while (conditionOnlyCfsSize > 0) {
        this.conditionOnlyCfs.add(WritableUtils.readCompressedByteArray(input));
        conditionOnlyCfsSize--;
    }
}

From source file:org.apache.phoenix.filter.EncodedQualifiersColumnProjectionFilter.java

License:Apache License

@Override
public void readFields(DataInput input) throws IOException {
    this.emptyCFName = WritableUtils.readCompressedByteArray(input);
    int bitsetLongArraySize = WritableUtils.readVInt(input);
    long[] bitsetLongArray = new long[bitsetLongArraySize];
    for (int i = 0; i < bitsetLongArraySize; i++) {
        bitsetLongArray[i] = WritableUtils.readVLong(input);
    }/*  w  w  w .j  a va  2s .  c o m*/
    this.trackedColumns = BitSet.valueOf(bitsetLongArray);
    this.encodingScheme = QualifierEncodingScheme.values()[WritableUtils.readVInt(input)];
    int conditionOnlyCfsSize = WritableUtils.readVInt(input);
    this.conditionOnlyCfs = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
    while (conditionOnlyCfsSize > 0) {
        this.conditionOnlyCfs.add(WritableUtils.readCompressedByteArray(input));
        conditionOnlyCfsSize--;
    }
}

From source file:org.apache.phoenix.filter.RowKeyComparisonFilter.java

License:Apache License

@Override
public void readFields(DataInput input) throws IOException {
    super.readFields(input);
    this.essentialCF = WritableUtils.readCompressedByteArray(input);
}

From source file:org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable.java

License:Apache License

@Override
public void readFields(final DataInput input) throws IOException {
    final ByteArrayInputStream bais = new ByteArrayInputStream(WritableUtils.readCompressedByteArray(input));
    this.t = KryoShimServiceLoader.readClassAndObject(bais);
}

From source file:org.apache.tinkerpop.gremlin.hadoop.structure.io.VertexWritable.java

License:Apache License

@Override
public void readFields(final DataInput input) throws IOException {
    this.vertex = null;
    final ByteArrayInputStream bais = new ByteArrayInputStream(WritableUtils.readCompressedByteArray(input));
    this.vertex = ((StarGraph) KryoShimServiceLoader.readClassAndObject(bais)).getStarVertex(); // read the star graph;
}