Example usage for org.apache.hadoop.io WritableUtils writeCompressedByteArray

List of usage examples for org.apache.hadoop.io WritableUtils writeCompressedByteArray

Introduction

In this page you can find the example usage for org.apache.hadoop.io WritableUtils writeCompressedByteArray.

Prototype

public static int writeCompressedByteArray(DataOutput out, byte[] bytes) throws IOException 

Source Link

Usage

From source file:com.moz.fiji.hive.io.EntityIdWritable.java

License:Apache License

@Override
public void write(DataOutput out) throws IOException {
    WritableUtils.writeCompressedByteArray(out, mHBaseRowKey);

    // Write the components
    WritableUtils.writeVInt(out, mComponents.size());
    for (Object component : mComponents) {
        if (component instanceof String) {
            WritableUtils.writeEnum(out, Component.STRING);
            String stringComponent = (String) component;
            WritableUtils.writeString(out, stringComponent);
        } else if (component instanceof Integer) {
            WritableUtils.writeEnum(out, Component.INTEGER);
            Integer intComponent = (Integer) component;
            WritableUtils.writeVInt(out, intComponent);
        } else if (component instanceof Long) {
            WritableUtils.writeEnum(out, Component.LONG);
            Long longComponent = (Long) component;
            WritableUtils.writeVLong(out, longComponent);
        } else if (component instanceof byte[]) {
            Preconditions.checkState(mComponents.size() == 1, "byte[] only valid as sole component.");
            WritableUtils.writeEnum(out, Component.RAW_HBASE_KEY);
            byte[] byteArrayComponent = (byte[]) component;
            WritableUtils.writeCompressedByteArray(out, byteArrayComponent);
        } else if (component == null) {
            WritableUtils.writeEnum(out, Component.NULL);
        } else {/*from   www. j  av a  2 s .  c om*/
            throw new EntityIdException("Unexpected type for Component " + component.getClass().getName());
        }
    }

    WritableUtils.writeString(out, mShellString);
}

From source file:com.moz.fiji.hive.io.FijiCellWritable.java

License:Apache License

/**
 * Reads and converts data according to the specified schema.
 *
 * @param out DataOutput to serialize this object into.
 * @param data data to be serialized.//from w w w  .ja v  a  2  s  .  c  om
 * @param schema Schema to be used for serializing this data.
 * @throws IOException if there was an error writing.
 */
private static void writeData(DataOutput out, Object data, Schema schema) throws IOException {
    switch (schema.getType()) {
    case INT:
        Integer intData = (Integer) data;
        WritableUtils.writeVInt(out, intData);
        break;
    case LONG:
        Long longData = (Long) data;
        WritableUtils.writeVLong(out, longData);
        break;
    case DOUBLE:
        Double doubleData = (Double) data;
        DoubleWritable doubleWritable = new DoubleWritable(doubleData);
        doubleWritable.write(out);
        break;
    case ENUM:
    case STRING:
        String stringData = data.toString();
        WritableUtils.writeString(out, stringData);
        break;
    case FLOAT:
        Float floatData = (Float) data;
        FloatWritable floatWritable = new FloatWritable(floatData);
        floatWritable.write(out);
        break;
    case ARRAY:
        List<Object> listData = (List<Object>) data;
        WritableUtils.writeVInt(out, listData.size());
        for (Object listElement : listData) {
            writeData(out, listElement, schema.getElementType());
        }
        break;
    case RECORD:
        IndexedRecord recordData = (IndexedRecord) data;
        WritableUtils.writeVInt(out, schema.getFields().size());
        for (Schema.Field field : schema.getFields()) {
            WritableUtils.writeString(out, field.name());
            writeData(out, recordData.get(field.pos()), field.schema());
        }
        break;
    case MAP:
        Map<String, Object> mapData = (Map<String, Object>) data;
        WritableUtils.writeVInt(out, mapData.size());
        for (Map.Entry<String, Object> entry : mapData.entrySet()) {
            WritableUtils.writeString(out, entry.getKey());
            writeData(out, entry.getValue(), schema.getValueType());
        }
        break;
    case UNION:
        final Integer tag = GenericData.get().resolveUnion(schema, data);
        WritableUtils.writeVInt(out, tag);
        Schema unionSubSchema = schema.getTypes().get(tag);
        writeData(out, data, unionSubSchema);
        break;
    case BYTES:
        byte[] bytesData = (byte[]) data;
        WritableUtils.writeCompressedByteArray(out, bytesData);
        break;
    case BOOLEAN:
        Boolean booleanData = (Boolean) data;
        BooleanWritable booleanWritable = new BooleanWritable(booleanData);
        booleanWritable.write(out);
        break;
    case NULL:
        // Don't need to write anything for null.
        break;
    case FIXED:
    default:
        throw new UnsupportedOperationException("Unsupported type: " + schema.getType());
    }
}

From source file:com.willetinc.hadoop.mapreduce.dynamodb.AttributeValueIOUtils.java

License:Apache License

public static void write(Types type, AttributeValue value, DataOutput out) throws IOException {
    switch (type) {
    case STRING://from  w w w.jav a  2 s  . c om
        Text.writeString(out, value.getS());
        break;
    case NUMBER:
        Text.writeString(out, value.getN());
        break;
    case BINARY: {
        WritableUtils.writeCompressedByteArray(out, value.getB().array());
        break;
    }
    case STRING_SET: {
        List<String> values = value.getSS();
        out.writeInt(values.size());
        for (String s : values) {
            Text.writeString(out, s);
        }
        break;
    }
    case NUMBER_SET: {
        List<String> values = value.getNS();
        out.writeInt(values.size());
        for (String s : values) {
            Text.writeString(out, s);
        }
        break;
    }
    case BINARY_SET: {
        List<ByteBuffer> values = value.getBS();
        out.writeInt(values.size());
        for (ByteBuffer buf : values) {
            WritableUtils.writeCompressedByteArray(out, buf.array());
        }
    }
    }
}

From source file:org.apache.accumulo.core.client.security.tokens.PasswordToken.java

License:Apache License

@Override
public void write(DataOutput arg0) throws IOException {
    WritableUtils.writeCompressedByteArray(arg0, password);
}

From source file:org.apache.phoenix.filter.ColumnProjectionFilter.java

License:Apache License

@Override
public void write(DataOutput output) throws IOException {
    WritableUtils.writeCompressedByteArray(output, this.emptyCFName);
    WritableUtils.writeVInt(output, this.columnsTracker.size());
    for (Entry<ImmutableBytesPtr, NavigableSet<ImmutableBytesPtr>> entry : this.columnsTracker.entrySet()) {
        // write family name
        WritableUtils.writeCompressedByteArray(output, entry.getKey().copyBytes());
        int qaulsSize = entry.getValue() == null ? 0 : entry.getValue().size();
        WritableUtils.writeVInt(output, qaulsSize);
        if (qaulsSize > 0) {
            for (ImmutableBytesPtr cq : entry.getValue()) {
                // write qualifier name
                WritableUtils.writeCompressedByteArray(output, cq.copyBytes());
            }/* ww w.j  ava 2  s . c  o  m*/
        }
    }
    // Encode usesEncodedColumnNames in conditionOnlyCfs size.
    WritableUtils.writeVInt(output, (this.conditionOnlyCfs.size() + 1) * (usesEncodedColumnNames ? 1 : -1));
    for (byte[] f : this.conditionOnlyCfs) {
        WritableUtils.writeCompressedByteArray(output, f);
    }

}

From source file:org.apache.phoenix.filter.EncodedQualifiersColumnProjectionFilter.java

License:Apache License

@Override
public void write(DataOutput output) throws IOException {
    WritableUtils.writeCompressedByteArray(output, this.emptyCFName);
    long[] longArrayOfBitSet = trackedColumns.toLongArray();
    WritableUtils.writeVInt(output, longArrayOfBitSet.length);
    for (Long l : longArrayOfBitSet) {
        WritableUtils.writeVLong(output, l);
    }/*  w  ww.j a  v a  2 s . c  o m*/
    WritableUtils.writeVInt(output, encodingScheme.ordinal());
    WritableUtils.writeVInt(output, this.conditionOnlyCfs.size());
    for (byte[] f : this.conditionOnlyCfs) {
        WritableUtils.writeCompressedByteArray(output, f);
    }
}

From source file:org.apache.phoenix.filter.RowKeyComparisonFilter.java

License:Apache License

@Override
public void write(DataOutput output) throws IOException {
    super.write(output);
    WritableUtils.writeCompressedByteArray(output, this.essentialCF);
}

From source file:org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable.java

License:Apache License

@Override
public void write(final DataOutput output) throws IOException {
    final byte serialized[] = KryoShimServiceLoader.writeClassAndObjectToBytes(this.t);
    WritableUtils.writeCompressedByteArray(output, serialized);
}

From source file:org.apache.tinkerpop.gremlin.hadoop.structure.io.VertexWritable.java

License:Apache License

@Override
public void write(final DataOutput output) throws IOException {
    final byte serialized[] = KryoShimServiceLoader.writeClassAndObjectToBytes(this.vertex.graph());
    WritableUtils.writeCompressedByteArray(output, serialized);
}