Example usage for org.apache.hadoop.io ArrayWritable ArrayWritable

List of usage examples for org.apache.hadoop.io ArrayWritable ArrayWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io ArrayWritable ArrayWritable.

Prototype

public ArrayWritable(Class<? extends Writable> valueClass, Writable[] values) 

Source Link

Usage

From source file:com.dasasian.chok.mapfile.TextArrayWritable.java

License:Apache License

public TextArrayWritable(List<Text> texts) {
    array = new ArrayWritable(Text.class, texts.toArray(new Writable[texts.size()]));
}

From source file:com.mozilla.hadoop.hbase.mapreduce.MultiScanTableMapReduceUtil.java

License:Apache License

/**
 * Converts an array of Scan objects into a base64 string
 * @param scans/*from  w  w w.ja  va2s  .c o m*/
 * @return
 * @throws IOException
 */
public static String convertScanArrayToString(final Scan[] scans) throws IOException {
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    final DataOutputStream dos = new DataOutputStream(baos);

    ArrayWritable aw = new ArrayWritable(Scan.class, scans);
    aw.write(dos);

    return Base64.encodeBytes(baos.toByteArray());
}

From source file:com.stratio.es.utils.UtilESTest.java

License:Apache License

private LinkedMapWritable createJsonTest() {
    LinkedMapWritable json = new LinkedMapWritable();

    LinkedMapWritable metadata = new LinkedMapWritable();
    metadata.put(new Text("author"), new Text(AUTHOR));
    metadata.put(new Text("title"), new Text(TITLE));
    metadata.put(new Text("source"), new Text(SOURCE));

    LinkedMapWritable cantoI = new LinkedMapWritable();

    cantoI.put(new Text("canto"), new Text(CANTO_I));
    cantoI.put(new Text("text"), new Text(TEXT_I));

    LinkedMapWritable cantoII = new LinkedMapWritable();
    cantoII.put(new Text("canto"), new Text(CANTO_II));
    cantoII.put(new Text("text"), new Text(TEXT_II));

    LinkedMapWritable[] writableArrary = new LinkedMapWritable[] { cantoI, cantoII };

    ArrayWritable cantosList = new ArrayWritable(LinkedMapWritable.class, writableArrary);

    json.put(new Text("metadata"), metadata);
    json.put(new Text("cantos"), cantosList);

    return json;//w w  w.  j a va 2s .  c  om
}

From source file:com.uber.hoodie.hadoop.realtime.AbstractRealtimeRecordReader.java

License:Apache License

/**
 * Convert the projected read from delta record into an array writable
 */// w w  w .jav a 2  s . com
public static Writable avroToArrayWritable(Object value, Schema schema) {

    if (value == null) {
        return null;
    }

    switch (schema.getType()) {
    case STRING:
        return new Text(value.toString());
    case BYTES:
        return new BytesWritable((byte[]) value);
    case INT:
        return new IntWritable((Integer) value);
    case LONG:
        return new LongWritable((Long) value);
    case FLOAT:
        return new FloatWritable((Float) value);
    case DOUBLE:
        return new DoubleWritable((Double) value);
    case BOOLEAN:
        return new BooleanWritable((Boolean) value);
    case NULL:
        return null;
    case RECORD:
        GenericRecord record = (GenericRecord) value;
        Writable[] recordValues = new Writable[schema.getFields().size()];
        int recordValueIndex = 0;
        for (Schema.Field field : schema.getFields()) {
            recordValues[recordValueIndex++] = avroToArrayWritable(record.get(field.name()), field.schema());
        }
        return new ArrayWritable(Writable.class, recordValues);
    case ENUM:
        return new Text(value.toString());
    case ARRAY:
        GenericArray arrayValue = (GenericArray) value;
        Writable[] arrayValues = new Writable[arrayValue.size()];
        int arrayValueIndex = 0;
        for (Object obj : arrayValue) {
            arrayValues[arrayValueIndex++] = avroToArrayWritable(obj, schema.getElementType());
        }
        // Hive 1.x will fail here, it requires values2 to be wrapped into another ArrayWritable
        return new ArrayWritable(Writable.class, arrayValues);
    case MAP:
        Map mapValue = (Map) value;
        Writable[] mapValues = new Writable[mapValue.size()];
        int mapValueIndex = 0;
        for (Object entry : mapValue.entrySet()) {
            Map.Entry mapEntry = (Map.Entry) entry;
            Writable[] nestedMapValues = new Writable[2];
            nestedMapValues[0] = new Text(mapEntry.getKey().toString());
            nestedMapValues[1] = avroToArrayWritable(mapEntry.getValue(), schema.getValueType());
            mapValues[mapValueIndex++] = new ArrayWritable(Writable.class, nestedMapValues);
        }
        // Hive 1.x will fail here, it requires values3 to be wrapped into another ArrayWritable
        return new ArrayWritable(Writable.class, mapValues);
    case UNION:
        List<Schema> types = schema.getTypes();
        if (types.size() != 2) {
            throw new IllegalArgumentException("Only support union with 2 fields");
        }
        Schema s1 = types.get(0);
        Schema s2 = types.get(1);
        if (s1.getType() == Schema.Type.NULL) {
            return avroToArrayWritable(value, s2);
        } else if (s2.getType() == Schema.Type.NULL) {
            return avroToArrayWritable(value, s1);
        } else {
            throw new IllegalArgumentException("Only support union with null");
        }
    case FIXED:
        return new BytesWritable(((GenericFixed) value).bytes());
    default:
        return null;
    }
}

From source file:com.uber.hoodie.hadoop.SafeParquetRecordReaderWrapper.java

License:Apache License

/**
 * We could be in concurrent fetch and read env.
 * We need to ensure new ArrayWritable as ParquetReader implementation reuses same
 * ArrayWritable for all reads which will cause corruption when buffering.
 * So, we create a new ArrayWritable here with Value class from parquetReader's value
 * and an empty array.//from   ww  w.  ja va 2s .  co  m
 */
@Override
public ArrayWritable createValue() {
    // Call createValue of parquetReader to get size and class type info only
    Writable[] emptyWritableBuf = new Writable[numValueFields];
    return new ArrayWritable(valueClass, emptyWritableBuf);
}

From source file:de.kp.core.arules.hadoop.BitSetArrayWritable.java

License:Open Source License

public BitSetArrayWritable() {

    List<BitSetWritable> empty = Collections.<BitSetWritable>emptyList();
    this.bitsetArrayWritable = new ArrayWritable(BitSetWritable.class,
            empty.toArray(new Writable[empty.size()]));

}

From source file:de.kp.core.arules.hadoop.BitSetArrayWritable.java

License:Open Source License

public BitSetArrayWritable(BitSet[] bitSets) {

    ArrayList<BitSetWritable> items = new ArrayList<BitSetWritable>();
    for (int i = 0; i < bitSets.length; i++) {
        items.add(new BitSetWritable(bitSets[i]));
    }//from  ww w.jav  a 2 s  . c  o  m

    this.bitsetArrayWritable = new ArrayWritable(BitSetWritable.class,
            items.toArray(new Writable[items.size()]));

}

From source file:de.kp.core.arules.hadoop.IntArrayWritable.java

License:Open Source License

public IntArrayWritable() {

    List<IntWritable> empty = Collections.<IntWritable>emptyList();
    this.intArrayWritable = new ArrayWritable(IntWritable.class, empty.toArray(new Writable[empty.size()]));

}

From source file:de.kp.core.arules.hadoop.IntArrayWritable.java

License:Open Source License

public IntArrayWritable(int[] intArray) {

    ArrayList<IntWritable> items = new ArrayList<IntWritable>();
    for (int i = 0; i < intArray.length; i++) {
        items.add(new IntWritable(intArray[i]));
    }// w w  w.j  a  v a2 s  . c om

    this.intArrayWritable = new ArrayWritable(IntWritable.class, items.toArray(new Writable[items.size()]));

}

From source file:de.kp.core.arules.hadoop.TransactionListWritable.java

License:Open Source License

public TransactionListWritable() {

    List<TransactionWritable> empty = Collections.<TransactionWritable>emptyList();
    this.transactionListWritable = new ArrayWritable(TransactionWritable.class,
            empty.toArray(new Writable[empty.size()]));

}