Example usage for org.apache.hadoop.io ByteWritable ByteWritable

List of usage examples for org.apache.hadoop.io ByteWritable ByteWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io ByteWritable ByteWritable.

Prototype

public ByteWritable(byte value) 

Source Link

Usage

From source file:org.archive.bacon.io.SequenceFileStorage.java

License:Apache License

/**
 * Convert the Pig tupleValue to the corresponding Hadoop object.
 */// w w  w . j a v  a2s  . c o m
public Writable getWritable(Object tupleValue, Writable nullWritable) throws IOException {
    switch (DataType.findType(tupleValue)) {
    case DataType.BOOLEAN:
        return new BooleanWritable((boolean) tupleValue);

    case DataType.BYTE:
        return new ByteWritable((byte) tupleValue);

    case DataType.CHARARRAY:
        return new Text((String) tupleValue);

    case DataType.INTEGER:
        return new IntWritable((int) tupleValue);

    case DataType.LONG:
        return new LongWritable((long) tupleValue);

    case DataType.DOUBLE:
        return new DoubleWritable((double) tupleValue);

    case DataType.FLOAT:
        return new FloatWritable((float) tupleValue);

    case DataType.BYTEARRAY:
        return new BytesWritable((byte[]) tupleValue);

    // If we get a 'null' from Pig, just pass through the
    // already-instantiated Hadoop nullWritable.
    case DataType.NULL:
        return nullWritable;

    // Don't know what to do with these complex data types.
    case DataType.BAG:
    case DataType.ERROR:
    case DataType.MAP:
    case DataType.TUPLE:
    case DataType.UNKNOWN:
    default:
        throw new IOException("Cannot write values of type: " + DataType.findTypeName(tupleValue));
    }
}

From source file:org.archive.hadoop.pig.SequenceFileStorage.java

License:Apache License

/**
 * Convert the Pig tupleValue to the corresponding Hadoop object.
 *//* w w w  . j av  a2 s.co  m*/
public Writable getWritable(Object tupleValue, Writable nullWritable) throws IOException {
    switch (DataType.findType(tupleValue)) {
    case DataType.BOOLEAN:
        return new BooleanWritable((Boolean) tupleValue);

    case DataType.BYTE:
        return new ByteWritable((Byte) tupleValue);

    case DataType.CHARARRAY:
        return new Text((String) tupleValue);

    case DataType.INTEGER:
        return new IntWritable((Integer) tupleValue);

    case DataType.LONG:
        return new LongWritable((Long) tupleValue);

    case DataType.DOUBLE:
        return new DoubleWritable((Double) tupleValue);

    case DataType.FLOAT:
        return new FloatWritable((Float) tupleValue);

    case DataType.BYTEARRAY:
        return new BytesWritable((byte[]) tupleValue);

    // If we get a 'null' from Pig, just pass through the
    // already-instantiated Hadoop nullWritable.
    case DataType.NULL:
        return nullWritable;

    // Don't know what to do with these complex data types.
    case DataType.BAG:
    case DataType.ERROR:
    case DataType.MAP:
    case DataType.TUPLE:
    case DataType.UNKNOWN:
    default:
        throw new IOException("Cannot write values of type: " + DataType.findTypeName(tupleValue));
    }
}

From source file:org.elasticsearch.hadoop.mr.WritableValueReader.java

License:Apache License

@Override
protected Object processByte(Byte value) {
    return new ByteWritable(value);
}

From source file:org.elasticsearch.hadoop.serialization.WritableTypeToJsonTest.java

License:Apache License

@Test
public void testByte() {
    writableTypeToJson(new ByteWritable(Byte.MAX_VALUE));
}

From source file:org.elasticsearch.hadoop.util.WritableUtils.java

License:Apache License

@SuppressWarnings({ "unchecked", "rawtypes" })
public static Writable toWritable(Object object) {
    if (object instanceof Writable) {
        return (Writable) object;
    }//from w ww .j  av  a2 s  .c  om
    if (object == null) {
        return NullWritable.get();
    }
    if (object instanceof String) {
        return new Text((String) object);
    }
    if (object instanceof Long) {
        return new VLongWritable((Long) object);
    }
    if (object instanceof Integer) {
        return new VIntWritable((Integer) object);
    }
    if (object instanceof Byte) {
        return new ByteWritable((Byte) object);
    }
    if (object instanceof Short) {
        return WritableCompatUtil.availableShortWritable((Short) object);
    }
    if (object instanceof Double) {
        return new DoubleWritable((Double) object);
    }
    if (object instanceof Float) {
        return new FloatWritable((Float) object);
    }
    if (object instanceof Boolean) {
        return new BooleanWritable((Boolean) object);
    }
    if (object instanceof byte[]) {
        return new BytesWritable((byte[]) object);
    }
    if (object instanceof List) {
        List<Object> list = (List<Object>) object;
        if (!list.isEmpty()) {
            Object first = list.get(0);
            Writable[] content = new Writable[list.size()];
            for (int i = 0; i < content.length; i++) {
                content[i] = toWritable(list.get(i));
            }
            return new ArrayWritable(toWritable(first).getClass(), content);
        }
        return new ArrayWritable(NullWritable.class, new Writable[0]);
    }
    if (object instanceof SortedSet) {
        SortedMapWritable smap = new SortedMapWritable();
        SortedSet<Object> set = (SortedSet) object;
        for (Object obj : set) {
            smap.put((WritableComparable) toWritable(obj), NullWritable.get());
        }
        return smap;
    }
    if (object instanceof Set) {
        MapWritable map = new MapWritable();
        Set<Object> set = (Set) object;
        for (Object obj : set) {
            map.put(toWritable(obj), NullWritable.get());
        }
        return map;
    }
    if (object instanceof SortedMap) {
        SortedMapWritable smap = new SortedMapWritable();
        Map<Object, Object> map = (Map) object;
        for (Map.Entry<?, ?> entry : map.entrySet()) {
            smap.put((WritableComparable) toWritable(entry.getKey()), toWritable(entry.getValue()));
        }
        return smap;
    }
    if (object instanceof Map) {
        MapWritable result = new MapWritable();
        Map<Object, Object> map = (Map) object;
        for (Map.Entry<?, ?> entry : map.entrySet()) {
            result.put(toWritable(entry.getKey()), toWritable(entry.getValue()));
        }
        return result;
    }
    // fall-back to bytearray
    return new BytesWritable(object.toString().getBytes(StringUtils.UTF_8));
}

From source file:org.huahinframework.core.io.RecordTest.java

License:Apache License

@Test
public void testGroupingByteWritable() {
    Record record = new Record();
    ByteWritable o = new ByteWritable((byte) 10);
    record.addGrouping("Object", o);
    assertEquals(record.getGroupingByteWritable("Object"), o);
    assertEquals(record.getGroupingByteWritable("Object2"), null);

    try {//from  ww  w .j av  a  2 s .  co  m
        record.getGroupingInteger("Object");
        fail("fail ClassCastException");
    } catch (Exception e) {
        assertTrue(e instanceof ClassCastException);
    }
}

From source file:org.huahinframework.core.io.RecordTest.java

License:Apache License

@Test
public void testValueByteWritable() {
    Record record = new Record();
    ByteWritable o = new ByteWritable((byte) 10);
    record.addValue("Object", o);
    assertEquals(record.getValueByteWritable("Object"), o);
    assertEquals(record.getValueByteWritable("Object2"), null);

    try {//from  w  ww  . j  a  v  a 2s  . c  om
        record.getValueIntWritable("Object");
        fail("fail ClassCastException");
    } catch (Exception e) {
        assertTrue(e instanceof ClassCastException);
    }
}

From source file:org.huahinframework.core.util.ObjectUtil.java

License:Apache License

/**
 * Convert the HadoopObject from Java primitive.
 * @param object Java primitive object//from  ww  w  .j  ava2s.  c om
 * @return HadoopObject
 */
public static HadoopObject primitive2Hadoop(Object object) {
    if (object == null) {
        return new HadoopObject(NULL, NullWritable.get());
    }

    if (object instanceof Byte) {
        return new HadoopObject(BYTE, new ByteWritable((Byte) object));
    } else if (object instanceof Integer) {
        return new HadoopObject(INTEGER, new IntWritable((Integer) object));
    } else if (object instanceof Long) {
        return new HadoopObject(LONG, new LongWritable((Long) object));
    } else if (object instanceof Double) {
        return new HadoopObject(DOUBLE, new DoubleWritable((Double) object));
    } else if (object instanceof Float) {
        return new HadoopObject(FLOAT, new FloatWritable((Float) object));
    } else if (object instanceof Boolean) {
        return new HadoopObject(BOOLEAN, new BooleanWritable((Boolean) object));
    } else if (object instanceof String) {
        return new HadoopObject(STRING, new Text((String) object));
    } else if (object.getClass().isArray()) {
        return arrayPrimitive2Hadoop(object);
    } else if (object instanceof Collection<?>) {
        Collection<?> collection = (Collection<?>) object;
        return arrayPrimitive2Hadoop(collection.toArray());
    } else if (object instanceof Map<?, ?>) {
        Map<?, ?> map = (Map<?, ?>) object;
        if (map.size() == 0) {
            throw new ClassCastException("object not found");
        }

        MapWritable mapWritable = new MapWritable();
        for (Entry<?, ?> entry : map.entrySet()) {
            mapWritable.put(primitive2Hadoop(entry.getKey()).getObject(),
                    primitive2Hadoop(entry.getValue()).getObject());
        }

        return new HadoopObject(MAP, mapWritable);
    }

    throw new ClassCastException("cast object not found");
}

From source file:org.huahinframework.core.util.ObjectUtilTest.java

License:Apache License

@Test
public void testPrimitive2HadoopIOByte() {
    byte o = 123;
    HadoopObject ho = ObjectUtil.primitive2Hadoop(o);
    assertEquals(ObjectUtil.BYTE, ho.getType());
    assertEquals(new ByteWritable(o), ho.getObject());
}

From source file:org.huahinframework.core.util.ObjectUtilTest.java

License:Apache License

@Test
public void testHadoopIO2PrimitiveByte() {
    byte o = 123;
    PrimitiveObject no = ObjectUtil.hadoop2Primitive(new ByteWritable(o));
    assertEquals(ObjectUtil.BYTE, no.getType());
    assertEquals(o, no.getObject());//from w  w w  . j  ava 2  s  .  c  o m
}