List of usage examples for org.apache.hadoop.io ByteWritable ByteWritable
public ByteWritable(byte value)
From source file:co.nubetech.hiho.dedup.TestHashUtility.java
License:Apache License
@Test public void testMD5HashForByteWritableKey() throws IOException { ByteWritable key = new ByteWritable(new Byte("123")); MD5Hash md5HashKey1 = HashUtility.getMD5Hash(key); MD5Hash md5HashKey2 = HashUtility.getMD5Hash(key); assertEquals(md5HashKey1, md5HashKey2); }
From source file:com.axiomine.largecollections.kryo.serializers.ByteWritableSerializer.java
License:Apache License
public ByteWritable read(Kryo kryo, Input input, Class<ByteWritable> type) { return new ByteWritable(input.readByte()); }
From source file:com.csiro.hadoop.WritableTest.java
public static void main(String[] args) { System.out.println("*** Primitive Writable ***"); BooleanWritable bool1 = new BooleanWritable(true); ByteWritable byte1 = new ByteWritable((byte) 3); System.out.printf("Boolean:%s Byte:%d\n", bool1, byte1.get()); IntWritable int1 = new IntWritable(5); IntWritable int2 = new IntWritable(17); System.out.printf("I1:%d I2:%d\n", int1.get(), int2.get()); int1.set(int2.get()); System.out.printf("I1:%d I2:%d\n", int1.get(), int2.get()); Integer int3 = new Integer(23); int1.set(int3); System.out.printf("I1:%d I2:%d\n", int1.get(), int2.get()); System.out.println("*** Array Writable ***"); ArrayWritable a = new ArrayWritable(IntWritable.class); a.set(new IntWritable[] { new IntWritable(1), new IntWritable(3), new IntWritable(5) }); IntWritable[] values = (IntWritable[]) a.get(); for (IntWritable i : values) { System.out.println(i);/*from w w w . ja va 2 s . co m*/ } IntArrayWritable ia = new IntArrayWritable(); ia.set(new IntWritable[] { new IntWritable(1), new IntWritable(3), new IntWritable(5) }); IntWritable[] ivalues = (IntWritable[]) ia.get(); ia.set((new LongWritable[] { new LongWritable(10001) })); System.out.println("*** Map Writables ***"); MapWritable m = new MapWritable(); IntWritable key1 = new IntWritable(5); NullWritable value1 = NullWritable.get(); m.put(key1, value1); System.out.println(m.containsKey(key1)); System.out.println(m.get(key1)); m.put(new LongWritable(100000000), key1); Set<Writable> keys = m.keySet(); for (Writable k : keys) System.out.println(k.getClass()); }
From source file:com.dasasian.chok.util.WritableType.java
License:Apache License
/** * Convert a java primitive type wrapper (like String, Integer, Float, etc...) * to the corresponding hadoop {@link WritableComparable}. * @param object the object to convert//from w w w . j a v a 2 s. c o m * @return the writable comparable */ public WritableComparable convertComparable(Object object) { switch (this) { case TEXT: return new Text((String) object); case BYTE: return new ByteWritable((Byte) object); case INT: return new IntWritable((Integer) object); case LONG: return new LongWritable(((Long) object)); case FLOAT: return new FloatWritable((Float) object); case DOUBLE: return new DoubleWritable((Double) object); } throw getUnhandledTypeException(); }
From source file:com.ibm.bi.dml.runtime.transform.GTFMTDReducer.java
License:Open Source License
@SuppressWarnings("unchecked") private long generateOffsetsFile(ArrayList<OffsetCount> list) throws IllegalArgumentException, IOException { Collections.sort(list);// w w w . j av a2s.c o m @SuppressWarnings("deprecation") SequenceFile.Writer writer = new SequenceFile.Writer(FileSystem.get(_rJob), _rJob, new Path(_agents.getOffsetFile() + "/part-00000"), ByteWritable.class, OffsetCount.class); long lineOffset = 0; for (OffsetCount oc : list) { long count = oc.count; oc.count = lineOffset; writer.append(new ByteWritable((byte) 0), oc); lineOffset += count; } writer.close(); list.clear(); return lineOffset; }
From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.TestJsonNodeWritableUtils.java
License:Apache License
@Test public void test_transform() { final ObjectMapper mapper = BeanTemplateUtils.configureMapper(Optional.empty()); new JsonNodeWritableUtils(); //coverage! assertEquals(NullNode.instance, JsonNodeWritableUtils.transform("banana", JsonNodeFactory.instance)); assertEquals(null, JsonNodeWritableUtils.transform(null, JsonNodeFactory.instance)); assertEquals(NullNode.instance,/* ww w .j a v a 2 s. c o m*/ JsonNodeWritableUtils.transform(NullWritable.get(), JsonNodeFactory.instance)); assertEquals(mapper.convertValue(true, JsonNode.class), JsonNodeWritableUtils.transform(new BooleanWritable(true), JsonNodeFactory.instance)); assertEquals(mapper.convertValue("test", JsonNode.class), JsonNodeWritableUtils.transform(new Text("test"), JsonNodeFactory.instance)); assertEquals(mapper.convertValue(new byte[] { (byte) 0xFF }, JsonNode.class), JsonNodeWritableUtils.transform(new ByteWritable((byte) 0xFF), JsonNodeFactory.instance)); assertEquals(mapper.convertValue(4, JsonNode.class), JsonNodeWritableUtils.transform(new IntWritable(4), JsonNodeFactory.instance)); assertEquals(mapper.convertValue(4, JsonNode.class), JsonNodeWritableUtils.transform(new VIntWritable(4), JsonNodeFactory.instance)); assertEquals(mapper.convertValue(4L, JsonNode.class), JsonNodeWritableUtils.transform(new LongWritable(4), JsonNodeFactory.instance)); assertEquals(mapper.convertValue(4L, JsonNode.class), JsonNodeWritableUtils.transform(new VLongWritable(4), JsonNodeFactory.instance)); assertEquals(mapper.convertValue(new byte[] { (byte) 0xFF, (byte) 0xFE }, JsonNode.class), JsonNodeWritableUtils.transform(new BytesWritable(new byte[] { (byte) 0xFF, (byte) 0xFE }), JsonNodeFactory.instance)); assertEquals(mapper.convertValue(4.0, JsonNode.class), JsonNodeWritableUtils.transform(new DoubleWritable(4), JsonNodeFactory.instance)); //(had real trouble creating a float node!) assertEquals(JsonNodeFactory.instance.numberNode(Float.valueOf((float) 4.0)), JsonNodeWritableUtils.transform(new FloatWritable(4), JsonNodeFactory.instance)); // will test object writable and array writable below }
From source file:edu.uci.ics.hivesterix.serde.lazy.LazyByte.java
License:Apache License
public LazyByte(LazyByte copy) { super(copy); data = new ByteWritable(copy.data.get()); }
From source file:eu.stratosphere.hadoopcompatibility.datatypes.DefaultStratosphereTypeConverter.java
License:Apache License
@SuppressWarnings("unchecked") private <T> T convert(Record stratosphereType, int pos, Class<T> hadoopType) { if (hadoopType == LongWritable.class) { return (T) new LongWritable((stratosphereType.getField(pos, LongValue.class)).getValue()); }//from w w w .j a v a 2 s. c o m if (hadoopType == org.apache.hadoop.io.Text.class) { return (T) new Text((stratosphereType.getField(pos, StringValue.class)).getValue()); } if (hadoopType == org.apache.hadoop.io.IntWritable.class) { return (T) new IntWritable((stratosphereType.getField(pos, IntValue.class)).getValue()); } if (hadoopType == org.apache.hadoop.io.FloatWritable.class) { return (T) new FloatWritable((stratosphereType.getField(pos, FloatValue.class)).getValue()); } if (hadoopType == org.apache.hadoop.io.DoubleWritable.class) { return (T) new DoubleWritable((stratosphereType.getField(pos, DoubleValue.class)).getValue()); } if (hadoopType == org.apache.hadoop.io.BooleanWritable.class) { return (T) new BooleanWritable((stratosphereType.getField(pos, BooleanValue.class)).getValue()); } if (hadoopType == org.apache.hadoop.io.ByteWritable.class) { return (T) new ByteWritable((stratosphereType.getField(pos, ByteValue.class)).getValue()); } throw new RuntimeException("Unable to convert Stratosphere type (" + stratosphereType.getClass().getCanonicalName() + ") to Hadoop."); }
From source file:hivemall.utils.hadoop.WritableUtils.java
License:Open Source License
public static Writable toWritable(Object object) { if (object == null) { return null; //return NullWritable.get(); }/*from www .ja v a 2 s. c o m*/ if (object instanceof Writable) { return (Writable) object; } if (object instanceof String) { return new Text((String) object); } if (object instanceof Long) { return new VLongWritable((Long) object); } if (object instanceof Integer) { return new VIntWritable((Integer) object); } if (object instanceof Byte) { return new ByteWritable((Byte) object); } if (object instanceof Double) { return new DoubleWritable((Double) object); } if (object instanceof Float) { return new FloatWritable((Float) object); } if (object instanceof Boolean) { return new BooleanWritable((Boolean) object); } if (object instanceof byte[]) { return new BytesWritable((byte[]) object); } return new BytesWritable(object.toString().getBytes()); }
From source file:hydrograph.engine.cascading.scheme.hive.parquet.ParquetWritableUtils.java
License:Apache License
private static Writable createPrimitive(final Object obj, final PrimitiveObjectInspector inspector) throws SerDeException { if (obj == null) { return null; }/*w w w . j a va 2s. co m*/ switch (inspector.getPrimitiveCategory()) { case VOID: return null; case BOOLEAN: return new BooleanWritable( ((BooleanObjectInspector) inspector).get(new BooleanWritable((boolean) obj))); case BYTE: return new ByteWritable(((ByteObjectInspector) inspector).get(new ByteWritable((byte) obj))); case DOUBLE: return new DoubleWritable(((DoubleObjectInspector) inspector).get(new DoubleWritable((double) obj))); case FLOAT: return new FloatWritable(((FloatObjectInspector) inspector).get(new FloatWritable((float) obj))); case INT: return new IntWritable(((IntObjectInspector) inspector).get(new IntWritable((int) obj))); case LONG: return new LongWritable(((LongObjectInspector) inspector).get(new LongWritable((long) obj))); case SHORT: return new ShortWritable(((ShortObjectInspector) inspector).get(new ShortWritable((short) obj))); case STRING: String v; if (obj instanceof Long) { SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd"); Date date = new Date((long) obj); v = df.format(date); } else if (obj instanceof BigDecimal) { BigDecimal bigDecimalObj = (BigDecimal) obj; v = bigDecimalObj.toString(); } else { v = ((StringObjectInspector) inspector).getPrimitiveJavaObject(obj); } try { return new BytesWritable(v.getBytes("UTF-8")); } catch (UnsupportedEncodingException e) { throw new SerDeException("Failed to encode string in UTF-8", e); } case DECIMAL: HiveDecimal hd; if (obj instanceof Double) { hd = HiveDecimal.create(new BigDecimal((Double) obj)); } else if (obj instanceof BigDecimal) { hd = HiveDecimal.create((BigDecimal) obj); } else { // if "obj" is other than Double or BigDecimal and a vaild // number, .toString, will get its correct number representation // and a BigDecimal object will be created hd = HiveDecimal.create(new BigDecimal(obj.toString())); } return new HiveDecimalWritable(hd); case TIMESTAMP: return new TimestampWritable(((TimestampObjectInspector) inspector) .getPrimitiveJavaObject(new TimestampWritable(new Timestamp((long) obj)))); case DATE: return new DateWritable(((DateObjectInspector) inspector) .getPrimitiveJavaObject(new DateWritable(new Date((long) obj)))); case CHAR: String strippedValue = ((HiveCharObjectInspector) inspector).getPrimitiveJavaObject(obj) .getStrippedValue(); return new BytesWritable(Binary.fromString(strippedValue).getBytes()); case VARCHAR: String value = ((HiveVarcharObjectInspector) inspector).getPrimitiveJavaObject(obj).getValue(); return new BytesWritable(Binary.fromString(value).getBytes()); default: throw new SerDeException("Unknown primitive : " + inspector.getPrimitiveCategory()); } }