List of usage examples for org.apache.hadoop.io MapWritable MapWritable
public MapWritable()
From source file:org.huahinframework.core.io.RecordTest.java
License:Apache License
@Test public void testValueTextMapWritable() { Record record = new Record(); MapWritable o = new MapWritable(); o.put(new Text("String1"), new Text("String1")); o.put(new Text("String2"), new Text("String2")); o.put(new Text("String3"), new Text("String3")); record.addValue("Object", o); assertEquals(record.getValueMapWritable("Object"), o); assertEquals(record.getValueMapWritable("Object").size(), 3); assertEquals(record.getValueMapWritable("Object").get(new Text("String1")), new Text("String1")); assertEquals(record.getValueMapWritable("Object").get(new Text("String2")), new Text("String2")); assertEquals(record.getValueMapWritable("Object").get(new Text("String3")), new Text("String3")); assertEquals(record.getValueList("Object2"), null); try {/*from w w w .ja v a 2 s . c om*/ record.getValueInteger("Object"); fail("fail ClassCastException"); } catch (Exception e) { assertTrue(e instanceof ClassCastException); } }
From source file:org.huahinframework.core.io.RecordTest.java
License:Apache License
@Test public void testValueTextIntWritableMapWritable() { Record record = new Record(); MapWritable o = new MapWritable(); o.put(new Text("String1"), new IntWritable(1)); o.put(new Text("String2"), new IntWritable(2)); o.put(new Text("String3"), new IntWritable(3)); record.addValue("Object", o); assertEquals(record.getValueMapWritable("Object"), o); assertEquals(record.getValueMapWritable("Object").size(), 3); assertEquals(record.getValueMapWritable("Object").get(new Text("String1")), new IntWritable(1)); assertEquals(record.getValueMapWritable("Object").get(new Text("String2")), new IntWritable(2)); assertEquals(record.getValueMapWritable("Object").get(new Text("String3")), new IntWritable(3)); assertEquals(record.getValueList("Object2"), null); try {//from ww w . j a v a2 s .c o m record.getValueInteger("Object"); fail("fail ClassCastException"); } catch (Exception e) { assertTrue(e instanceof ClassCastException); } }
From source file:org.huahinframework.core.util.ObjectUtil.java
License:Apache License
/** * Convert the HadoopObject from Java primitive. * @param object Java primitive object//w w w . j a v a2 s. c om * @return HadoopObject */ public static HadoopObject primitive2Hadoop(Object object) { if (object == null) { return new HadoopObject(NULL, NullWritable.get()); } if (object instanceof Byte) { return new HadoopObject(BYTE, new ByteWritable((Byte) object)); } else if (object instanceof Integer) { return new HadoopObject(INTEGER, new IntWritable((Integer) object)); } else if (object instanceof Long) { return new HadoopObject(LONG, new LongWritable((Long) object)); } else if (object instanceof Double) { return new HadoopObject(DOUBLE, new DoubleWritable((Double) object)); } else if (object instanceof Float) { return new HadoopObject(FLOAT, new FloatWritable((Float) object)); } else if (object instanceof Boolean) { return new HadoopObject(BOOLEAN, new BooleanWritable((Boolean) object)); } else if (object instanceof String) { return new HadoopObject(STRING, new Text((String) object)); } else if (object.getClass().isArray()) { return arrayPrimitive2Hadoop(object); } else if (object instanceof Collection<?>) { Collection<?> collection = (Collection<?>) object; return arrayPrimitive2Hadoop(collection.toArray()); } else if (object instanceof Map<?, ?>) { Map<?, ?> map = (Map<?, ?>) object; if (map.size() == 0) { throw new ClassCastException("object not found"); } MapWritable mapWritable = new MapWritable(); for (Entry<?, ?> entry : map.entrySet()) { mapWritable.put(primitive2Hadoop(entry.getKey()).getObject(), primitive2Hadoop(entry.getValue()).getObject()); } return new HadoopObject(MAP, mapWritable); } throw new ClassCastException("cast object not found"); }
From source file:org.huahinframework.core.util.ObjectUtilTest.java
License:Apache License
@Test public void testPrimitive2HadoopIOMap() { Map<String, Integer> o = new HashMap<String, Integer>(); MapWritable m = new MapWritable(); o.put("0", 0); m.put(new Text("0"), new IntWritable(0)); o.put("1", 1); m.put(new Text("1"), new IntWritable(1)); HadoopObject ho = ObjectUtil.primitive2Hadoop(o); assertEquals(ObjectUtil.MAP, ho.getType()); assertEquals(MapWritable.class, ho.getObject().getClass()); MapWritable mw = (MapWritable) ho.getObject(); if (mw.size() != m.size()) { fail("map not equals size: " + mw.size() + " != " + m.size()); }//w w w .j ava 2 s . co m for (Entry<Writable, Writable> entry : m.entrySet()) { if (mw.get(entry.getKey()) == null) { fail("map key not found"); } assertEquals(mw.get(entry.getKey()), entry.getValue()); } }
From source file:org.huahinframework.core.util.ObjectUtilTest.java
License:Apache License
@SuppressWarnings("unchecked") @Test/*from www .j a va 2 s. c om*/ public void testHadoopIO2PrimitiveMap() { Map<String, Integer> o = new HashMap<String, Integer>(); MapWritable mw = new MapWritable(); o.put("0", 0); mw.put(new Text("0"), new IntWritable(0)); o.put("1", 1); mw.put(new Text("1"), new IntWritable(1)); PrimitiveObject no = ObjectUtil.hadoop2Primitive(mw); assertEquals(ObjectUtil.MAP, no.getType()); assertEquals(ObjectUtil.STRING, no.getMapKeyType()); assertEquals(ObjectUtil.INTEGER, no.getMapValueType()); if (!(no.getObject() instanceof Map<?, ?>)) { fail("object not map"); } Map<String, Integer> m = (Map<String, Integer>) no.getObject(); if (mw.size() != o.size()) { fail("map not equals size: " + mw.size() + " != " + o.size()); } for (Entry<String, Integer> entry : o.entrySet()) { if (m.get(entry.getKey()) == null) { fail("map key not found"); } assertEquals(m.get(entry.getKey()), entry.getValue()); } }
From source file:org.rad.qa.map.QuoteAnalyzerMapper.java
License:Open Source License
@Override public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { String line = value.toString(); StringTokenizer tokens = new StringTokenizer(line, ","); MapWritable list = new MapWritable(); String ticker = tokens.nextToken(); tokens.nextToken(); // skip the date list.put(QuoteAnalyzerConstants.OPEN, convert(tokens.nextToken())); list.put(QuoteAnalyzerConstants.HIGH, convert(tokens.nextToken())); list.put(QuoteAnalyzerConstants.LOW, convert(tokens.nextToken())); list.put(QuoteAnalyzerConstants.CLOSE, convert(tokens.nextToken())); list.put(QuoteAnalyzerConstants.VOLUME, convert(tokens.nextToken())); context.write(new Text(ticker), list); }
From source file:org.schedoscope.export.redis.outputformat.RedisHashWritable.java
License:Apache License
/** * Default constructor, initializes the internal writables. */ public RedisHashWritable() { key = new Text(); value = new MapWritable(); }
From source file:org.schedoscope.export.redis.outputformat.RedisHashWritable.java
License:Apache License
private MapWritable toMapWritable(Map<String, String> value) { MapWritable mr = new MapWritable(); for (Entry<String, String> e : value.entrySet()) { mr.put(new Text(e.getKey()), new Text(String.valueOf(e.getValue()))); }//from ww w. j a va2 s . co m return mr; }
From source file:org.schedoscope.export.redis.RedisExportMapper.java
License:Apache License
@SuppressWarnings("unchecked") @Override//from w w w . ja va2 s. co m protected void map(WritableComparable<?> key, HCatRecord value, Context context) throws IOException, InterruptedException { Text redisKey = new Text(keyPrefix + value.getString(keyName, schema)); RedisWritable redisValue = null; boolean write = false; HCatFieldSchema fieldSchema = schema.get(valueName); switch (fieldSchema.getCategory()) { case MAP: Map<String, String> valMap = (Map<String, String>) value.getMap(valueName, schema); if (valMap != null) { redisValue = new RedisHashWritable(redisKey.toString(), valMap); write = true; } break; case ARRAY: List<String> valArray = (List<String>) value.getList(valueName, schema); if (valArray != null) { redisValue = new RedisListWritable(redisKey.toString(), valArray); write = true; } break; case PRIMITIVE: Object obj = value.get(valueName, schema); if (obj != null) { String valStr = obj.toString(); valStr = HCatUtils.getHashValueIfInList(valueName, valStr, anonFields, salt); redisValue = new RedisStringWritable(redisKey.toString(), valStr); write = true; } break; case STRUCT: List<String> valStruct = (List<String>) value.getStruct(valueName, schema); HCatSchema structSchema = fieldSchema.getStructSubSchema(); if (valStruct != null) { MapWritable structValue = new MapWritable(); for (int i = 0; i < structSchema.size(); i++) { if (valStruct.get(i) != null) { structValue.put(new Text(structSchema.get(i).getName()), new Text(valStruct.get(i))); write = true; } } redisValue = new RedisHashWritable(redisKey, structValue); } break; default: break; } if (write) { context.write(redisKey, redisValue); context.getCounter(StatCounter.SUCCESS).increment(1); } else { context.getCounter(StatCounter.FAILED).increment(1); } }
From source file:org.schedoscope.export.redis.RedisFullTableExportMapper.java
License:Apache License
@Override protected void map(WritableComparable<?> key, HCatRecord value, Context context) throws IOException, InterruptedException { Text redisKey = new Text(keyPrefix + value.getString(keyName, schema)); MapWritable redisValue = new MapWritable(); boolean write = false; for (String f : schema.getFieldNames()) { Object obj = value.get(f, schema); if (obj != null) { String jsonString;/* w w w. j a v a 2 s. co m*/ if (schema.get(f).isComplex()) { jsonString = serializer.getFieldAsJson(value, f); } else { jsonString = obj.toString(); jsonString = HCatUtils.getHashValueIfInList(f, jsonString, anonFields, salt); } redisValue.put(new Text(f), new Text(jsonString)); write = true; } } if (write) { context.getCounter(StatCounter.SUCCESS).increment(1); context.write(redisKey, new RedisHashWritable(redisKey, redisValue)); } else { context.getCounter(StatCounter.FAILED).increment(1); } }