Example usage for org.apache.hadoop.io MapWritable put

List of usage examples for org.apache.hadoop.io MapWritable put

Introduction

In this page you can find the example usage for org.apache.hadoop.io MapWritable put.

Prototype

@Override
    public Writable put(Writable key, Writable value) 

Source Link

Usage

From source file:org.huahinframework.core.util.ObjectUtilTest.java

License:Apache License

@SuppressWarnings("unchecked")
@Test//from ww  w . j  a  va2 s.c  om
public void testHadoopIO2PrimitiveMap() {
    Map<String, Integer> o = new HashMap<String, Integer>();
    MapWritable mw = new MapWritable();

    o.put("0", 0);
    mw.put(new Text("0"), new IntWritable(0));

    o.put("1", 1);
    mw.put(new Text("1"), new IntWritable(1));

    PrimitiveObject no = ObjectUtil.hadoop2Primitive(mw);
    assertEquals(ObjectUtil.MAP, no.getType());
    assertEquals(ObjectUtil.STRING, no.getMapKeyType());
    assertEquals(ObjectUtil.INTEGER, no.getMapValueType());
    if (!(no.getObject() instanceof Map<?, ?>)) {
        fail("object not map");
    }

    Map<String, Integer> m = (Map<String, Integer>) no.getObject();
    if (mw.size() != o.size()) {
        fail("map not equals size: " + mw.size() + " != " + o.size());
    }

    for (Entry<String, Integer> entry : o.entrySet()) {
        if (m.get(entry.getKey()) == null) {
            fail("map key not found");
        }

        assertEquals(m.get(entry.getKey()), entry.getValue());
    }
}

From source file:org.hxx.hadoop.GeneratorHbase.java

License:Apache License

private static void createDatum(CrawlDatum datum, Result r) {
    NavigableMap<byte[], byte[]> map = r.getFamilyMap(Bytes.toBytes("cf1"));
    org.apache.hadoop.io.MapWritable metaData = new org.apache.hadoop.io.MapWritable();

    for (Iterator iterator = map.keySet().iterator(); iterator.hasNext();) {
        byte[] key = (byte[]) iterator.next();
        byte[] value = map.get(key);
        String skey = Bytes.toString(key);
        if ("url".equals(skey)) {
            // nothing
        } else if ("Score".equals(skey)) {
            if (value != null)
                datum.setScore(Bytes.toFloat(value));
        } else if ("Status".equals(skey)) {
            if (value != null)
                datum.setStatus(value[0]);
        } else if ("Fetchtime".equals(skey)) {
            if (value != null)
                datum.setFetchTime(Bytes.toLong(value));
        } else if ("Retries".equals(skey)) {
            if (value != null)
                datum.setRetriesSinceFetch(value[0]);
        } else if ("FetchInterval".equals(skey)) {
            if (value != null)
                datum.setFetchInterval(Bytes.toInt(value));
        } else if ("Modifiedtime".equals(skey)) {
            if (value != null)
                datum.setModifiedTime(Bytes.toLong(value));
        } else if ("Signature".equals(skey)) {
            if (value != null)
                datum.setSignature(value);
        } else if (Nutch.GENERATE_TIME_KEY.equals(skey)) {// mfang,2014/10/13
            if (value != null) {
                metaData.put(new Text(key), new LongWritable(Bytes.toLong(value)));
            }/*from   w w  w . j av a2  s  .  c  o m*/
        } else
            metaData.put(new Text(key), new Text(value));
    }
    metaData.put(new Text("urlid"), new Text(r.getRow()));
    datum.setMetaData(metaData);
}

From source file:org.rad.qa.map.QuoteAnalyzerMapper.java

License:Open Source License

@Override
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
    String line = value.toString();
    StringTokenizer tokens = new StringTokenizer(line, ",");
    MapWritable list = new MapWritable();

    String ticker = tokens.nextToken();
    tokens.nextToken(); // skip the date
    list.put(QuoteAnalyzerConstants.OPEN, convert(tokens.nextToken()));
    list.put(QuoteAnalyzerConstants.HIGH, convert(tokens.nextToken()));
    list.put(QuoteAnalyzerConstants.LOW, convert(tokens.nextToken()));
    list.put(QuoteAnalyzerConstants.CLOSE, convert(tokens.nextToken()));
    list.put(QuoteAnalyzerConstants.VOLUME, convert(tokens.nextToken()));

    context.write(new Text(ticker), list);
}

From source file:org.rad.qa.reduce.QuoteAnalyzerReducer.java

License:Open Source License

@Override
public void reduce(Text key, Iterable<MapWritable> values, Context context)
        throws IOException, InterruptedException {
    Iterator<MapWritable> writables = values.iterator();
    MapWritable mw;/*from w  w  w  .  ja va  2 s . c  o m*/
    double open = 0.0, close = 0.0, low = 0.0, high = 0.0, volume = 0.0;
    int size = 0;

    while (writables.hasNext()) {
        size++;
        mw = writables.next();
        open += ((DoubleWritable) mw.get(QuoteAnalyzerConstants.OPEN)).get();
        high += ((DoubleWritable) mw.get(QuoteAnalyzerConstants.HIGH)).get();
        low += ((DoubleWritable) mw.get(QuoteAnalyzerConstants.LOW)).get();
        close += ((DoubleWritable) mw.get(QuoteAnalyzerConstants.CLOSE)).get();
        volume += ((DoubleWritable) mw.get(QuoteAnalyzerConstants.VOLUME)).get();
    }

    MapWritable result = new QuoteAnalyzerOutput(key.toString());
    result.put(QuoteAnalyzerConstants.OPEN, new DoubleWritable(open / size));
    result.put(QuoteAnalyzerConstants.HIGH, new DoubleWritable(high / size));
    result.put(QuoteAnalyzerConstants.LOW, new DoubleWritable(low / size));
    result.put(QuoteAnalyzerConstants.CLOSE, new DoubleWritable(close / size));
    result.put(QuoteAnalyzerConstants.VOLUME, new DoubleWritable(volume / size));

    context.write(key, result);
}

From source file:org.schedoscope.export.redis.outputformat.RedisHashWritable.java

License:Apache License

private MapWritable toMapWritable(Map<String, String> value) {

    MapWritable mr = new MapWritable();
    for (Entry<String, String> e : value.entrySet()) {
        mr.put(new Text(e.getKey()), new Text(String.valueOf(e.getValue())));
    }//from   w w  w.j  a  v  a  2 s .  c o m
    return mr;
}

From source file:org.schedoscope.export.redis.RedisExportMapper.java

License:Apache License

@SuppressWarnings("unchecked")
@Override/*from ww  w .j  a  v a 2 s .  co  m*/
protected void map(WritableComparable<?> key, HCatRecord value, Context context)
        throws IOException, InterruptedException {

    Text redisKey = new Text(keyPrefix + value.getString(keyName, schema));
    RedisWritable redisValue = null;
    boolean write = false;

    HCatFieldSchema fieldSchema = schema.get(valueName);

    switch (fieldSchema.getCategory()) {
    case MAP:
        Map<String, String> valMap = (Map<String, String>) value.getMap(valueName, schema);
        if (valMap != null) {
            redisValue = new RedisHashWritable(redisKey.toString(), valMap);
            write = true;
        }
        break;
    case ARRAY:
        List<String> valArray = (List<String>) value.getList(valueName, schema);
        if (valArray != null) {
            redisValue = new RedisListWritable(redisKey.toString(), valArray);
            write = true;
        }
        break;
    case PRIMITIVE:
        Object obj = value.get(valueName, schema);
        if (obj != null) {
            String valStr = obj.toString();
            valStr = HCatUtils.getHashValueIfInList(valueName, valStr, anonFields, salt);
            redisValue = new RedisStringWritable(redisKey.toString(), valStr);
            write = true;
        }
        break;
    case STRUCT:
        List<String> valStruct = (List<String>) value.getStruct(valueName, schema);
        HCatSchema structSchema = fieldSchema.getStructSubSchema();
        if (valStruct != null) {
            MapWritable structValue = new MapWritable();

            for (int i = 0; i < structSchema.size(); i++) {
                if (valStruct.get(i) != null) {
                    structValue.put(new Text(structSchema.get(i).getName()), new Text(valStruct.get(i)));
                    write = true;
                }
            }
            redisValue = new RedisHashWritable(redisKey, structValue);
        }
        break;
    default:
        break;
    }

    if (write) {
        context.write(redisKey, redisValue);
        context.getCounter(StatCounter.SUCCESS).increment(1);
    } else {
        context.getCounter(StatCounter.FAILED).increment(1);
    }
}

From source file:org.schedoscope.export.redis.RedisFullTableExportMapper.java

License:Apache License

@Override
protected void map(WritableComparable<?> key, HCatRecord value, Context context)
        throws IOException, InterruptedException {

    Text redisKey = new Text(keyPrefix + value.getString(keyName, schema));

    MapWritable redisValue = new MapWritable();
    boolean write = false;

    for (String f : schema.getFieldNames()) {

        Object obj = value.get(f, schema);
        if (obj != null) {
            String jsonString;/*from   ww  w . j  a  v a  2  s. c om*/

            if (schema.get(f).isComplex()) {
                jsonString = serializer.getFieldAsJson(value, f);
            } else {
                jsonString = obj.toString();
                jsonString = HCatUtils.getHashValueIfInList(f, jsonString, anonFields, salt);
            }
            redisValue.put(new Text(f), new Text(jsonString));
            write = true;
        }
    }

    if (write) {
        context.getCounter(StatCounter.SUCCESS).increment(1);
        context.write(redisKey, new RedisHashWritable(redisKey, redisValue));
    } else {
        context.getCounter(StatCounter.FAILED).increment(1);
    }
}

From source file:org.vilcek.hive.kv.KVHiveRecordReader.java

License:Apache License

@Override
public boolean next(LongWritable k, MapWritable v) throws IOException {
    boolean ret = iter.hasNext();
    if (ret) {/*from   w  ww . ja v a 2 s .  c  o m*/
        current = iter.next();
        k.set(cnt);
        v.clear();
        List<String> majorKeysList = current.getKey().getMajorPath();
        List<String> minorKeysList = current.getKey().getMinorPath();
        for (int i = 0; i < majorKeyLabelsArray.length; i++) {
            try {
                String key = majorKeyLabelsArray[i];
                String value = majorKeysList.get(i);
                if (!value.equals(SERIALIZED_NULL)) {
                    v.put(new Text(key), new Text(value));
                }
            } catch (ArrayIndexOutOfBoundsException e) {
            }
        }
        byte[] value = current.getValue().getValue();
        if (!value.toString().equals(SERIALIZED_NULL)) {
            if (Format.AVRO == current.getValue().getFormat() && binding != null) {
                try {
                    JsonRecord object = binding.toObject(current.getValue());
                    JsonNode jsonNode = object.getJsonNode();
                    value = jsonNode.toString().getBytes("UTF8");
                } catch (Throwable ignored) {
                }
            }
            if (minorKeysList.isEmpty()) {
                v.put(new Text("value"), new Text(value));
            } else {
                for (int j = 0; j < minorKeysList.size(); j++) {
                    String key = minorKeysList.get(j);
                    v.put(new Text(key), new Text(value));
                }
            }
        }
        cnt++;
        return ret;
    } else {
        return false;
    }
}

From source file:org.vroyer.hive.solr.SolrReader.java

License:Open Source License

public void setValueHolder(MapWritable valueHolder, Object[] values) {
    for (int i = 0; i < values.length; i++) {
        ObjectInspector oi = rowOI.get(i);

        Writable writableValue;/*from w  w w.  j  a va 2  s .  co m*/
        if (values[i] == null) {
            writableValue = NullWritable.get();
        } else {
            log.debug("SOLR oi=" + oi + " type=" + values[i].getClass().getName() + " value =" + values[i]);
            if ((oi != null) && (oi instanceof JavaTimestampObjectInspector)) {
                if (values[i] instanceof Date) {
                    values[i] = new Timestamp(((Date) values[i]).getTime());
                } else if (values[i] instanceof String) {
                    // facet date syntax : 2004-01-01T00:00:00Z
                    try {
                        values[i] = new Timestamp(dateFormat.parse((String) values[i]).getTime());
                    } catch (ParseException e) {
                        log.error("Cannot parse timestamp:" + values[i]);
                    }
                }
            }
            writableValue = (Writable) ObjectInspectorUtils.copyToStandardObject(values[i], oi,
                    ObjectInspectorCopyOption.WRITABLE);
            log.debug("value=" + writableValue + " type=" + writableValue.getClass().getName());
        }
        valueHolder.put(new Text(hiveColNames.get(i)), writableValue);
    }
}

From source file:org.wonderbee.elasticsearch.hive.ElasticSearchSerDe.java

License:Apache License

@Override
public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
    StructObjectInspector outputRowOI = (StructObjectInspector) objInspector;
    List<? extends StructField> outputFieldRefs = outputRowOI.getAllStructFieldRefs();
    MapWritable record = new MapWritable();

    String isJson = props.getProperty(ES_IS_JSON);
    if ("true".equalsIgnoreCase(isJson)) {
        throw new SerDeException("Json mode not yet supported");
    }//w  ww . j av  a  2  s . c  om
    // Handle delimited records (ie. isJson == false)

    for (int c = 0; c < numColumns; c++) {
        try {
            Object field = outputRowOI.getStructFieldData(obj, outputFieldRefs.get(c));
            ObjectInspector fieldOI = outputFieldRefs.get(c).getFieldObjectInspector();

            PrimitiveObjectInspector fieldStringOI = (PrimitiveObjectInspector) fieldOI;
            String columnName = columnNames.get(c);
            record.put(new Text(columnName), (Writable) fieldStringOI.getPrimitiveWritableObject(field));
        } catch (NullPointerException e) {
            //LOG.info("Increment null field counter.");
        }

    }

    return record;
}