Example usage for org.apache.hadoop.io MapWritable MapWritable

List of usage examples for org.apache.hadoop.io MapWritable MapWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io MapWritable MapWritable.

Prototype

public MapWritable() 

Source Link

Document

Default constructor.

Usage

From source file:de.kp.core.spade.hadoop.IDListWritable.java

License:Open Source License

public IDListWritable() {
    mapWritable = new MapWritable();
}

From source file:de.kp.core.spade.hadoop.IDListWritable.java

License:Open Source License

public IDListWritable(IDList idList) {

    mapWritable = new MapWritable();

    Map<Integer, BitSet> seqItemsetEntries = ((IDListBitmap) idList).getSeqItemsetEntries();
    for (Map.Entry<Integer, BitSet> entry : seqItemsetEntries.entrySet()) {

        Integer k = entry.getKey();
        BitSet v = entry.getValue();

        mapWritable.put(new IntWritable(k), new BitSetWritable(v));

    }//from w w  w .  j  a va 2  s.c om

}

From source file:edu.ub.ahstfg.io.document.ParsedDocument.java

License:Open Source License

/**
 * Unparametrized constructor.// w  w w .j a  v a2  s. co  m
 */
public ParsedDocument() {
    url = new Text();
    terms = new MapWritable();
    keywords = new MapWritable();
}

From source file:edu.ub.ahstfg.io.index.ArrayIndex.java

License:Open Source License

@Override
public void readFields(DataInput input) throws IOException {
    ArrayWritable wTerms = new ArrayWritable(Text.class);
    wTerms.readFields(input);//from   w  ww  .jav  a  2s .  c o  m
    terms = WritableConverter.arrayWritable2LinkedListString(wTerms);

    MapWritable wTermFreq = new MapWritable();
    wTermFreq.readFields(input);
    termFreq = WritableConverter.mapWritable2HashMapStringLinkedListShort(wTermFreq);

    ArrayWritable wKeywords = new ArrayWritable(Text.class);
    wKeywords.readFields(input);
    keywords = WritableConverter.arrayWritable2LinkedListString(wKeywords);

    MapWritable wKeywordFreq = new MapWritable();
    wKeywordFreq.readFields(input);
    keywordFreq = WritableConverter.mapWritable2HashMapStringLinkedListShort(wKeywordFreq);
}

From source file:edu.ub.ahstfg.io.WritableConverter.java

License:Open Source License

/**
 * Converts a HashMap<String, LinkedList<Long>> to MapWritable.
 * @param input HasMap to convert.// w w  w. ja  v  a2s  . c om
 * @return Converted MapWritable.
 */
public static MapWritable hashMapStringLinkedListShort2MapWritable(HashMap<String, LinkedList<Short>> input) {
    MapWritable ret = new MapWritable();
    LinkedList<Short> arl;
    for (String s : input.keySet()) {
        arl = input.get(s);
        ret.put(new Text(s), LinkedListShort2ArrayWritable(arl));
    }
    return ret;
}

From source file:edu.umd.cloud9.io.benchmark.BenchmarkHashMapWritable.java

License:Apache License

/**
 * Runs this benchmark./*from w w  w.j  a va  2  s .  c  om*/
 */
public static void main(String[] args) throws Exception {
    long startTime = System.currentTimeMillis();
    int numTrials = 100000;

    Random rand = new Random();

    ByteArrayOutputStream[] storageHashMapWritable = new ByteArrayOutputStream[numTrials];
    for (int i = 0; i < numTrials; i++) {
        HashMapWritable<IntWritable, IntWritable> map = new HashMapWritable<IntWritable, IntWritable>();

        int size = rand.nextInt(50) + 50;

        for (int j = 0; j < size; j++) {
            map.put(new IntWritable(rand.nextInt(10000)), new IntWritable(rand.nextInt(10)));
        }

        ByteArrayOutputStream bytesOut = new ByteArrayOutputStream();
        DataOutputStream dataOut = new DataOutputStream(bytesOut);

        map.write(dataOut);
        storageHashMapWritable[i] = bytesOut;
    }

    System.out.println("Generating and serializing " + numTrials + " random HashMapWritables: "
            + (System.currentTimeMillis() - startTime) / 1000.0 + " seconds");

    startTime = System.currentTimeMillis();

    ByteArrayOutputStream[] storageMapWritable = new ByteArrayOutputStream[numTrials];
    for (int i = 0; i < numTrials; i++) {
        MapWritable map = new MapWritable();

        int size = rand.nextInt(50) + 50;

        for (int j = 0; j < size; j++) {
            map.put(new IntWritable(rand.nextInt(10000)), new IntWritable(rand.nextInt(10)));
        }

        ByteArrayOutputStream bytesOut = new ByteArrayOutputStream();
        DataOutputStream dataOut = new DataOutputStream(bytesOut);

        map.write(dataOut);
        storageMapWritable[i] = bytesOut;
    }

    System.out.println("Generating and serializing " + numTrials + " random MapWritables: "
            + (System.currentTimeMillis() - startTime) / 1000.0 + " seconds");

    float cntA = 0.0f;
    float cntB = 0.0f;
    for (int i = 0; i < numTrials; i++) {
        cntA += storageHashMapWritable[i].size();
        cntB += storageMapWritable[i].size();
    }

    System.out.println("Average size of each HashMapWritable: " + cntA / numTrials);
    System.out.println("Average size of each MapWritable: " + cntB / numTrials);

    startTime = System.currentTimeMillis();

    for (int i = 0; i < numTrials; i++) {
        HashMapWritable<IntWritable, IntWritable> map = new HashMapWritable<IntWritable, IntWritable>();

        map.readFields(new DataInputStream(new ByteArrayInputStream(storageHashMapWritable[i].toByteArray())));
    }

    System.out.println("Deserializing " + numTrials + " random MapWritables: "
            + (System.currentTimeMillis() - startTime) / 1000.0 + " seconds");

    startTime = System.currentTimeMillis();

    for (int i = 0; i < numTrials; i++) {
        MapWritable map = new MapWritable();

        map.readFields(new DataInputStream(new ByteArrayInputStream(storageMapWritable[i].toByteArray())));
    }

    System.out.println("Deserializing " + numTrials + " random MapWritables: "
            + (System.currentTimeMillis() - startTime) / 1000.0 + " seconds");

}

From source file:ezbake.amino.dataloader.WarehausDataLoader.java

License:Apache License

/**
 * Process a whole row from the WholeRowIterator.
 *
 * @param key   The Key returned from the WholeRowIterator
 * @param value The Value returned from the WholeRowIterator
 * @return MapWritable with all of the bucketed values
 *///w w  w  .  j  a  v a 2 s . com
@Override
protected MapWritable processWholeRow(Key key, Value value) throws IOException {
    final MapWritable outputMap = new MapWritable();

    for (Value v : WholeRowIterator.decodeRow(key, value).values()) {
        if (v == null) {
            logger.warn("Value was NULL for key: " + key.toString());
            continue;
        }
        processor.process(v.get(), outputMap);
    }
    return outputMap;
}

From source file:full_MapReduce.AttributeInfoReducer.java

License:Open Source License

public void reduce(Text key, Iterable<AttributeCounterWritable> values, Context context)
        throws IOException, InterruptedException {
    MapWritable res = new MapWritable();
    Text value;/*  ww  w.j  a v  a 2s.  com*/
    Text classification;
    IntWritable count;

    for (AttributeCounterWritable cur_attribute_counter : values) {
        value = cur_attribute_counter.getValue();
        classification = cur_attribute_counter.getClassification();
        count = cur_attribute_counter.getCount();

        if (!res.containsKey(value)) {
            res.put(new Text(value), new MapWritable());
        }
        MapWritable cur_map = (MapWritable) res.get(value);

        if (!cur_map.containsKey(classification)) {
            cur_map.put(new Text(classification), new IntWritable(0));
        }
        ((IntWritable) cur_map.get(classification))
                .set(((IntWritable) cur_map.get(classification)).get() + count.get());
    }

    context.write(key, res);
}

From source file:gaffer.accumulostore.key.core.AbstractCoreKeyAccumuloElementConverter.java

License:Apache License

@Override
public Value getValueFromProperties(final Properties properties, final String group)
        throws AccumuloElementConversionException {
    final MapWritable map = new MapWritable();
    for (final Map.Entry<String, Object> entry : properties.entrySet()) {
        final String propertyName = entry.getKey();
        final StorePropertyDefinition propertyDefinition = storeSchema.getElement(group)
                .getProperty(propertyName);
        if (propertyDefinition != null) {
            if (StorePositions.VALUE.isEqual(propertyDefinition.getPosition())) {
                try {
                    map.put(new Text(propertyName),
                            new BytesWritable(propertyDefinition.getSerialiser().serialise(entry.getValue())));
                } catch (final SerialisationException e) {
                    throw new AccumuloElementConversionException("Failed to serialise property " + propertyName,
                            e);/*from  w  ww . j a  v a2s.  com*/
                }
            }
        }
    }
    if (map.isEmpty()) {
        return new Value();
    }
    return new Value(WritableUtils.toByteArray(map));
}

From source file:gaffer.accumulostore.key.core.AbstractCoreKeyAccumuloElementConverter.java

License:Apache License

@Override
public Properties getPropertiesFromValue(final String group, final Value value)
        throws AccumuloElementConversionException {
    final Properties properties = new Properties();
    if (value == null || value.getSize() == 0) {
        return properties;
    }//w  w w .jav a 2s. c o  m
    final MapWritable map = new MapWritable();
    try (final InputStream inStream = new ByteArrayInputStream(value.get());
            final DataInputStream dataStream = new DataInputStream(inStream)) {
        map.readFields(dataStream);
    } catch (final IOException e) {
        throw new AccumuloElementConversionException("Failed to read map writable from value", e);
    }
    final StoreElementDefinition elementDefinition = storeSchema.getElement(group);
    if (null == elementDefinition) {
        throw new AccumuloElementConversionException("No StoreElementDefinition found for group " + group
                + " is this group in your Store Schema or do your table iterators need updating?");
    }
    for (final Writable writeableKey : map.keySet()) {
        final String propertyName = writeableKey.toString();
        final BytesWritable propertyValueBytes = (BytesWritable) map.get(writeableKey);
        try {
            properties.put(propertyName, elementDefinition.getProperty(propertyName).getSerialiser()
                    .deserialise(propertyValueBytes.getBytes()));
        } catch (final SerialisationException e) {
            throw new AccumuloElementConversionException("Failed to deserialise property " + propertyName, e);
        }
    }
    return properties;
}