Example usage for org.apache.hadoop.io MapWritable containsKey

List of usage examples for org.apache.hadoop.io MapWritable containsKey

Introduction

In this page you can find the example usage for org.apache.hadoop.io MapWritable containsKey.

Prototype

@Override
    public boolean containsKey(Object key) 

Source Link

Usage

From source file:com.csiro.hadoop.WritableTest.java

public static void main(String[] args) {
    System.out.println("*** Primitive Writable ***");

    BooleanWritable bool1 = new BooleanWritable(true);
    ByteWritable byte1 = new ByteWritable((byte) 3);
    System.out.printf("Boolean:%s Byte:%d\n", bool1, byte1.get());

    IntWritable int1 = new IntWritable(5);
    IntWritable int2 = new IntWritable(17);
    System.out.printf("I1:%d I2:%d\n", int1.get(), int2.get());

    int1.set(int2.get());
    System.out.printf("I1:%d I2:%d\n", int1.get(), int2.get());

    Integer int3 = new Integer(23);
    int1.set(int3);
    System.out.printf("I1:%d I2:%d\n", int1.get(), int2.get());

    System.out.println("*** Array Writable ***");

    ArrayWritable a = new ArrayWritable(IntWritable.class);
    a.set(new IntWritable[] { new IntWritable(1), new IntWritable(3), new IntWritable(5) });

    IntWritable[] values = (IntWritable[]) a.get();
    for (IntWritable i : values) {
        System.out.println(i);/*from  w  ww  . j a  v a  2s. co  m*/
    }

    IntArrayWritable ia = new IntArrayWritable();
    ia.set(new IntWritable[] { new IntWritable(1), new IntWritable(3), new IntWritable(5) });

    IntWritable[] ivalues = (IntWritable[]) ia.get();

    ia.set((new LongWritable[] { new LongWritable(10001) }));

    System.out.println("*** Map Writables ***");

    MapWritable m = new MapWritable();
    IntWritable key1 = new IntWritable(5);
    NullWritable value1 = NullWritable.get();

    m.put(key1, value1);
    System.out.println(m.containsKey(key1));
    System.out.println(m.get(key1));
    m.put(new LongWritable(100000000), key1);
    Set<Writable> keys = m.keySet();

    for (Writable k : keys)
        System.out.println(k.getClass());

}

From source file:crunch.MaxTemperature.java

License:Apache License

@Test
    public void setWritableEmulation() throws IOException {
        MapWritable src = new MapWritable();
        src.put(new IntWritable(1), NullWritable.get());
        src.put(new IntWritable(2), NullWritable.get());

        MapWritable dest = new MapWritable();
        WritableUtils.cloneInto(dest, src);
        assertThat(dest.containsKey(new IntWritable(1)), is(true));
    }//  w  w w. ja va 2s.  co m

From source file:full_MapReduce.AttributeInfoReducer.java

License:Open Source License

public void reduce(Text key, Iterable<AttributeCounterWritable> values, Context context)
        throws IOException, InterruptedException {
    MapWritable res = new MapWritable();
    Text value;/*from w  w  w.  j a v  a  2 s  .co  m*/
    Text classification;
    IntWritable count;

    for (AttributeCounterWritable cur_attribute_counter : values) {
        value = cur_attribute_counter.getValue();
        classification = cur_attribute_counter.getClassification();
        count = cur_attribute_counter.getCount();

        if (!res.containsKey(value)) {
            res.put(new Text(value), new MapWritable());
        }
        MapWritable cur_map = (MapWritable) res.get(value);

        if (!cur_map.containsKey(classification)) {
            cur_map.put(new Text(classification), new IntWritable(0));
        }
        ((IntWritable) cur_map.get(classification))
                .set(((IntWritable) cur_map.get(classification)).get() + count.get());
    }

    context.write(key, res);
}

From source file:io.aos.hdfs.MapWritableTest.java

License:Apache License

@Test
public void setWritableEmulation() throws IOException {
    MapWritable src = new MapWritable();
    src.put(new IntWritable(1), NullWritable.get());
    src.put(new IntWritable(2), NullWritable.get());

    MapWritable dest = new MapWritable();
    WritableUtils.cloneInto(dest, src);//w w w  .  j  a va2  s  . co m
    assertThat(dest.containsKey(new IntWritable(1)), is(true));
}

From source file:org.apache.hama.ml.recommendation.cf.OnlineTrainBSP.java

License:Apache License

private void sendRequiredFeatures(BSPPeer<Text, VectorWritable, Text, VectorWritable, MapWritable> peer)
        throws IOException, SyncException, InterruptedException {

    MapWritable msg = null;
    int senderId = 0;

    while ((msg = peer.getCurrentMessage()) != null) {
        senderId = ((IntWritable) msg.get(OnlineCF.Settings.MSG_SENDER_ID)).get();
        MapWritable resp = new MapWritable();
        if (msg.containsKey(OnlineCF.Settings.MSG_INP_ITEM_FEATURES)) {
            // send item feature
            String itemId = ((Text) msg.get(OnlineCF.Settings.MSG_INP_ITEM_FEATURES)).toString().substring(1);
            resp.put(OnlineCF.Settings.MSG_INP_ITEM_FEATURES, new Text(itemId));
            resp.put(OnlineCF.Settings.MSG_VALUE, inpItemsFeatures.get(itemId));
        } else if (msg.containsKey(OnlineCF.Settings.MSG_INP_USER_FEATURES)) {
            // send user feature
            String userId = ((Text) msg.get(OnlineCF.Settings.MSG_INP_USER_FEATURES)).toString().substring(1);
            resp.put(OnlineCF.Settings.MSG_INP_USER_FEATURES, new Text(userId));
            resp.put(OnlineCF.Settings.MSG_VALUE, inpUsersFeatures.get(userId));
        }/*from  w w  w.  j ava2 s .c o  m*/
        peer.send(peer.getPeerName(senderId), resp);
    }
}

From source file:org.apache.hama.ml.recommendation.cf.OnlineTrainBSP.java

License:Apache License

private void collectFeatures(BSPPeer<Text, VectorWritable, Text, VectorWritable, MapWritable> peer)
        throws IOException {
    // remove all features,
    // since we will get necessary features via messages
    inpItemsFeatures = new HashMap<String, VectorWritable>();
    inpUsersFeatures = new HashMap<String, VectorWritable>();

    MapWritable msg = null;
    int userFeatureSize = 0;
    int itemFeatureSize = 0;
    while ((msg = peer.getCurrentMessage()) != null) {
        if (msg.containsKey(OnlineCF.Settings.MSG_INP_ITEM_FEATURES)) {
            // send item feature
            String itemId = ((Text) msg.get(OnlineCF.Settings.MSG_INP_ITEM_FEATURES)).toString();
            inpItemsFeatures.put(itemId, (VectorWritable) msg.get(OnlineCF.Settings.MSG_VALUE));
            itemFeatureSize = ((VectorWritable) msg.get(OnlineCF.Settings.MSG_VALUE)).getVector().getLength();
        } else if (msg.containsKey(OnlineCF.Settings.MSG_INP_USER_FEATURES)) {
            // send user feature
            String userId = ((Text) msg.get(OnlineCF.Settings.MSG_INP_USER_FEATURES)).toString();
            inpUsersFeatures.put(userId, (VectorWritable) msg.get(OnlineCF.Settings.MSG_VALUE));
            userFeatureSize = ((VectorWritable) msg.get(OnlineCF.Settings.MSG_VALUE)).getVector().getLength();
        }//from   www  . j a  va  2s  . co m
    }
    if (inpItemsFeatures.size() > 0) {
        itemFeatureMatrix = new DenseDoubleMatrix(MATRIX_RANK, itemFeatureSize, rnd);
    }
    if (inpUsersFeatures.size() > 0) {
        userFeatureMatrix = new DenseDoubleMatrix(MATRIX_RANK, userFeatureSize, rnd);
    }
}

From source file:org.apache.mahout.classifier.sequencelearning.hmm.hadoop.BaumWelchCombiner.java

License:Apache License

@Override
protected void reduce(Text key, Iterable<MapWritable> stripes, Context context)
        throws IOException, InterruptedException {

    MapWritable sumOfStripes = new MapWritable();

    if (scaling.equals("logscaling")) {
        for (MapWritable stripe : stripes) {
            for (Map.Entry e : stripe.entrySet()) {
                double val = ((DoubleWritable) e.getValue()).get();
                if (!sumOfStripes.containsKey(e.getKey())) {
                    sumOfStripes.put((IntWritable) e.getKey(), new DoubleWritable(val));
                } else {
                    double sumSripesVal = ((DoubleWritable) sumOfStripes.get(e.getKey())).get();
                    if (sumSripesVal > Double.NEGATIVE_INFINITY) {
                        val = val + Math.log(1 + Math.exp(sumSripesVal - val));
                    }//from w w w. ja v a  2s. co m
                    sumOfStripes.put((IntWritable) e.getKey(), new DoubleWritable(val));
                }
            }
        }
    } else if (scaling.equals("rescaling")) {
        for (MapWritable stripe : stripes) {
            for (Map.Entry e : stripe.entrySet()) {
                if (key.charAt(0) == (int) 'I') {

                    double val = ((DoubleWritable) e.getValue()).get();
                    if (!sumOfStripes.containsKey(e.getKey())) {
                        sumOfStripes.put((IntWritable) e.getKey(), (DoubleWritable) e.getValue());
                    } else {
                        val += ((DoubleWritable) sumOfStripes.get(e.getKey())).get();
                        sumOfStripes.put((IntWritable) e.getKey(), new DoubleWritable(val));
                    }
                } else {
                    double[] pr = BaumWelchUtils.toDoublePair(((BytesWritable) e.getValue()).getBytes());
                    double num = pr[0];
                    double denom = pr[1];
                    if (!sumOfStripes.containsKey(e.getKey())) {
                        sumOfStripes.put((IntWritable) e.getKey(), (BytesWritable) e.getValue());
                    } else {
                        double[] pr1 = BaumWelchUtils
                                .toDoublePair(((BytesWritable) sumOfStripes.get(e.getKey())).getBytes());
                        num += pr1[0];
                        denom += pr1[1];
                        byte[] doublePair1 = BaumWelchUtils.doublePairToByteArray(num, denom);
                        sumOfStripes.put((IntWritable) e.getKey(), new BytesWritable(doublePair1));
                    }
                }
            }
        }
    } else {
        for (MapWritable stripe : stripes) {
            for (Map.Entry e : stripe.entrySet()) {
                double val = ((DoubleWritable) e.getValue()).get();
                if (!sumOfStripes.containsKey(e.getKey())) {
                    sumOfStripes.put((IntWritable) e.getKey(), (DoubleWritable) e.getValue());
                } else {
                    val += ((DoubleWritable) sumOfStripes.get(e.getKey())).get();
                    sumOfStripes.put((IntWritable) e.getKey(), new DoubleWritable(val));
                }
            }
        }
    }
    context.write(key, sumOfStripes);
}

From source file:org.apache.mahout.classifier.sequencelearning.hmm.hadoop.BaumWelchReducer.java

License:Apache License

@Override
protected void reduce(Text key, Iterable<MapWritable> stripes, Context context)
        throws IOException, InterruptedException {

    MapWritable sumOfStripes = new MapWritable();

    // Finish the Expectation Step by aggregating all posterior probabilities for one key
    if (scaling.equals("logscaling")) {
        double totalValSum = Double.NEGATIVE_INFINITY;
        for (MapWritable stripe : stripes) {
            for (Map.Entry e : stripe.entrySet()) {
                double val = ((DoubleWritable) e.getValue()).get();
                double max = totalValSum > val ? totalValSum : val;
                totalValSum = max + Math.log(Math.exp(totalValSum - max) + Math.exp(val - max));
                if (!sumOfStripes.containsKey(e.getKey())) {
                    sumOfStripes.put((IntWritable) e.getKey(), new DoubleWritable(val));
                } else {
                    double sumSripesVal = ((DoubleWritable) sumOfStripes.get(e.getKey())).get();
                    if (sumSripesVal > Double.NEGATIVE_INFINITY) {
                        val = val + Math.log(1 + Math.exp(sumSripesVal - val));
                    }//from  w  ww .  j  a v a2s . com
                    sumOfStripes.put((IntWritable) e.getKey(), new DoubleWritable(val));
                }
            }
        }

        //normalize the aggregate
        for (Map.Entry e : sumOfStripes.entrySet()) {
            double val = ((DoubleWritable) e.getValue()).get();
            if (totalValSum > Double.NEGATIVE_INFINITY) {
                val = val - totalValSum;
            }
            sumOfStripes.put((IntWritable) e.getKey(), new DoubleWritable(Math.exp(val)));
        }
    } else if (scaling.equals("rescaling")) {
        double totalValSum = 0.0;

        for (MapWritable stripe : stripes) {
            for (Map.Entry e : stripe.entrySet()) {
                if (key.charAt(0) == (int) 'I') {
                    double val = ((DoubleWritable) e.getValue()).get();
                    totalValSum += val;
                    if (!sumOfStripes.containsKey(e.getKey())) {
                        sumOfStripes.put((IntWritable) e.getKey(), (DoubleWritable) e.getValue());
                    } else {
                        val += ((DoubleWritable) sumOfStripes.get(e.getKey())).get();
                        sumOfStripes.put((IntWritable) e.getKey(), new DoubleWritable(val));
                    }
                } else {
                    double[] pr = BaumWelchUtils.toDoublePair(((BytesWritable) e.getValue()).getBytes());
                    double num = pr[0];
                    double denom = pr[1];
                    if (!sumOfStripes.containsKey(e.getKey())) {
                        sumOfStripes.put((IntWritable) e.getKey(), (BytesWritable) e.getValue());
                    } else {
                        double[] pr1 = BaumWelchUtils
                                .toDoublePair(((BytesWritable) sumOfStripes.get(e.getKey())).getBytes());
                        num += pr1[0];
                        denom += pr1[1];
                        byte[] doublePair1 = BaumWelchUtils.doublePairToByteArray(num, denom);
                        sumOfStripes.put((IntWritable) e.getKey(), new BytesWritable(doublePair1));
                    }
                }
            }
        }

        if (key.charAt(0) == (int) 'I') {
            //normalize the aggregate
            for (Map.Entry e : sumOfStripes.entrySet()) {
                double val = ((DoubleWritable) e.getValue()).get();
                if (totalValSum > 0) {
                    val /= totalValSum;
                }
                sumOfStripes.put((IntWritable) e.getKey(), new DoubleWritable(val));
            }

        } else {
            // compute the probabilities
            for (Map.Entry e : sumOfStripes.entrySet()) {
                double[] pr1 = BaumWelchUtils
                        .toDoublePair(((BytesWritable) sumOfStripes.get(e.getKey())).getBytes());
                sumOfStripes.put((IntWritable) e.getKey(), new DoubleWritable(pr1[0] / pr1[1]));
            }
        }
    } else {
        double totalValSum = 0.0;

        for (MapWritable stripe : stripes) {
            for (Map.Entry e : stripe.entrySet()) {
                int state = ((IntWritable) e.getKey()).get();
                double val = ((DoubleWritable) e.getValue()).get();
                totalValSum += val;
                if (!sumOfStripes.containsKey(e.getKey())) {
                    sumOfStripes.put((IntWritable) e.getKey(), (DoubleWritable) e.getValue());
                } else {
                    val += ((DoubleWritable) sumOfStripes.get(e.getKey())).get();
                    sumOfStripes.put((IntWritable) e.getKey(), new DoubleWritable(val));
                }
            }
        }

        //normalize the aggregate
        for (Map.Entry e : sumOfStripes.entrySet()) {
            double val = ((DoubleWritable) e.getValue()).get();
            if (totalValSum > 0) {
                val /= totalValSum;
            }
            sumOfStripes.put((IntWritable) e.getKey(), new DoubleWritable(val));
        }
    }

    //Write the distribution parameter vector to HDFS for the next iteration
    context.write(key, sumOfStripes);

}

From source file:org.apache.pirk.query.wideskies.QueryUtils.java

License:Apache License

/**
 * Method to convert the given data element given by the MapWritable data element into the extracted BigInteger partitions based upon the given queryType
 *///w  w  w.  ja v  a2  s.  c o  m
public static List<BigInteger> partitionDataElement(MapWritable dataMap, QuerySchema qSchema,
        DataSchema dSchema, boolean embedSelector) throws PIRException {
    List<BigInteger> parts = new ArrayList<>();

    logger.debug("queryType = " + qSchema.getSchemaName());

    // Add the embedded selector to the parts
    if (embedSelector) {
        String selectorFieldName = qSchema.getSelectorName();
        String type = dSchema.getElementType(selectorFieldName);
        String selector = getSelectorByQueryType(dataMap, qSchema, dSchema);

        parts.addAll(embeddedSelectorToPartitions(selector, type,
                dSchema.getPartitionerForElement(selectorFieldName)));

        logger.debug("Added embedded selector for selector = " + selector + " parts.size() = " + parts.size());
    }

    // Add all appropriate data fields
    List<String> dataFieldsToExtract = qSchema.getElementNames();
    for (String fieldName : dataFieldsToExtract) {
        Object dataElement = null;
        if (dataMap.containsKey(dSchema.getTextName(fieldName))) {
            dataElement = dataMap.get(dSchema.getTextName(fieldName));
        }

        if (dSchema.isArrayElement(fieldName)) {
            List<String> elementArray = null;
            if (dataElement == null) {
                elementArray = Collections.singletonList("");
            } else if (dataElement instanceof WritableArrayWritable) {
                elementArray = Arrays.asList(((WritableArrayWritable) dataElement).toStrings());
            } else if (dataElement instanceof ArrayWritable) {
                elementArray = Arrays.asList(((ArrayWritable) dataElement).toStrings());
            }

            parts.addAll(dSchema.getPartitionerForElement(fieldName).arrayToPartitions(elementArray,
                    dSchema.getElementType(fieldName)));
        } else {
            if (dataElement == null) {
                dataElement = "";
            } else if (dataElement instanceof Text) {
                dataElement = dataElement.toString();
            }
            parts.addAll(dSchema.getPartitionerForElement(fieldName).toPartitions(dataElement,
                    dSchema.getElementType(fieldName)));
        }
    }
    logger.debug("parts.size() = " + parts.size());

    return parts;
}