Example usage for org.apache.hadoop.io MapWritable get

List of usage examples for org.apache.hadoop.io MapWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io MapWritable get.

Prototype

@Override
    public Writable get(Object key) 

Source Link

Usage

From source file:org.apache.beam.sdk.io.hadoop.inputformat.integration.tests.HIFIOElasticIT.java

License:Apache License

private String addFieldValuesToRow(String row, MapWritable mapw, String columnName) {
    Object valueObj = (Object) mapw.get(new Text(columnName));
    row += valueObj.toString() + "|";
    return row;// w w w .j a  v  a2s. co  m
}

From source file:org.apache.hama.graph.AggregationRunner.java

License:Apache License

/**
 * Receives aggregated values from a master task.
 * //w ww. j  ava 2 s .co  m
 * @return always true if no aggregators are defined, false if aggregators say
 *         we haven't seen any messages anymore.
 */
public boolean receiveAggregatedValues(MapWritable updatedValues, long iteration)
        throws IOException, SyncException, InterruptedException {
    // map is the first value that is in the queue
    for (int i = 0; i < aggregators.length; i++) {
        globalAggregatorResult[i] = updatedValues.get(aggregatorValueFlag[i]);
        globalAggregatorIncrement[i] = (IntWritable) updatedValues.get(aggregatorIncrementFlag[i]);
    }
    IntWritable count = (IntWritable) updatedValues.get(GraphJobRunner.FLAG_MESSAGE_COUNTS);
    if (count != null && count.get() == Integer.MIN_VALUE) {
        return false;
    }
    return true;
}

From source file:org.apache.hama.ml.recommendation.cf.OnlineTrainBSP.java

License:Apache License

private DoubleMatrix normalizeMatrix(BSPPeer<Text, VectorWritable, Text, VectorWritable, MapWritable> peer,
        DoubleMatrix featureMatrix, IntWritable msgFeatureMatrix, boolean broadcast)
        throws IOException, SyncException, InterruptedException {
    // send to master peer
    MapWritable msg = new MapWritable();
    MatrixWritable mtx = new MatrixWritable(featureMatrix);
    msg.put(msgFeatureMatrix, mtx);/*w ww  .j  a  v  a  2  s .c  o m*/
    String master = peer.getPeerName(peer.getNumPeers() / 2);
    peer.send(master, msg);
    peer.sync();

    // normalize
    DoubleMatrix res = null;
    if (peer.getPeerName().equals(master)) {
        res = new DenseDoubleMatrix(featureMatrix.getRowCount(), featureMatrix.getColumnCount(), 0);
        int incomingMsgCount = 0;
        while ((msg = peer.getCurrentMessage()) != null) {
            MatrixWritable tmp = (MatrixWritable) msg.get(msgFeatureMatrix);
            res.add(tmp.getMatrix());
            incomingMsgCount++;
        }
        res.divide(incomingMsgCount);
    }

    if (broadcast) {
        if (peer.getPeerName().equals(master)) {
            // broadcast to all
            msg = new MapWritable();
            msg.put(msgFeatureMatrix, new MatrixWritable(res));
            // send to all
            for (String peerName : peer.getAllPeerNames()) {
                peer.send(peerName, msg);
            }
        }
        peer.sync();
        // receive normalized value from master
        msg = peer.getCurrentMessage();
        featureMatrix = ((MatrixWritable) msg.get(msgFeatureMatrix)).getMatrix();
    }
    return res;
}

From source file:org.apache.hama.ml.recommendation.cf.OnlineTrainBSP.java

License:Apache License

private void getNormalizedItemFactorizedValues(
        BSPPeer<Text, VectorWritable, Text, VectorWritable, MapWritable> peer,
        HashMap<Text, DoubleVector> normalizedValues, HashMap<Text, LinkedList<IntWritable>> senderList)
        throws IOException {

    HashMap<Text, Integer> normalizedValueCount = new HashMap<Text, Integer>();
    Text itemId = null;/*from ww w.  j  a v a2 s . com*/
    VectorWritable value = null;
    IntWritable senderId = null;
    MapWritable msg = new MapWritable();

    while ((msg = peer.getCurrentMessage()) != null) {
        itemId = (Text) msg.get(OnlineCF.Settings.MSG_ITEM_MATRIX);
        value = (VectorWritable) msg.get(OnlineCF.Settings.MSG_VALUE);
        senderId = (IntWritable) msg.get(OnlineCF.Settings.MSG_SENDER_ID);

        if (normalizedValues.containsKey(itemId) == false) {
            DenseDoubleVector tmp = new DenseDoubleVector(MATRIX_RANK, 0.0);
            normalizedValues.put(itemId, tmp);
            normalizedValueCount.put(itemId, 0);
            senderList.put(itemId, new LinkedList<IntWritable>());
        }

        normalizedValues.put(itemId, normalizedValues.get(itemId).add(value.getVector()));
        normalizedValueCount.put(itemId, normalizedValueCount.get(itemId) + 1);
        senderList.get(itemId).add(senderId);
    }

    // normalize
    for (Map.Entry<Text, DoubleVector> e : normalizedValues.entrySet()) {
        double count = normalizedValueCount.get(e.getKey());
        e.setValue(e.getValue().multiply(1.0 / count));
    }
}

From source file:org.apache.hama.ml.recommendation.cf.OnlineTrainBSP.java

License:Apache License

private void receiveSyncedItemFactorizedValues(
        BSPPeer<Text, VectorWritable, Text, VectorWritable, MapWritable> peer) throws IOException {

    MapWritable msg = new MapWritable();
    Text itemId = null;/*w ww  . j  a  va 2s. co  m*/
    // messages are arriving take them
    while ((msg = peer.getCurrentMessage()) != null) {
        itemId = (Text) msg.get(OnlineCF.Settings.MSG_ITEM_MATRIX);
        itemsMatrix.put(itemId.toString(), (VectorWritable) msg.get(OnlineCF.Settings.MSG_VALUE));
    }
}

From source file:org.apache.hama.ml.recommendation.cf.OnlineTrainBSP.java

License:Apache License

private void sendRequiredFeatures(BSPPeer<Text, VectorWritable, Text, VectorWritable, MapWritable> peer)
        throws IOException, SyncException, InterruptedException {

    MapWritable msg = null;
    int senderId = 0;

    while ((msg = peer.getCurrentMessage()) != null) {
        senderId = ((IntWritable) msg.get(OnlineCF.Settings.MSG_SENDER_ID)).get();
        MapWritable resp = new MapWritable();
        if (msg.containsKey(OnlineCF.Settings.MSG_INP_ITEM_FEATURES)) {
            // send item feature
            String itemId = ((Text) msg.get(OnlineCF.Settings.MSG_INP_ITEM_FEATURES)).toString().substring(1);
            resp.put(OnlineCF.Settings.MSG_INP_ITEM_FEATURES, new Text(itemId));
            resp.put(OnlineCF.Settings.MSG_VALUE, inpItemsFeatures.get(itemId));
        } else if (msg.containsKey(OnlineCF.Settings.MSG_INP_USER_FEATURES)) {
            // send user feature
            String userId = ((Text) msg.get(OnlineCF.Settings.MSG_INP_USER_FEATURES)).toString().substring(1);
            resp.put(OnlineCF.Settings.MSG_INP_USER_FEATURES, new Text(userId));
            resp.put(OnlineCF.Settings.MSG_VALUE, inpUsersFeatures.get(userId));
        }/*from   w  ww. j  a  v  a2 s  .com*/
        peer.send(peer.getPeerName(senderId), resp);
    }
}

From source file:org.apache.hama.ml.recommendation.cf.OnlineTrainBSP.java

License:Apache License

private void collectFeatures(BSPPeer<Text, VectorWritable, Text, VectorWritable, MapWritable> peer)
        throws IOException {
    // remove all features,
    // since we will get necessary features via messages
    inpItemsFeatures = new HashMap<String, VectorWritable>();
    inpUsersFeatures = new HashMap<String, VectorWritable>();

    MapWritable msg = null;
    int userFeatureSize = 0;
    int itemFeatureSize = 0;
    while ((msg = peer.getCurrentMessage()) != null) {
        if (msg.containsKey(OnlineCF.Settings.MSG_INP_ITEM_FEATURES)) {
            // send item feature
            String itemId = ((Text) msg.get(OnlineCF.Settings.MSG_INP_ITEM_FEATURES)).toString();
            inpItemsFeatures.put(itemId, (VectorWritable) msg.get(OnlineCF.Settings.MSG_VALUE));
            itemFeatureSize = ((VectorWritable) msg.get(OnlineCF.Settings.MSG_VALUE)).getVector().getLength();
        } else if (msg.containsKey(OnlineCF.Settings.MSG_INP_USER_FEATURES)) {
            // send user feature
            String userId = ((Text) msg.get(OnlineCF.Settings.MSG_INP_USER_FEATURES)).toString();
            inpUsersFeatures.put(userId, (VectorWritable) msg.get(OnlineCF.Settings.MSG_VALUE));
            userFeatureSize = ((VectorWritable) msg.get(OnlineCF.Settings.MSG_VALUE)).getVector().getLength();
        }//from   w w w . ja v  a2 s .  c  om
    }
    if (inpItemsFeatures.size() > 0) {
        itemFeatureMatrix = new DenseDoubleMatrix(MATRIX_RANK, itemFeatureSize, rnd);
    }
    if (inpUsersFeatures.size() > 0) {
        userFeatureMatrix = new DenseDoubleMatrix(MATRIX_RANK, userFeatureSize, rnd);
    }
}

From source file:org.apache.hive.storage.jdbc.JdbcSerDe.java

License:Apache License

@Override
public Object deserialize(Writable blob) throws SerDeException {
    LOGGER.debug("Deserializing from SerDe");
    if (!(blob instanceof MapWritable)) {
        throw new SerDeException("Expected MapWritable. Got " + blob.getClass().getName());
    }//from  ww  w . j a v a2  s  .  c o m

    if ((row == null) || (columnNames == null)) {
        throw new SerDeException("JDBC SerDe hasn't been initialized properly");
    }

    row.clear();
    MapWritable input = (MapWritable) blob;
    Text columnKey = new Text();

    for (int i = 0; i < numColumns; i++) {
        columnKey.set(columnNames.get(i));
        Writable value = input.get(columnKey);
        row.add(value instanceof NullWritable ? null : ((ObjectWritable) value).get());
    }

    return row;
}

From source file:org.apache.mahout.classifier.sequencelearning.baumwelchmapreduce.BaumWelchMapper.java

License:Apache License

@Override
public void map(LongWritable seqID, IntArrayWritable seq, Context context)
        throws IOException, InterruptedException {

    MapWritable initialDistributionStripe = new MapWritable();
    MapWritable transitionDistributionStripe = new MapWritable();
    MapWritable emissionDistributionStripe = new MapWritable();

    //IntArrayWritable[] writableSequence = (IntArrayWritable[])seq.get();
    //int[] sequence = new int[seq.get().length];
    int[] sequence = new int[seq.get().length];

    int n = 0;/*from  w  w w.  j  a v a 2 s .co  m*/
    for (Writable val : seq.get()) {
        sequence[n] = ((IntWritable) val).get();
        n++;
    }

    for (int k = 0; k < sequence.length; k++) {
        log.info("Sequence Array {}", Integer.toString(sequence[k]));
    }

    Matrix alphaFactors = HmmAlgorithms.forwardAlgorithm(Model, sequence, false);
    for (int i = 0; i < alphaFactors.numRows(); i++) {
        for (int j = 0; j < alphaFactors.numCols(); j++) {
            log.info("Alpha Factors Matrix entry ({}, {}) = {}", new Object[] { i, j, alphaFactors.get(i, j) });
        }
    }

    Matrix betaFactors = HmmAlgorithms.backwardAlgorithm(Model, sequence, false);
    for (int i = 0; i < betaFactors.numRows(); i++) {
        for (int j = 0; j < betaFactors.numCols(); j++) {
            log.info("Beta Factors Matrix entry ({}, {}) = {}", new Object[] { i, j, betaFactors.get(i, j) });
        }

        //Initial Distribution
        for (int q = 0; q < nrOfHiddenStates; q++) {
            double alpha_1_q = alphaFactors.get(1, q);
            double beta_1_q = betaFactors.get(1, q);
            initialDistributionStripe.put(new IntWritable(q), new DoubleWritable(alpha_1_q * beta_1_q));
        }

        //Emission Distribution
        /*
        Matrix emissionMatrix = new DenseMatrix(nrOfHiddenStates, sequence.length);
        for (int t = 0; t < sequence.length; t++) {
        HashMap<Integer, Double> innerMap = new HashMap<Integer, Double>();
        for (int q = 0; q < nrOfHiddenStates; q++) {
          double alpha_t_q = alphaFactors.get(t, q);
          double beta_t_q  = betaFactors.get(t, q);
          //innerMap.put(q, alpha_t_q * beta_t_q);
          emissionMatrix.set(q, t, alpha_t_q * beta_t_q);
          }
        }
        for (int q = 0; q < nrOfHiddenStates; q++) {
        Map innerEmissionMap = new MapWritable();
        for (int xt = 0; xt < sequence.length; xt++) {
          innerEmissionMap.put(new IntWritable(xt), new DoubleWritable(emissionMatrix.get(q, xt)));
        }
        emissionDistributionStripe.put(new IntWritable(q), (MapWritable)innerEmissionMap);
        }
        */

        double[][] emissionMatrix = new double[nrOfHiddenStates][nrOfEmittedStates];

        for (int q = 0; q < nrOfHiddenStates; q++) {
            for (int x = 0; x < nrOfEmittedStates; x++) {
                emissionMatrix[q][x] = 0.0;
            }
        }

        for (int t = 0; t < sequence.length; t++) {
            //HashMap<Integer, Double> innerMap = new HashMap<Integer, Double>();
            for (int q = 0; q < nrOfHiddenStates; q++) {
                double alpha_t_q = alphaFactors.get(t, q);
                double beta_t_q = betaFactors.get(t, q);
                //innerMap.put(q, alpha_t_q * beta_t_q);
                //emissionMatrix.set(q, t, alpha_t_q * beta_t_q);
                emissionMatrix[q][sequence[t]] += alpha_t_q * beta_t_q;
            }
        }
        for (int q = 0; q < nrOfHiddenStates; q++) {
            Map innerEmissionMap = new MapWritable();
            for (int xt = 0; xt < sequence.length; xt++) {
                innerEmissionMap.put(new IntWritable(sequence[xt]),
                        new DoubleWritable(emissionMatrix[q][sequence[xt]]));
            }
            emissionDistributionStripe.put(new IntWritable(q), (MapWritable) innerEmissionMap);
        }

        //Transition Distribution
        double[][] transitionMatrix = new double[nrOfHiddenStates][nrOfHiddenStates];
        for (int q = 0; q < nrOfHiddenStates; q++) {
            for (int x = 0; x < nrOfHiddenStates; x++) {
                transitionMatrix[q][x] = 0.0;
            }
        }

        for (int t = 0; t < sequence.length - 1; t++) {
            for (int q = 0; q < nrOfHiddenStates; q++) {
                for (int r = 0; r < nrOfHiddenStates; r++) {
                    double alpha_t_q = alphaFactors.get(t, q);
                    double A_q_r = Model.getTransitionMatrix().get(q, r);
                    double B_r_xtplus1 = Model.getEmissionMatrix().get(r, sequence[t + 1]);
                    double beta_tplus1_r = betaFactors.get(t + 1, r);
                    double transitionProb = alpha_t_q * A_q_r * B_r_xtplus1 * beta_tplus1_r;
                    log.info("Putting into Inner Map of Transition Distribution. Key = {}, Value = {}", q,
                            transitionProb);
                    transitionMatrix[q][r] += transitionProb;
                }
            }
        }
        for (int q = 0; q < nrOfHiddenStates; q++) {
            Map innerTransitionMap = new MapWritable();
            for (int r = 0; r < nrOfHiddenStates; r++) {
                innerTransitionMap.put(new IntWritable(r), new DoubleWritable(transitionMatrix[q][r]));
            }
            transitionDistributionStripe.put(new IntWritable(q), (MapWritable) innerTransitionMap);
        }

        context.write(new Text("INITIAL"), initialDistributionStripe);
        log.info("Context Writing from Mapper the Initial Distribution Stripe. Size = {}  Entries = {}",
                Integer.toString(initialDistributionStripe.size()),
                Integer.toString(initialDistributionStripe.entrySet().size()));
        for (int q = 0; q < nrOfHiddenStates; q++) {
            context.write(new Text("EMIT_" + Integer.toString(q)),
                    (MapWritable) emissionDistributionStripe.get(new IntWritable(q)));
            log.info("Context Writing from Mapper the Emission Distribution Stripe. State = {}  Entries = {}",
                    Integer.toString(q), Integer.toString(
                            ((MapWritable) emissionDistributionStripe.get(new IntWritable(q))).size()));
            for (MapWritable.Entry<Writable, Writable> entry : ((MapWritable) emissionDistributionStripe
                    .get(new IntWritable(q))).entrySet()) {
                log.info("Emission Distribution Stripe Details. Key = {}  Value = {} ",
                        Integer.toString(((IntWritable) entry.getKey()).get()),
                        Double.toString(((DoubleWritable) entry.getValue()).get()));
            }
            context.write(new Text("TRANSIT_" + Integer.toString(q)),
                    (MapWritable) transitionDistributionStripe.get(new IntWritable(q)));
            log.info("Context Writing from Mapper the Transition Distribution Stripe. State = {}  Entries = {}",
                    Integer.toString(q), Integer.toString(
                            ((MapWritable) transitionDistributionStripe.get(new IntWritable(q))).size()));
            for (MapWritable.Entry<Writable, Writable> entry : ((MapWritable) transitionDistributionStripe
                    .get(new IntWritable(q))).entrySet()) {
                log.info("Transition Distribution Stripe Details. Key = {}  Value = {} ",
                        Integer.toString(((IntWritable) entry.getKey()).get()),
                        Double.toString(((DoubleWritable) entry.getValue()).get()));
            }
        }

    }
}

From source file:org.apache.mahout.classifier.sequencelearning.hmm.hadoop.BaumWelchCombiner.java

License:Apache License

@Override
protected void reduce(Text key, Iterable<MapWritable> stripes, Context context)
        throws IOException, InterruptedException {

    MapWritable sumOfStripes = new MapWritable();

    if (scaling.equals("logscaling")) {
        for (MapWritable stripe : stripes) {
            for (Map.Entry e : stripe.entrySet()) {
                double val = ((DoubleWritable) e.getValue()).get();
                if (!sumOfStripes.containsKey(e.getKey())) {
                    sumOfStripes.put((IntWritable) e.getKey(), new DoubleWritable(val));
                } else {
                    double sumSripesVal = ((DoubleWritable) sumOfStripes.get(e.getKey())).get();
                    if (sumSripesVal > Double.NEGATIVE_INFINITY) {
                        val = val + Math.log(1 + Math.exp(sumSripesVal - val));
                    }//from  w  ww .ja v  a2 s  .  c om
                    sumOfStripes.put((IntWritable) e.getKey(), new DoubleWritable(val));
                }
            }
        }
    } else if (scaling.equals("rescaling")) {
        for (MapWritable stripe : stripes) {
            for (Map.Entry e : stripe.entrySet()) {
                if (key.charAt(0) == (int) 'I') {

                    double val = ((DoubleWritable) e.getValue()).get();
                    if (!sumOfStripes.containsKey(e.getKey())) {
                        sumOfStripes.put((IntWritable) e.getKey(), (DoubleWritable) e.getValue());
                    } else {
                        val += ((DoubleWritable) sumOfStripes.get(e.getKey())).get();
                        sumOfStripes.put((IntWritable) e.getKey(), new DoubleWritable(val));
                    }
                } else {
                    double[] pr = BaumWelchUtils.toDoublePair(((BytesWritable) e.getValue()).getBytes());
                    double num = pr[0];
                    double denom = pr[1];
                    if (!sumOfStripes.containsKey(e.getKey())) {
                        sumOfStripes.put((IntWritable) e.getKey(), (BytesWritable) e.getValue());
                    } else {
                        double[] pr1 = BaumWelchUtils
                                .toDoublePair(((BytesWritable) sumOfStripes.get(e.getKey())).getBytes());
                        num += pr1[0];
                        denom += pr1[1];
                        byte[] doublePair1 = BaumWelchUtils.doublePairToByteArray(num, denom);
                        sumOfStripes.put((IntWritable) e.getKey(), new BytesWritable(doublePair1));
                    }
                }
            }
        }
    } else {
        for (MapWritable stripe : stripes) {
            for (Map.Entry e : stripe.entrySet()) {
                double val = ((DoubleWritable) e.getValue()).get();
                if (!sumOfStripes.containsKey(e.getKey())) {
                    sumOfStripes.put((IntWritable) e.getKey(), (DoubleWritable) e.getValue());
                } else {
                    val += ((DoubleWritable) sumOfStripes.get(e.getKey())).get();
                    sumOfStripes.put((IntWritable) e.getKey(), new DoubleWritable(val));
                }
            }
        }
    }
    context.write(key, sumOfStripes);
}