Example usage for org.apache.mahout.math Vector times

List of usage examples for org.apache.mahout.math Vector times

Introduction

In this page you can find the example usage for org.apache.mahout.math Vector times.

Prototype

Vector times(Vector x);

Source Link

Document

Return a new vector containing the element-wise product of the recipient and the argument

Usage

From source file:DisplayClustering.java

License:Apache License

/**
 * Identical to plotRectangle(), but with the option of setting the color of
 * the rectangle's stroke.// w  ww.  j a v a  2 s. c o  m
 *
 * NOTE: This should probably be refactored with plotRectangle() since most of
 * the code here is direct copy/paste from that method.
 *
 * @param g2
 *          A Graphics2D context.
 * @param v
 *          A vector for the rectangle's center.
 * @param dv
 *          A vector for the rectangle's dimensions.
 * @param color
 *          The color of the rectangle's stroke.
 */
protected static void plotClusteredRectangle(Graphics2D g2, Vector v, Vector dv, Color color) {
    double[] flip = { 1, -1 };
    Vector v2 = v.times(new DenseVector(flip));
    v2 = v2.minus(dv.divide(2));
    int h = SIZE / 2;
    double x = v2.get(0) + h;
    double y = v2.get(1) + h;

    g2.setStroke(new BasicStroke(1));
    g2.setColor(color);
    g2.draw(new Rectangle2D.Double(x * DS, y * DS, dv.get(0) * DS, dv.get(1) * DS));
}

From source file:DisplayClustering.java

License:Apache License

/**
 * Draw a rectangle on the graphics context
 *
 * @param g2/*from  w  ww  . jav a 2 s.  c o m*/
 *          a Graphics2D context
 * @param v
 *          a Vector of rectangle center
 * @param dv
 *          a Vector of rectangle dimensions
 */
protected static void plotRectangle(Graphics2D g2, Vector v, Vector dv) {
    double[] flip = { 1, -1 };
    Vector v2 = v.times(new DenseVector(flip));
    v2 = v2.minus(dv.divide(2));
    int h = SIZE / 2;
    double x = v2.get(0) + h;
    double y = v2.get(1) + h;
    g2.draw(new Rectangle2D.Double(x * DS, y * DS, dv.get(0) * DS, dv.get(1) * DS));
}

From source file:DisplayClustering.java

License:Apache License

/**
 * Draw an ellipse on the graphics context
 *
 * @param g2/* w w w .  j a v a  2s  . co m*/
 *          a Graphics2D context
 * @param v
 *          a Vector of ellipse center
 * @param dv
 *          a Vector of ellipse dimensions
 */
protected static void plotEllipse(Graphics2D g2, Vector v, Vector dv) {
    double[] flip = { 1, -1 };
    Vector v2 = v.times(new DenseVector(flip));
    v2 = v2.minus(dv.divide(2));
    int h = SIZE / 2;
    double x = v2.get(0) + h;
    double y = v2.get(1) + h;
    g2.draw(new Ellipse2D.Double(x * DS, y * DS, dv.get(0) * DS, dv.get(1) * DS));
}

From source file:at.illecker.hama.rootbeer.examples.matrixmultiplication.compositeinput.cpu.MatrixMultiplicationBSPCpu.java

License:Apache License

@Override
public void bsp(BSPPeer<IntWritable, TupleWritable, IntWritable, VectorWritable, MatrixRowMessage> peer)
        throws IOException, SyncException, InterruptedException {

    IntWritable key = new IntWritable();
    TupleWritable value = new TupleWritable();
    while (peer.readNext(key, value)) {

        // Logging
        if (isDebuggingEnabled) {
            for (int i = 0; i < value.size(); i++) {
                Vector vector = ((VectorWritable) value.get(i)).get();
                logger.writeChars("bsp,input,key=" + key + ",value=" + vector.toString() + "\n");
            }//from   www.j  a  v  a2 s  . c  o  m
        }

        Vector firstVector = ((VectorWritable) value.get(0)).get();
        Vector secondVector = ((VectorWritable) value.get(1)).get();

        // outCardinality is resulting column size n
        // (l x m) * (m x n) = (l x n)
        boolean firstIsOutFrag = secondVector.size() == outCardinality;

        // outFrag is Matrix which has the resulting column cardinality
        // (matrixB)
        Vector outFrag = firstIsOutFrag ? secondVector : firstVector;

        // multiplier is Matrix which has the resulting row count
        // (transposed matrixA)
        Vector multiplier = firstIsOutFrag ? firstVector : secondVector;

        if (isDebuggingEnabled) {
            logger.writeChars("bsp,firstIsOutFrag=" + firstIsOutFrag + "\n");
            logger.writeChars("bsp,outFrag=" + outFrag + "\n");
            logger.writeChars("bsp,multiplier=" + multiplier + "\n");
        }

        for (Vector.Element e : multiplier.nonZeroes()) {

            VectorWritable outVector = new VectorWritable();
            // Scalar Multiplication (Vector x Element)
            outVector.set(outFrag.times(e.get()));

            peer.send(masterTask, new MatrixRowMessage(e.index(), outVector));

            if (isDebuggingEnabled) {
                logger.writeChars("bsp,send,key=" + e.index() + ",value=" + outVector.get().toString() + "\n");
            }
        }
        if (isDebuggingEnabled) {
            logger.flush();
        }
    }
    peer.sync();
}

From source file:ca.uwaterloo.cpami.mahout.matrix.utils.GramSchmidt.java

License:Apache License

public static void orthonormalizeColumns(Matrix mx) {

    //int n = mx.numCols();
    int n = mx.numRows();

    for (int c = 0; c < n; c++) {
        System.out.println("col: " + c);
        Vector col = mx.viewRow(c);
        for (int c1 = 0; c1 < c; c1++) {
            Vector viewC1 = mx.viewRow(c1);
            col.assign(col.minus(viewC1.times(viewC1.dot(col))));

        }//from  w w  w.  jav a2  s.  c  o  m
        final double norm2 = col.norm(2);
        if (norm2 == 0) {
            System.out.println("zero");
        }
        col.assign(new DoubleFunction() {
            @Override
            public double apply(double x) {
                return x / norm2;
            }
        });
    }
}

From source file:cn.edu.bjtu.cit.recommender.Recommender.java

License:Apache License

@SuppressWarnings("unchecked")
public int run(String[] args) throws Exception {
    if (args.length < 2) {
        System.err.println();//from   w  ww.  ja v a  2 s .  com
        System.err.println("Usage: " + this.getClass().getName()
                + " [generic options] input output [profiling] [estimation] [clustersize]");
        System.err.println();
        printUsage();
        GenericOptionsParser.printGenericCommandUsage(System.err);

        return 1;
    }
    OptionParser parser = new OptionParser(args);

    Pipeline pipeline = new MRPipeline(Recommender.class, getConf());

    if (parser.hasOption(CLUSTER_SIZE)) {
        pipeline.getConfiguration().setInt(ClusterOracle.CLUSTER_SIZE,
                Integer.parseInt(parser.getOption(CLUSTER_SIZE).getValue()));
    }

    if (parser.hasOption(PROFILING)) {
        pipeline.getConfiguration().setBoolean(Profiler.IS_PROFILE, true);
        this.profileFilePath = parser.getOption(PROFILING).getValue();

    }

    if (parser.hasOption(ESTIMATION)) {
        estFile = parser.getOption(ESTIMATION).getValue();
        est = new Estimator(estFile, clusterSize);
    }

    if (parser.hasOption(OPT_REDUCE)) {
        pipeline.getConfiguration().setBoolean(OPT_REDUCE, true);
    }

    if (parser.hasOption(OPT_MSCR)) {
        pipeline.getConfiguration().setBoolean(OPT_MSCR, true);
    }

    if (parser.hasOption(ACTIVE_THRESHOLD)) {
        threshold = Integer.parseInt(parser.getOption("at").getValue());
    }

    if (parser.hasOption(TOP)) {
        top = Integer.parseInt(parser.getOption("top").getValue());
    }

    profiler = new Profiler(pipeline);
    /*
     * input node
     */
    PCollection<String> lines = pipeline.readTextFile(args[0]);

    if (profiler.isProfiling() && lines.getSize() > 10 * 1024 * 1024) {
        lines = lines.sample(0.1);
    }

    /*
     * S0 + GBK
     */
    PGroupedTable<Long, Long> userWithPrefs = lines.parallelDo(new MapFn<String, Pair<Long, Long>>() {

        @Override
        public Pair<Long, Long> map(String input) {
            String[] split = input.split(Estimator.DELM);
            long userID = Long.parseLong(split[0]);
            long itemID = Long.parseLong(split[1]);
            return Pair.of(userID, itemID);
        }

        @Override
        public float scaleFactor() {
            return est.getScaleFactor("S0").sizeFactor;
        }

        @Override
        public float scaleFactorByRecord() {
            return est.getScaleFactor("S0").recsFactor;
        }
    }, Writables.tableOf(Writables.longs(), Writables.longs())).groupByKey(est.getClusterSize());

    /*
     * S1
     */
    PTable<Long, Vector> userVector = userWithPrefs
            .parallelDo(new MapFn<Pair<Long, Iterable<Long>>, Pair<Long, Vector>>() {
                @Override
                public Pair<Long, Vector> map(Pair<Long, Iterable<Long>> input) {
                    Vector userVector = new RandomAccessSparseVector(Integer.MAX_VALUE, 100);
                    for (long itemPref : input.second()) {
                        userVector.set((int) itemPref, 1.0f);
                    }
                    return Pair.of(input.first(), userVector);
                }

                @Override
                public float scaleFactor() {
                    return est.getScaleFactor("S1").sizeFactor;
                }

                @Override
                public float scaleFactorByRecord() {
                    return est.getScaleFactor("S1").recsFactor;
                }
            }, Writables.tableOf(Writables.longs(), Writables.vectors()));

    userVector = profiler.profile("S0-S1", pipeline, userVector, ProfileConverter.long_vector(),
            Writables.tableOf(Writables.longs(), Writables.vectors()));

    /*
     * S2
     */
    PTable<Long, Vector> filteredUserVector = userVector
            .parallelDo(new DoFn<Pair<Long, Vector>, Pair<Long, Vector>>() {

                @Override
                public void process(Pair<Long, Vector> input, Emitter<Pair<Long, Vector>> emitter) {
                    if (input.second().getNumNondefaultElements() > threshold) {
                        emitter.emit(input);
                    }
                }

                @Override
                public float scaleFactor() {
                    return est.getScaleFactor("S2").sizeFactor;
                }

                @Override
                public float scaleFactorByRecord() {
                    return est.getScaleFactor("S2").recsFactor;
                }

            }, Writables.tableOf(Writables.longs(), Writables.vectors()));

    filteredUserVector = profiler.profile("S2", pipeline, filteredUserVector, ProfileConverter.long_vector(),
            Writables.tableOf(Writables.longs(), Writables.vectors()));

    /*
     * S3 + GBK
     */
    PGroupedTable<Integer, Integer> coOccurencePairs = filteredUserVector
            .parallelDo(new DoFn<Pair<Long, Vector>, Pair<Integer, Integer>>() {
                @Override
                public void process(Pair<Long, Vector> input, Emitter<Pair<Integer, Integer>> emitter) {
                    Iterator<Vector.Element> it = input.second().iterateNonZero();
                    while (it.hasNext()) {
                        int index1 = it.next().index();
                        Iterator<Vector.Element> it2 = input.second().iterateNonZero();
                        while (it2.hasNext()) {
                            int index2 = it2.next().index();
                            emitter.emit(Pair.of(index1, index2));
                        }
                    }
                }

                @Override
                public float scaleFactor() {
                    float size = est.getScaleFactor("S3").sizeFactor;
                    return size;
                }

                @Override
                public float scaleFactorByRecord() {
                    float recs = est.getScaleFactor("S3").recsFactor;
                    return recs;
                }
            }, Writables.tableOf(Writables.ints(), Writables.ints())).groupByKey(est.getClusterSize());

    /*
     * S4
     */
    PTable<Integer, Vector> coOccurenceVector = coOccurencePairs
            .parallelDo(new MapFn<Pair<Integer, Iterable<Integer>>, Pair<Integer, Vector>>() {
                @Override
                public Pair<Integer, Vector> map(Pair<Integer, Iterable<Integer>> input) {
                    Vector cooccurrenceRow = new RandomAccessSparseVector(Integer.MAX_VALUE, 100);
                    for (int itemIndex2 : input.second()) {
                        cooccurrenceRow.set(itemIndex2, cooccurrenceRow.get(itemIndex2) + 1.0);
                    }
                    return Pair.of(input.first(), cooccurrenceRow);
                }

                @Override
                public float scaleFactor() {
                    return est.getScaleFactor("S4").sizeFactor;
                }

                @Override
                public float scaleFactorByRecord() {
                    return est.getScaleFactor("S4").recsFactor;
                }
            }, Writables.tableOf(Writables.ints(), Writables.vectors()));

    coOccurenceVector = profiler.profile("S3-S4", pipeline, coOccurenceVector, ProfileConverter.int_vector(),
            Writables.tableOf(Writables.ints(), Writables.vectors()));

    /*
     * S5 Wrapping co-occurrence columns
     */
    PTable<Integer, VectorOrPref> wrappedCooccurrence = coOccurenceVector
            .parallelDo(new MapFn<Pair<Integer, Vector>, Pair<Integer, VectorOrPref>>() {

                @Override
                public Pair<Integer, VectorOrPref> map(Pair<Integer, Vector> input) {
                    return Pair.of(input.first(), new VectorOrPref(input.second()));
                }

                @Override
                public float scaleFactor() {
                    return est.getScaleFactor("S5").sizeFactor;
                }

                @Override
                public float scaleFactorByRecord() {
                    return est.getScaleFactor("S5").recsFactor;
                }

            }, Writables.tableOf(Writables.ints(), VectorOrPref.vectorOrPrefs()));

    wrappedCooccurrence = profiler.profile("S5", pipeline, wrappedCooccurrence, ProfileConverter.int_vopv(),
            Writables.tableOf(Writables.ints(), VectorOrPref.vectorOrPrefs()));

    /*
     * S6 Splitting user vectors
     */
    PTable<Integer, VectorOrPref> userVectorSplit = filteredUserVector
            .parallelDo(new DoFn<Pair<Long, Vector>, Pair<Integer, VectorOrPref>>() {

                @Override
                public void process(Pair<Long, Vector> input, Emitter<Pair<Integer, VectorOrPref>> emitter) {
                    long userID = input.first();
                    Vector userVector = input.second();
                    Iterator<Vector.Element> it = userVector.iterateNonZero();
                    while (it.hasNext()) {
                        Vector.Element e = it.next();
                        int itemIndex = e.index();
                        float preferenceValue = (float) e.get();
                        emitter.emit(Pair.of(itemIndex, new VectorOrPref(userID, preferenceValue)));
                    }
                }

                @Override
                public float scaleFactor() {
                    return est.getScaleFactor("S6").sizeFactor;
                }

                @Override
                public float scaleFactorByRecord() {
                    return est.getScaleFactor("S6").recsFactor;
                }
            }, Writables.tableOf(Writables.ints(), VectorOrPref.vectorOrPrefs()));

    userVectorSplit = profiler.profile("S6", pipeline, userVectorSplit, ProfileConverter.int_vopp(),
            Writables.tableOf(Writables.ints(), VectorOrPref.vectorOrPrefs()));

    /*
     * S7 Combine VectorOrPrefs
     */
    PTable<Integer, VectorAndPrefs> combinedVectorOrPref = wrappedCooccurrence.union(userVectorSplit)
            .groupByKey(est.getClusterSize())
            .parallelDo(new DoFn<Pair<Integer, Iterable<VectorOrPref>>, Pair<Integer, VectorAndPrefs>>() {

                @Override
                public void process(Pair<Integer, Iterable<VectorOrPref>> input,
                        Emitter<Pair<Integer, VectorAndPrefs>> emitter) {
                    Vector vector = null;
                    List<Long> userIDs = Lists.newArrayList();
                    List<Float> values = Lists.newArrayList();
                    for (VectorOrPref vop : input.second()) {
                        if (vector == null) {
                            vector = vop.getVector();
                        }
                        long userID = vop.getUserID();
                        if (userID != Long.MIN_VALUE) {
                            userIDs.add(vop.getUserID());
                        }
                        float value = vop.getValue();
                        if (!Float.isNaN(value)) {
                            values.add(vop.getValue());
                        }
                    }
                    emitter.emit(Pair.of(input.first(), new VectorAndPrefs(vector, userIDs, values)));
                }

                @Override
                public float scaleFactor() {
                    return est.getScaleFactor("S7").sizeFactor;
                }

                @Override
                public float scaleFactorByRecord() {
                    return est.getScaleFactor("S7").recsFactor;
                }
            }, Writables.tableOf(Writables.ints(), VectorAndPrefs.vectorAndPrefs()));

    combinedVectorOrPref = profiler.profile("S5+S6-S7", pipeline, combinedVectorOrPref,
            ProfileConverter.int_vap(), Writables.tableOf(Writables.ints(), VectorAndPrefs.vectorAndPrefs()));
    /*
     * S8 Computing partial recommendation vectors
     */
    PTable<Long, Vector> partialMultiply = combinedVectorOrPref
            .parallelDo(new DoFn<Pair<Integer, VectorAndPrefs>, Pair<Long, Vector>>() {
                @Override
                public void process(Pair<Integer, VectorAndPrefs> input, Emitter<Pair<Long, Vector>> emitter) {
                    Vector cooccurrenceColumn = input.second().getVector();
                    List<Long> userIDs = input.second().getUserIDs();
                    List<Float> prefValues = input.second().getValues();
                    for (int i = 0; i < userIDs.size(); i++) {
                        long userID = userIDs.get(i);
                        if (userID != Long.MIN_VALUE) {
                            float prefValue = prefValues.get(i);
                            Vector partialProduct = cooccurrenceColumn.times(prefValue);
                            emitter.emit(Pair.of(userID, partialProduct));
                        }
                    }
                }

                @Override
                public float scaleFactor() {
                    return est.getScaleFactor("S8").sizeFactor;
                }

                @Override
                public float scaleFactorByRecord() {
                    return est.getScaleFactor("S8").recsFactor;
                }

            }, Writables.tableOf(Writables.longs(), Writables.vectors())).groupByKey(est.getClusterSize())
            .combineValues(new CombineFn<Long, Vector>() {

                @Override
                public void process(Pair<Long, Iterable<Vector>> input, Emitter<Pair<Long, Vector>> emitter) {
                    Vector partial = null;
                    for (Vector vector : input.second()) {
                        partial = partial == null ? vector : partial.plus(vector);
                    }
                    emitter.emit(Pair.of(input.first(), partial));
                }

                @Override
                public float scaleFactor() {
                    return est.getScaleFactor("combine").sizeFactor;
                }

                @Override
                public float scaleFactorByRecord() {
                    return est.getScaleFactor("combine").recsFactor;
                }
            });

    partialMultiply = profiler.profile("S8-combine", pipeline, partialMultiply, ProfileConverter.long_vector(),
            Writables.tableOf(Writables.longs(), Writables.vectors()));

    /*
     * S9 Producing recommendations from vectors
     */
    PTable<Long, RecommendedItems> recommendedItems = partialMultiply
            .parallelDo(new DoFn<Pair<Long, Vector>, Pair<Long, RecommendedItems>>() {

                @Override
                public void process(Pair<Long, Vector> input, Emitter<Pair<Long, RecommendedItems>> emitter) {
                    Queue<RecommendedItem> topItems = new PriorityQueue<RecommendedItem>(11,
                            Collections.reverseOrder(BY_PREFERENCE_VALUE));
                    Iterator<Vector.Element> recommendationVectorIterator = input.second().iterateNonZero();
                    while (recommendationVectorIterator.hasNext()) {
                        Vector.Element element = recommendationVectorIterator.next();
                        int index = element.index();
                        float value = (float) element.get();
                        if (topItems.size() < top) {
                            topItems.add(new GenericRecommendedItem(index, value));
                        } else if (value > topItems.peek().getValue()) {
                            topItems.add(new GenericRecommendedItem(index, value));
                            topItems.poll();
                        }
                    }
                    List<RecommendedItem> recommendations = new ArrayList<RecommendedItem>(topItems.size());
                    recommendations.addAll(topItems);
                    Collections.sort(recommendations, BY_PREFERENCE_VALUE);
                    emitter.emit(Pair.of(input.first(), new RecommendedItems(recommendations)));
                }

                @Override
                public float scaleFactor() {
                    return est.getScaleFactor("S9").sizeFactor;
                }

                @Override
                public float scaleFactorByRecord() {
                    return est.getScaleFactor("S9").recsFactor;
                }

            }, Writables.tableOf(Writables.longs(), RecommendedItems.recommendedItems()));

    recommendedItems = profiler.profile("S9", pipeline, recommendedItems, ProfileConverter.long_ri(),
            Writables.tableOf(Writables.longs(), RecommendedItems.recommendedItems()));

    /*
     * Profiling
     */
    if (profiler.isProfiling()) {
        profiler.writeResultToFile(profileFilePath);
        profiler.cleanup(pipeline.getConfiguration());
        return 0;
    }
    /*
     * asText
     */
    pipeline.writeTextFile(recommendedItems, args[1]);
    PipelineResult result = pipeline.done();
    return result.succeeded() ? 0 : 1;
}

From source file:com.cloudera.science.ml.kmeans.core.MiniBatchUpdateStrategy.java

License:Open Source License

@Override
public <V extends Vector> Centers update(List<Weighted<V>> points, Centers centers) {
    int[] perCenterStepCounts = new int[centers.size()];
    WeightedSampler<V> sampler = new WeightedSampler<V>(points, random);
    for (int iter = 0; iter < numIterations; iter++) {
        // Compute closest cent for each mini-batch
        List<List<V>> centerAssignments = Lists.newArrayList();
        for (int i = 0; i < centers.size(); i++) {
            centerAssignments.add(Lists.<V>newArrayList());
        }/*  ww w  .j  a  v a2s .  c o m*/
        for (int i = 0; i < miniBatchSize; i++) {
            V sample = sampler.sample();
            int closestId = centers.indexOfClosest(sample);
            centerAssignments.get(closestId).add(sample);
        }
        // Apply the mini-batch
        List<Vector> nextCenters = Lists.newArrayList();
        for (int i = 0; i < centerAssignments.size(); i++) {
            Vector currentCenter = centers.get(i);
            for (int j = 0; j < centerAssignments.get(i).size(); j++) {
                double eta = 1.0 / (++perCenterStepCounts[i] + 1.0);
                currentCenter = currentCenter.times(1.0 - eta);
                currentCenter = currentCenter.plus(centerAssignments.get(i).get(j).times(eta));
            }
            nextCenters.add(currentCenter);
        }
        centers = new Centers(nextCenters);
    }
    return centers;
}

From source file:com.ydy.cf.solver.impl.AlternatingLeastSquaresImplicitSolver.java

License:Apache License

/** Y' (Cu - I) Y +  I */
private Matrix YtransponseCuMinusIYPlusLambdaI(Vector userRatings) {
    Preconditions.checkArgument(userRatings.isSequentialAccess(), "need sequential access to ratings!");

    /* (Cu -I) Y */
    OpenIntObjectHashMap<Vector> CuMinusIY = new OpenIntObjectHashMap<Vector>();
    Iterator<Vector.Element> ratings = userRatings.iterateNonZero();
    while (ratings.hasNext()) {
        Vector.Element e = ratings.next();
        Vector curYRow = Y.viewRow(e.index());
        CuMinusIY.put(e.index(), curYRow.times(confidence(e.get()) - 1));
    }/*  w  w w .  jav  a  2 s  .c  o m*/

    Matrix YtransponseCuMinusIY = new DenseMatrix(numFeatures, numFeatures);

    /* Y' (Cu -I) Y by outer products */
    ratings = userRatings.iterateNonZero();
    while (ratings.hasNext()) {
        Vector.Element e = ratings.next();
        for (Vector.Element feature : Y.viewRow(e.index())) {
            Vector partial = CuMinusIY.get(e.index()).times(feature.get());
            YtransponseCuMinusIY.viewRow(feature.index()).assign(partial, Functions.PLUS);
        }
    }

    /* Y' (Cu - I) Y +  I  add lambda on the diagonal */
    for (int feature = 0; feature < numFeatures; feature++) {
        YtransponseCuMinusIY.setQuick(feature, feature,
                YtransponseCuMinusIY.getQuick(feature, feature) + lambda);
    }

    return YtransponseCuMinusIY;
}

From source file:com.ydy.cf.solver.impl.AlternatingLeastSquaresImplicitSolver.java

License:Apache License

/** Y' Cu p(u) */
private Matrix YtransponseCuPu(Vector userRatings) {
    Preconditions.checkArgument(userRatings.isSequentialAccess(), "need sequential access to ratings!");

    Vector YtransponseCuPu = new DenseVector(numFeatures);

    Iterator<Vector.Element> ratings = userRatings.iterateNonZero();
    while (ratings.hasNext()) {
        Vector.Element e = ratings.next();
        Vector curYRow = Y.viewRow(e.index());
        YtransponseCuPu.assign(curYRow.times(confidence(e.get())), Functions.PLUS);
    }/*from  ww  w .j  a  v  a 2s .c om*/

    return columnVectorAsMatrix(YtransponseCuPu);
}

From source file:edu.indiana.d2i.htrc.kmeans.KMeansClusterer.java

License:Apache License

/**
 * Iterates over all clusters and identifies the one closes to the given point. Distance measure used is
 * configured at creation time./*from w  w  w  . j av a2 s  .c o m*/
 * 
 * @param point
 *          a point to find a cluster for.
 * @param clusters
 *          a List<Cluster> to test.
 */
public void emitPointToNearestCluster(Vector point, Iterable<Cluster> clusters,
        Mapper<?, ?, Text, ClusterObservations>.Context context) throws IOException, InterruptedException {
    Cluster nearestCluster = null;
    double nearestDistance = Double.MAX_VALUE;
    for (Cluster cluster : clusters) {
        Vector clusterCenter = cluster.getCenter();
        double distance = this.measure.distance(clusterCenter.getLengthSquared(), clusterCenter, point);
        if (log.isDebugEnabled()) {
            log.debug("{} Cluster: {}", distance, cluster.getId());
        }
        if (distance < nearestDistance || nearestCluster == null) {
            nearestCluster = cluster;
            nearestDistance = distance;
        }
    }
    context.write(new Text(nearestCluster.getIdentifier()),
            new ClusterObservations(1, point, point.times(point)));
}