Example usage for weka.core.matrix Matrix times

List of usage examples for weka.core.matrix Matrix times

Introduction

In this page you can find the example usage for weka.core.matrix Matrix times.

Prototype

public Matrix times(Matrix B) 

Source Link

Document

Linear algebraic matrix multiplication, A * B

Usage

From source file:adams.data.instancesanalysis.pls.PLS1.java

License:Open Source License

/**
 * Performs predictions on the data.//from  w  w  w .  ja v a2  s . c o  m
 *
 * @param data   the input data
 * @return      the predicted data
 */
protected Instances predict(Instances data) {
    Instances result;
    Instances tmpInst;
    int i;
    int j;
    Matrix x;
    Matrix X;
    Matrix T;
    Matrix t;

    result = new Instances(getOutputFormat());

    for (i = 0; i < data.numInstances(); i++) {
        // work on each instance
        tmpInst = new Instances(data, 0);
        tmpInst.add((Instance) data.instance(i).copy());
        x = MatrixHelper.getX(tmpInst);
        X = new Matrix(1, getNumComponents());
        T = new Matrix(1, getNumComponents());

        for (j = 0; j < getNumComponents(); j++) {
            MatrixHelper.setVector(x, X, j);
            // 1. step: tj = xj * wj
            t = x.times(MatrixHelper.getVector(m_W, j));
            MatrixHelper.setVector(t, T, j);
            // 2. step: xj+1 = xj - tj*pj^T (tj is 1x1 matrix!)
            x = x.minus(MatrixHelper.getVector(m_P, j).transpose().times(t.get(0, 0)));
        }

        switch (m_PredictionType) {
        case ALL:
            tmpInst = MatrixHelper.toInstances(getOutputFormat(), T, T.times(m_b_hat));
            break;
        case NONE:
        case EXCEPT_CLASS:
            tmpInst = MatrixHelper.toInstances(getOutputFormat(), T, MatrixHelper.getY(tmpInst));
            break;
        default:
            throw new IllegalStateException("Unhandled prediction type: " + m_PredictionType);
        }

        result.add(tmpInst.instance(0));

    }

    return result;
}

From source file:adams.data.instancesanalysis.pls.PLS1.java

License:Open Source License

/**
 * Transforms the data, initializes if necessary.
 *
 * @param data   the data to use/*from   w  w  w.j  a va  2  s  . c o m*/
 */
protected Instances doTransform(Instances data, Map<String, Object> params) throws Exception {
    Matrix X, X_trans;
    Matrix y;
    Matrix W, w;
    Matrix T, t, t_trans;
    Matrix P, p, p_trans;
    double b;
    Matrix b_hat;
    int j;
    Matrix tmp;
    Instances result;

    // initialization
    if (!isInitialized()) {
        // split up data
        X = MatrixHelper.getX(data);
        y = MatrixHelper.getY(data);
        X_trans = X.transpose();

        // init
        W = new Matrix(data.numAttributes() - 1, getNumComponents());
        P = new Matrix(data.numAttributes() - 1, getNumComponents());
        T = new Matrix(data.numInstances(), getNumComponents());
        b_hat = new Matrix(getNumComponents(), 1);

        for (j = 0; j < getNumComponents(); j++) {
            // 1. step: wj
            w = X_trans.times(y);
            MatrixHelper.normalizeVector(w);
            MatrixHelper.setVector(w, W, j);

            // 2. step: tj
            t = X.times(w);
            t_trans = t.transpose();
            MatrixHelper.setVector(t, T, j);

            // 3. step: ^bj
            b = t_trans.times(y).get(0, 0) / t_trans.times(t).get(0, 0);
            b_hat.set(j, 0, b);

            // 4. step: pj
            p = X_trans.times(t).times(1 / t_trans.times(t).get(0, 0));
            p_trans = p.transpose();
            MatrixHelper.setVector(p, P, j);

            // 5. step: Xj+1
            X = X.minus(t.times(p_trans));
            y = y.minus(t.times(b));
        }

        // W*(P^T*W)^-1
        tmp = W.times(((P.transpose()).times(W)).inverse());

        // factor = W*(P^T*W)^-1 * b_hat
        m_r_hat = tmp.times(b_hat);

        // save matrices
        m_P = P;
        m_W = W;
        m_b_hat = b_hat;

        result = predict(data);
    }
    // prediction
    else {
        result = predict(data);
    }

    return result;
}

From source file:adams.data.instancesanalysis.pls.SIMPLS.java

License:Open Source License

/**
 * Transforms the data, initializes if necessary.
 *
 * @param data   the data to use//from   ww  w  . ja v a  2 s.co m
 */
protected Instances doTransform(Instances data, Map<String, Object> params) throws Exception {
    Matrix A, A_trans;
    Matrix M;
    Matrix X, X_trans;
    Matrix X_new;
    Matrix Y, y;
    Matrix C, c;
    Matrix Q, q;
    Matrix W, w;
    Matrix P, p, p_trans;
    Matrix v, v_trans;
    Matrix T;
    Instances result;
    int h;

    if (!isInitialized()) {
        // init
        X = MatrixHelper.getX(data);
        X_trans = X.transpose();
        Y = MatrixHelper.getY(data);
        A = X_trans.times(Y);
        M = X_trans.times(X);
        C = Matrix.identity(data.numAttributes() - 1, data.numAttributes() - 1);
        W = new Matrix(data.numAttributes() - 1, getNumComponents());
        P = new Matrix(data.numAttributes() - 1, getNumComponents());
        Q = new Matrix(1, getNumComponents());

        for (h = 0; h < getNumComponents(); h++) {
            // 1. qh as dominant EigenVector of Ah'*Ah
            A_trans = A.transpose();
            q = MatrixHelper.getDominantEigenVector(A_trans.times(A));

            // 2. wh=Ah*qh, ch=wh'*Mh*wh, wh=wh/sqrt(ch), store wh in W as column
            w = A.times(q);
            c = w.transpose().times(M).times(w);
            w = w.times(1.0 / StrictMath.sqrt(c.get(0, 0)));
            MatrixHelper.setVector(w, W, h);

            // 3. ph=Mh*wh, store ph in P as column
            p = M.times(w);
            p_trans = p.transpose();
            MatrixHelper.setVector(p, P, h);

            // 4. qh=Ah'*wh, store qh in Q as column
            q = A_trans.times(w);
            MatrixHelper.setVector(q, Q, h);

            // 5. vh=Ch*ph, vh=vh/||vh||
            v = C.times(p);
            MatrixHelper.normalizeVector(v);
            v_trans = v.transpose();

            // 6. Ch+1=Ch-vh*vh', Mh+1=Mh-ph*ph'
            C = C.minus(v.times(v_trans));
            M = M.minus(p.times(p_trans));

            // 7. Ah+1=ChAh (actually Ch+1)
            A = C.times(A);
        }

        // finish
        if (getNumCoefficients() > 0)
            slim(W);
        m_W = W;
        T = X.times(m_W);
        X_new = T;
        m_B = W.times(Q.transpose());

        switch (m_PredictionType) {
        case ALL:
            y = T.times(P.transpose()).times(m_B);
            break;
        case NONE:
        case EXCEPT_CLASS:
            y = MatrixHelper.getY(data);
            break;
        default:
            throw new IllegalStateException("Unhandled prediction type: " + m_PredictionType);
        }

        result = MatrixHelper.toInstances(getOutputFormat(), X_new, y);
    } else {
        X = MatrixHelper.getX(data);
        X_new = X.times(m_W);

        switch (m_PredictionType) {
        case ALL:
            y = X.times(m_B);
            break;
        case NONE:
        case EXCEPT_CLASS:
            y = MatrixHelper.getY(data);
            break;
        default:
            throw new IllegalStateException("Unhandled prediction type: " + m_PredictionType);
        }

        result = MatrixHelper.toInstances(getOutputFormat(), X_new, y);
    }

    return result;
}

From source file:cyber009.ann.ANN.java

public void weightFindMatrix() {
    Matrix X = new Matrix(v.X);
    Matrix Y = new Matrix(v.D, 1);
    Matrix W = new Matrix(v.N, 1);

    for (int d = 0; d < v.D; d++) {
        Y.set(d, 0, v.TARGET[d]);/*from  ww  w. ja v a  2  s . c  om*/
    }

    for (int n = 0; n < v.N; n++) {
        W.set(n, 0, 0.0);
        //W.set(n, 0, v.WEIGHT[n]);
    }

    Matrix temp = X.transpose().times(X);
    //        System.out.println(temp.toString());
    temp = temp.inverse().times(X.transpose());
    //        System.out.println(temp.toString());
    temp = temp.times(Y);
    //System.out.println(temp.toString());
    W = temp;
    for (int n = 0; n <= v.N; n++) {
        v.WEIGHT[n] = W.get(n, 0);
    }
    //System.out.println(YI.toString());

}

From source file:meka.classifiers.multilabel.PLST.java

License:Open Source License

/**
 * Transforms the predictions of the internal classifier back to the original labels.
 *
 * @param y The predictions that should be transformed back. The array consists only of
 * the predictions as they are returned from the internal classifier.
 * @return The transformed predictions./*from w  w w.ja  v a 2s. co  m*/
 */
@Override
public double[] transformPredictionsBack(double[] y) {
    // y consists of predictions and maxindex, we need only predictions
    double[] predictions = new double[y.length / 2];

    for (int i = 0; i < predictions.length; i++) {
        predictions[i] = y[predictions.length + i];
    }

    double[][] dataArray = new double[1][predictions.length];

    dataArray[0] = predictions;

    Matrix yMat = new Matrix(dataArray);

    Matrix multiplied = yMat.times(this.m_v.transpose()).plus(m_Shift);

    double[] res = new double[multiplied.getColumnDimension()];

    // change back from -1/1 coding to 0/1
    for (int i = 0; i < res.length; i++) {
        res[i] = multiplied.getArray()[0][i] < 0.0 ? 0.0 : 1.0;
    }

    return res;
}

From source file:mulan.transformations.ColumnSubsetSelection.java

License:Open Source License

public MultiLabelInstances transform(MultiLabelInstances data, int kappa, long seed) {
    try {//  w ww . j  ava2s.c om

        if (kappa >= data.getNumLabels()) {
            throw new MulanRuntimeException(
                    "Dimensionality reduction parameter should not exceed or be equal to the total count of labels!");
        }

        // integer indices of physical label assignments
        int[] labelIndices = data.getLabelIndices();
        int[] indices = new int[labelIndices.length];

        System.arraycopy(labelIndices, 0, indices, 0, labelIndices.length);

        // load label indicator matrix in a Matrix object
        double[][] datmatrix = new double[data.getDataSet().numInstances()][labelIndices.length];
        Matrix mat = new Matrix(datmatrix);

        for (int i = 0; i < data.getDataSet().numInstances(); i++) {
            Instance instance = data.getDataSet().instance(i);
            for (int j = 0; j < labelIndices.length; j++) {
                mat.set(i, j, Double.parseDouble(instance.toString(labelIndices[j])));
                //DEBUG: System.out.print("" + Double.parseDouble(instance.toString(labelIndices[j])) + ",");
            }
        }

        // make private copy of the label matrix
        this.Y = mat;

        // compute eigenvalue analysis of label indicator matrix
        SingularValueDecomposition svd = new SingularValueDecomposition(mat);

        //DEBUG: System.out.println("rows = " + svd.getV().getRowDimension() + ", cols = " + svd.getV().getColumnDimension());

        assert (svd.getV().getRowDimension() == svd.getV().getColumnDimension());

        Matrix rVec = svd.getV();
        Matrix Vk = new Matrix(new double[svd.getV().getRowDimension()][kappa]);

        // snippet (2)
        for (int i = 0; i < kappa; i++) {
            for (int j = 0; j < svd.getV().getColumnDimension(); j++) {
                Vk.set(j, i, rVec.get(i, j));
            }
        }

        // compute column selection probabilitites
        double[] selectionProbabilities = new double[Vk.getRowDimension()];
        double[] selectionProbabilitiesCDF = new double[Vk.getRowDimension()];

        for (int i = 0; i < Vk.getRowDimension(); i++) {
            selectionProbabilities[i] = 0.0;
            for (int j = 0; j < kappa; j++) {
                selectionProbabilities[i] += Math.pow(Vk.get(i, j), 2);
            }
            selectionProbabilities[i] = Math.sqrt(selectionProbabilities[i]);
        }

        // normalize probabilities
        double psum = 0.0;
        for (int i = 0; i < Vk.getRowDimension(); i++) {
            psum += selectionProbabilities[i];
            //System.out.println("psum = " + psum);
        }
        //System.out.println("psum = " + psum);
        //assert (psum != 0 && psum == 1.0); // must be non-zero and unitary

        for (int i = 0; i < Vk.getRowDimension(); i++) {
            selectionProbabilities[i] /= psum;
        }

        psum = 0.0;
        for (int i = 0; i < Vk.getRowDimension(); i++) {
            psum += selectionProbabilities[i];
            selectionProbabilitiesCDF[i] = psum;
        }

        // add selected columns on a linked list
        sampledIndiceSet = new java.util.HashSet();

        // run column-sampling loop
        int sampling_count = 0;

        Random generator = new Random(seed);
        while (sampledIndiceSet.size() < kappa) // ...loop until knapsack gets filled...
        {
            // pick a random number

            //DEBUG:
            //double roulette = generator.nextDouble() * 0.5;
            double roulette = generator.nextDouble();

            // seek closest match according to sampling probabilities
            int closest_match = -1;

            // iterate label cols
            for (int i = 0; i < Vk.getRowDimension(); i++) {
                if (roulette < selectionProbabilitiesCDF[i]) // ...spot a possible match...
                {
                    // ...if so, select and quit scope...
                    closest_match = i; // BEWARE! "i" is an index over the label enumeration, not an ordering index!
                    break;
                }
            }

            // if we stepped on the flag, something serious is going on!
            assert (closest_match != -1);

            // see if column was selected; if not, add it
            if (!sampledIndiceSet.contains((Object) closest_match)) {
                sampledIndiceSet.add((Object) closest_match);
                //System.out.println("DEBUG(CSSP): Added column " + closest_match + " to the sampled column set!");
            }

            sampling_count += 1;
        }

        System.out.println("Sampling loop completed in " + sampling_count + " runs.");

        // compute indices-to-remove array
        indicesToRemove = new int[labelIndices.length - sampledIndiceSet.size()];

        // compute all **PHYSICAL** (not VIRTUAL) indices of label columns for CSSP to remove
        int idx = 0;
        for (int i = 0; i < labelIndices.length; i++) {
            if (!sampledIndiceSet.contains((Object) i)) {
                indicesToRemove[idx] = indices[i];
                idx += 1;
            }
        }

        // apply CSSP: select columns to remove
        int[] selectedIndicesObj = indicesToRemove.clone();
        selectedIndicesInt = new int[selectedIndicesObj.length];
        for (int i = 0; i < selectedIndicesObj.length; i++) {
            selectedIndicesInt[i] = (int) selectedIndicesObj[i];
        }

        // compute Moore-Penrose pseudo-inverse matrix of the column-reduced label indicator matrix
        double[][] datmatrix2 = new double[data.getDataSet().numInstances()][labelIndices.length
                - selectedIndicesInt.length];
        Matrix matC = new Matrix(datmatrix2);

        //DEBUG:
        //System.out.println("Selecting only " + matC.getColumnDimension() + " columns; removing " + selectedIndicesInt.length + " columns out of an original total of " + data.getLabelIndices().length + " labels!");

        // compute indices to keep
        java.util.LinkedList<Integer> indicesToKeep = new java.util.LinkedList();
        for (int i = 0; i < labelIndices.length; i++) {
            boolean keep = true;

            // see if this col has to be removed
            for (int k = 0; k < selectedIndicesInt.length; k++) {
                if (selectedIndicesInt[k] == labelIndices[i]) {
                    keep = false;
                    break;
                }
            }

            // add if we actually should keep this...
            if (keep) {
                indicesToKeep.add(labelIndices[i]);
            }
        }

        assert (indicesToKeep.size() == matC.getColumnDimension());

        for (int i = 0; i < matC.getRowDimension(); i++) {
            // get data instance
            Instance instance = data.getDataSet().instance(i);

            // replicate data from ALL columns that WOULD not be removed by CSSP           
            for (int j = 0; j < matC.getColumnDimension(); j++) {
                // get label indice
                int corrIdx = (int) indicesToKeep.get(j);

                // update matC
                matC.set(i, j, Double.parseDouble(instance.toString(corrIdx)));
            }
        }

        //DEBUG: System.out.println("matC rows = " + matC.getRowDimension() + ", cols = " + matC.getColumnDimension() + "\n data original label cols # = " + data.getLabelIndices().length);

        // make private copy of projection matrices

        // Moore-Penrose pseudo-inverse of the label matrix matC
        // see http://robotics.caltech.edu/~jwb/courses/ME115/handouts/pseudo.pdf for an SVD-based workaround for MP-inverse

        // Moore-Penrose pseudoinverse computation based on Singular Value Decomposition (SVD)
        /*
         SingularValueDecomposition decomp = Vk.svd();
                
         Matrix S = decomp.getS();
         Matrix Scross = new Matrix(selectedIndicesInt.length,selectedIndicesInt.length);
         for(int i = 0; i < selectedIndicesInt.length; i++) {
         for(int j = 0; j < selectedIndicesInt.length; j++) {
         if(i == j) {
         if(S.get(i, j) == 0) {
         Scross.set(i, j, 0.0);
         } else {
         Scross.set(i, j, 1 / S.get(i, j));
         }
         } else {
         Scross.set(i, j, 0.0);
         }
         }
         }
                
         this.Yc = decomp.getV().times(Scross).times(decomp.getU().transpose());
         */

        // DEBUG: traditional way of computing the Moore-Penrose pseudoinverse
        if (matC.getRowDimension() >= matC.getColumnDimension()) {
            this.Yc = ((matC.transpose().times(matC)).inverse()).times(matC.transpose());
        } else {
            this.Yc = matC.transpose().times((matC.times(matC.transpose()).inverse()));
        }

        //System.out.println("Yc rows: " + Yc.getRowDimension() + "\nYc cols: " + Yc.getColumnDimension() + "\n Y rows: " + Y.getRowDimension() + "\nY cols: " + Y.getColumnDimension());

        this.ProjectionMatrix = Yc.times(Y); // compute projection matrix

        // add sampled indices to Remove object
        remove = new Remove();
        remove.setAttributeIndicesArray(selectedIndicesInt);
        remove.setInvertSelection(false);
        remove.setInputFormat(data.getDataSet());

        // apply remove filter on the labels
        transformed = Filter.useFilter(data.getDataSet(), remove);

        this.sampledIndicesObj = indicesToKeep.toArray();

        return data.reintegrateModifiedDataSet(transformed);

    } catch (Exception ex) {
        // do nothing
        //Logger.getLogger(BinaryRelevanceTransformation.class.getName()).log(Level.SEVERE, null, ex);
        return null;
    }
}

From source file:org.knime.knip.suise.node.boundarymodel.BoundaryModel.java

License:Open Source License

private double calcLMDL(double epsilon) {

    // create sample matrix
    Matrix V = new Matrix(m_contourData.numFeatures(), m_contourData.numVectors());

    double[] vec;
    for (int i = 0; i < m_contourData.numVectors(); i++) {
        vec = m_contourData.getVector(i);
        for (int j = 0; j < vec.length; j++) {
            V.set(j, i, vec[j]);//  ww w. j av a  2  s  .c  om
        }
    }

    // estimate of the covariance matrix
    Matrix W = V.times(V.transpose());

    W.times(m_contourData.numFeatures() / (epsilon * epsilon * m_contourData.numVectors()));

    W = Matrix.identity(m_contourData.numFeatures(), m_contourData.numFeatures()).plus(W);

    return Utils.log2(W.det()) * (m_contourData.numFeatures() + m_contourData.numVectors()) / 2;

}

From source file:org.knime.knip.suise.node.boundarymodel.contourdata.ContourCluster.java

License:Open Source License

private double calcLMDL() {

    // create sample matrix
    Matrix V = new Matrix(m_cdata.numFeatures(), m_samples.size());

    double[] vec;
    for (int i = 0; i < m_samples.size(); i++) {
        vec = m_cdata.get(m_samples.get(i)[0], m_samples.get(i)[1]);
        for (int j = 0; j < vec.length; j++) {
            V.set(j, i, vec[j]);//from ww  w .j  a v  a 2s.  co m
        }
    }

    double epsilon = 5;

    // estimate of the covariance matrix
    Matrix W = V.times(V.transpose());

    W.times(m_cdata.numFeatures() / (epsilon * epsilon * m_samples.size()));

    W = Matrix.identity(m_cdata.numFeatures(), m_cdata.numFeatures()).plus(W);

    return Utils.log2(W.det()) * (m_cdata.numFeatures() + m_samples.size()) / 2;

}

From source file:org.mitre.ccv.CompleteCompositionVectorMain.java

License:Open Source License

/**
 * Performs Affinity Propagation Clustering
 * /*from  w  w w.j a  v  a  2s .  c  o  m*/
 * @param dm
 * @param filename
 */
public AffinityPropagation cluster(DistanceMatrix dm, int type) {
    double[][] mVals = dm.getClonedDistances();
    Matrix m = new Matrix(mVals);
    if (distCalc == 1 || distCalc == 3) {
        m = m.times(-1.0);
    } else if (distCalc == 2) {
        Matrix o = new Matrix(m.getRowDimension(), m.getColumnDimension(), 1.0);
        m = o.minus(m);
    }

    int total = m.getColumnDimension();
    double values[] = new double[(total * total - total) / 2];
    int count = 0;
    for (int i = 1; i < total; i++) {
        for (int j = i + 1; j < total - 1; j++) {
            values[count] = m.get(i, j);
            count++;
        }

    }

    double preference = getPreference(values, type);

    AffinityPropagation ap = new AffinityPropagation(m, 5000, 300, 0.9, preference);

    return ap;
}

From source file:org.mitre.clustering.AffinityPropagation.java

License:Open Source License

public AffinityPropagation(Matrix sims, int max, int cons, double lambda, double p) {

    s = sims;//from  w  ww  . ja v  a2  s .co  m
    maxits = max;
    convits = cons;
    lam = lambda;

    count = s.getColumnDimension();

    /*
     * Add noise to get rid of degeneracies
     */
    Matrix rand = Matrix.random(count, count);
    Matrix small = new Matrix(count, count, .0000001);

    Matrix t = s.times(Double.MIN_VALUE).plus(small);
    t = t.times(rand);
    s = s.plus(t);

    /*
     * Put preferences on diagonal of S
     */
    for (int i = 0; i < count; i++) {
        s.set(i, i, p);
    }

    try {
        run();
    } catch (Exception ex) {
        ex.printStackTrace();
    }

}