Example usage for weka.core.matrix Matrix transpose

List of usage examples for weka.core.matrix Matrix transpose

Introduction

In this page you can find the example usage for weka.core.matrix Matrix transpose.

Prototype

public Matrix transpose() 

Source Link

Document

Matrix transpose.

Usage

From source file:adams.data.instancesanalysis.pls.PLS1.java

License:Open Source License

/**
 * Transforms the data, initializes if necessary.
 *
 * @param data   the data to use/* w w  w  . jav a2  s .  c om*/
 */
protected Instances doTransform(Instances data, Map<String, Object> params) throws Exception {
    Matrix X, X_trans;
    Matrix y;
    Matrix W, w;
    Matrix T, t, t_trans;
    Matrix P, p, p_trans;
    double b;
    Matrix b_hat;
    int j;
    Matrix tmp;
    Instances result;

    // initialization
    if (!isInitialized()) {
        // split up data
        X = MatrixHelper.getX(data);
        y = MatrixHelper.getY(data);
        X_trans = X.transpose();

        // init
        W = new Matrix(data.numAttributes() - 1, getNumComponents());
        P = new Matrix(data.numAttributes() - 1, getNumComponents());
        T = new Matrix(data.numInstances(), getNumComponents());
        b_hat = new Matrix(getNumComponents(), 1);

        for (j = 0; j < getNumComponents(); j++) {
            // 1. step: wj
            w = X_trans.times(y);
            MatrixHelper.normalizeVector(w);
            MatrixHelper.setVector(w, W, j);

            // 2. step: tj
            t = X.times(w);
            t_trans = t.transpose();
            MatrixHelper.setVector(t, T, j);

            // 3. step: ^bj
            b = t_trans.times(y).get(0, 0) / t_trans.times(t).get(0, 0);
            b_hat.set(j, 0, b);

            // 4. step: pj
            p = X_trans.times(t).times(1 / t_trans.times(t).get(0, 0));
            p_trans = p.transpose();
            MatrixHelper.setVector(p, P, j);

            // 5. step: Xj+1
            X = X.minus(t.times(p_trans));
            y = y.minus(t.times(b));
        }

        // W*(P^T*W)^-1
        tmp = W.times(((P.transpose()).times(W)).inverse());

        // factor = W*(P^T*W)^-1 * b_hat
        m_r_hat = tmp.times(b_hat);

        // save matrices
        m_P = P;
        m_W = W;
        m_b_hat = b_hat;

        result = predict(data);
    }
    // prediction
    else {
        result = predict(data);
    }

    return result;
}

From source file:adams.data.instancesanalysis.pls.SIMPLS.java

License:Open Source License

/**
 * Transforms the data, initializes if necessary.
 *
 * @param data   the data to use/*from   ww w  . j ava2  s . co  m*/
 */
protected Instances doTransform(Instances data, Map<String, Object> params) throws Exception {
    Matrix A, A_trans;
    Matrix M;
    Matrix X, X_trans;
    Matrix X_new;
    Matrix Y, y;
    Matrix C, c;
    Matrix Q, q;
    Matrix W, w;
    Matrix P, p, p_trans;
    Matrix v, v_trans;
    Matrix T;
    Instances result;
    int h;

    if (!isInitialized()) {
        // init
        X = MatrixHelper.getX(data);
        X_trans = X.transpose();
        Y = MatrixHelper.getY(data);
        A = X_trans.times(Y);
        M = X_trans.times(X);
        C = Matrix.identity(data.numAttributes() - 1, data.numAttributes() - 1);
        W = new Matrix(data.numAttributes() - 1, getNumComponents());
        P = new Matrix(data.numAttributes() - 1, getNumComponents());
        Q = new Matrix(1, getNumComponents());

        for (h = 0; h < getNumComponents(); h++) {
            // 1. qh as dominant EigenVector of Ah'*Ah
            A_trans = A.transpose();
            q = MatrixHelper.getDominantEigenVector(A_trans.times(A));

            // 2. wh=Ah*qh, ch=wh'*Mh*wh, wh=wh/sqrt(ch), store wh in W as column
            w = A.times(q);
            c = w.transpose().times(M).times(w);
            w = w.times(1.0 / StrictMath.sqrt(c.get(0, 0)));
            MatrixHelper.setVector(w, W, h);

            // 3. ph=Mh*wh, store ph in P as column
            p = M.times(w);
            p_trans = p.transpose();
            MatrixHelper.setVector(p, P, h);

            // 4. qh=Ah'*wh, store qh in Q as column
            q = A_trans.times(w);
            MatrixHelper.setVector(q, Q, h);

            // 5. vh=Ch*ph, vh=vh/||vh||
            v = C.times(p);
            MatrixHelper.normalizeVector(v);
            v_trans = v.transpose();

            // 6. Ch+1=Ch-vh*vh', Mh+1=Mh-ph*ph'
            C = C.minus(v.times(v_trans));
            M = M.minus(p.times(p_trans));

            // 7. Ah+1=ChAh (actually Ch+1)
            A = C.times(A);
        }

        // finish
        if (getNumCoefficients() > 0)
            slim(W);
        m_W = W;
        T = X.times(m_W);
        X_new = T;
        m_B = W.times(Q.transpose());

        switch (m_PredictionType) {
        case ALL:
            y = T.times(P.transpose()).times(m_B);
            break;
        case NONE:
        case EXCEPT_CLASS:
            y = MatrixHelper.getY(data);
            break;
        default:
            throw new IllegalStateException("Unhandled prediction type: " + m_PredictionType);
        }

        result = MatrixHelper.toInstances(getOutputFormat(), X_new, y);
    } else {
        X = MatrixHelper.getX(data);
        X_new = X.times(m_W);

        switch (m_PredictionType) {
        case ALL:
            y = X.times(m_B);
            break;
        case NONE:
        case EXCEPT_CLASS:
            y = MatrixHelper.getY(data);
            break;
        default:
            throw new IllegalStateException("Unhandled prediction type: " + m_PredictionType);
        }

        result = MatrixHelper.toInstances(getOutputFormat(), X_new, y);
    }

    return result;
}

From source file:cyber009.ann.ANN.java

public void weightFindMatrix() {
    Matrix X = new Matrix(v.X);
    Matrix Y = new Matrix(v.D, 1);
    Matrix W = new Matrix(v.N, 1);

    for (int d = 0; d < v.D; d++) {
        Y.set(d, 0, v.TARGET[d]);/*w  w w .j  a  v a  2 s  .  c o  m*/
    }

    for (int n = 0; n < v.N; n++) {
        W.set(n, 0, 0.0);
        //W.set(n, 0, v.WEIGHT[n]);
    }

    Matrix temp = X.transpose().times(X);
    //        System.out.println(temp.toString());
    temp = temp.inverse().times(X.transpose());
    //        System.out.println(temp.toString());
    temp = temp.times(Y);
    //System.out.println(temp.toString());
    W = temp;
    for (int n = 0; n <= v.N; n++) {
        v.WEIGHT[n] = W.get(n, 0);
    }
    //System.out.println(YI.toString());

}

From source file:mulan.classifier.meta.MLCSSP.java

License:Open Source License

@Override
protected MultiLabelOutput makePredictionInternal(Instance instance) {
    try {/* w  w  w  . ja  v  a2  s .  c o  m*/

        Instance transformed = css.transformInstance(instance);
        MultiLabelOutput out = baseLearner.makePrediction(transformed);

        double[] confidences = out.getConfidences();

        // make response matrix
        Matrix conf = new Matrix(kappa, 1);
        for (int i = 0; i < kappa; i++) {
            conf.set(i, 0, confidences[i]);
        }

        // compute projected classifier response
        Matrix projectedResponse = conf.transpose().times(css.getProjectionMatrix());

        boolean[] projected_bipartition = new boolean[projectedResponse.getColumnDimension()];
        double[] projected_confidences = new double[projectedResponse.getColumnDimension()];

        for (int i = 0; i < projectedResponse.getColumnDimension(); i++) {
            projected_confidences[i] = projectedResponse.get(0, i);
            projected_bipartition[i] = (Math.ceil(projected_confidences[i]) == 1) ? true : false;
        }

        // return mlo
        MultiLabelOutput mlo = new MultiLabelOutput(projected_bipartition, projected_confidences);
        return mlo;

    } catch (InvalidDataException ex) {
        Logger.getLogger(MLCSSP.class.getName()).log(Level.SEVERE, null, ex);

    } catch (ModelInitializationException ex) {
        Logger.getLogger(MLCSSP.class.getName()).log(Level.SEVERE, null, ex);

    } catch (Exception ex) {
        Logger.getLogger(MLCSSP.class.getName()).log(Level.SEVERE, null, ex);
    }

    return null;
}

From source file:mulan.classifier.neural.ThresholdFunction.java

License:Open Source License

/**
 * Build a threshold function for based on input data.
 * The threshold function is build for a particular model.
 *
 * @param idealLabels the ideal output for each input patterns, which a model should output.
 *                  First index is expected to be number of examples and second is the label index.
 * @param modelOutLabels the real output of a model for each input pattern.
 *                    First index is expected to be number of examples and second is the label index.
 * @throws IllegalArgumentException if dimensions of input arrays does not match
 *///ww  w  .ja  v a2 s.co  m
public void build(final double[][] idealLabels, final double[][] modelOutLabels) {

    if (idealLabels == null || modelOutLabels == null) {
        throw new IllegalArgumentException("Non of the input parameters can be null.");
    }

    int numExamples = idealLabels.length;
    int numLabels = idealLabels[0].length;

    if (modelOutLabels.length != numExamples || modelOutLabels[0].length != numLabels) {
        throw new IllegalArgumentException("Matrix dimensions of input parameters does not agree.");
    }

    double[] thresholds = new double[numExamples];
    double[] isLabelModelOuts = new double[numLabels];
    double[] isNotLabelModelOuts = new double[numLabels];
    for (int example = 0; example < numExamples; example++) {
        Arrays.fill(isLabelModelOuts, Double.MAX_VALUE);
        Arrays.fill(isNotLabelModelOuts, -Double.MAX_VALUE);
        for (int label = 0; label < numLabels; label++) {
            if (idealLabels[example][label] == 1) {
                isLabelModelOuts[label] = modelOutLabels[example][label];
            } else {
                isNotLabelModelOuts[label] = modelOutLabels[example][label];
            }
        }
        double isLabelMin = isLabelModelOuts[Utils.minIndex(isLabelModelOuts)];
        double isNotLabelMax = isNotLabelModelOuts[Utils.maxIndex(isNotLabelModelOuts)];

        // check if we have unique minimum ...
        // if not take center of the segment ... if it is a segment
        if (isLabelMin != isNotLabelMax) {
            // check marginal cases -> all labels are in or none of them
            if (isLabelMin == Double.MAX_VALUE) {
                thresholds[example] = isNotLabelMax + 0.1;
            } else if (isNotLabelMax == -Double.MAX_VALUE) {
                thresholds[example] = isLabelMin - 0.1;
            } else {
                // center of a segment
                thresholds[example] = (isLabelMin + isNotLabelMax) / 2;
            }
        } else {
            // when minimum is unique
            thresholds[example] = isLabelMin;
        }
    }

    Matrix modelMatrix = new Matrix(numExamples, numLabels + 1, 1.0);
    modelMatrix.setMatrix(0, numExamples - 1, 0, numLabels - 1, new Matrix(modelOutLabels));
    Matrix weights = modelMatrix.solve(new Matrix(thresholds, thresholds.length));
    double[][] weightsArray = weights.transpose().getArray();

    parameters = Arrays.copyOf(weightsArray[0], weightsArray[0].length);
}

From source file:mulan.transformations.ColumnSubsetSelection.java

License:Open Source License

public MultiLabelInstances transform(MultiLabelInstances data, int kappa, long seed) {
    try {/*  w w w  .java  2s. c  o m*/

        if (kappa >= data.getNumLabels()) {
            throw new MulanRuntimeException(
                    "Dimensionality reduction parameter should not exceed or be equal to the total count of labels!");
        }

        // integer indices of physical label assignments
        int[] labelIndices = data.getLabelIndices();
        int[] indices = new int[labelIndices.length];

        System.arraycopy(labelIndices, 0, indices, 0, labelIndices.length);

        // load label indicator matrix in a Matrix object
        double[][] datmatrix = new double[data.getDataSet().numInstances()][labelIndices.length];
        Matrix mat = new Matrix(datmatrix);

        for (int i = 0; i < data.getDataSet().numInstances(); i++) {
            Instance instance = data.getDataSet().instance(i);
            for (int j = 0; j < labelIndices.length; j++) {
                mat.set(i, j, Double.parseDouble(instance.toString(labelIndices[j])));
                //DEBUG: System.out.print("" + Double.parseDouble(instance.toString(labelIndices[j])) + ",");
            }
        }

        // make private copy of the label matrix
        this.Y = mat;

        // compute eigenvalue analysis of label indicator matrix
        SingularValueDecomposition svd = new SingularValueDecomposition(mat);

        //DEBUG: System.out.println("rows = " + svd.getV().getRowDimension() + ", cols = " + svd.getV().getColumnDimension());

        assert (svd.getV().getRowDimension() == svd.getV().getColumnDimension());

        Matrix rVec = svd.getV();
        Matrix Vk = new Matrix(new double[svd.getV().getRowDimension()][kappa]);

        // snippet (2)
        for (int i = 0; i < kappa; i++) {
            for (int j = 0; j < svd.getV().getColumnDimension(); j++) {
                Vk.set(j, i, rVec.get(i, j));
            }
        }

        // compute column selection probabilitites
        double[] selectionProbabilities = new double[Vk.getRowDimension()];
        double[] selectionProbabilitiesCDF = new double[Vk.getRowDimension()];

        for (int i = 0; i < Vk.getRowDimension(); i++) {
            selectionProbabilities[i] = 0.0;
            for (int j = 0; j < kappa; j++) {
                selectionProbabilities[i] += Math.pow(Vk.get(i, j), 2);
            }
            selectionProbabilities[i] = Math.sqrt(selectionProbabilities[i]);
        }

        // normalize probabilities
        double psum = 0.0;
        for (int i = 0; i < Vk.getRowDimension(); i++) {
            psum += selectionProbabilities[i];
            //System.out.println("psum = " + psum);
        }
        //System.out.println("psum = " + psum);
        //assert (psum != 0 && psum == 1.0); // must be non-zero and unitary

        for (int i = 0; i < Vk.getRowDimension(); i++) {
            selectionProbabilities[i] /= psum;
        }

        psum = 0.0;
        for (int i = 0; i < Vk.getRowDimension(); i++) {
            psum += selectionProbabilities[i];
            selectionProbabilitiesCDF[i] = psum;
        }

        // add selected columns on a linked list
        sampledIndiceSet = new java.util.HashSet();

        // run column-sampling loop
        int sampling_count = 0;

        Random generator = new Random(seed);
        while (sampledIndiceSet.size() < kappa) // ...loop until knapsack gets filled...
        {
            // pick a random number

            //DEBUG:
            //double roulette = generator.nextDouble() * 0.5;
            double roulette = generator.nextDouble();

            // seek closest match according to sampling probabilities
            int closest_match = -1;

            // iterate label cols
            for (int i = 0; i < Vk.getRowDimension(); i++) {
                if (roulette < selectionProbabilitiesCDF[i]) // ...spot a possible match...
                {
                    // ...if so, select and quit scope...
                    closest_match = i; // BEWARE! "i" is an index over the label enumeration, not an ordering index!
                    break;
                }
            }

            // if we stepped on the flag, something serious is going on!
            assert (closest_match != -1);

            // see if column was selected; if not, add it
            if (!sampledIndiceSet.contains((Object) closest_match)) {
                sampledIndiceSet.add((Object) closest_match);
                //System.out.println("DEBUG(CSSP): Added column " + closest_match + " to the sampled column set!");
            }

            sampling_count += 1;
        }

        System.out.println("Sampling loop completed in " + sampling_count + " runs.");

        // compute indices-to-remove array
        indicesToRemove = new int[labelIndices.length - sampledIndiceSet.size()];

        // compute all **PHYSICAL** (not VIRTUAL) indices of label columns for CSSP to remove
        int idx = 0;
        for (int i = 0; i < labelIndices.length; i++) {
            if (!sampledIndiceSet.contains((Object) i)) {
                indicesToRemove[idx] = indices[i];
                idx += 1;
            }
        }

        // apply CSSP: select columns to remove
        int[] selectedIndicesObj = indicesToRemove.clone();
        selectedIndicesInt = new int[selectedIndicesObj.length];
        for (int i = 0; i < selectedIndicesObj.length; i++) {
            selectedIndicesInt[i] = (int) selectedIndicesObj[i];
        }

        // compute Moore-Penrose pseudo-inverse matrix of the column-reduced label indicator matrix
        double[][] datmatrix2 = new double[data.getDataSet().numInstances()][labelIndices.length
                - selectedIndicesInt.length];
        Matrix matC = new Matrix(datmatrix2);

        //DEBUG:
        //System.out.println("Selecting only " + matC.getColumnDimension() + " columns; removing " + selectedIndicesInt.length + " columns out of an original total of " + data.getLabelIndices().length + " labels!");

        // compute indices to keep
        java.util.LinkedList<Integer> indicesToKeep = new java.util.LinkedList();
        for (int i = 0; i < labelIndices.length; i++) {
            boolean keep = true;

            // see if this col has to be removed
            for (int k = 0; k < selectedIndicesInt.length; k++) {
                if (selectedIndicesInt[k] == labelIndices[i]) {
                    keep = false;
                    break;
                }
            }

            // add if we actually should keep this...
            if (keep) {
                indicesToKeep.add(labelIndices[i]);
            }
        }

        assert (indicesToKeep.size() == matC.getColumnDimension());

        for (int i = 0; i < matC.getRowDimension(); i++) {
            // get data instance
            Instance instance = data.getDataSet().instance(i);

            // replicate data from ALL columns that WOULD not be removed by CSSP           
            for (int j = 0; j < matC.getColumnDimension(); j++) {
                // get label indice
                int corrIdx = (int) indicesToKeep.get(j);

                // update matC
                matC.set(i, j, Double.parseDouble(instance.toString(corrIdx)));
            }
        }

        //DEBUG: System.out.println("matC rows = " + matC.getRowDimension() + ", cols = " + matC.getColumnDimension() + "\n data original label cols # = " + data.getLabelIndices().length);

        // make private copy of projection matrices

        // Moore-Penrose pseudo-inverse of the label matrix matC
        // see http://robotics.caltech.edu/~jwb/courses/ME115/handouts/pseudo.pdf for an SVD-based workaround for MP-inverse

        // Moore-Penrose pseudoinverse computation based on Singular Value Decomposition (SVD)
        /*
         SingularValueDecomposition decomp = Vk.svd();
                
         Matrix S = decomp.getS();
         Matrix Scross = new Matrix(selectedIndicesInt.length,selectedIndicesInt.length);
         for(int i = 0; i < selectedIndicesInt.length; i++) {
         for(int j = 0; j < selectedIndicesInt.length; j++) {
         if(i == j) {
         if(S.get(i, j) == 0) {
         Scross.set(i, j, 0.0);
         } else {
         Scross.set(i, j, 1 / S.get(i, j));
         }
         } else {
         Scross.set(i, j, 0.0);
         }
         }
         }
                
         this.Yc = decomp.getV().times(Scross).times(decomp.getU().transpose());
         */

        // DEBUG: traditional way of computing the Moore-Penrose pseudoinverse
        if (matC.getRowDimension() >= matC.getColumnDimension()) {
            this.Yc = ((matC.transpose().times(matC)).inverse()).times(matC.transpose());
        } else {
            this.Yc = matC.transpose().times((matC.times(matC.transpose()).inverse()));
        }

        //System.out.println("Yc rows: " + Yc.getRowDimension() + "\nYc cols: " + Yc.getColumnDimension() + "\n Y rows: " + Y.getRowDimension() + "\nY cols: " + Y.getColumnDimension());

        this.ProjectionMatrix = Yc.times(Y); // compute projection matrix

        // add sampled indices to Remove object
        remove = new Remove();
        remove.setAttributeIndicesArray(selectedIndicesInt);
        remove.setInvertSelection(false);
        remove.setInputFormat(data.getDataSet());

        // apply remove filter on the labels
        transformed = Filter.useFilter(data.getDataSet(), remove);

        this.sampledIndicesObj = indicesToKeep.toArray();

        return data.reintegrateModifiedDataSet(transformed);

    } catch (Exception ex) {
        // do nothing
        //Logger.getLogger(BinaryRelevanceTransformation.class.getName()).log(Level.SEVERE, null, ex);
        return null;
    }
}

From source file:org.knime.knip.suise.node.boundarymodel.BoundaryModel.java

License:Open Source License

private double calcLMDL(double epsilon) {

    // create sample matrix
    Matrix V = new Matrix(m_contourData.numFeatures(), m_contourData.numVectors());

    double[] vec;
    for (int i = 0; i < m_contourData.numVectors(); i++) {
        vec = m_contourData.getVector(i);
        for (int j = 0; j < vec.length; j++) {
            V.set(j, i, vec[j]);/*from   w ww .  jav  a  2 s. co m*/
        }
    }

    // estimate of the covariance matrix
    Matrix W = V.times(V.transpose());

    W.times(m_contourData.numFeatures() / (epsilon * epsilon * m_contourData.numVectors()));

    W = Matrix.identity(m_contourData.numFeatures(), m_contourData.numFeatures()).plus(W);

    return Utils.log2(W.det()) * (m_contourData.numFeatures() + m_contourData.numVectors()) / 2;

}

From source file:org.knime.knip.suise.node.boundarymodel.contourdata.ContourCluster.java

License:Open Source License

private double calcLMDL() {

    // create sample matrix
    Matrix V = new Matrix(m_cdata.numFeatures(), m_samples.size());

    double[] vec;
    for (int i = 0; i < m_samples.size(); i++) {
        vec = m_cdata.get(m_samples.get(i)[0], m_samples.get(i)[1]);
        for (int j = 0; j < vec.length; j++) {
            V.set(j, i, vec[j]);/*from w ww .  j  a v a  2 s.co  m*/
        }
    }

    double epsilon = 5;

    // estimate of the covariance matrix
    Matrix W = V.times(V.transpose());

    W.times(m_cdata.numFeatures() / (epsilon * epsilon * m_samples.size()));

    W = Matrix.identity(m_cdata.numFeatures(), m_cdata.numFeatures()).plus(W);

    return Utils.log2(W.det()) * (m_cdata.numFeatures() + m_samples.size()) / 2;

}