Example usage for org.apache.commons.math3.linear RealMatrix getRowDimension

List of usage examples for org.apache.commons.math3.linear RealMatrix getRowDimension

Introduction

In this page you can find the example usage for org.apache.commons.math3.linear RealMatrix getRowDimension.

Prototype

int getRowDimension();

Source Link

Document

Returns the number of rows in the matrix.

Usage

From source file:com.itemanalysis.psychometrics.factoranalysis.FactorAnalysisTest.java

@Test
public void eigenValues() {
    RealMatrix R = new Array2DRowRealMatrix(readHarman74Data());

    int nFactors = 2;
    int nVariables = 24;
    double[] x = new double[nVariables];
    for (int i = 0; i < nVariables; i++) {
        x[i] = .5;//from   w  w w . java 2 s .  com
    }

    for (int i = 0; i < nVariables; i++) {
        R.setEntry(i, i, 1.0 - x[i]);
    }

    EigenDecomposition eigen = new EigenDecomposition(R);
    RealMatrix eigenVectors = eigen.getV().getSubMatrix(0, nVariables - 1, 0, nFactors - 1);

    double[] ev = new double[nFactors];
    for (int i = 0; i < nFactors; i++) {
        ev[i] = Math.sqrt(eigen.getRealEigenvalue(i));
    }
    DiagonalMatrix evMatrix = new DiagonalMatrix(ev);// USE Apache version
    // of Diagonal
    // matrix when
    // upgrade to
    // version 3.2
    RealMatrix LAMBDA = eigenVectors.multiply(evMatrix);
    RealMatrix SIGMA = (LAMBDA.multiply(LAMBDA.transpose()));
    RealMatrix RESID = R.subtract(SIGMA);

    double sum = 0.0;
    double squared = 0.0;
    for (int i = 0; i < RESID.getRowDimension(); i++) {
        for (int j = 0; j < RESID.getColumnDimension(); j++) {
            if (i != j) {
                sum += Math.pow(RESID.getEntry(i, j), 2);
            }
        }
    }

    System.out.println(sum);

    // RealMatrix SIGMA = (LAMBDA.multiply(LAMBDA.transpose()));
    //
    // System.out.println("SIGMA");
    // for(int i=0;i<SIGMA.getRowDimension();i++){
    // for(int j=0;j<SIGMA.getColumnDimension();j++){
    // System.out.print(SIGMA.getEntry(i, j) + " ");
    // }
    // System.out.println();
    // }
}

From source file:edu.cudenver.bios.matrix.GramSchmidtOrthonormalization.java

/**
 * Perform Gram Schmidt Orthonormalization on the specified 
 * matrix. The matrix A (mxn) is decomposed into two matrices 
 * Q (mxn), R (nxn) such that//from   www  .j  a  v  a2 s  . com
 * <ul>
 * <li>A = QR
 * <li>Q'Q = Identity
 * <li>R is upper triangular
 * </ul> 
 * The resulting Q, R matrices can be retrieved with the getQ()
 * and getR() functions.
 * 
 * @param matrix
 */
public GramSchmidtOrthonormalization(RealMatrix matrix) {
    if (matrix == null)
        throw new IllegalArgumentException("Null matrix");

    // create the Q, R matrices
    int m = matrix.getRowDimension();
    int n = matrix.getColumnDimension();
    Q = MatrixUtils.createRealMatrix(m, n);
    R = MatrixUtils.createRealMatrix(n, n);

    // perform Gram Schmidt process using the following algorithm
    // let w<n> be the resulting orthonormal column vectors
    // let v<n> be the columns of the incoming matrix
    // w1 = (1/norm(v1))*v1
    // ...
    // wj = 1/norm(vj - projectionVj-1Vj)*[vj - projectionVj-1Vj]
    // where projectionVj-1Vj = (w1 * vj) * w1 + (w2 * vj) * w2 + ... + (wj-1 * vj) * wj-1
    //
    for (int i = 0; i < n; i++) {
        RealMatrix v = matrix.getColumnMatrix(i);
        for (int j = 0; j < i; j++) {
            RealMatrix Qj = Q.getColumnMatrix(j);
            double value = Qj.transpose().multiply(v).getEntry(0, 0);
            R.setEntry(j, i, value);
            v = v.subtract(Qj.scalarMultiply(value));
        }
        double norm = v.getFrobeniusNorm();
        R.setEntry(i, i, norm);
        Q.setColumnMatrix(i, v.scalarMultiply(1 / norm));
    }
}

From source file:eagle.security.userprofile.impl.UserProfileAnomalyEigenEvaluator.java

private RealMatrix normalizeData(RealMatrix matrix, UserProfileEigenModel model) {
    RealMatrix normalizedData = new Array2DRowRealMatrix(matrix.getRowDimension(), matrix.getColumnDimension());
    if (LOG.isDebugEnabled())
        LOG.debug("model statistics size: " + model.statistics().length);
    for (int i = 0; i < matrix.getRowDimension(); i++) {
        for (int j = 0; j < matrix.getColumnDimension(); j++) {
            double value = (matrix.getEntry(i, j) - model.statistics()[j].getMean())
                    / model.statistics()[j].getStddev();
            normalizedData.setEntry(i, j, value);
        }//from   www  . ja va  2s. co  m
    }
    return normalizedData;
}

From source file:edu.cudenver.bios.power.glmm.GLMMTestHotellingLawley.java

/**
 * Compute a Hotelling-Lawley Trace statistic
 *
 * @param H hypothesis sum of squares matrix
 * @param E error sum of squares matrix/*from   w  w w.  j  a v  a2  s . c  o  m*/
 * @returns F statistic
 */
private double getHotellingLawleyTrace(RealMatrix H, RealMatrix E) throws IllegalArgumentException {
    if (!H.isSquare() || !E.isSquare() || H.getColumnDimension() != E.getRowDimension())
        throw new IllegalArgumentException(
                "Failed to compute Hotelling-Lawley Trace: hypothesis and error matrices must be square and same dimensions");

    RealMatrix inverseE = new LUDecomposition(E).getSolver().getInverse();
    RealMatrix HinverseE = H.multiply(inverseE);

    return HinverseE.getTrace();
}

From source file:com.cloudera.oryx.common.math.RRQRDecomposition.java

/**
 * Return the effective numerical matrix rank.
 * <p>The effective numerical rank is the number of non-negligible
 * singular values.</p>//from  w ww .j  a  v a  2 s . c o  m
 * <p>This implementation looks at Frobenius norms of the sequence of
 * bottom right submatrices.  When a large fall in norm is seen,
 * the rank is returned. The drop is computed as:</p>
 * {@code (thisNorm/lastNorm) * rNorm < dropThreshold }
 * <p>
 * where thisNorm is the Frobenius norm of the current submatrix,
 * lastNorm is the Frobenius norm of the previous submatrix,
 * rNorm is is the Frobenius norm of the complete matrix
 * </p>
 *
 * @param dropThreshold threshold triggering rank computation
 * @return effective numerical matrix rank
 */
public int getRank(final double dropThreshold) {
    RealMatrix r = getR();
    int rows = r.getRowDimension();
    int columns = r.getColumnDimension();
    int rank = 1;
    double lastNorm = r.getFrobeniusNorm();
    double rNorm = lastNorm;
    while (rank < Math.min(rows, columns)) {
        double thisNorm = r.getSubMatrix(rank, rows - 1, rank, columns - 1).getFrobeniusNorm();
        if (thisNorm == 0 || (thisNorm / lastNorm) * rNorm < dropThreshold) {
            break;
        }
        lastNorm = thisNorm;
        rank++;
    }
    return rank;
}

From source file:com.itemanalysis.psychometrics.cfa.TauEquivalentModel.java

public void setGradient(RealMatrix factorLoadingFirstDerivative, RealMatrix errorVarianceFirstDerivative,
        double[] gradient) {
    double sum = 0.0;
    for (int i = 0; i < factorLoadingFirstDerivative.getRowDimension(); i++) {
        sum += factorLoadingFirstDerivative.getEntry(i, 0);
    }/*from  w ww .  j a v  a  2s .c o m*/
    gradient[0] = sum;

    for (int i = 0; i < errorVariance.length; i++) {
        gradient[i + 1] = errorVarianceFirstDerivative.getEntry(i, i);
    }

}

From source file:edu.cudenver.bios.power.test.general.TestDataAnalysis.java

private void checkFit(Test test, ModelFit fit) {
    System.out.println("Test: " + test + " Ndf: " + number.format(fit.numeratorDF) + " Ddf: "
            + number.format(fit.denominatorDF) + " F-crit: " + number.format(fit.Fvalue) + " p-value: "
            + number.format(fit.Pvalue));
    RealMatrix beta = fit.beta;
    System.out.println("Parameter estimates:");
    for (int r = 0; r < beta.getRowDimension(); r++) {
        for (int c = 0; c < beta.getColumnDimension(); c++) {
            System.out.println("Beta[" + r + "," + c + "]=" + beta.getEntry(r, c));
        }//from w ww  .ja va 2 s.  c  om
    }
}

From source file:com.itemanalysis.psychometrics.factoranalysis.GPArotationTest.java

private void printMatrix(RealMatrix x, String title) {
    System.out.println("PRINTING MATRIX: " + title);
    for (int i = 0; i < x.getRowDimension(); i++) {
        for (int j = 0; j < x.getColumnDimension(); j++) {
            System.out.print(x.getEntry(i, j) + "  ");
        }/*ww w .j  a  v  a  2s. c  o  m*/
        System.out.println();
    }
}

From source file:com.itemanalysis.psychometrics.cfa.ParallelModel.java

public void setGradient(RealMatrix factorLoadingFirstDerivative, RealMatrix errorVarianceFirstDerivative,
        double[] gradient) {
    double sumL = 0.0, sumE = 0.0;

    for (int i = 0; i < factorLoadingFirstDerivative.getRowDimension(); i++) {
        sumL += factorLoadingFirstDerivative.getEntry(i, 0);
    }/*from  w  w  w.  j  a v a 2 s . c o  m*/
    gradient[0] = sumL;

    for (int i = 0; i < errorVariance.length; i++) {
        sumE += errorVarianceFirstDerivative.getEntry(i, i);
    }
    gradient[1] = sumE;
}

From source file:com.mothsoft.alexis.engine.numeric.CorrelationCalculatorTest.java

@Test
public void testCommonsMathCorrelationIdentityMatrix() {
    final PearsonsCorrelation correlation = new PearsonsCorrelation();

    final double[][] xy = new double[4][2];
    xy[0][0] = 0.0;// w w w  . j  av a 2 s .c o  m
    xy[1][0] = 1.0;
    xy[2][0] = 2.3;
    xy[3][0] = 3.4;

    xy[0][1] = 0.0;
    xy[1][1] = 1.0;
    xy[2][1] = 2.3;
    xy[3][1] = 3.4;

    final RealMatrix matrix = correlation.computeCorrelationMatrix(xy);
    assertEquals(2, matrix.getColumnDimension());
    assertEquals(2, matrix.getRowDimension());
    assertEquals(1.0, matrix.getEntry(0, 1), 0.000001);
    assertEquals(1.0, matrix.getEntry(1, 0), 0.000001);
}