Example usage for org.apache.commons.math3.linear RealMatrix getColumnDimension

List of usage examples for org.apache.commons.math3.linear RealMatrix getColumnDimension

Introduction

In this page you can find the example usage for org.apache.commons.math3.linear RealMatrix getColumnDimension.

Prototype

int getColumnDimension();

Source Link

Document

Returns the number of columns in the matrix.

Usage

From source file:com.github.thorbenlindhauer.factor.GaussianFactorTest.java

@Test
public void testInitializationFromMomentForm() {
    Scope scope = newScope(new ContinuousVariable("A"), new ContinuousVariable("B"),
            new ContinuousVariable("C"));

    RealMatrix covarianceMatrix = new Array2DRowRealMatrix(
            new double[][] { { 1.0d, 2.0d, 3.0d }, { 4.0d, 5.0d, 6.0d }, { 7.0d, 8.0d, 10.0d } });

    RealVector meanVector = new ArrayRealVector(new double[] { 1.0d, 4.0d, 7.0d });

    // when//from w  w w. j  ava  2  s .  c om
    GaussianFactor factor = CanonicalGaussianFactor.fromMomentForm(scope, meanVector, covarianceMatrix);

    // then
    RealMatrix returnedCovarianceMatrix = factor.getCovarianceMatrix();
    assertThat(returnedCovarianceMatrix.getColumnDimension()).isEqualTo(3);
    assertThat(returnedCovarianceMatrix.getRowDimension()).isEqualTo(3);

    double[] row = returnedCovarianceMatrix.getRowVector(0).toArray();
    assertThat(row[0]).isEqualTo(1.0d, TestConstants.DOUBLE_VALUE_TOLERANCE);
    assertThat(row[1]).isEqualTo(2.0d, TestConstants.DOUBLE_VALUE_TOLERANCE);
    assertThat(row[2]).isEqualTo(3.0d, TestConstants.DOUBLE_VALUE_TOLERANCE);

    row = returnedCovarianceMatrix.getRowVector(1).toArray();
    assertThat(row[0]).isEqualTo(4.0d, TestConstants.DOUBLE_VALUE_TOLERANCE);
    assertThat(row[1]).isEqualTo(5.0d, TestConstants.DOUBLE_VALUE_TOLERANCE);
    assertThat(row[2]).isEqualTo(6.0d, TestConstants.DOUBLE_VALUE_TOLERANCE);

    row = returnedCovarianceMatrix.getRowVector(2).toArray();
    assertThat(row[0]).isEqualTo(7.0d, TestConstants.DOUBLE_VALUE_TOLERANCE);
    assertThat(row[1]).isEqualTo(8.0d, TestConstants.DOUBLE_VALUE_TOLERANCE);
    assertThat(row[2]).isEqualTo(10.0d, TestConstants.DOUBLE_VALUE_TOLERANCE);

    double[] returnedMeanVector = factor.getMeanVector().toArray();
    assertThat(returnedMeanVector[0]).isEqualTo(1.0d, TestConstants.DOUBLE_VALUE_TOLERANCE);
    assertThat(returnedMeanVector[1]).isEqualTo(4.0d, TestConstants.DOUBLE_VALUE_TOLERANCE);
    assertThat(returnedMeanVector[2]).isEqualTo(7.0d, TestConstants.DOUBLE_VALUE_TOLERANCE);
}

From source file:edu.cudenver.bios.matrix.test.TestMatrixOrthonormalization.java

/**
 * Verify that the Q'Q = I for the Q matrix produced by the
 * orthonormalization//from w  w  w .  ja v  a 2  s  . co m
 */
public void testQQisIdentity() {
    RealMatrix Q = norm.getQ();

    // verify that Q'Q = identity
    RealMatrix shouldBeIdentityMatrix = Q.transpose().multiply(Q);
    // make sure the matrix is sqaure
    if (!shouldBeIdentityMatrix.isSquare()) {
        fail();
    }
    // make sure the diagonal elements are one (within tolerance), and off diagonals
    // are zero (within tolerance)
    for (int r = 0; r < shouldBeIdentityMatrix.getRowDimension(); r++) {
        for (int c = 0; c < shouldBeIdentityMatrix.getColumnDimension(); c++) {
            double shouldBeValue = (r == c) ? 1 : 0;
            if (Precision.compareTo(shouldBeIdentityMatrix.getEntry(r, c), shouldBeValue, TOLERANCE) != 0)
                fail();
        }
    }
    assertTrue(true);
}

From source file:edu.cudenver.bios.power.test.paper.TestConditionalOrthogonalPolynomial3Factor.java

/**
 * Write a matrix in latex//from w  ww .  j a  v  a 2s.  co  m
 * @param section
 * @param name
 * @param matrix
 */
private void appendMatrix(String name, RealMatrix matrix) {
    matrixAltStringBuffer.append("\\begin{eqnarray*}\n");
    // add name label
    matrixAltStringBuffer.append("\\underset{\\left(" + matrix.getRowDimension() + "\\times"
            + matrix.getColumnDimension() + "\\right)}{" + name + "} & = & \\begin{bmatrix}");
    for (int r = 0; r < matrix.getRowDimension(); r++) {
        boolean first = true;
        for (int c = 0; c < matrix.getColumnDimension(); c++) {
            if (!first) {
                matrixAltStringBuffer.append(" & ");
            }
            matrixAltStringBuffer.append(ShortNumber.format(matrix.getEntry(r, c)));
            if (first) {
                first = false;
            }
        }
        matrixAltStringBuffer.append("\\protect\\\\\n");
    }
    matrixAltStringBuffer.append("\\end{bmatrix}\n");
    matrixAltStringBuffer.append("\\end{eqnarray*}\n");
    if (matrix.getColumnDimension() > 10) {
        matrixAltStringBuffer.append("\\normalsize\n");
    }
}

From source file:com.clust4j.algo.preprocess.PCA.java

@Override
public PCA fit(RealMatrix X) {
    synchronized (fitLock) {
        this.centerer = new MeanCenterer().fit(X);
        this.m = X.getRowDimension();
        this.n = X.getColumnDimension();

        // ensure n_components not too large
        if (this.n_components > n)
            this.n_components = n;

        final RealMatrix data = this.centerer.transform(X);
        SingularValueDecomposition svd = new SingularValueDecomposition(data);
        RealMatrix U = svd.getU(), S = svd.getS(), V = svd.getV().transpose();

        // flip Eigenvectors' sign to enforce deterministic output
        EntryPair<RealMatrix, RealMatrix> uv_sign_swap = eigenSignFlip(U, V);

        U = uv_sign_swap.getKey();//from  w  w  w.j a  v  a  2  s . c o  m
        V = uv_sign_swap.getValue();
        RealMatrix components_ = V;

        // get variance explained by singular value
        final double[] s = MatUtils.diagFromSquare(S.getData());
        this.variabilities = new double[s.length];
        for (int i = 0; i < s.length; i++) {
            variabilities[i] = (s[i] * s[i]) / (double) m;
            total_var += variabilities[i];
        }

        // get variability ratio
        this.variability_ratio = new double[s.length];
        for (int i = 0; i < s.length; i++) {
            variability_ratio[i] = variabilities[i] / total_var;
        }

        // post-process number of components if in var_mode
        double[] ratio_cumsum = VecUtils.cumsum(variability_ratio);
        if (this.var_mode) {
            for (int i = 0; i < ratio_cumsum.length; i++) {
                if (ratio_cumsum[i] >= this.variability) {
                    this.n_components = i + 1;
                    break;
                }

                // if it never hits the if block, the n_components is
                // equal to the number of columns in its entirety
            }
        }

        // get noise variance
        if (n_components < FastMath.min(n, m)) {
            this.noise_variance = VecUtils.mean(VecUtils.slice(variabilities, n_components, s.length));
        } else {
            this.noise_variance = 0.0;
        }

        // Set the components and other sliced variables
        this.components = new Array2DRowRealMatrix(MatUtils.slice(components_.getData(), 0, n_components),
                false);
        this.variabilities = VecUtils.slice(variabilities, 0, n_components);
        this.variability_ratio = VecUtils.slice(variability_ratio, 0, n_components);

        if (retain) {
            this.U = new Array2DRowRealMatrix(MatUtils.slice(U.getData(), 0, n_components), false);
            ;
            this.S = new Array2DRowRealMatrix(MatUtils.slice(S.getData(), 0, n_components), false);
            ;
        }

        return this;
    }
}

From source file:com.itemanalysis.psychometrics.factoranalysis.GPArotation.java

/**
 * For debugging//from ww  w.ja v a  2s  . co m
 *
 * @param x a matrix to print
 * @param title title for output
 */
private void printMatrix(RealMatrix x, String title) {
    System.out.println("PRINTING MATRIX: " + title);
    for (int i = 0; i < x.getRowDimension(); i++) {
        for (int j = 0; j < x.getColumnDimension(); j++) {
            System.out.print(x.getEntry(i, j) + "  ");
        }
        System.out.println();
    }
}

From source file:edu.dfci.cccb.mev.hcl.domain.simple.SimpleTwoDimensionalHclBuilder.java

private Node cluster(final Dataset dataset, Dimension dimension, Metric metric, Linkage linkage)
        throws DatasetException {
    final Type dimensionType = dimension.type();
    final RealMatrix original = toRealMatrix(dataset);
    final int size = dimensionType == ROW ? original.getRowDimension() : original.getColumnDimension();
    final int other = dimensionType == COLUMN ? original.getRowDimension() : original.getColumnDimension();
    Iterator<Integer> enumerator = new Iterator<Integer>() {

        private int counter = -1;

        @Override/* w  ww. jav a  2s .  c  om*/
        public boolean hasNext() {
            return true;
        }

        @Override
        public Integer next() {
            counter--;
            if (counter > 0)
                counter = -1;
            return counter;
        }

        @Override
        public void remove() {
            throw new UnsupportedOperationException();
        }
    };
    final double[][] distances = new double[size][size];

    log.debug("Populating node hash");
    final Map<Integer, Node> genehash = new HashMap<Integer, Node>() {
        private static final long serialVersionUID = 1L;

        {
            for (int index = size; --index >= 0; put(index,
                    nodeBuilder().leaf(dataset.dimension(dimensionType).keys().get(index))))
                ;
        }
    };
    TreeMap<Double, int[]> sorted = new TreeMap<>();

    log.debug("Populating distance matrix");
    for (int i = 0; i < size; i++) {
        for (int j = i + 1; j < size; j++) {
            double distance = metric.distance(new AbstractList<Double>() {

                private int i;

                @Override
                public Double get(int index) {
                    return dimensionType == ROW ? original.getEntry(i, index) : original.getEntry(index, i);
                }

                @Override
                public int size() {
                    return other;
                }

                private List<Double> initializeProjection(int i) {
                    this.i = i;
                    return this;
                }
            }.initializeProjection(i), new AbstractList<Double>() {

                private int j;

                @Override
                public Double get(int index) {
                    return dimensionType == ROW ? original.getEntry(j, index) : original.getEntry(index, j);
                }

                @Override
                public int size() {
                    return other;
                }

                private List<Double> initializeProjection(int j) {
                    this.j = j;
                    return this;
                }
            }.initializeProjection(j));

            distances[i][j] = distance;
            distances[j][i] = distance;
            int[] genePair = { i, j };
            // Enter the distance calculated and the genes measured into a
            // treemap. Will be automatically sorted.
            sorted.put(distance, genePair);
        }
    }

    log.debug("Aggregating");
    while (true) {
        // Get the first key of the TreeMap. Will be the shortest distance de
        // facto.
        final double minkey = (Double) sorted.firstKey();
        int[] minValues = (int[]) sorted.firstEntry().getValue();

        final int value1 = minValues[0], value2 = minValues[1];
        // find

        Node cluster = nodeBuilder().branch(minkey, genehash.get(value1), genehash.get(value2));
        int id = enumerator.next();

        genehash.put(id, cluster);
        genehash.remove(value1);
        genehash.remove(value2);

        if (genehash.size() <= 1)
            break;

        // Iterate over all the current clusters to remeasure distance with the
        // previously clustered group.
        for (Entry<Integer, Node> e : genehash.entrySet()) {
            Node c = e.getValue();
            // Skip measuring the new cluster with itself.
            if (c == cluster)
                continue;

            List<Double> aggregation = new ArrayList<>();
            // Get genes from each cluster. Distance is measured from each element
            // to every element.
            for (int current : traverse(dimension.keys(), c))
                for (int created : traverse(dimension.keys(), cluster))
                    aggregation.add(distances[current][created]);

            int[] valuePair = { e.getKey(), id };
            sorted.put(linkage.aggregate(aggregation), valuePair);
        }

        // Get the shortest distance.
        // Check to make sure shortest distance does not include a gene pair
        // that
        // has already had its elements clustered.
        boolean minimized = false;
        while (!minimized) {
            double mk = sorted.firstKey();
            minValues = sorted.firstEntry().getValue();
            // If the gene pair is not present in the current gene set, remove
            // this distance.
            if (!genehash.containsKey(minValues[0]) || !genehash.containsKey(minValues[1]))
                sorted.remove(mk);
            else
                minimized = true;
        }
    }

    Node result = genehash.entrySet().iterator().next().getValue();
    log.debug("Clustered " + result);
    return result;
}

From source file:eagle.security.userprofile.impl.UserProfileAnomalyKDEEvaluator.java

@Override
public List<MLCallbackResult> detect(final String user, final String algorithm,
        UserActivityAggModel userActivity, UserProfileKDEModel aModel) {
    List<MLCallbackResult> mlPredictionOutputList = new ArrayList<MLCallbackResult>();
    RealMatrix inputData = userActivity.matrix();

    double[] probabilityEstimation = new double[inputData.getRowDimension()];
    for (int i = 0; i < probabilityEstimation.length; i++)
        probabilityEstimation[i] = 1.0;/*from   w w  w  . j av  a 2s  .c o  m*/

    boolean[][] anomalyFeature = new boolean[inputData.getRowDimension()][inputData.getColumnDimension()];

    for (int i = 0; i < anomalyFeature.length; i++) {
        for (int j = 0; j < anomalyFeature[i].length; j++) {
            anomalyFeature[i][j] = false;
        }
    }

    if (aModel == null) {
        LOG.info("No model available for this uer, returning");
        return null;
    }

    Map<String, String> context = new HashMap<String, String>() {
        {
            put(UserProfileConstants.USER_TAG, user);
            put(UserProfileConstants.ALGORITHM_TAG, algorithm);
        }
    };

    for (int i = 0; i < inputData.getRowDimension(); i++) {
        List<String> cmds = JavaConversions.seqAsJavaList(userActivity.cmdTypes());
        if (inputData.getColumnDimension() != cmds.size()) {
            LOG.error("Test data is not with same dimension as training, aborting...");
            return null;
        } else {

            UserCommandStatistics[] listStats = aModel.statistics();

            for (int j = 0; j < inputData.getColumnDimension(); j++) {
                //                    LOG.info("mean for j=" + j + " is:" + listStats[j].getMean());
                //                    LOG.info("stddev for j=" + j + " is:" + listStats[j].getStddev());
                if (listStats[j].isLowVariant()) {
                    //                        LOG.info(listStats[j].getCommandName() + " is low variant for user: " + user);
                    if (inputData.getEntry(i, j) > listStats[j].getMean()) {
                        probabilityEstimation[i] *= Double.NEGATIVE_INFINITY;
                        anomalyFeature[i][j] = true;
                    }
                } else {
                    double stddev = listStats[j].getStddev();
                    //LOG.info("stddev: " + stddev);
                    double mean = listStats[j].getMean();
                    //LOG.info("mean: " + mean);
                    double sqrt2PI = Math.sqrt(2.0 * Math.PI);
                    //LOG.info("sqrt2PI: " + sqrt2PI);
                    double denominatorFirstPart = sqrt2PI * stddev;
                    //LOG.info("denominatorFirstPart: " + denominatorFirstPart);
                    double squareMeanNormal = Math.pow((inputData.getEntry(i, j) - mean), 2);
                    //LOG.info("squareMeanNormal: " + squareMeanNormal);
                    double twoPowStandardDev = Math.pow(stddev, 2);
                    //LOG.info("twoPowStandardDev: " + twoPowStandardDev);
                    double twoTimesTwoPowStandardDev = 2.0 * twoPowStandardDev;
                    //LOG.info("twoTimesTwoPowStandardDev: " + twoTimesTwoPowStandardDev);

                    double tempVal = ((1.00 / denominatorFirstPart)
                            * (Math.exp(-(squareMeanNormal / twoTimesTwoPowStandardDev))));
                    probabilityEstimation[i] *= tempVal;
                    //LOG.info("probabilityEstimation: " + probabilityEstimation[i]);
                    if ((inputData.getEntry(i, j) - mean) > 2 * stddev)
                        anomalyFeature[i][j] = true;
                }
            }

        }
    }

    for (int i = 0; i < probabilityEstimation.length; i++) {
        MLCallbackResult callBackResult = new MLCallbackResult();
        callBackResult.setContext(context);
        //LOG.info("probability estimation for data @" + i + " is: " + probabilityEstimation[i]);
        if (probabilityEstimation[i] < aModel.maxProbabilityEstimate()) {
            callBackResult.setAnomaly(true);
            for (int col = 0; col < anomalyFeature[i].length; col++) {
                //LOG.info("feature anomaly? " + (featureVals[col] == true));
                if (anomalyFeature[i][col] == true) {
                    callBackResult.setFeature(aModel.statistics()[col].getCommandName());
                }
            }
        } else {
            callBackResult.setAnomaly(false);
        }

        callBackResult.setTimestamp(userActivity.timestamp());
        List<String> datapoints = new ArrayList<String>();
        double[] rowVals = userActivity.matrix().getRow(i);
        for (double rowVal : rowVals)
            datapoints.add(rowVal + "");
        callBackResult.setDatapoints(datapoints);
        callBackResult.setId(user);
        callBackResult.setAlgorithm(UserProfileConstants.KDE_ALGORITHM);
        mlPredictionOutputList.add(callBackResult);
    }
    return mlPredictionOutputList;
}

From source file:eagle.security.userprofile.model.eigen.UserProfileEigenModeler.java

private void computeCovarianceAndSVD(RealMatrix inputMat, int containsLowVariantCol) {

    int finalMatrixRow = 0;
    int finalMatrixCol = 0;

    LOG.info("containsLowVariantCol size: " + containsLowVariantCol);
    int colDimension = (inputMat.getColumnDimension() - containsLowVariantCol);
    try {/*from   ww  w . ja  v a 2  s  . c  o m*/
        finalMatrixWithoutLowVariantCmds = new Array2DRowRealMatrix(inputMat.getRowDimension(), colDimension);
    } catch (NotStrictlyPositiveException e) {
        LOG.error(String.format("Failed to build matrix [rowDimension:%s, columnDimension: %s]",
                inputMat.getRowDimension(), colDimension), e);
        throw e;
    }

    for (int i = 0; i < inputMat.getRowDimension(); i++) {
        for (int j = 0; j < inputMat.getColumnDimension(); j++) {
            if (!statistics[j].isLowVariant()) {
                finalMatrixWithoutLowVariantCmds.setEntry(finalMatrixRow, finalMatrixCol,
                        inputMat.getEntry(i, j));
                finalMatrixCol++;
            }
        }
        finalMatrixCol = 0;
        finalMatrixRow++;
    }

    Covariance cov;
    try {
        cov = new Covariance(finalMatrixWithoutLowVariantCmds.getData());
    } catch (Exception ex) {
        throw new IllegalArgumentException(String.format("Failed to create covariance from matrix [ %s x %s ]",
                finalMatrixWithoutLowVariantCmds.getRowDimension(),
                finalMatrixWithoutLowVariantCmds.getColumnDimension()), ex);
    }
    covarianceMatrix = cov.getCovarianceMatrix();
    SingularValueDecomposition svd = new SingularValueDecomposition(covarianceMatrix);
    diagonalMatrix = svd.getS();
    uMatrix = svd.getU();
    vMatrix = svd.getV();
}

From source file:com.itemanalysis.psychometrics.factoranalysis.GPArotation.java

/**
 * Conducts orthogonal rotation of factor loadings.
 *
 * @param A matrix of orthogonal factor loadings
 * @return a matrix of rotated factor loadings.
 * @throws ConvergenceException/*  w ww. ja v  a 2s.c  o m*/
 */
private RotationResults GPForth(RealMatrix A, boolean normalize, int maxIter, double eps)
        throws ConvergenceException {
    int ncol = A.getColumnDimension();

    if (normalize) {
        //elementwise division by normalizing weights
        final RealMatrix W = getNormalizingWeights(A, true);
        A.walkInRowOrder(new DefaultRealMatrixChangingVisitor() {
            @Override
            public double visit(int row, int column, double value) {
                return value / W.getEntry(row, column);
            }
        });
    }

    RealMatrix Tmat = new IdentityMatrix(ncol);
    double alpha = 1;
    RealMatrix L = A.multiply(Tmat);

    gpFunction.computeValues(L);

    double f = gpFunction.getValue();
    RealMatrix VgQ = gpFunction.getGradient();
    RealMatrix G = A.transpose().multiply(VgQ);
    double VgQtF = gpFunction.getValue();
    RealMatrix VgQt = gpFunction.getGradient();
    RealMatrix Tmatt = null;

    int iter = 0;
    double s = eps + 0.5;
    double s2 = 0;
    int innnerIter = 11;

    while (iter < maxIter) {
        RealMatrix M = Tmat.transpose().multiply(G);
        RealMatrix S = (M.add(M.transpose()));
        S = S.scalarMultiply(0.5);
        RealMatrix Gp = G.subtract(Tmat.multiply(S));
        s = Math.sqrt((Gp.transpose().multiply(Gp)).getTrace());
        s2 = Math.pow(s, 2);

        if (s < eps)
            break;
        alpha *= 2.0;

        for (int j = 0; j < innnerIter; j++) {
            Gp = Gp.scalarMultiply(alpha);
            RealMatrix X = (Tmat.subtract(Gp));
            SingularValueDecomposition SVD = new SingularValueDecomposition(X);

            Tmatt = SVD.getU().multiply(SVD.getV().transpose());
            L = A.multiply(Tmatt);
            gpFunction.computeValues(L);
            VgQt = gpFunction.getGradient();
            VgQtF = gpFunction.getValue();

            if (VgQtF < f - 0.5 * s2 * alpha) {
                break;
            }
            alpha /= 2.0;
        }

        Tmat = Tmatt;
        f = VgQtF;
        G = A.transpose().multiply(VgQt);
        iter++;
    }

    boolean convergence = s < eps;
    if (!convergence) {
        throw new ConvergenceException();
    }

    if (normalize) {
        //elementwise multiplication by normalizing weights
        final RealMatrix W = getNormalizingWeights(A, true);
        A.walkInRowOrder(new DefaultRealMatrixChangingVisitor() {
            @Override
            public double visit(int row, int column, double value) {
                return value * W.getEntry(row, column);
            }
        });
    }

    RealMatrix Phi = Tmat.transpose().multiply(Tmat);
    RotationResults result = new RotationResults(gpFunction.getValue(), L, Phi, Tmat, rotationMethod);
    return result;

}

From source file:com.github.thorbenlindhauer.factor.GaussianFactorTest.java

@Test
public void testFactorMarginalCase2() {
    Scope variables = newScope(new ContinuousVariable("A"));

    GaussianFactor aMarginal = abcFactor.marginal(variables);

    // then//w  ww. j a v  a  2s. com
    Collection<Variable> newVariables = aMarginal.getVariables().getVariables();
    assertThat(newVariables).hasSize(1);
    assertThat(newVariables).contains(new ContinuousVariable("A"));

    // precision matrix: K_xx - K_xy * K_yy^(-1) * K_yx
    RealMatrix precisionMatrix = aMarginal.getPrecisionMatrix();
    assertThat(precisionMatrix.isSquare()).isTrue();
    assertThat(precisionMatrix.getColumnDimension()).isEqualTo(1);

    double precision = precisionMatrix.getRowVector(0).toArray()[0];
    assertThat(precision).isEqualTo(-(14.0d / 3.0d), TestConstants.DOUBLE_VALUE_TOLERANCE);

    // scaled mean vector: h_x - K_xy * K_yy^(-1) * h_y
    RealVector scaledMeanVector = aMarginal.getScaledMeanVector();
    assertThat(scaledMeanVector.getDimension()).isEqualTo(1);

    double meanValue = scaledMeanVector.toArray()[0];
    assertThat(meanValue).isEqualTo(4.0d / 3.0d, TestConstants.DOUBLE_VALUE_TOLERANCE);

    // normalization constant: g + 0.5 * (log( det( 2 * PI * K_yy^(-1))) + h_y * K_yy^(-1) * h_y)
    assertThat(aMarginal.getNormalizationConstant()).isEqualTo(9.324590408d,
            TestConstants.DOUBLE_VALUE_TOLERANCE);
}