Example usage for org.apache.commons.math3.linear Array2DRowRealMatrix Array2DRowRealMatrix

List of usage examples for org.apache.commons.math3.linear Array2DRowRealMatrix Array2DRowRealMatrix

Introduction

In this page you can find the example usage for org.apache.commons.math3.linear Array2DRowRealMatrix Array2DRowRealMatrix.

Prototype

public Array2DRowRealMatrix(final double[] v) 

Source Link

Document

Create a new (column) RealMatrix using v as the data for the unique column of the created matrix.

Usage

From source file:net.semanticmetadata.lire.filter.LsaFilter.java

/**
 * @param results//w  w  w.j a  v a  2s .co m
 * @param query
 * @return the filtered results or null if error occurs.
 */
public ImageSearchHits filter(ImageSearchHits results, Document query) {
    // create a double[items][histogram]
    tempFeature = null;
    LinkedList<double[]> features = new LinkedList<double[]>();
    try {
        tempFeature = (LireFeature) featureClass.newInstance();
    } catch (Exception e) {
        logger.severe("Could not create feature " + featureClass.getName() + " (" + e.getMessage() + ").");
        return null;
    }
    // get all features from the result set, take care of those that do not have the respective field.
    for (int i = 0; i < results.length(); i++) {
        Document d = results.doc(i);
        if (d.getField(fieldName) != null) {
            tempFeature.setByteArrayRepresentation(d.getField(fieldName).binaryValue().bytes,
                    d.getField(fieldName).binaryValue().offset, d.getField(fieldName).binaryValue().length);
            features.add(tempFeature.getDoubleHistogram());
        }
    }
    // now go for the query
    if (query.getField(fieldName) != null) {
        tempFeature.setByteArrayRepresentation(query.getField(fieldName).binaryValue().bytes,
                query.getField(fieldName).binaryValue().offset, query.getField(fieldName).binaryValue().length);
    } else {
        logger.severe("Query document is missing the given feature " + featureClass.getName() + ".");
        return null;
    }
    double[][] matrixData = new double[features.size() + 1][tempFeature.getDoubleHistogram().length];
    System.arraycopy(tempFeature.getDoubleHistogram(), 0, matrixData[0], 0,
            tempFeature.getDoubleHistogram().length);
    int count = 1;
    for (Iterator<double[]> iterator = features.iterator(); iterator.hasNext();) {
        double[] next = iterator.next();
        System.arraycopy(next, 0, matrixData[count], 0, next.length);
        count++;
    }
    for (int i = 0; i < matrixData.length; i++) {
        double[] doubles = matrixData[i];
        for (int j = 0; j < doubles.length; j++) {
            if (Double.isNaN(doubles[j]))
                System.err.println("Value is NaN");
            ;
        }
    }
    // create a matrix object and do the magic
    Array2DRowRealMatrix m = new Array2DRowRealMatrix(matrixData);
    long ms = System.currentTimeMillis();
    SingularValueDecomposition svd = new SingularValueDecomposition(m);
    ms = System.currentTimeMillis() - ms;
    double[] singularValues = svd.getSingularValues();
    RealMatrix s = svd.getS();
    // if no number of dimensions is given reduce to a tenth.
    if (numberOfDimensions < 1)
        numberOfDimensions = singularValues.length / 10;
    for (int i = numberOfDimensions; i < singularValues.length; i++) {
        s.setEntry(i, i, 0);
    }
    RealMatrix mNew = svd.getU().multiply(s).multiply(svd.getVT());
    double[][] data = mNew.getData();

    // create the new result set
    TreeSet<SimpleResult> result = new TreeSet<SimpleResult>();
    double maxDistance = 0;
    double[] queryData = data[0];
    for (int i = 1; i < data.length; i++) {
        double[] doubles = data[i];
        double distance = MetricsUtils.distL1(doubles, queryData);
        result.add(new SimpleResult((float) distance, results.doc(i - 1), i - 1));
        maxDistance = Math.max(maxDistance, distance);
    }
    ImageSearchHits hits;
    hits = new SimpleImageSearchHits(result, (float) maxDistance);
    return hits;
}

From source file:edu.ucdenver.bios.power.ConditionalMultivariateWithConfidenceLimitsTest.java

private GLMMPowerParameters buildInputs(GLMMTestFactory.Test test) {
    // build the inputs
    GLMMPowerParameters params = new GLMMPowerParameters();

    // add tests/*from   www  .  j a v a  2  s .co  m*/
    params.addTest(test);

    // add alpha values - bonferroni corrected for 6 comparisons
    params.addAlpha(0.05 / 6);

    // add beta scale values
    params.addBetaScale(1);

    // build theta null matrix
    double[][] theta0 = { { 0, 0, 0 } };
    params.setTheta(new Array2DRowRealMatrix(theta0));

    // build sigma matrix
    double[][] sigma = { { 0.08380, 0.05020, 0.03560, 0.05330 }, { 0.05020, 0.05370, 0.03250, 0.03330 },
            { 0.03560, 0.03250, 0.04410, 0.03860 }, { 0.05330, 0.03330, 0.03860, 0.07220 } };
    params.setSigmaError(new Array2DRowRealMatrix(sigma));
    // add sigma scale values
    params.addSigmaScale(1);

    // build design matrix
    params.setDesignEssence(org.apache.commons.math3.linear.MatrixUtils.createRealIdentityMatrix(10));
    // add sample size multipliers
    for (int sampleSize = 2; sampleSize <= 10; sampleSize++)
        params.addSampleSize(sampleSize);

    // build beta matrix
    params.setBeta(new FixedRandomMatrix(beta, null, false));

    // build between subject contrast
    double[][] between = { { 1, 1, 1, 1, 1, -1, -1, -1, -1, -1 } };
    params.setBetweenSubjectContrast(new FixedRandomMatrix(between, null, true));

    double[] regions = { 1, 2, 3, 4 };
    String name = "region";
    ArrayList<Factor> factorList = new ArrayList<Factor>();
    Factor regionFactor = new Factor(name, regions);
    factorList.add(regionFactor);
    params.setWithinSubjectContrast(OrthogonalPolynomials.withinSubjectContrast(factorList)
            .getMainEffectContrast(regionFactor).getContrastMatrix());

    // parameters for confidence limits
    params.setConfidenceIntervalType(ConfidenceIntervalType.BETA_KNOWN_SIGMA_ESTIMATED);
    params.setSampleSizeForEstimates(21);
    params.setDesignMatrixRankForEstimates(1);
    // 2 sided CI
    params.setAlphaLowerConfidenceLimit(0.025);
    params.setAlphaUpperConfidenceLimit(0.025);

    return params;
}

From source file:com.joptimizer.algebra.CholeskySparseFactorizationTest.java

public void testSimple2() throws Exception {
    log.debug("testSimple2");
    double[][] A = new double[][] { { 4, 0, 0, 1 }, { 0, 4, 0, -1 }, { 0, 0, 6, 1 }, { 1, -1, 1, 6 } };

    CholeskySparseFactorization cs = new CholeskySparseFactorization(new SparseDoubleMatrix2D(A));
    cs.factorize();// w  w  w .ja  v  a 2 s . co  m
    DoubleMatrix2D L = cs.getL();
    DoubleMatrix2D LT = cs.getLT();
    log.debug("L : " + ArrayUtils.toString(L.toArray()));
    log.debug("LT: " + ArrayUtils.toString(LT.toArray()));

    //check the norm ||A.x-b||
    double[] b = new double[] { 1, 2, 3, 4 };
    double[] x = cs.solve(F1.make(b)).toArray();
    double norm = new Array2DRowRealMatrix(A).operate(new ArrayRealVector(x)).subtract(new ArrayRealVector(b))
            .getNorm();
    log.debug("norm: " + norm);
    assertEquals(0., norm, 1.e-15);

    //check the scaled residual
    double residual = Utils.calculateScaledResidual(A, x, b);
    log.debug("residual: " + residual);
    assertEquals(0., residual, Utils.getDoubleMachineEpsilon());
}

From source file:net.semanticmetadata.lire.filters.LsaFilter.java

/**
 * @param results/*from   w ww . j a  va 2  s .c om*/
 * @param query
 * @return the filtered results or null if error occurs.
 */
public ImageSearchHits filter(ImageSearchHits results, IndexReader reader, Document query) {
    // create a double[items][histogram]
    tempFeature = null;
    LinkedList<double[]> features = new LinkedList<double[]>();
    try {
        tempFeature = (LireFeature) featureClass.newInstance();
    } catch (Exception e) {
        logger.severe("Could not create feature " + featureClass.getName() + " (" + e.getMessage() + ").");
        return null;
    }
    // get all features from the result set, take care of those that do not have the respective field.
    for (int i = 0; i < results.length(); i++) {
        Document d = null;
        try {
            d = reader.document(results.documentID(i));
        } catch (IOException e) {
            e.printStackTrace();
        }
        if (d.getField(fieldName) != null) {
            tempFeature.setByteArrayRepresentation(d.getField(fieldName).binaryValue().bytes,
                    d.getField(fieldName).binaryValue().offset, d.getField(fieldName).binaryValue().length);
            features.add(tempFeature.getFeatureVector());
        }
    }
    // now go for the query
    if (query.getField(fieldName) != null) {
        tempFeature.setByteArrayRepresentation(query.getField(fieldName).binaryValue().bytes,
                query.getField(fieldName).binaryValue().offset, query.getField(fieldName).binaryValue().length);
    } else {
        logger.severe("Query document is missing the given feature " + featureClass.getName() + ".");
        return null;
    }
    double[][] matrixData = new double[features.size() + 1][tempFeature.getFeatureVector().length];
    System.arraycopy(tempFeature.getFeatureVector(), 0, matrixData[0], 0,
            tempFeature.getFeatureVector().length);
    int count = 1;
    for (Iterator<double[]> iterator = features.iterator(); iterator.hasNext();) {
        double[] next = iterator.next();
        System.arraycopy(next, 0, matrixData[count], 0, next.length);
        count++;
    }
    for (int i = 0; i < matrixData.length; i++) {
        double[] doubles = matrixData[i];
        for (int j = 0; j < doubles.length; j++) {
            if (Double.isNaN(doubles[j]))
                System.err.println("Value is NaN");
            ;
        }
    }
    // create a matrix object and do the magic
    Array2DRowRealMatrix m = new Array2DRowRealMatrix(matrixData);
    long ms = System.currentTimeMillis();
    SingularValueDecomposition svd = new SingularValueDecomposition(m);
    ms = System.currentTimeMillis() - ms;
    double[] singularValues = svd.getSingularValues();
    RealMatrix s = svd.getS();
    // if no number of dimensions is given reduce to a tenth.
    if (numberOfDimensions < 1)
        numberOfDimensions = singularValues.length / 10;
    for (int i = numberOfDimensions; i < singularValues.length; i++) {
        s.setEntry(i, i, 0);
    }
    RealMatrix mNew = svd.getU().multiply(s).multiply(svd.getVT());
    double[][] data = mNew.getData();

    // create the new result set
    TreeSet<SimpleResult> result = new TreeSet<SimpleResult>();
    double maxDistance = 0;
    double[] queryData = data[0];
    for (int i = 1; i < data.length; i++) {
        double[] doubles = data[i];
        double distance = MetricsUtils.distL1(doubles, queryData);
        result.add(new SimpleResult((float) distance, results.documentID(i - 1)));
        maxDistance = Math.max(maxDistance, distance);
    }
    ImageSearchHits hits;
    hits = new SimpleImageSearchHits(result, (float) maxDistance);
    return hits;
}

From source file:com.clust4j.algo.MeanShiftTests.java

@Test
public void testMeanShiftMFE1() {
    boolean a = false;
    final Array2DRowRealMatrix mat = new Array2DRowRealMatrix(MatUtils.randomGaussian(50, 2));
    MeanShift ms = new MeanShift(mat, 0.5);
    try {/*ww w .j  a v a  2 s.  co  m*/
        ms.getLabels();
    } catch (ModelNotFitException m) {
        a = true;
    } finally {
        assertTrue(a);
    }
}

From source file:com.clust4j.algo.pipeline.PipelineTest.java

@Test
public void testSupervisedA() {
    final double[][] data = new double[][] { new double[] { 0.005, 0.182751, 0.1284 },
            new double[] { 3.65816, 0.29518, 2.123316 }, new double[] { 4.1234, 0.27395, 1.8900002 } };

    final Array2DRowRealMatrix mat = new Array2DRowRealMatrix(data);
    final NearestCentroidParameters planner = new NearestCentroidParameters().setVerbose(true);

    // Build the pipeline
    final SupervisedPipeline<NearestCentroid> pipe = new SupervisedPipeline<NearestCentroid>(planner,
            new PreProcessor[] { new StandardScaler(),
                    new MeanImputation(new MeanImputation.MeanImputationPlanner().setVerbose(true)) // Will create a warning
            });//from ww  w  . jav a  2 s. com
    final NearestCentroid nc = pipe.fit(mat, new int[] { 0, 1, 1 });

    assertTrue(nc.getLabels()[0] == 0 && nc.getLabels()[1] == 1);
    assertTrue(nc.getLabels()[1] == nc.getLabels()[2]);
    System.out.println();

}

From source file:com.github.thorbenlindhauer.factor.GaussianFactorTest.java

@Test
public void testInitializationFromConditionalLinearGaussian() {
    GaussianFactor factor = factorBuilder.scope("A", "B").conditional().conditioningScope("B").parameters(
            new ArrayRealVector(new double[] { 4.0d }), // mean of A
            new Array2DRowRealMatrix(new double[] { 2.0d }), // variance for A
            new Array2DRowRealMatrix(new double[] { 5.0d })); // weight of B

    // P(A = 3 | B = 1.5)
    assertThat(factor.getValueForAssignment(new double[] { 10.0d, 1.5d })).isEqualTo(0.160733d,
            TestConstants.DOUBLE_VALUE_TOLERANCE);

}

From source file:edu.cudenver.bios.matrix.MatrixUtils.java

/**
 * Calculate the Kronecker product of two matrices
 *
 * @param matrix1 first matrix// w ww.  j ava  2  s .c  om
 * @param matrix2 second matrix
 * @return Kronecker product of matrix 1 and matrix 2
 */
public static RealMatrix getKroneckerProduct(RealMatrix matrix1, RealMatrix matrix2) {
    if (matrix1 == null || matrix2 == null)
        throw new IllegalArgumentException("null input matrix");

    int m1Rows = matrix1.getRowDimension();
    int m1Cols = matrix1.getColumnDimension();
    int m2Rows = matrix2.getRowDimension();
    int m2Cols = matrix2.getColumnDimension();

    double[][] productData = new double[m1Rows * m2Rows][m1Cols * m2Cols];
    RealMatrix productMatrix = new Array2DRowRealMatrix(productData);
    for (int col = 0; col < m1Cols; col++) {
        for (int row = 0; row < m1Rows; row++) {
            productMatrix.setSubMatrix((matrix2.scalarMultiply(matrix1.getEntry(row, col))).getData(),
                    row * m2Rows, col * m2Cols);
        }
    }

    return productMatrix;
}

From source file:com.clust4j.algo.KMeansTests.java

/** Now scale = false and multiclass */
@Test/*from  w  w w .  j a v a  2 s  .c  om*/
public void KMeansTest3() {
    final double[][] data = new double[][] { new double[] { 0.005, 0.182751, 0.1284 },
            new double[] { 3.65816, 0.29518, 2.123316 }, new double[] { 4.1234, 0.0001, 1.8900002 },
            new double[] { 100, 200, 100 } };

    final Array2DRowRealMatrix mat = new Array2DRowRealMatrix(data);
    KMeans km = new KMeans(mat, new KMeansParameters(3)).fit();

    assertTrue(km.getK() == 3);
    assertTrue(km.getLabels()[1] == km.getLabels()[2]);
    assertTrue(km.getLabels()[0] != km.getLabels()[3]);
    assertTrue(km.didConverge());
}

From source file:edu.cudenver.bios.power.test.general.TestNonCentralityDistribution.java

/**
 * Builds matrices for a multivariate GLM with a baseline covariate
 *//*from   w w w .j av  a 2 s .  c o  m*/
private GLMMPowerParameters buildValidMultivariateRandomInputs() {
    GLMMPowerParameters params = new GLMMPowerParameters();

    // add power methods
    //for(PowerMethod method: PowerMethod.values()) params.addPowerMethod(method);
    params.addPowerMethod(PowerMethod.CONDITIONAL_POWER);
    params.addPowerMethod(PowerMethod.QUANTILE_POWER);
    params.addQuantile(0.25);
    params.addQuantile(0.5);
    params.addQuantile(0.75);

    // add tests - only HL andUNIREP value for random case
    params.addTest(Test.HOTELLING_LAWLEY_TRACE);
    //params.addTest(GLMMPowerParameters.Test.UNIREP);

    // add alpha values
    for (double alpha : ALPHA_LIST)
        params.addAlpha(alpha);

    int P = 3;
    int Q = 3;
    // create design matrix
    params.setDesignEssence(org.apache.commons.math3.linear.MatrixUtils.createRealIdentityMatrix(Q));
    // add sample size multipliers
    for (int sampleSize : SAMPLE_SIZE_LIST)
        params.addSampleSize(sampleSize);

    // build sigma G matrix
    double[][] sigmaG = { { 1 } };
    params.setSigmaGaussianRandom(new Array2DRowRealMatrix(sigmaG));

    // build sigma Y matrix
    double rho = 0.4;
    double[][] sigmaY = { { 1, 0 }, { 0, 1 } };
    params.setSigmaOutcome(new Array2DRowRealMatrix(sigmaY));

    // build sigma YG
    double rhoYG = 0.8;
    double[][] sigmaYG = { { 0.9 }, { 0 } };
    params.setSigmaOutcomeGaussianRandom(new Array2DRowRealMatrix(sigmaYG));

    // add sigma scale values
    for (double sigmaScale : SIGMA_SCALE_LIST)
        params.addSigmaScale(sigmaScale);

    // build beta matrix
    double[][] beta = { { 1, 0 }, { 0, 0 }, { 0, 0 } };
    double[][] betaRandom = { { 0.9, 0 } };
    params.setBeta(new FixedRandomMatrix(beta, betaRandom, false));
    // add beta scale values
    for (double betaScale : BETA_SCALE_LIST)
        params.addBetaScale(betaScale);

    // build theta null matrix
    double[][] theta0 = { { 0, 0 }, { 0, 0 } };
    params.setTheta(new Array2DRowRealMatrix(theta0));

    // build between subject contrast
    double[][] between = { { 1, -1, 0 }, { 1, 0, -1 } };
    double[][] betweenRandom = { { 1 }, { 1 } };
    params.setBetweenSubjectContrast(new FixedRandomMatrix(between, betweenRandom, true));

    // build within subject contrast
    double[][] within = { { 1, 0 }, { 0, 1 } };
    params.setWithinSubjectContrast(new Array2DRowRealMatrix(within));

    // set the sigma error matrix to [sigmaY - sigmaYG * sigmaG-1 * sigmaGY]
    RealMatrix sigmaGY = params.getSigmaOutcomeGaussianRandom().transpose();
    RealMatrix sigmaGInverse = new LUDecomposition(params.getSigmaGaussianRandom()).getSolver().getInverse();
    params.setSigmaError(params.getSigmaOutcome()
            .subtract(params.getSigmaOutcomeGaussianRandom().multiply(sigmaGInverse.multiply(sigmaGY))));

    return params;
}