Example usage for org.apache.commons.math.linear RealMatrix getSubMatrix

List of usage examples for org.apache.commons.math.linear RealMatrix getSubMatrix

Introduction

In this page you can find the example usage for org.apache.commons.math.linear RealMatrix getSubMatrix.

Prototype

RealMatrix getSubMatrix(int startRow, int endRow, int startColumn, int endColumn) throws MatrixIndexException;

Source Link

Document

Gets a submatrix.

Usage

From source file:de.mpicbg.knime.hcs.base.utils.Table2Matrix.java

public static RealMatrix extractMatrix(List<DataRow> rows, List<Attribute> params) {
    double[][] matrix = new double[rows.size()][params.size()];
    int nbparams = params.size();
    int m = 0;/*from   ww  w.  ja  v  a 2  s.  com*/
    for (DataRow row : rows) {
        int n = 0;
        for (Attribute readout : params) {
            Double val = readout.getDoubleAttribute(row);
            if ((val == null) || Double.isInfinite(val) || Double.isNaN(val)) {
                break;
            }
            matrix[m][n] = val;
            n += 1;
        }
        if (n == nbparams) {
            m += 1;
        }
    }
    // remove the unused rows.
    RealMatrix rmatrix = new Array2DRowRealMatrix(matrix);
    if (m > 0) {
        rmatrix = rmatrix.getSubMatrix(0, m - 1, 0, nbparams - 1);
    }
    return rmatrix;
}

From source file:juicebox.tools.utils.common.MatrixTools.java

/**
 * @return region within matrix specified by indices
 *///w  ww .  j a  v  a 2 s  .c o  m
public static RealMatrix getSubMatrix(RealMatrix matrix, int[] indices) {
    return matrix.getSubMatrix(indices[0], indices[1], indices[2], indices[3]);
}

From source file:net.sf.jtmt.similarity.matrix.AbstractSimilarity.java

public RealMatrix transform(RealMatrix termDocumentMatrix) {
    int numDocs = termDocumentMatrix.getColumnDimension();
    RealMatrix similarityMatrix = new OpenMapRealMatrix(numDocs, numDocs);
    for (int i = 0; i < numDocs; i++) {
        RealMatrix sourceDocMatrix = termDocumentMatrix.getSubMatrix(0,
                termDocumentMatrix.getRowDimension() - 1, i, i);
        for (int j = 0; j < numDocs; j++) {
            RealMatrix targetDocMatrix = termDocumentMatrix.getSubMatrix(0,
                    termDocumentMatrix.getRowDimension() - 1, j, j);
            similarityMatrix.setEntry(i, j, computeSimilarity(sourceDocMatrix, targetDocMatrix));
        }/*w  ww .ja v  a2  s .  c om*/
    }
    return similarityMatrix;
}

From source file:net.sf.jtmt.clustering.DocumentCollection.java

/**
 * Instantiates a new document collection.
 *
 * @param tdMatrix the td matrix//  w w w  .  ja va  2 s .c  o m
 * @param docNames the doc names
 */
public DocumentCollection(RealMatrix tdMatrix, String[] docNames) {
    int position = 0;
    this.tdMatrix = tdMatrix;
    this.documentMap = new HashMap<String, RealMatrix>();
    this.documentNames = new ArrayList<String>();
    for (String documentName : docNames) {
        documentMap.put(documentName,
                tdMatrix.getSubMatrix(0, tdMatrix.getRowDimension() - 1, position, position));
        documentNames.add(documentName);
        position++;
    }
}

From source file:juicebox.tools.utils.juicer.apa.APARegionStatistics.java

public APARegionStatistics(RealMatrix data) {
    int max = data.getColumnDimension();
    int midPoint = max / 2;
    double centralVal = data.getEntry(midPoint, midPoint);

    int regionWidth = APA.regionWidth;

    /** NOTE - indices are inclusive in java, but in python the second index is not inclusive */

    peak2mean = centralVal / ((sum(data.getData()) - centralVal) / (data.getColumnDimension() - 1));

    double avgUL = mean(data.getSubMatrix(0, regionWidth - 1, 0, regionWidth - 1).getData());
    peak2UL = centralVal / avgUL;//from w  ww.j a v  a  2s  . co  m

    double avgUR = mean(data.getSubMatrix(0, regionWidth - 1, max - regionWidth, max - 1).getData());
    peak2UR = centralVal / avgUR;

    double avgLL = mean(data.getSubMatrix(max - regionWidth, max - 1, 0, regionWidth - 1).getData());
    peak2LL = centralVal / avgLL;

    double avgLR = mean(data.getSubMatrix(max - regionWidth, max - 1, max - regionWidth, max - 1).getData());
    peak2LR = centralVal / avgLR;

    DescriptiveStatistics yStats = statistics(
            data.getSubMatrix(max - regionWidth, max - 1, 0, regionWidth - 1).getData());
    ZscoreLL = (centralVal - yStats.getMean()) / yStats.getStandardDeviation();
}

From source file:lib.regressions.MultipleRegression.java

/**
 * Perform the regression computations/*from  w  ww. j  a  v  a  2  s.co m*/
 */
private void compute() {
    // Set everything to 0.
    for (int i = 0; i < (myNumVar + 1); i++) {
        myCoef[i] = 0.0;
        myStdErr[i] = 0.0;
        myTStat[i] = 0.0;
    }
    myChiSq = 0.0;
    myRSq = 0.0;
    myAdjustedRSq = 0.0;

    // Set coefficients, t-stat, etc. if there has been enough data added.
    if (myCount >= (myNumVar + 1)) {
        RealMatrix dataMatrix = new RealMatrixImpl(mySums.getSumXX());
        RealMatrix xxMatrix = dataMatrix.getSubMatrix(1, myNumVar + 1, 1, myNumVar + 1);
        RealMatrix xyMatrix = dataMatrix.getSubMatrix(1, myNumVar + 1, 0, 0);

        computeOkX(); // Determine which X components to use.
        int[] listX = getListX();
        int[] listY = { 0 };
        int numX = listX.length;
        RealMatrix xxSubMatrix = xxMatrix.getSubMatrix(listX, listX);
        RealMatrix xySubMatrix = xyMatrix.getSubMatrix(listX, listY);

        double sumY = mySums.getSumXX()[0][1];
        double sumYY = mySums.getSumXX()[0][0];

        if (!xxSubMatrix.isSingular()) {
            RealMatrix xxInverse = xxSubMatrix.inverse();
            RealMatrix coefMatrix = xxInverse.multiply(xySubMatrix);
            double[] coef = coefMatrix.getColumn(0);

            // Compute chi-squared
            myChiSq = sumYY - 2 * coefMatrix.transpose().multiply(xySubMatrix).getEntry(0, 0)
                    + +coefMatrix.transpose().multiply(xxSubMatrix).multiply(coefMatrix).getEntry(0, 0);

            // Compute R^2 and adjusted R^2
            int offset = getUseIntercept() ? 1 : 0;
            myRSq = 1.0 - myChiSq / (sumYY - sumY * sumY / myCount);
            myAdjustedRSq = 1 - ((1 - myRSq) * (myCount - 1) + 1 - offset) / (myCount - numX);

            // Compute standard errors and t-stats
            int j = 0;
            for (int i = 0; i < (myNumVar + 1); i++) {
                if (myOkX[i]) {
                    j++;
                    myCoef[i] = coef[j - 1];
                    myStdErr[i] = Math.sqrt(myChiSq * xxInverse.getEntry(j - 1, j - 1) / (myCount - numX));
                    myTStat[i] = myCoef[i] / myStdErr[i];
                }
            }
        }

    }
    myIsComputed = true;
}

From source file:name.mjw.cytospade.fcsFile.java

/**
 * getCompensatedEventList ---//from  w ww.  j  av a  2  s . c  o  m
 * <p>
 * Returns the event list compensated by the SPILL matrix.
 * <p>
 *
 * @return array of double arrays containing the events.
 */
public double[][] getCompensatedEventList() {
    double[][] events = this.getEventList();
    if (events.length != this.getNumChannels())
        return events; // Unable to extract the underlying events

    // Convert the SPILL string to a compensation matrix
    String compString = this.getSpillString();
    if (compString == null)
        return events; // No compensation, just return the events

    // Split the compensation string into its values
    //
    // The basic structure for SPILL* is:
    // $SPILLOVER/n,string1,string2,...,f1,f2,f3,f4,.../

    String[] compValues = compString.split(",");
    String[] compNames = null;
    String[] compData = null;
    int compDataStart = 0;

    int n = 0;
    try {
        // Try to parse the number of acquisition parameters
        n = Integer.parseInt(compValues[0]);
        if (n <= 0 || n > this.parameters)
            throw new NumberFormatException();
    } catch (NumberFormatException nfe) {
        CyLogger.getLogger().error("Failed to parse parameter count in spill string", nfe);
        return events;
    }

    compNames = Arrays.copyOfRange(compValues, 1, n + 1);

    // Match names in spill string to columns in parameter lists
    compDataStart = Arrays.asList(this.channelShortname).indexOf(compNames[0]);
    if (compDataStart < 0) {
        CyLogger.getLogger().error("Failed to match channel " + compNames[0] + " to parameter in file");
        return events; // Failure match spill string names to channels
    }
    for (int i = 0; i < n; i++) {
        if (!compNames[i].equals(this.channelShortname[compDataStart + i])) {
            CyLogger.getLogger().error("Spill channel are not continguous parameters in file");
            return events; // Spill string columns not in order
        }
    }

    // Extract actual compensation data
    compData = Arrays.copyOfRange(compValues, n + 1, compValues.length);
    if (compData.length != (n * n))
        return events;

    /**
     * Populate the compensation matrix --- The values are stored in
     * row-major order, i.e., the elements in the first row appear
     * first.
     */
    double[][] matrix = new double[n][n];

    // Loop through the array of compensation values
    for (int i = 0; i < n; i++) {
        for (int j = 0; j < n; j++) {
            try {
                matrix[i][j] = Double.parseDouble(compData[i * n + j]);
            } catch (NumberFormatException nfe) {
                // Set default value If a NumberFormatException occurred
                matrix[i][j] = 0.0d;
            }
        }
    }

    // Compute the inverse of the compensation data and then apply
    // to data matrix (which is column major). Specifically compute
    // transpose(inverse(<SPILL MATRIX>)) * data
    RealMatrix comp = (new LUDecompositionImpl(new Array2DRowRealMatrix(matrix))).getSolver().getInverse();
    RealMatrix data = new BlockRealMatrix(events);
    data.setSubMatrix( // Update compensated portion of data matrix
            comp.transpose()
                    .multiply(data.getSubMatrix(compDataStart, compDataStart + n - 1, 0,
                            this.getEventCount() - 1))
                    .getData(),

            compDataStart, 0);
    return data.getData();
}

From source file:gephi.spade.panel.fcsFile.java

/**
 * getCompensatedEventList ---/*from www. j a v a2 s.c o m*/
 * <p>
 * Returns the event list compensated by the SPILL matrix.
 * <p>
 *
 * @return array of double arrays containing the events.
 */
public double[][] getCompensatedEventList() {
    double[][] events = this.getEventList();
    if (events.length != this.getNumChannels())
        return events; // Unable to extract the underlying events

    // Convert the SPILL string to a compensation matrix
    String compString = this.getSpillString();
    if (compString == null)
        return events; // No compensation, just return the events

    // Split the compensation string into its values
    //
    // The basic structure for SPILL* is:
    // $SPILLOVER/n,string1,string2,...,f1,f2,f3,f4,.../

    String[] compValues = compString.split(",");
    String[] compNames = null;
    String[] compData = null;
    int compDataStart = 0;

    int n = 0;
    try {
        // Try to parse the number of acquisition parameters
        n = Integer.parseInt(compValues[0]);
        if (n <= 0 || n > this.parameters)
            throw new NumberFormatException();
    } catch (NumberFormatException nfe) {
        //CyLogger.getLogger().error("Failed to parse parameter count in spill string",nfe);
        return events;
    }

    compNames = Arrays.copyOfRange(compValues, 1, n + 1);

    // Match names in spill string to columns in parameter lists
    compDataStart = Arrays.asList(this.channelShortname).indexOf(compNames[0]);
    if (compDataStart < 0) {
        //CyLogger.getLogger().error("Failed to match channel "+compNames[0]+" to parameter in file");
        return events; // Failure match spill string names to channels
    }
    for (int i = 0; i < n; i++) {
        if (!compNames[i].equals(this.channelShortname[compDataStart + i])) {
            //CyLogger.getLogger().error("Spill channel are not continguous parameters in file");
            return events; // Spill string columns not in order
        }
    }

    // Extract actual compensation data
    compData = Arrays.copyOfRange(compValues, n + 1, compValues.length);
    if (compData.length != (n * n))
        return events;

    /**
     * Populate the compensation matrix --- The values are stored in
     * row-major order, i.e., the elements in the first row appear
     * first.
     */
    double[][] matrix = new double[n][n];

    // Loop through the array of compensation values
    for (int i = 0; i < n; i++) {
        for (int j = 0; j < n; j++) {
            try {
                matrix[i][j] = Double.parseDouble(compData[i * n + j]);
            } catch (NumberFormatException nfe) {
                // Set default value If a NumberFormatException occurred
                matrix[i][j] = 0.0d;
            }
        }
    }

    // Compute the inverse of the compensation data and then apply
    // to data matrix (which is column major). Specifically compute
    // transpose(inverse(<SPILL MATRIX>)) * data
    RealMatrix comp = (new LUDecompositionImpl(new Array2DRowRealMatrix(matrix))).getSolver().getInverse();
    RealMatrix data = new BlockRealMatrix(events);
    data.setSubMatrix( // Update compensated portion of data matrix
            comp.transpose()
                    .multiply(data.getSubMatrix(compDataStart, compDataStart + n - 1, 0,
                            this.getEventCount() - 1))
                    .getData(),

            compDataStart, 0);
    return data.getData();
}

From source file:org.mitre.math.linear.RealMatrixUtils.java

/**
 * Normalizes a matrix into its Z-scores.
 *///from www. j a v a2  s.c  o  m
public void normalizeMatrix(RealMatrix matrix) {
    //int features = matrix.getRowDimension();
    //int samples = matrix.getColumnDimension();
    int m = matrix.getRowDimension();
    int n = matrix.getColumnDimension();

    // Normalize each row
    for (int i = 0; i < m; i++) {
        // would be easier/quicker if we can get the whole row as an array (or vector)
        RealMatrix subMatrix = matrix.getSubMatrix(i, i, 0, n - 1); // n - 1 for 0 based indexing
        double sum = norm1(subMatrix) / n;
        // minusEquals (subtractEquals) subMatrix
        for (int j = 0; j < n; j++) {
            subMatrix.addToEntry(0, j, -1.0 * sum);
        }
        double std = norm1(arrayTimes(subMatrix, subMatrix)) / n;
        timesEquals(subMatrix, 1.0 / std);

        //setSubMatrix(i,i, 0, samples -1, m)
        for (int j = 0; j < n; j++) {
            matrix.setEntry(i, j, subMatrix.getEntry(0, j));
        }
    }

    // Normalize each feature
    //for (int i = 0; i < features; i++) {
    //Matrix m = matrix.getMatrix(i, i, 0, samples - 1);
    //double sum = m.norm1() / samples;
    //double sum = norm1(m) / samples;
    //m.minusEquals(new Matrix(1, samples, sum));
    //double std = m.arrayTimes(m).norm1() / samples;
    //m.times(1.0 / std);
    //matrix.setMatrix(i, i, 0, samples - 1, m);

    //}
}