List of usage examples for org.apache.commons.math3.linear SingularValueDecomposition getSingularValues
public double[] getSingularValues()
From source file:com.yahoo.egads.utilities.SpectralMethods.java
public static RealMatrix mFilter(RealMatrix data, int windowSize, FilteringMethod method, double methodParameter) { int n = data.getRowDimension(); int m = data.getColumnDimension(); int k = n - windowSize + 1; int i = 0, ind = 0; double[] temp; double sum = 0; RealMatrix hankelMat = SpectralMethods.createHankelMatrix(data, windowSize); SingularValueDecomposition svd = new SingularValueDecomposition(hankelMat); double[] singularValues = svd.getSingularValues(); switch (method) { case VARIANCE: temp = new double[singularValues.length - 1]; for (i = 1; i < singularValues.length; ++i) { sum += (singularValues[i] * singularValues[i]); }// w w w . j a v a 2 s . com for (i = 0; i < temp.length; ++i) { temp[i] = (singularValues[i + 1] * singularValues[i + 1]) / sum; } sum = 0; for (i = temp.length - 1; i >= 0; --i) { sum += temp[i]; if (sum >= 1 - methodParameter) { ind = i; break; } } break; case EXPLICIT: ind = (int) Math.max(Math.min(methodParameter - 1, singularValues.length - 1), 0); break; case K_GAP: final double[] eigenGaps = new double[singularValues.length - 1]; Integer[] index = new Integer[singularValues.length - 1]; for (i = 0; i < eigenGaps.length; ++i) { eigenGaps[i] = singularValues[i] - singularValues[i + 1]; index[i] = i; } Arrays.sort(index, new Comparator<Integer>() { @Override public int compare(Integer o1, Integer o2) { return Double.compare(eigenGaps[o1], eigenGaps[o2]); } }); int maxIndex = 0; for (i = index.length - (int) methodParameter; i < index.length; ++i) { if (index[i] > maxIndex) { maxIndex = index[i]; } } ind = Math.min(maxIndex, singularValues.length / 3); break; case SMOOTHNESS: double[] variances = new double[singularValues.length]; for (i = 1; i < singularValues.length; ++i) { variances[i] = (singularValues[i] * singularValues[i]); } variances[0] = variances[1]; double smoothness = SpectralMethods .computeSmoothness(Arrays.copyOfRange(variances, 1, variances.length)); if (methodParameter - smoothness < 0.01) { methodParameter += 0.01; } double invalidS = smoothness; int validIndex = 1, invalidIndex = singularValues.length; while (true) { if (invalidS >= methodParameter) { ind = invalidIndex - 1; break; } else if (invalidIndex - validIndex <= 1) { ind = validIndex - 1; break; } int ii = (validIndex + invalidIndex) / 2; double[] tempVariances = Arrays.copyOf(Arrays.copyOfRange(variances, 0, ii + 1), singularValues.length); double s = SpectralMethods.computeSmoothness(tempVariances); if (s >= methodParameter) { validIndex = ii; } else { invalidIndex = ii; invalidS = s; } } break; case EIGEN_RATIO: int startIndex = 0, endIndex = singularValues.length - 1; if (singularValues[endIndex] / singularValues[0] >= methodParameter) { ind = endIndex; } else { while (true) { int midIndex = (startIndex + endIndex) / 2; if (singularValues[midIndex] / singularValues[0] >= methodParameter) { if (singularValues[midIndex + 1] / singularValues[0] < methodParameter) { ind = midIndex; break; } else { startIndex = midIndex; } } else { endIndex = midIndex; } } } break; case GAP_RATIO: double[] gaps = new double[singularValues.length - 1]; for (i = 0; i < gaps.length; ++i) { gaps[i] = singularValues[i] - singularValues[i + 1]; } ind = 0; for (i = gaps.length - 1; i >= 0; --i) { if (gaps[i] / singularValues[0] >= methodParameter) { ind = i; break; } } break; default: ind = singularValues.length - 1; break; } ind = Math.max(0, Math.min(ind, singularValues.length - 1)); RealMatrix truncatedHankelMatrix = MatrixUtils.createRealMatrix(k, m * windowSize); RealMatrix mU = svd.getU(); RealMatrix mVT = svd.getVT(); for (i = 0; i <= ind; ++i) { truncatedHankelMatrix = truncatedHankelMatrix .add(mU.getColumnMatrix(i).multiply(mVT.getRowMatrix(i)).scalarMultiply(singularValues[i])); } return SpectralMethods.averageHankelMatrix(truncatedHankelMatrix, windowSize); }
From source file:hivemall.anomaly.SingularSpectrumTransform.java
/** * Singular Value Decomposition (SVD) based naive scoring. *//*from w ww . j av a2s .co m*/ private double computeScoreSVD(@Nonnull final RealMatrix H, @Nonnull final RealMatrix G) { SingularValueDecomposition svdH = new SingularValueDecomposition(H); RealMatrix UT = svdH.getUT(); SingularValueDecomposition svdG = new SingularValueDecomposition(G); RealMatrix Q = svdG.getU(); // find the largest singular value for the r principal components RealMatrix UTQ = UT.getSubMatrix(0, r - 1, 0, window - 1).multiply(Q.getSubMatrix(0, window - 1, 0, r - 1)); SingularValueDecomposition svdUTQ = new SingularValueDecomposition(UTQ); double[] s = svdUTQ.getSingularValues(); return 1.d - s[0]; }
From source file:com.caseystella.analytics.outlier.batch.rpca.RPCA.java
private double computeL(double mu) { double LPenalty = lpenalty * mu; SingularValueDecomposition svd = new SingularValueDecomposition(X.subtract(S)); double[] penalizedD = softThreshold(svd.getSingularValues(), LPenalty); RealMatrix D_matrix = MatrixUtils.createRealDiagonalMatrix(penalizedD); L = svd.getU().multiply(D_matrix).multiply(svd.getVT()); return sum(penalizedD) * LPenalty; }
From source file:net.semanticmetadata.lire.filter.LsaFilter.java
/** * @param results/*from w w w . j a v a2 s. c om*/ * @param query * @return the filtered results or null if error occurs. */ public ImageSearchHits filter(ImageSearchHits results, Document query) { // create a double[items][histogram] tempFeature = null; LinkedList<double[]> features = new LinkedList<double[]>(); try { tempFeature = (LireFeature) featureClass.newInstance(); } catch (Exception e) { logger.severe("Could not create feature " + featureClass.getName() + " (" + e.getMessage() + ")."); return null; } // get all features from the result set, take care of those that do not have the respective field. for (int i = 0; i < results.length(); i++) { Document d = results.doc(i); if (d.getField(fieldName) != null) { tempFeature.setByteArrayRepresentation(d.getField(fieldName).binaryValue().bytes, d.getField(fieldName).binaryValue().offset, d.getField(fieldName).binaryValue().length); features.add(tempFeature.getDoubleHistogram()); } } // now go for the query if (query.getField(fieldName) != null) { tempFeature.setByteArrayRepresentation(query.getField(fieldName).binaryValue().bytes, query.getField(fieldName).binaryValue().offset, query.getField(fieldName).binaryValue().length); } else { logger.severe("Query document is missing the given feature " + featureClass.getName() + "."); return null; } double[][] matrixData = new double[features.size() + 1][tempFeature.getDoubleHistogram().length]; System.arraycopy(tempFeature.getDoubleHistogram(), 0, matrixData[0], 0, tempFeature.getDoubleHistogram().length); int count = 1; for (Iterator<double[]> iterator = features.iterator(); iterator.hasNext();) { double[] next = iterator.next(); System.arraycopy(next, 0, matrixData[count], 0, next.length); count++; } for (int i = 0; i < matrixData.length; i++) { double[] doubles = matrixData[i]; for (int j = 0; j < doubles.length; j++) { if (Double.isNaN(doubles[j])) System.err.println("Value is NaN"); ; } } // create a matrix object and do the magic Array2DRowRealMatrix m = new Array2DRowRealMatrix(matrixData); long ms = System.currentTimeMillis(); SingularValueDecomposition svd = new SingularValueDecomposition(m); ms = System.currentTimeMillis() - ms; double[] singularValues = svd.getSingularValues(); RealMatrix s = svd.getS(); // if no number of dimensions is given reduce to a tenth. if (numberOfDimensions < 1) numberOfDimensions = singularValues.length / 10; for (int i = numberOfDimensions; i < singularValues.length; i++) { s.setEntry(i, i, 0); } RealMatrix mNew = svd.getU().multiply(s).multiply(svd.getVT()); double[][] data = mNew.getData(); // create the new result set TreeSet<SimpleResult> result = new TreeSet<SimpleResult>(); double maxDistance = 0; double[] queryData = data[0]; for (int i = 1; i < data.length; i++) { double[] doubles = data[i]; double distance = MetricsUtils.distL1(doubles, queryData); result.add(new SimpleResult((float) distance, results.doc(i - 1), i - 1)); maxDistance = Math.max(maxDistance, distance); } ImageSearchHits hits; hits = new SimpleImageSearchHits(result, (float) maxDistance); return hits; }
From source file:net.semanticmetadata.lire.filters.LsaFilter.java
/** * @param results// ww w .j a v a 2s. c o m * @param query * @return the filtered results or null if error occurs. */ public ImageSearchHits filter(ImageSearchHits results, IndexReader reader, Document query) { // create a double[items][histogram] tempFeature = null; LinkedList<double[]> features = new LinkedList<double[]>(); try { tempFeature = (LireFeature) featureClass.newInstance(); } catch (Exception e) { logger.severe("Could not create feature " + featureClass.getName() + " (" + e.getMessage() + ")."); return null; } // get all features from the result set, take care of those that do not have the respective field. for (int i = 0; i < results.length(); i++) { Document d = null; try { d = reader.document(results.documentID(i)); } catch (IOException e) { e.printStackTrace(); } if (d.getField(fieldName) != null) { tempFeature.setByteArrayRepresentation(d.getField(fieldName).binaryValue().bytes, d.getField(fieldName).binaryValue().offset, d.getField(fieldName).binaryValue().length); features.add(tempFeature.getFeatureVector()); } } // now go for the query if (query.getField(fieldName) != null) { tempFeature.setByteArrayRepresentation(query.getField(fieldName).binaryValue().bytes, query.getField(fieldName).binaryValue().offset, query.getField(fieldName).binaryValue().length); } else { logger.severe("Query document is missing the given feature " + featureClass.getName() + "."); return null; } double[][] matrixData = new double[features.size() + 1][tempFeature.getFeatureVector().length]; System.arraycopy(tempFeature.getFeatureVector(), 0, matrixData[0], 0, tempFeature.getFeatureVector().length); int count = 1; for (Iterator<double[]> iterator = features.iterator(); iterator.hasNext();) { double[] next = iterator.next(); System.arraycopy(next, 0, matrixData[count], 0, next.length); count++; } for (int i = 0; i < matrixData.length; i++) { double[] doubles = matrixData[i]; for (int j = 0; j < doubles.length; j++) { if (Double.isNaN(doubles[j])) System.err.println("Value is NaN"); ; } } // create a matrix object and do the magic Array2DRowRealMatrix m = new Array2DRowRealMatrix(matrixData); long ms = System.currentTimeMillis(); SingularValueDecomposition svd = new SingularValueDecomposition(m); ms = System.currentTimeMillis() - ms; double[] singularValues = svd.getSingularValues(); RealMatrix s = svd.getS(); // if no number of dimensions is given reduce to a tenth. if (numberOfDimensions < 1) numberOfDimensions = singularValues.length / 10; for (int i = numberOfDimensions; i < singularValues.length; i++) { s.setEntry(i, i, 0); } RealMatrix mNew = svd.getU().multiply(s).multiply(svd.getVT()); double[][] data = mNew.getData(); // create the new result set TreeSet<SimpleResult> result = new TreeSet<SimpleResult>(); double maxDistance = 0; double[] queryData = data[0]; for (int i = 1; i < data.length; i++) { double[] doubles = data[i]; double distance = MetricsUtils.distL1(doubles, queryData); result.add(new SimpleResult((float) distance, results.documentID(i - 1))); maxDistance = Math.max(maxDistance, distance); } ImageSearchHits hits; hits = new SimpleImageSearchHits(result, (float) maxDistance); return hits; }
From source file:com.opengamma.strata.math.impl.linearalgebra.SVDecompositionCommonsResult.java
/** * @param svd The result of the SV decomposition, not null *//*from w w w .jav a2 s . co m*/ public SVDecompositionCommonsResult(SingularValueDecomposition svd) { ArgChecker.notNull(svd, "svd"); _condition = svd.getConditionNumber(); _norm = svd.getNorm(); _rank = svd.getRank(); _s = CommonsMathWrapper.unwrap(svd.getS()); _singularValues = svd.getSingularValues(); _u = CommonsMathWrapper.unwrap(svd.getU()); _uTranspose = CommonsMathWrapper.unwrap(svd.getUT()); _v = CommonsMathWrapper.unwrap(svd.getV()); _vTranspose = CommonsMathWrapper.unwrap(svd.getVT()); _solver = svd.getSolver(); }
From source file:hivemall.utils.math.MatrixUtilsTest.java
@Test public void testPower1() { RealMatrix A = new Array2DRowRealMatrix( new double[][] { new double[] { 1, 2, 3 }, new double[] { 4, 5, 6 } }); double[] x = new double[3]; x[0] = Math.random();/*from w w w.j a v a 2 s .c o m*/ x[1] = Math.random(); x[2] = Math.random(); double[] u = new double[2]; double[] v = new double[3]; double s = MatrixUtils.power1(A, x, 2, u, v); SingularValueDecomposition svdA = new SingularValueDecomposition(A); Assert.assertArrayEquals(svdA.getU().getColumn(0), u, 0.001d); Assert.assertArrayEquals(svdA.getV().getColumn(0), v, 0.001d); Assert.assertEquals(svdA.getSingularValues()[0], s, 0.001d); }
From source file:edu.cmu.tetrad.util.TetradMatrix1.java
public TetradMatrix1 sqrt() { SingularValueDecomposition svd = new SingularValueDecomposition(getRealMatrix()); RealMatrix U = svd.getU();/* w w w . j a v a2 s . co m*/ RealMatrix V = svd.getV(); double[] s = svd.getSingularValues(); for (int i = 0; i < s.length; i++) s[i] = 1.0 / s[i]; RealMatrix S = new BlockRealMatrix(s.length, s.length); for (int i = 0; i < s.length; i++) S.setEntry(i, i, s[i]); RealMatrix sqrt = U.multiply(S).multiply(V); return new TetradMatrix1(sqrt); }
From source file:org.eclipse.dataset.LinearAlgebra.java
/** * @param a//from www . ja v a 2s .c o m * @return array of singular values */ public static double[] calcSingularValues(Dataset a) { SingularValueDecomposition svd = new SingularValueDecomposition(createRealMatrix(a)); return svd.getSingularValues(); }
From source file:org.eclipse.dataset.LinearAlgebra.java
/** * Calculate singular value decomposition A = U S V^T * @param a/*from w w w. j ava 2 s. c o m*/ * @return array of U - orthogonal matrix, s - singular values vector, V - orthogonal matrix */ public static Dataset[] calcSingularValueDecomposition(Dataset a) { SingularValueDecomposition svd = new SingularValueDecomposition(createRealMatrix(a)); return new Dataset[] { createDataset(svd.getU()), new DoubleDataset(svd.getSingularValues()), createDataset(svd.getV()) }; }