List of usage examples for org.apache.commons.math3.linear ArrayRealVector mapMultiply
public RealVector mapMultiply(double d)
From source file:fp.overlapr.algorithmen.StressMajorization.java
@Deprecated private static ArrayRealVector conjugateGradientsMethod(Array2DRowRealMatrix A, ArrayRealVector b, ArrayRealVector werte) {//www . j av a 2 s.c o m Array2DRowRealMatrix preJacobi = new Array2DRowRealMatrix(A.getRowDimension(), A.getColumnDimension()); // Predconditioner berechnen preJacobi.walkInRowOrder(new DefaultRealMatrixChangingVisitor() { @Override public double visit(int row, int column, double value) { if (row == column) { return 1 / A.getEntry(row, column); } else { return 0.0; } } }); // x_k beliebig whlen ArrayRealVector x_k = new ArrayRealVector(werte); // r_k berechnen ArrayRealVector r_k = b.subtract(A.operate(x_k)); // h_k berechnen ArrayRealVector h_k = (ArrayRealVector) preJacobi.operate(r_k); // d_k = r_k ArrayRealVector d_k = h_k; // x_k+1 und r_k+1 und d_k+1, sowie alpha und beta und z erzeugen ArrayRealVector x_k1; ArrayRealVector r_k1; ArrayRealVector d_k1; ArrayRealVector h_k1; double alpha; double beta; ArrayRealVector z; do { // Speichere Matrix-Vektor-Produkt, um es nur einmal auszurechnen z = (ArrayRealVector) A.operate(d_k); // Finde von x_k in Richtung d_k den Ort x_k1 des Minimums der // Funktion E // und aktualisere den Gradienten bzw. das Residuum alpha = r_k.dotProduct(h_k) / d_k.dotProduct(z); x_k1 = x_k.add(d_k.mapMultiply(alpha)); r_k1 = r_k.subtract(z.mapMultiply(alpha)); h_k1 = (ArrayRealVector) preJacobi.operate(r_k1); // Korrigiere die Suchrichtung d_k1 beta = r_k1.dotProduct(h_k1) / r_k.dotProduct(h_k); d_k1 = h_k1.add(d_k.mapMultiply(beta)); // Werte "eins" weitersetzen x_k = x_k1; r_k = r_k1; d_k = d_k1; h_k = h_k1; } while (r_k1.getNorm() > TOL); return x_k1; }
From source file:automenta.vivisect.dimensionalize.HyperassociativeMap.java
private ArrayRealVector getThePosition(V n, ArrayRealVector v) { getPosition(n, v.getDataRef()); return (ArrayRealVector) v.mapMultiply(1.0 / scale); }
From source file:hivemall.anomaly.SDAR2D.java
/** * @param x series of input in LIFO order * @param k AR window size// www. j a v a 2s . co m * @return x_hat predicted x * @link https://en.wikipedia.org/wiki/Matrix_multiplication#Outer_product */ @Nonnull public RealVector update(@Nonnull final ArrayRealVector[] x, final int k) { Preconditions.checkArgument(x.length >= 1, "x.length MUST be greater than 1: " + x.length); Preconditions.checkArgument(k >= 0, "k MUST be greater than or equals to 0: ", k); Preconditions.checkArgument(k < _C.length, "k MUST be less than |C| but " + "k=" + k + ", |C|=" + _C.length); final ArrayRealVector x_t = x[0]; final int dims = x_t.getDimension(); if (_initialized == false) { this._mu = x_t.copy(); this._sigma = new BlockRealMatrix(dims, dims); assert (_sigma.isSquare()); this._initialized = true; return new ArrayRealVector(dims); } Preconditions.checkArgument(k >= 1, "k MUST be greater than 0: ", k); // old parameters are accessible to compute the Hellinger distance this._muOld = _mu.copy(); this._sigmaOld = _sigma.copy(); // update mean vector // \hat{mu} := (1-r) \hat{} + r x_t this._mu = _mu.mapMultiply(1.d - _r).add(x_t.mapMultiply(_r)); // compute residuals (x - \hat{}) final RealVector[] xResidual = new RealVector[k + 1]; for (int j = 0; j <= k; j++) { xResidual[j] = x[j].subtract(_mu); } // update covariance matrices // C_j := (1-r) C_j + r (x_t - \hat{}) (x_{t-j} - \hat{})' final RealMatrix[] C = this._C; final RealVector rxResidual0 = xResidual[0].mapMultiply(_r); // r (x_t - \hat{}) for (int j = 0; j <= k; j++) { RealMatrix Cj = C[j]; if (Cj == null) { C[j] = rxResidual0.outerProduct(x[j].subtract(_mu)); } else { C[j] = Cj.scalarMultiply(1.d - _r).add(rxResidual0.outerProduct(x[j].subtract(_mu))); } } // solve A in the following Yule-Walker equation // C_j = _{i=1}^{k} A_i C_{j-i} where j = 1..k, C_{-i} = C_i' /* * /C_1\ /A_1\ /C_0 |C_1' |C_2' | . . . |C_{k-1}' \ * |---| |---| |--------+--------+--------+ +---------| * |C_2| |A_2| |C_1 |C_0 |C_1' | . | * |---| |---| |--------+--------+--------+ . | * |C_3| = |A_3| |C_2 |C_1 |C_0 | . | * | . | | . | |--------+--------+--------+ | * | . | | . | | . . | * | . | | . | | . . | * |---| |---| |--------+ +--------| * \C_k/ \A_k/ \C_{k-1} | . . . |C_0 / */ RealMatrix[][] rhs = MatrixUtils.toeplitz(C, k); RealMatrix[] lhs = Arrays.copyOfRange(C, 1, k + 1); RealMatrix R = MatrixUtils.combinedMatrices(rhs, dims); RealMatrix L = MatrixUtils.combinedMatrices(lhs); RealMatrix A = MatrixUtils.solve(L, R, false); // estimate x // \hat{x} = \hat{} + _{i=1}^k A_i (x_{t-i} - \hat{}) RealVector x_hat = _mu.copy(); for (int i = 0; i < k; i++) { int offset = i * dims; RealMatrix Ai = A.getSubMatrix(offset, offset + dims - 1, 0, dims - 1); x_hat = x_hat.add(Ai.operate(xResidual[i + 1])); } // update model covariance // := (1-r) + r (x - \hat{x}) (x - \hat{x})' RealVector xEstimateResidual = x_t.subtract(x_hat); this._sigma = _sigma.scalarMultiply(1.d - _r) .add(xEstimateResidual.mapMultiply(_r).outerProduct(xEstimateResidual)); return x_hat; }