Example usage for org.apache.commons.math3.linear RealMatrix getRow

List of usage examples for org.apache.commons.math3.linear RealMatrix getRow

Introduction

In this page you can find the example usage for org.apache.commons.math3.linear RealMatrix getRow.

Prototype

double[] getRow(int row) throws OutOfRangeException;

Source Link

Document

Get the entries at the given row index.

Usage

From source file:feature.lowLevel.audio.FeatureExtractor.java

/**
 * @param waveSegment audio data/*from  w  w  w.ja v  a 2  s .  com*/
 * @param opt Feature Extraction Options
 * @param fftWindowSize size of window for FFT calculation
 * @return RealMatrixExt array containing one RealMatrixExt for each feature set extracted
 */
public RealMatrixExt[] extractFeatureSets(short[] waveSegment, FeatureExtractionOptions opt,
        int fftWindowSize) {
    int segmentSize = waveSegment.length;
    int fi = 0; // feature set index
    RealMatrixExt[] featureSets = new RealMatrixExt[opt.getNumberOfFeatureSets()];

    /* convert to double precision and */
    /* scale wave data to adjust according to hearing threshold */

    @SuppressWarnings("unused")
    double divisor;

    if (sampleSizeInBits == 8) {
        divisor = 256;
    } else {
        divisor = 32768;
    }

    double[] wav = new double[segmentSize];

    for (int i = 0; i < segmentSize; i++) {
        wav[i] = waveSegment[i];
    }

    /*
     * compute spectrogram for each wave segment (with 50 % overlap in FFT computation)
     */

    double[][] spec = Spectrogram.computeSpectrogram(wav, fftWindowSize, fftWindowSize / 2, Window.HAMMING);

    int nFrames = spec.length;

    /* group to bark bands */

    BarkScale barkScale = new BarkScale(sampleRate, fftWindowSize);
    int nBands = barkScale.getNumberOfBarkBands();

    double[][] specBark = new double[nFrames][nBands];

    for (int t = 0; t < nFrames; t++) {
        specBark[t] = barkScale.apply(spec[t]);
    }

    /* decibel */
    for (int t = 0; t < nFrames; t++) {
        for (int b = 0; b < nBands; b++) {
            if (specBark[t][b] < 1) {
                specBark[t][b] = 0; // in Matlab code this is 1
            } else {
                specBark[t][b] = 10.0 * Math.log10(specBark[t][b]); // in
            }
        }
    }

    /* Sone */

    for (int t = 0; t < nFrames; t++) {
        for (int b = 0; b < nBands; b++) {
            if (specBark[t][b] >= 40) {
                specBark[t][b] = Math.pow(2, ((specBark[t][b] - 40) / 10));
            } else {
                specBark[t][b] = Math.pow((specBark[t][b] / 40), 2.642);
            }
        }
    }

    /* transpose matrix (for efficient row processing) */
    RealMatrix specBarkT = new RealMatrixExt(specBark).transpose();

    /* compute SSD features */

    if (opt.hasSSD()) {
        featureSets[fi++] = computeStatisticalSpectrumDescriptor(specBarkT, opt.getBandLimit());
    }

    if (!(opt.hasRP() || opt.hasRH()))
        return featureSets;

    /* FFT */

    double[][] specModAmp = new double[nBands][];

    FFT FFTreal = new FFT();

    for (int b = 0; b < nBands; b++) {
        // compute FFT magnitude of each band
        specModAmp[b] = FFTreal.computeMagnitude(specBarkT.getRow(b));
    }

    /* Fluctuation Strength Curve */

    // resolution of modulation frequency axis (0.17 Hz)
    double modulationFreqResolution = 1 / ((double) segmentSize / (double) sampleRate);

    for (int b = 0; b < nBands; b++) {
        specModAmp[b][0] = 0; // omit DC component (would cause deviations while blurring)
        for (int t = 1; t <= nFrames; t++) // skip DC component (avoid div/0)
        {
            double modFreq = modulationFreqResolution * t;
            double fluctWeight = 1 / (modFreq / 4 + 4 / modFreq);
            specModAmp[b][t] = specModAmp[b][t] * fluctWeight;
        }
    }

    /* Blurring */
    int bandLimit = opt.getBandLimit();
    int modAmplLimit = opt.getModAmpLimit();
    double[][] result = new double[bandLimit][modAmplLimit];

    if (nBands < bandLimit) {
        // requested number of bands is > than actual # of bands
        // fill remainder with 0
        for (int b = nBands; b < bandLimit; b++) {
            Arrays.fill(result[b], 0);
        }
        bandLimit = nBands;
    }

    for (int b = 0; b < bandLimit; b++) {
        for (int t = 1; t <= modAmplLimit; t++) {
            // skip DC component: start with 1
            result[b][t - 1] = specModAmp[b][t];
        }
    }

    RealMatrixExt rhythmPattern = new RealMatrixExt(result);
    rhythmPattern.setType(RealMatrixExt.TYPE_RP);

    if (opt.hasRP()) {
        featureSets[fi++] = rhythmPattern;
    }

    /* compute RH features */

    if (opt.hasRH()) {
        featureSets[fi++] = computeRhythmHistogram(rhythmPattern);
    }

    return featureSets;
}

From source file:com.datumbox.framework.machinelearning.featureselection.continuous.PCA.java

@Override
protected void filterFeatures(Dataset newdata) {
    ModelParameters modelParameters = knowledgeBase.getModelParameters();

    //convert data into matrix
    Map<Object, Integer> feature2ColumnId = modelParameters.getFeature2ColumnId();
    MatrixDataset matrixDataset = MatrixDataset.parseDataset(newdata, feature2ColumnId);
    RealMatrix X = matrixDataset.getX();
    /*/* ww  w  . j  av a 2s  .  com*/
    //subtracting means
    double[] meanValues = modelParameters.getMean();
    int n = newdata.size();
    int cols = feature2ColumnId.size();
    for(int row=0;row<n;++row) {
    for(int columnId=0;columnId<cols;++columnId) {
        X.addToEntry(row, columnId, -meanValues[columnId]); //inplace subtraction!!!
    }
    }
    */
    RealMatrix components = new BlockRealMatrix(modelParameters.getComponents());

    //multiplying the data with components
    X = X.multiply(components);

    Dataset transformedDataset = new Dataset();

    for (Record r : newdata) {
        Integer recordId = r.getId();

        Record newR = new Record();
        int componentId = 0;
        for (double value : X.getRow(recordId)) {
            newR.getX().put(componentId, value);
            ++componentId;
        }
        newR.setY(r.getY());

        transformedDataset.add(newR);
    }

    newdata.clear();
    newdata.merge(transformedDataset);
}

From source file:lirmm.inria.fr.math.BigSparseRealMatrixTest.java

@Test
public void testGetRow() {
    RealMatrix m = new BigSparseRealMatrix(subTestData);
    checkArrays(subRow0[0], m.getRow(0));
    checkArrays(subRow3[0], m.getRow(3));
    try {//ww  w.ja v  a  2  s. com
        m.getRow(-1);
        Assert.fail("Expecting OutOfRangeException");
    } catch (OutOfRangeException ex) {
        // expected
    }
    try {
        m.getRow(4);
        Assert.fail("Expecting OutOfRangeException");
    } catch (OutOfRangeException ex) {
        // expected
    }
}

From source file:lirmm.inria.fr.math.BigSparseRealMatrixTest.java

@Test
public void testGetVectors() {
    RealMatrix m = new BigSparseRealMatrix(testData);
    TestUtils.assertEquals("get row", m.getRow(0), testDataRow1, entryTolerance);
    TestUtils.assertEquals("get col", m.getColumn(2), testDataCol3, entryTolerance);
    try {//from  w w  w. j ava  2  s  .  c o  m
        m.getRow(10);
        Assert.fail("expecting OutOfRangeException");
    } catch (OutOfRangeException ex) {
        // ignored
    }
    try {
        m.getColumn(-1);
        Assert.fail("expecting OutOfRangeException");
    } catch (OutOfRangeException ex) {
        // ignored
    }
}

From source file:lirmm.inria.fr.math.BigSparseRealMatrixTest.java

@Test
public void testSetRow() {
    RealMatrix m = new BigSparseRealMatrix(subTestData);
    Assert.assertTrue(subRow3[0][0] != m.getRow(0)[0]);
    m.setRow(0, subRow3[0]);//w ww .j a  v a 2  s  .  co m
    checkArrays(subRow3[0], m.getRow(0));
    try {
        m.setRow(-1, subRow3[0]);
        Assert.fail("Expecting OutOfRangeException");
    } catch (OutOfRangeException ex) {
        // expected
    }
    try {
        m.setRow(0, new double[5]);
        Assert.fail("Expecting MatrixDimensionMismatchException");
    } catch (MatrixDimensionMismatchException ex) {
        // expected
    }
}

From source file:eagle.security.userprofile.impl.UserProfileAnomalyEigenEvaluator.java

@Override
public List<MLCallbackResult> detect(final String user, final String algorithm,
        UserActivityAggModel userActivity, UserProfileEigenModel aModel) {
    RealMatrix inputData = userActivity.matrix();
    LOG.warn("EigenBasedAnomalyDetection predictAnomaly called with dimension: " + inputData.getRowDimension()
            + "x" + inputData.getColumnDimension());

    if (aModel == null) {
        LOG.warn(//from   w w w  . j  a  va2  s  .  c  om
                "nothing to do as the input model does not have required values, returning from evaluating this algorithm..");
        return null;
    }

    List<MLCallbackResult> mlCallbackResults = new ArrayList<MLCallbackResult>();
    RealMatrix normalizedMat = normalizeData(inputData, aModel);

    UserCommandStatistics[] listStats = aModel.statistics();
    int colWithHighVariant = 0;

    for (int j = 0; j < normalizedMat.getColumnDimension(); j++) {
        if (listStats[j].isLowVariant() == false) {
            colWithHighVariant++;
        }
    }

    final Map<String, String> context = new HashMap<String, String>() {
        {
            put(UserProfileConstants.USER_TAG, user);
            put(UserProfileConstants.ALGORITHM_TAG, algorithm);
        }
    };

    Map<Integer, String> lineNoWithVariantBasedAnomalyDetection = new HashMap<Integer, String>();
    for (int i = 0; i < normalizedMat.getRowDimension(); i++) {
        MLCallbackResult aResult = new MLCallbackResult();
        aResult.setAnomaly(false);
        aResult.setContext(context);

        for (int j = 0; j < normalizedMat.getColumnDimension(); j++) {
            //LOG.info("mean for j=" + j + " is:" + listStats[j].getMean());
            //LOG.info("stddev for j=" + j + " is:" + listStats[j].getStddev());
            if (listStats[j].isLowVariant() == true) {
                //LOG.info(listOfCmds[j] + " is low variant");
                if (normalizedMat.getEntry(i, j) > listStats[j].getMean()) {
                    lineNoWithVariantBasedAnomalyDetection.put(i, "lowVariantAnomaly");
                    aResult.setAnomaly(true);
                    aResult.setTimestamp(userActivity.timestamp());
                    aResult.setFeature(listStats[j].getCommandName());
                    aResult.setAlgorithm(UserProfileConstants.EIGEN_DECOMPOSITION_ALGORITHM);
                    List<String> datapoints = new ArrayList<String>();
                    double[] rowVals = inputData.getRow(i);
                    for (double rowVal : rowVals)
                        datapoints.add(rowVal + "");
                    aResult.setDatapoints(datapoints);
                    aResult.setId(user);
                    //mlCallbackResults.add(aResult);
                } /*else {
                  aResult.setAnomaly(false);
                  aResult.setTimestamp(userActivity.timestamp());
                  mlCallbackResults.add(aResult);
                  }*/
            }
        }
        mlCallbackResults.add(i, aResult);
        //return results;
    }

    //LOG.info("results size here: " + results.length);

    //LOG.info("col with high variant: " + colWithHighVariant);
    RealMatrix finalMatWithoutLowVariantFeatures = new Array2DRowRealMatrix(normalizedMat.getRowDimension(),
            colWithHighVariant);
    //LOG.info("size of final test data: " + finalMatWithoutLowVariantFeatures.getRowDimension() +"x"+ finalMatWithoutLowVariantFeatures.getColumnDimension());
    int finalMatrixRow = 0;
    int finalMatrixCol = 0;
    for (int i = 0; i < normalizedMat.getRowDimension(); i++) {
        for (int j = 0; j < normalizedMat.getColumnDimension(); j++) {
            if (listStats[j].isLowVariant() == false) {
                finalMatWithoutLowVariantFeatures.setEntry(finalMatrixRow, finalMatrixCol,
                        normalizedMat.getEntry(i, j));
                finalMatrixCol++;
            }
        }
        finalMatrixCol = 0;
        finalMatrixRow++;
    }
    RealVector[] pcs = aModel.principalComponents();
    //LOG.info("pc size: " + pcs.getRowDimension() +"x" + pcs.getColumnDimension());

    RealMatrix finalInputMatTranspose = finalMatWithoutLowVariantFeatures.transpose();

    for (int i = 0; i < finalMatWithoutLowVariantFeatures.getRowDimension(); i++) {
        if (lineNoWithVariantBasedAnomalyDetection.get(i) == null) {
            //MLCallbackResult result = new MLCallbackResult();
            MLCallbackResult result = mlCallbackResults.get(i);
            //result.setContext(context);
            for (int sz = 0; sz < pcs.length; sz++) {
                double[] pc1 = pcs[sz].toArray();
                RealMatrix pc1Mat = new Array2DRowRealMatrix(pc1);
                RealMatrix transposePC1Mat = pc1Mat.transpose();
                RealMatrix testData = pc1Mat.multiply(transposePC1Mat)
                        .multiply(finalInputMatTranspose.getColumnMatrix(i));
                //LOG.info("testData size: " + testData.getRowDimension() + "x" + testData.getColumnDimension());
                RealMatrix testDataTranspose = testData.transpose();
                //LOG.info("testData transpose size: " + testDataTranspose.getRowDimension() + "x" + testDataTranspose.getColumnDimension());
                RealVector iRowVector = testDataTranspose.getRowVector(0);
                //RealVector pc1Vector = transposePC1Mat.getRowVector(sz);
                RealVector pc1Vector = transposePC1Mat.getRowVector(0);
                double distanceiRowAndPC1 = iRowVector.getDistance(pc1Vector);
                //LOG.info("distance from pc sz: " + sz + " " + distanceiRowAndPC1 + " " + model.getMaxL2Norm().getEntry(sz));
                //LOG.info("model.getMaxL2Norm().getEntry(sz):" + model.getMaxL2Norm().getEntry(sz));
                if (distanceiRowAndPC1 > aModel.maximumL2Norm().getEntry(sz)) {
                    //LOG.info("distance from pc sz: " + sz + " " + distanceiRowAndPC1 + " " + model.getMaxL2Norm().getEntry(sz));
                    result.setAnomaly(true);
                    result.setFeature(aModel.statistics()[sz].getCommandName());
                    result.setTimestamp(System.currentTimeMillis());
                    result.setAlgorithm(UserProfileConstants.EIGEN_DECOMPOSITION_ALGORITHM);
                    List<String> datapoints = new ArrayList<String>();
                    double[] rowVals = inputData.getRow(i);
                    for (double rowVal : rowVals)
                        datapoints.add(rowVal + "");
                    result.setDatapoints(datapoints);
                    result.setId(user);
                }
            }
            mlCallbackResults.set(i, result);
        }
    }
    return mlCallbackResults;
}

From source file:com.github.tteofili.looseen.yay.SGM.java

/**
 * perform weights learning from the training examples using (configurable) mini batch gradient descent algorithm
 *
 * @param samples the training examples//from w  ww .  j av a 2 s .c  om
 * @return the final cost with the updated weights
 * @throws Exception if BGD fails to converge or any numerical error happens
 */
private double learnWeights(Sample... samples) throws Exception {

    int iterations = 0;

    double cost = Double.MAX_VALUE;

    int j = 0;

    // momentum
    RealMatrix vb = MatrixUtils.createRealMatrix(biases[0].getRowDimension(), biases[0].getColumnDimension());
    RealMatrix vb2 = MatrixUtils.createRealMatrix(biases[1].getRowDimension(), biases[1].getColumnDimension());
    RealMatrix vw = MatrixUtils.createRealMatrix(weights[0].getRowDimension(), weights[0].getColumnDimension());
    RealMatrix vw2 = MatrixUtils.createRealMatrix(weights[1].getRowDimension(),
            weights[1].getColumnDimension());

    long start = System.currentTimeMillis();
    int c = 1;
    RealMatrix x = MatrixUtils.createRealMatrix(configuration.batchSize, samples[0].getInputs().length);
    RealMatrix y = MatrixUtils.createRealMatrix(configuration.batchSize, samples[0].getOutputs().length);
    while (true) {

        int i = 0;
        for (int k = j * configuration.batchSize; k < j * configuration.batchSize
                + configuration.batchSize; k++) {
            Sample sample = samples[k % samples.length];
            x.setRow(i, sample.getInputs());
            y.setRow(i, sample.getOutputs());
            i++;
        }
        j++;

        long time = (System.currentTimeMillis() - start) / 1000;
        if (iterations % (1 + (configuration.maxIterations / 100)) == 0 && time > 60 * c) {
            c += 1;
            //                System.out.println("cost: " + cost + ", accuracy: " + evaluate(this) + " after " + iterations + " iterations in " + (time / 60) + " minutes (" + ((double) iterations / time) + " ips)");
        }

        RealMatrix w0t = weights[0].transpose();
        RealMatrix w1t = weights[1].transpose();

        RealMatrix hidden = rectifierFunction.applyMatrix(x.multiply(w0t));
        hidden.walkInOptimizedOrder(new RealMatrixChangingVisitor() {
            @Override
            public void start(int rows, int columns, int startRow, int endRow, int startColumn, int endColumn) {

            }

            @Override
            public double visit(int row, int column, double value) {
                return value + biases[0].getEntry(0, column);
            }

            @Override
            public double end() {
                return 0;
            }
        });
        RealMatrix scores = hidden.multiply(w1t);
        scores.walkInOptimizedOrder(new RealMatrixChangingVisitor() {
            @Override
            public void start(int rows, int columns, int startRow, int endRow, int startColumn, int endColumn) {

            }

            @Override
            public double visit(int row, int column, double value) {
                return value + biases[1].getEntry(0, column);
            }

            @Override
            public double end() {
                return 0;
            }
        });

        RealMatrix probs = scores.copy();
        int len = scores.getColumnDimension() - 1;
        for (int d = 0; d < configuration.window - 1; d++) {
            int startColumn = d * len / (configuration.window - 1);
            RealMatrix subMatrix = scores.getSubMatrix(0, scores.getRowDimension() - 1, startColumn,
                    startColumn + x.getColumnDimension());
            for (int sm = 0; sm < subMatrix.getRowDimension(); sm++) {
                probs.setSubMatrix(softmaxActivationFunction.applyMatrix(subMatrix.getRowMatrix(sm)).getData(),
                        sm, startColumn);
            }
        }

        RealMatrix correctLogProbs = MatrixUtils.createRealMatrix(x.getRowDimension(), 1);
        correctLogProbs.walkInOptimizedOrder(new RealMatrixChangingVisitor() {
            @Override
            public void start(int rows, int columns, int startRow, int endRow, int startColumn, int endColumn) {

            }

            @Override
            public double visit(int row, int column, double value) {
                return -Math.log(probs.getEntry(row, getMaxIndex(y.getRow(row))));
            }

            @Override
            public double end() {
                return 0;
            }
        });
        double dataLoss = correctLogProbs.walkInOptimizedOrder(new RealMatrixPreservingVisitor() {
            private double d = 0;

            @Override
            public void start(int rows, int columns, int startRow, int endRow, int startColumn, int endColumn) {

            }

            @Override
            public void visit(int row, int column, double value) {
                d += value;
            }

            @Override
            public double end() {
                return d;
            }
        }) / samples.length;

        double reg = 0d;
        reg += weights[0].walkInOptimizedOrder(new RealMatrixPreservingVisitor() {
            private double d = 0d;

            @Override
            public void start(int rows, int columns, int startRow, int endRow, int startColumn, int endColumn) {

            }

            @Override
            public void visit(int row, int column, double value) {
                d += Math.pow(value, 2);
            }

            @Override
            public double end() {
                return d;
            }
        });
        reg += weights[1].walkInOptimizedOrder(new RealMatrixPreservingVisitor() {
            private double d = 0d;

            @Override
            public void start(int rows, int columns, int startRow, int endRow, int startColumn, int endColumn) {

            }

            @Override
            public void visit(int row, int column, double value) {
                d += Math.pow(value, 2);
            }

            @Override
            public double end() {
                return d;
            }
        });

        double regLoss = 0.5 * configuration.regularizationLambda * reg;
        double newCost = dataLoss + regLoss;
        if (iterations == 0) {
            //                System.out.println("started with cost = " + dataLoss + " + " + regLoss + " = " + newCost);
        }

        if (Double.POSITIVE_INFINITY == newCost) {
            throw new Exception("failed to converge at iteration " + iterations + " with alpha "
                    + configuration.alpha + " : cost going from " + cost + " to " + newCost);
        } else if (iterations > 1
                && (newCost < configuration.threshold || iterations > configuration.maxIterations)) {
            cost = newCost;
            //                System.out.println("successfully converged after " + (iterations - 1) + " iterations (alpha:" + configuration.alpha + ",threshold:" + configuration.threshold + ") with cost " + newCost);
            break;
        } else if (Double.isNaN(newCost)) {
            throw new Exception("failed to converge at iteration " + iterations + " with alpha "
                    + configuration.alpha + " : cost calculation underflow");
        }

        // update registered cost
        cost = newCost;

        // calculate the derivatives to update the parameters

        RealMatrix dscores = probs.copy();
        dscores.walkInOptimizedOrder(new RealMatrixChangingVisitor() {
            @Override
            public void start(int rows, int columns, int startRow, int endRow, int startColumn, int endColumn) {

            }

            @Override
            public double visit(int row, int column, double value) {
                return (y.getEntry(row, column) == 1 ? (value - 1) : value) / samples.length;
            }

            @Override
            public double end() {
                return 0;
            }
        });

        // get derivative on second layer
        RealMatrix dW2 = hidden.transpose().multiply(dscores);

        // regularize dw2
        dW2.walkInOptimizedOrder(new RealMatrixChangingVisitor() {
            @Override
            public void start(int rows, int columns, int startRow, int endRow, int startColumn, int endColumn) {

            }

            @Override
            public double visit(int row, int column, double value) {
                return value + configuration.regularizationLambda * w1t.getEntry(row, column);
            }

            @Override
            public double end() {
                return 0;
            }
        });

        RealMatrix db2 = MatrixUtils.createRealMatrix(biases[1].getRowDimension(),
                biases[1].getColumnDimension());
        dscores.walkInOptimizedOrder(new RealMatrixPreservingVisitor() {
            @Override
            public void start(int rows, int columns, int startRow, int endRow, int startColumn, int endColumn) {

            }

            @Override
            public void visit(int row, int column, double value) {
                db2.setEntry(0, column, db2.getEntry(0, column) + value);
            }

            @Override
            public double end() {
                return 0;
            }
        });

        RealMatrix dhidden = dscores.multiply(weights[1]);
        dhidden.walkInOptimizedOrder(new RealMatrixChangingVisitor() {
            @Override
            public void start(int rows, int columns, int startRow, int endRow, int startColumn, int endColumn) {

            }

            @Override
            public double visit(int row, int column, double value) {
                return value < 0 ? 0 : value;
            }

            @Override
            public double end() {
                return 0;
            }
        });

        RealMatrix db = MatrixUtils.createRealMatrix(biases[0].getRowDimension(),
                biases[0].getColumnDimension());
        dhidden.walkInOptimizedOrder(new RealMatrixPreservingVisitor() {
            @Override
            public void start(int rows, int columns, int startRow, int endRow, int startColumn, int endColumn) {

            }

            @Override
            public void visit(int row, int column, double value) {
                db.setEntry(0, column, db.getEntry(0, column) + value);
            }

            @Override
            public double end() {
                return 0;
            }
        });

        // get derivative on first layer
        RealMatrix dW = x.transpose().multiply(dhidden);

        // regularize
        dW.walkInOptimizedOrder(new RealMatrixChangingVisitor() {
            @Override
            public void start(int rows, int columns, int startRow, int endRow, int startColumn, int endColumn) {

            }

            @Override
            public double visit(int row, int column, double value) {
                return value + configuration.regularizationLambda * w0t.getEntry(row, column);
            }

            @Override
            public double end() {
                return 0;
            }
        });

        RealMatrix dWt = dW.transpose();
        RealMatrix dWt2 = dW2.transpose();

        if (configuration.useNesterovMomentum) {

            // update nesterov momentum
            final RealMatrix vbPrev = vb.copy();
            final RealMatrix vb2Prev = vb2.copy();
            final RealMatrix vwPrev = vw.copy();
            final RealMatrix vw2Prev = vw2.copy();

            vb.walkInOptimizedOrder(new RealMatrixChangingVisitor() {
                @Override
                public void start(int rows, int columns, int startRow, int endRow, int startColumn,
                        int endColumn) {

                }

                @Override
                public double visit(int row, int column, double value) {
                    return configuration.mu * value - configuration.alpha * db.getEntry(row, column);
                }

                @Override
                public double end() {
                    return 0;
                }
            });

            vb2.walkInOptimizedOrder(new RealMatrixChangingVisitor() {
                @Override
                public void start(int rows, int columns, int startRow, int endRow, int startColumn,
                        int endColumn) {

                }

                @Override
                public double visit(int row, int column, double value) {
                    return configuration.mu * value - configuration.alpha * db2.getEntry(row, column);
                }

                @Override
                public double end() {
                    return 0;
                }
            });

            vw.walkInOptimizedOrder(new RealMatrixChangingVisitor() {
                @Override
                public void start(int rows, int columns, int startRow, int endRow, int startColumn,
                        int endColumn) {

                }

                @Override
                public double visit(int row, int column, double value) {
                    return configuration.mu * value - configuration.alpha * dWt.getEntry(row, column);
                }

                @Override
                public double end() {
                    return 0;
                }
            });

            vw2.walkInOptimizedOrder(new RealMatrixChangingVisitor() {
                @Override
                public void start(int rows, int columns, int startRow, int endRow, int startColumn,
                        int endColumn) {

                }

                @Override
                public double visit(int row, int column, double value) {
                    return configuration.mu * value - configuration.alpha * dWt2.getEntry(row, column);
                }

                @Override
                public double end() {
                    return 0;
                }
            });

            // update bias
            biases[0].walkInOptimizedOrder(new RealMatrixChangingVisitor() {
                @Override
                public void start(int rows, int columns, int startRow, int endRow, int startColumn,
                        int endColumn) {

                }

                @Override
                public double visit(int row, int column, double value) {
                    return value - configuration.mu * vbPrev.getEntry(row, column)
                            + (1 + configuration.mu) * vb.getEntry(row, column);
                }

                @Override
                public double end() {
                    return 0;
                }
            });

            biases[1].walkInOptimizedOrder(new RealMatrixChangingVisitor() {
                @Override
                public void start(int rows, int columns, int startRow, int endRow, int startColumn,
                        int endColumn) {

                }

                @Override
                public double visit(int row, int column, double value) {
                    return value - configuration.mu * vb2Prev.getEntry(row, column)
                            + (1 + configuration.mu) * vb2.getEntry(row, column);
                }

                @Override
                public double end() {
                    return 0;
                }
            });

            // update the weights
            weights[0].walkInOptimizedOrder(new RealMatrixChangingVisitor() {

                @Override
                public void start(int rows, int columns, int startRow, int endRow, int startColumn,
                        int endColumn) {

                }

                @Override
                public double visit(int row, int column, double value) {
                    return value - configuration.mu * vwPrev.getEntry(row, column)
                            + (1 + configuration.mu) * vw.getEntry(row, column);
                }

                @Override
                public double end() {
                    return 0;
                }
            });

            weights[1].walkInOptimizedOrder(new RealMatrixChangingVisitor() {

                @Override
                public void start(int rows, int columns, int startRow, int endRow, int startColumn,
                        int endColumn) {

                }

                @Override
                public double visit(int row, int column, double value) {
                    return value - configuration.mu * vw2Prev.getEntry(row, column)
                            + (1 + configuration.mu) * vw2.getEntry(row, column);
                }

                @Override
                public double end() {
                    return 0;
                }
            });
        } else if (configuration.useMomentum) {
            // update momentum
            vb.walkInOptimizedOrder(new RealMatrixChangingVisitor() {
                @Override
                public void start(int rows, int columns, int startRow, int endRow, int startColumn,
                        int endColumn) {

                }

                @Override
                public double visit(int row, int column, double value) {
                    return configuration.mu * value - configuration.alpha * db.getEntry(row, column);
                }

                @Override
                public double end() {
                    return 0;
                }
            });

            vb2.walkInOptimizedOrder(new RealMatrixChangingVisitor() {
                @Override
                public void start(int rows, int columns, int startRow, int endRow, int startColumn,
                        int endColumn) {

                }

                @Override
                public double visit(int row, int column, double value) {
                    return configuration.mu * value - configuration.alpha * db2.getEntry(row, column);
                }

                @Override
                public double end() {
                    return 0;
                }
            });

            vw.walkInOptimizedOrder(new RealMatrixChangingVisitor() {
                @Override
                public void start(int rows, int columns, int startRow, int endRow, int startColumn,
                        int endColumn) {

                }

                @Override
                public double visit(int row, int column, double value) {
                    return configuration.mu * value - configuration.alpha * dWt.getEntry(row, column);
                }

                @Override
                public double end() {
                    return 0;
                }
            });

            vw2.walkInOptimizedOrder(new RealMatrixChangingVisitor() {
                @Override
                public void start(int rows, int columns, int startRow, int endRow, int startColumn,
                        int endColumn) {

                }

                @Override
                public double visit(int row, int column, double value) {
                    return configuration.mu * value - configuration.alpha * dWt2.getEntry(row, column);
                }

                @Override
                public double end() {
                    return 0;
                }
            });

            // update bias
            biases[0].walkInOptimizedOrder(new RealMatrixChangingVisitor() {
                @Override
                public void start(int rows, int columns, int startRow, int endRow, int startColumn,
                        int endColumn) {

                }

                @Override
                public double visit(int row, int column, double value) {
                    return value + vb.getEntry(row, column);
                }

                @Override
                public double end() {
                    return 0;
                }
            });

            biases[1].walkInOptimizedOrder(new RealMatrixChangingVisitor() {
                @Override
                public void start(int rows, int columns, int startRow, int endRow, int startColumn,
                        int endColumn) {

                }

                @Override
                public double visit(int row, int column, double value) {
                    return value + vb2.getEntry(row, column);
                }

                @Override
                public double end() {
                    return 0;
                }
            });

            // update the weights
            weights[0].walkInOptimizedOrder(new RealMatrixChangingVisitor() {

                @Override
                public void start(int rows, int columns, int startRow, int endRow, int startColumn,
                        int endColumn) {

                }

                @Override
                public double visit(int row, int column, double value) {
                    return value + vw.getEntry(row, column);
                }

                @Override
                public double end() {
                    return 0;
                }
            });

            weights[1].walkInOptimizedOrder(new RealMatrixChangingVisitor() {

                @Override
                public void start(int rows, int columns, int startRow, int endRow, int startColumn,
                        int endColumn) {

                }

                @Override
                public double visit(int row, int column, double value) {
                    return value + vw2.getEntry(row, column);
                }

                @Override
                public double end() {
                    return 0;
                }
            });
        } else {
            // standard parameter update

            // update bias
            biases[0].walkInOptimizedOrder(new RealMatrixChangingVisitor() {
                @Override
                public void start(int rows, int columns, int startRow, int endRow, int startColumn,
                        int endColumn) {

                }

                @Override
                public double visit(int row, int column, double value) {
                    return value - configuration.alpha * db.getEntry(row, column);
                }

                @Override
                public double end() {
                    return 0;
                }
            });

            biases[1].walkInOptimizedOrder(new RealMatrixChangingVisitor() {
                @Override
                public void start(int rows, int columns, int startRow, int endRow, int startColumn,
                        int endColumn) {

                }

                @Override
                public double visit(int row, int column, double value) {
                    return value - configuration.alpha * db2.getEntry(row, column);
                }

                @Override
                public double end() {
                    return 0;
                }
            });

            // update the weights
            weights[0].walkInOptimizedOrder(new RealMatrixChangingVisitor() {

                @Override
                public void start(int rows, int columns, int startRow, int endRow, int startColumn,
                        int endColumn) {

                }

                @Override
                public double visit(int row, int column, double value) {
                    return value - configuration.alpha * dWt.getEntry(row, column);
                }

                @Override
                public double end() {
                    return 0;
                }
            });

            weights[1].walkInOptimizedOrder(new RealMatrixChangingVisitor() {

                @Override
                public void start(int rows, int columns, int startRow, int endRow, int startColumn,
                        int endColumn) {

                }

                @Override
                public double visit(int row, int column, double value) {
                    return value - configuration.alpha * dWt2.getEntry(row, column);
                }

                @Override
                public double end() {
                    return 0;
                }
            });
        }

        iterations++;
    }

    return cost;
}

From source file:org.drugis.addis.presentation.SMAASerializer.java

private void insertPerCriterionMeasurement(ObjectMapper mapper, FullJointMeasurements m,
        ArrayNode performancesNode) {/*from   w ww . j av a2s  .co  m*/
    PerCriterionMeasurements measurements = (PerCriterionMeasurements) m;
    for (Criterion criterion : measurements.getCriteria()) {
        ObjectNode measurementNode = (ObjectNode) mapper.createObjectNode();
        measurementNode.put("criterion", toSlug(criterion.getName()));

        ObjectNode performanceNode = (ObjectNode) mapper.createObjectNode();
        CriterionMeasurement criterionMeasurement = measurements.getCriterionMeasurement(criterion);

        Class<? extends CriterionMeasurement> measurementType = criterionMeasurement.getClass();
        if (measurementType.equals(RelativeLogitGaussianCriterionMeasurement.class)
                || measurementType.equals(RelativeGaussianCriterionMeasurement.class)) {
            ObjectNode parameterNode = (ObjectNode) mapper.createObjectNode();
            String type = measurementType.equals(RelativeLogitGaussianCriterionMeasurement.class)
                    ? "relative-logit-normal"
                    : "relative-normal";
            performanceNode.put("type", type);

            GaussianMeasurement baseline;
            MultivariateGaussianCriterionMeasurement relativeMeasurement;

            CriterionMeasurement measurement = measurementType.cast(criterionMeasurement);
            if (measurementType.equals(RelativeLogitGaussianCriterionMeasurement.class)) {
                RelativeLogitGaussianCriterionMeasurement tmp = (RelativeLogitGaussianCriterionMeasurement) measurement;
                baseline = tmp.getGaussianMeasurement().getBaselineMeasurement();
                relativeMeasurement = tmp.getGaussianMeasurement().getRelativeMeasurement();
            } else {
                RelativeGaussianCriterionMeasurement tmp = (RelativeGaussianCriterionMeasurement) measurement;
                baseline = tmp.getBaselineMeasurement();
                relativeMeasurement = tmp.getRelativeMeasurement();
            }

            // Add baseline
            ObjectNode baselineNode = (ObjectNode) mapper.createObjectNode();
            baselineNode.put("type", "dnorm");
            baselineNode.put("name", toSlug(d_analysis.getBaseline().getLabel()));
            baselineNode.put("mu", baseline.getMean());
            baselineNode.put("sigma", baseline.getStDev());
            parameterNode.put("baseline", baselineNode);

            // Add relative
            ObjectNode relativeNode = (ObjectNode) mapper.createObjectNode();
            relativeNode.put("type", "dmnorm");
            ObjectNode relativeMuNode = (ObjectNode) mapper.createObjectNode();

            ObjectNode relativeCovNode = (ObjectNode) mapper.createObjectNode();
            ArrayNode relativeCovRowNames = (ArrayNode) mapper.createArrayNode();
            ArrayNode relativeCovColNames = (ArrayNode) mapper.createArrayNode();

            for (int i = 0; i < relativeMeasurement.getAlternatives().size(); ++i) {
                String alternative = toSlug(relativeMeasurement.getAlternatives().get(i).getName());
                relativeCovRowNames.add(alternative);
                relativeCovColNames.add(alternative);
                relativeMuNode.put(alternative, relativeMeasurement.getMeanVector().getEntry(i));
            }
            relativeCovNode.put("colnames", relativeCovColNames);
            relativeCovNode.put("rownames", relativeCovRowNames);

            ArrayNode relativeCovDataNode = (ArrayNode) mapper.createArrayNode();
            RealMatrix covarianceMatrix = relativeMeasurement.getCovarianceMatrix();
            for (int i = 0; i < covarianceMatrix.getRowDimension(); ++i) {
                ArrayNode row = (ArrayNode) mapper.createArrayNode();
                for (int j = 0; j < covarianceMatrix.getRowDimension(); ++j) {
                    row.add(covarianceMatrix.getRow(i)[j]);
                }
                relativeCovDataNode.add(row);
            }
            relativeCovNode.put("data", relativeCovDataNode);

            relativeNode.put("mu", relativeMuNode);
            relativeNode.put("cov", relativeCovNode);

            parameterNode.put("relative", relativeNode);

            performanceNode.put("parameters", parameterNode);
        }
        measurementNode.put("performance", performanceNode);

        performancesNode.add(measurementNode);
    }
}

From source file:org.knime.al.util.noveltydetection.knfst.MatrixFunctions.java

public static RealVector rowSums(final RealMatrix matrix) {
    final RealVector rowSums = MatrixUtils.createRealVector(new double[matrix.getRowDimension()]);
    for (int r = 0; r < matrix.getRowDimension(); r++) {
        final double[] row = matrix.getRow(r);
        for (final double cell : row) {
            rowSums.addToEntry(r, cell);
        }//  w w  w .  ja  v  a2s  . co  m
    }
    return rowSums;
}

From source file:put.ci.cevo.framework.algorithms.ApacheCMAES.java

/**
 * Update of the covariance matrix C./* www .j a  v  a2s  .  com*/
 *
 * @param hsig    Flag indicating a small correction.
 * @param bestArx Fitness-sorted matrix of the argument vectors producing the current offspring.
 * @param arz     Unsorted matrix containing the gaussian random values of the current offspring.
 * @param arindex Indices indicating the fitness-order of the current offspring.
 * @param xold    xmean matrix of the previous generation.
 */
private void updateCovariance(boolean hsig, final RealMatrix bestArx, final RealMatrix arz, final int[] arindex,
        final RealMatrix xold) {
    double negccov = 0;
    if (ccov1 + ccovmu > 0) {
        final RealMatrix arpos = bestArx.subtract(repmat(xold, 1, mu)).scalarMultiply(1 / sigma); // mu difference vectors
        final RealMatrix roneu = pc.multiply(pc.transpose()).scalarMultiply(ccov1); // rank one update
        // minor correction if hsig==false
        double oldFac = hsig ? 0 : ccov1 * cc * (2 - cc);
        oldFac += 1 - ccov1 - ccovmu;
        if (isActiveCMA) {
            // Adapt covariance matrix C active CMA
            negccov = (1 - ccovmu) * 0.25 * mueff / (FastMath.pow(dimension + 2, 1.5) + 2 * mueff);
            // keep at least 0.66 in all directions, small popsize are most
            // critical
            final double negminresidualvariance = 0.66;
            // where to make up for the variance loss
            final double negalphaold = 0.5;
            // prepare vectors, compute negative updating matrix Cneg
            final int[] arReverseIndex = reverse(arindex);
            RealMatrix arzneg = selectColumns(arz, MathArrays.copyOf(arReverseIndex, mu));
            RealMatrix arnorms = sqrt(sumRows(square(arzneg)));
            final int[] idxnorms = sortedIndices(arnorms.getRow(0));
            final RealMatrix arnormsSorted = selectColumns(arnorms, idxnorms);
            final int[] idxReverse = reverse(idxnorms);
            final RealMatrix arnormsReverse = selectColumns(arnorms, idxReverse);
            arnorms = divide(arnormsReverse, arnormsSorted);
            final int[] idxInv = inverse(idxnorms);
            final RealMatrix arnormsInv = selectColumns(arnorms, idxInv);
            // check and set learning rate negccov
            final double negcovMax = (1 - negminresidualvariance)
                    / square(arnormsInv).multiply(weights).getEntry(0, 0);
            if (negccov > negcovMax) {
                negccov = negcovMax;
            }
            arzneg = times(arzneg, repmat(arnormsInv, dimension, 1));
            final RealMatrix artmp = BD.multiply(arzneg);
            final RealMatrix Cneg = artmp.multiply(diag(weights)).multiply(artmp.transpose());
            oldFac += negalphaold * negccov;
            C = C.scalarMultiply(oldFac).add(roneu) // regard old matrix
                    .add(arpos.scalarMultiply( // plus rank one update
                            ccovmu + (1 - negalphaold) * negccov) // plus rank mu update
                            .multiply(times(repmat(weights, 1, dimension), arpos.transpose())))
                    .subtract(Cneg.scalarMultiply(negccov));
        } else {
            // Adapt covariance matrix C - nonactive
            C = C.scalarMultiply(oldFac) // regard old matrix
                    .add(roneu) // plus rank one update
                    .add(arpos.scalarMultiply(ccovmu) // plus rank mu update
                            .multiply(times(repmat(weights, 1, dimension), arpos.transpose())));
        }
    }
    updateBDFast(negccov);
}