Example usage for org.apache.commons.math3.linear RealVector getDistance

List of usage examples for org.apache.commons.math3.linear RealVector getDistance

Introduction

In this page you can find the example usage for org.apache.commons.math3.linear RealVector getDistance.

Prototype

public double getDistance(RealVector v) throws DimensionMismatchException 

Source Link

Document

Distance between two vectors.

Usage

From source file:Theta.java

/**
 * @param args the command line arguments
 *//*from  w  w  w . ja  va  2  s . c  o m*/
public static void main(String[] args) {
    // TODO code application logic here

    double[] u = { 1, 1 };
    double[] v = { 7, 9 };

    RealVector vectorU = new ArrayRealVector(u);
    RealVector vectorV = new ArrayRealVector(v);

    double distance = vectorU.getDistance(vectorV);
    System.out.println(distance);

    //RealVector projected=vectorU.projection(vectorV);

    //System.out.println(projected.toString());

}

From source file:game.utils.Utils.java

public static double getDistance(String type, RealVector v1, RealVector v2) {
    switch (type.toLowerCase()) {
    case "l1":
        return v1.getL1Distance(v2);
    case "l2":
        return v1.getDistance(v2);
    case "linf":
        return v1.getLInfDistance(v2);
    }//from  w  w  w  . j  a v  a2  s. c  o  m
    return -1;
}

From source file:com.cloudera.oryx.kmeans.common.Centers.java

/**
 * Returns the minimum squared Euclidean distance between the given
 * {@code Vector} and a point contained in this instance.
 * //w  w w  . j  a va  2s .c  om
 * @param point The point
 * @return The minimum squared Euclidean distance from the point 
 */
public Distance getDistance(RealVector point) {
    double min = Double.POSITIVE_INFINITY;
    int index = -1;
    for (int i = 0; i < centers.size(); i++) {
        RealVector c = centers.get(i);
        double distance = c.getDistance(point);
        double distanceSquared = distance * distance;
        if (distanceSquared < min) {
            min = distanceSquared;
            index = i;
        }
        min = Math.min(min, distance * distance);
    }
    return new Distance(min, index);
}

From source file:edu.oregonstate.eecs.mcplan.ml.GaussianMixtureModel.java

public double distance(final RealVector a, final RealVector b) {
    return a.getDistance(b);
}

From source file:edu.oregonstate.eecs.mcplan.ml.GaussianMixtureModel.java

private boolean hasConverged(final int c, final double pi, final RealVector mu, final RealMatrix Sigma) {
    return (pi - pi_[c]) < epsilon_ && mu.getDistance(mu_[c]) < epsilon_
            && Sigma.subtract(Sigma_[c]).getFrobeniusNorm() < epsilon_;
}

From source file:eagle.security.userprofile.model.eigen.UserProfileEigenModeler.java

private RealMatrix computeMaxDistanceOnPCs(int index) {

    RealMatrix pc = new Array2DRowRealMatrix(principalComponents[index].toArray());
    RealMatrix transposePC1 = pc.transpose();
    RealMatrix finalDataTranspose = finalMatrixWithoutLowVariantCmds.transpose();

    RealMatrix trainingData = pc.multiply(transposePC1).multiply(finalDataTranspose);
    RealMatrix trainingDataTranspose = trainingData.transpose();

    double maxDistance = 0.0;
    double minDistance = 0.0;
    RealVector p1 = null, p2 = null;//  w w w  .  ja  v  a2s . co  m
    if (LOG.isDebugEnabled())
        LOG.debug("Training data transpose size: " + trainingDataTranspose.getRowDimension() + "x"
                + trainingDataTranspose.getColumnDimension());
    for (int i = 0; i < trainingDataTranspose.getRowDimension(); i++) {
        RealVector iRowVector = new ArrayRealVector(trainingDataTranspose.getRow(i));
        RealVector transposePC1Vect = transposePC1.getRowVector(0);
        double distance = iRowVector.getDistance(transposePC1Vect);
        if (distance > maxDistance) {
            maxDistance = distance;
            p1 = iRowVector;
            p2 = transposePC1Vect;
        }
        if (distance < minDistance)
            minDistance = distance;
        //}
    }
    maximumL2Norm.setEntry(index, maxDistance);
    minimumL2Norm.setEntry(index, minDistance);
    minVector = p1;
    maxVector = p2;
    return trainingDataTranspose;
}

From source file:eagle.security.userprofile.impl.UserProfileAnomalyEigenEvaluator.java

@Override
public List<MLCallbackResult> detect(final String user, final String algorithm,
        UserActivityAggModel userActivity, UserProfileEigenModel aModel) {
    RealMatrix inputData = userActivity.matrix();
    LOG.warn("EigenBasedAnomalyDetection predictAnomaly called with dimension: " + inputData.getRowDimension()
            + "x" + inputData.getColumnDimension());

    if (aModel == null) {
        LOG.warn(//from   w  w  w.ja va2s .co  m
                "nothing to do as the input model does not have required values, returning from evaluating this algorithm..");
        return null;
    }

    List<MLCallbackResult> mlCallbackResults = new ArrayList<MLCallbackResult>();
    RealMatrix normalizedMat = normalizeData(inputData, aModel);

    UserCommandStatistics[] listStats = aModel.statistics();
    int colWithHighVariant = 0;

    for (int j = 0; j < normalizedMat.getColumnDimension(); j++) {
        if (listStats[j].isLowVariant() == false) {
            colWithHighVariant++;
        }
    }

    final Map<String, String> context = new HashMap<String, String>() {
        {
            put(UserProfileConstants.USER_TAG, user);
            put(UserProfileConstants.ALGORITHM_TAG, algorithm);
        }
    };

    Map<Integer, String> lineNoWithVariantBasedAnomalyDetection = new HashMap<Integer, String>();
    for (int i = 0; i < normalizedMat.getRowDimension(); i++) {
        MLCallbackResult aResult = new MLCallbackResult();
        aResult.setAnomaly(false);
        aResult.setContext(context);

        for (int j = 0; j < normalizedMat.getColumnDimension(); j++) {
            //LOG.info("mean for j=" + j + " is:" + listStats[j].getMean());
            //LOG.info("stddev for j=" + j + " is:" + listStats[j].getStddev());
            if (listStats[j].isLowVariant() == true) {
                //LOG.info(listOfCmds[j] + " is low variant");
                if (normalizedMat.getEntry(i, j) > listStats[j].getMean()) {
                    lineNoWithVariantBasedAnomalyDetection.put(i, "lowVariantAnomaly");
                    aResult.setAnomaly(true);
                    aResult.setTimestamp(userActivity.timestamp());
                    aResult.setFeature(listStats[j].getCommandName());
                    aResult.setAlgorithm(UserProfileConstants.EIGEN_DECOMPOSITION_ALGORITHM);
                    List<String> datapoints = new ArrayList<String>();
                    double[] rowVals = inputData.getRow(i);
                    for (double rowVal : rowVals)
                        datapoints.add(rowVal + "");
                    aResult.setDatapoints(datapoints);
                    aResult.setId(user);
                    //mlCallbackResults.add(aResult);
                } /*else {
                  aResult.setAnomaly(false);
                  aResult.setTimestamp(userActivity.timestamp());
                  mlCallbackResults.add(aResult);
                  }*/
            }
        }
        mlCallbackResults.add(i, aResult);
        //return results;
    }

    //LOG.info("results size here: " + results.length);

    //LOG.info("col with high variant: " + colWithHighVariant);
    RealMatrix finalMatWithoutLowVariantFeatures = new Array2DRowRealMatrix(normalizedMat.getRowDimension(),
            colWithHighVariant);
    //LOG.info("size of final test data: " + finalMatWithoutLowVariantFeatures.getRowDimension() +"x"+ finalMatWithoutLowVariantFeatures.getColumnDimension());
    int finalMatrixRow = 0;
    int finalMatrixCol = 0;
    for (int i = 0; i < normalizedMat.getRowDimension(); i++) {
        for (int j = 0; j < normalizedMat.getColumnDimension(); j++) {
            if (listStats[j].isLowVariant() == false) {
                finalMatWithoutLowVariantFeatures.setEntry(finalMatrixRow, finalMatrixCol,
                        normalizedMat.getEntry(i, j));
                finalMatrixCol++;
            }
        }
        finalMatrixCol = 0;
        finalMatrixRow++;
    }
    RealVector[] pcs = aModel.principalComponents();
    //LOG.info("pc size: " + pcs.getRowDimension() +"x" + pcs.getColumnDimension());

    RealMatrix finalInputMatTranspose = finalMatWithoutLowVariantFeatures.transpose();

    for (int i = 0; i < finalMatWithoutLowVariantFeatures.getRowDimension(); i++) {
        if (lineNoWithVariantBasedAnomalyDetection.get(i) == null) {
            //MLCallbackResult result = new MLCallbackResult();
            MLCallbackResult result = mlCallbackResults.get(i);
            //result.setContext(context);
            for (int sz = 0; sz < pcs.length; sz++) {
                double[] pc1 = pcs[sz].toArray();
                RealMatrix pc1Mat = new Array2DRowRealMatrix(pc1);
                RealMatrix transposePC1Mat = pc1Mat.transpose();
                RealMatrix testData = pc1Mat.multiply(transposePC1Mat)
                        .multiply(finalInputMatTranspose.getColumnMatrix(i));
                //LOG.info("testData size: " + testData.getRowDimension() + "x" + testData.getColumnDimension());
                RealMatrix testDataTranspose = testData.transpose();
                //LOG.info("testData transpose size: " + testDataTranspose.getRowDimension() + "x" + testDataTranspose.getColumnDimension());
                RealVector iRowVector = testDataTranspose.getRowVector(0);
                //RealVector pc1Vector = transposePC1Mat.getRowVector(sz);
                RealVector pc1Vector = transposePC1Mat.getRowVector(0);
                double distanceiRowAndPC1 = iRowVector.getDistance(pc1Vector);
                //LOG.info("distance from pc sz: " + sz + " " + distanceiRowAndPC1 + " " + model.getMaxL2Norm().getEntry(sz));
                //LOG.info("model.getMaxL2Norm().getEntry(sz):" + model.getMaxL2Norm().getEntry(sz));
                if (distanceiRowAndPC1 > aModel.maximumL2Norm().getEntry(sz)) {
                    //LOG.info("distance from pc sz: " + sz + " " + distanceiRowAndPC1 + " " + model.getMaxL2Norm().getEntry(sz));
                    result.setAnomaly(true);
                    result.setFeature(aModel.statistics()[sz].getCommandName());
                    result.setTimestamp(System.currentTimeMillis());
                    result.setAlgorithm(UserProfileConstants.EIGEN_DECOMPOSITION_ALGORITHM);
                    List<String> datapoints = new ArrayList<String>();
                    double[] rowVals = inputData.getRow(i);
                    for (double rowVal : rowVals)
                        datapoints.add(rowVal + "");
                    result.setDatapoints(datapoints);
                    result.setId(user);
                }
            }
            mlCallbackResults.set(i, result);
        }
    }
    return mlCallbackResults;
}

From source file:de.bund.bfr.math.LeastSquaresOptimization.java

@Override
public Result optimize(int nParameterSpace, int nOptimizations, boolean stopWhenSuccessful,
        Map<String, Double> minStartValues, Map<String, Double> maxStartValues, int maxIterations,
        DoubleConsumer progressListener, ExecutionContext exec) throws CanceledExecutionException {
    if (exec != null) {
        exec.checkCanceled();//  w w  w  . jav a2  s  .  c  o  m
    }

    progressListener.accept(0.0);

    List<ParamRange> ranges = MathUtils.getParamRanges(parameters, minStartValues, maxStartValues,
            nParameterSpace);
    RealVector targetVector = new ArrayRealVector(Doubles.toArray(targetValues));
    List<StartValues> startValuesList = MathUtils.createStartValuesList(ranges, nOptimizations,
            values -> targetVector
                    .getDistance(new ArrayRealVector(optimizerFunction.value(Doubles.toArray(values)))),
            progress -> progressListener.accept(0.5 * progress), exec);
    LevenbergMarquardtOptimizer optimizer = new LevenbergMarquardtOptimizer();
    Result result = new Result();
    AtomicInteger count = new AtomicInteger(0);

    for (StartValues startValues : startValuesList) {
        if (exec != null) {
            exec.checkCanceled();
        }

        progressListener.accept(0.5 * count.get() / startValuesList.size() + 0.5);

        try {
            LeastSquaresBuilder builder = createLeastSquaresBuilder(startValues.getValues(), maxIterations);

            builder.checker((iteration, previous, current) -> {
                double currentProgress = (double) iteration / (double) maxIterations;

                if (exec != null) {
                    try {
                        exec.checkCanceled();
                    } catch (CanceledExecutionException e) {
                        return true;
                    }
                }

                progressListener.accept(0.5 * (count.get() + currentProgress) / startValuesList.size() + 0.5);
                return iteration == maxIterations;
            });

            LeastSquaresOptimizer.Optimum optimizerResults = optimizer.optimize(builder.build());

            if (exec != null) {
                exec.checkCanceled();
            }

            double cost = optimizerResults.getCost();

            if (result.sse == null || cost * cost < result.sse) {
                result = getResults(optimizerResults);

                if (result.sse == 0.0) {
                    break;
                }

                if (result.r2 != null && result.r2 > 0.0 && stopWhenSuccessful) {
                    break;
                }
            }
        } catch (TooManyEvaluationsException | TooManyIterationsException | ConvergenceException e) {
        }

        count.incrementAndGet();
    }

    return result;
}

From source file:com.analog.lyric.dimple.test.solvers.sumproduct.TestSampledFactors.java

/**
 * Adapted from MATLAB test4 in tests/algoGaussian/testSampledFactors.m
 *///  w  w w. j  a va 2  s. c om
@Test
public void sampledComplexProduct() {
    // NOTE: test may fail if seed is changed! We keep the number of samples down so that the test doesn't
    // take too long. Increasing the samples produces better results.

    testRand.setSeed(42);

    try (CurrentModel cur = using(new FactorGraph())) {
        final Complex a = complex("a");
        final Complex b = complex("b");
        final Complex c = product(a, b);

        double[] aMean = new double[] { 10, 10 };
        RealMatrix aCovariance = randCovariance(2);
        a.setPrior(new MultivariateNormal(aMean, aCovariance.getData()));

        double[] bMean = new double[] { -20, 20 };
        RealMatrix bCovariance = randCovariance(2);
        b.setPrior(new MultivariateNormalParameters(bMean, bCovariance.getData()));

        GaussianRandomGenerator normalGenerator = new GaussianRandomGenerator(testRand);
        CorrelatedRandomVectorGenerator aGenerator = new CorrelatedRandomVectorGenerator(aMean, aCovariance,
                1e-12, normalGenerator);
        CorrelatedRandomVectorGenerator bGenerator = new CorrelatedRandomVectorGenerator(bMean, bCovariance,
                1e-12, normalGenerator);

        StorelessCovariance expectedCov = new StorelessCovariance(2);

        final int nSamples = 10000;

        RealVector expectedMean = MatrixUtils.createRealVector(new double[2]);
        double[] cSample = new double[2];

        for (int i = 0; i < nSamples; ++i) {
            double[] aSample = aGenerator.nextVector();
            double[] bSample = bGenerator.nextVector();

            // Compute complex product
            cSample[0] = aSample[0] * bSample[0] - aSample[1] * bSample[1];
            cSample[1] = aSample[0] * bSample[1] + aSample[1] * bSample[0];

            expectedMean.addToEntry(0, cSample[0]);
            expectedMean.addToEntry(1, cSample[1]);

            expectedCov.increment(cSample);
        }

        expectedMean.mapDivideToSelf(nSamples); // normalize

        SumProductSolverGraph sfg = requireNonNull(cur.graph.setSolverFactory(new SumProductSolver()));
        sfg.setOption(GibbsOptions.numSamples, nSamples);

        sfg.solve();

        MultivariateNormalParameters cBelief = requireNonNull(c.getBelief());

        RealVector observedMean = MatrixUtils.createRealVector(cBelief.getMean());
        double scaledMeanDistance = expectedMean.getDistance(observedMean) / expectedMean.getNorm();

        //         System.out.format("expectedMean = %s\n", expectedMean);
        //         System.out.format("observedMean = %s\n", observedMean);
        //         System.out.println(scaledMeanDistance);

        assertEquals(0.0, scaledMeanDistance, .02);

        RealMatrix expectedCovariance = expectedCov.getCovarianceMatrix();
        RealMatrix observedCovariance = MatrixUtils.createRealMatrix(cBelief.getCovariance());
        RealMatrix diffCovariance = expectedCovariance.subtract(observedCovariance);

        double scaledCovarianceDistance = diffCovariance.getNorm() / expectedCovariance.getNorm();

        //         System.out.println(expectedCovariance);
        //         System.out.println(expectedCovariance.getNorm());
        //         System.out.println(diffCovariance);
        //         System.out.println(diffCovariance.getNorm());
        //         System.out.println(diffCovariance.getNorm() / expectedCovariance.getNorm());

        assertEquals(0.0, scaledCovarianceDistance, .2);
    }
}

From source file:org.lambda3.indra.core.function.EuclideanRelatednessFunction.java

@Override
public double sim(RealVector r1, RealVector r2, boolean sparse) {
    return r1.getDistance(r2);
}