Example usage for org.apache.commons.math3.util Precision compareTo

List of usage examples for org.apache.commons.math3.util Precision compareTo

Introduction

In this page you can find the example usage for org.apache.commons.math3.util Precision compareTo.

Prototype

public static int compareTo(final double x, final double y, final int maxUlps) 

Source Link

Document

Compares two numbers given some amount of allowed error.

Usage

From source file:com.google.cloud.genomics.dataflow.functions.verifybamid.SolverTest.java

@Test
public void testGridSearch() {
    Interval interval = Solver.gridSearch(new Parabola(0.25), 0.0, 1.0, 0.1);
    assertEquals(Precision.compareTo(interval.getInf(), 0.1, 1), 0);
    assertEquals(Precision.compareTo(interval.getSup(), 0.3, 1), 0);

    interval = Solver.gridSearch(new Parabola(1.2), 0.0, 1.0, 0.1);
    assertEquals(Precision.compareTo(interval.getInf(), 0.9, 1), 0);
    assertEquals(Precision.compareTo(interval.getSup(), 1.0, 1), 0);

    interval = Solver.gridSearch(new Parabola(1.2), 0.0, 1.0, 0.3);
    assertEquals(Precision.compareTo(interval.getInf(), 0.9, 1), 0);
    assertEquals(Precision.compareTo(interval.getSup(), 1.0, 1), 0);
}

From source file:com.google.cloud.genomics.dataflow.functions.LikelihoodFnTest.java

@Test
public void testValue() {
    final double tolerance = 0.0000001; // slop factor for floating point comparisons
    final double alpha = 0.25;
    final int errPhred = 10; // P(err) = 0.1
    final double pRef = 0.6;

    Position position1 = new Position().setReferenceName("1").setPosition(123L);

    /*/*from  w  w w .j a va  2 s  . c o  m*/
     * Observe single REF read
     */
    ImmutableMap.Builder<Position, ReadCounts> countsBuilder = ImmutableMap.builder();
    ReadCounts rc = new ReadCounts();
    rc.setRefFreq(pRef);
    ReadQualityCount rqc = new ReadQualityCount();
    rqc.setBase(Base.REF);
    rqc.setQuality(errPhred);
    rqc.setCount(1);
    rc.addReadQualityCount(rqc);
    countsBuilder.put(position1, rc);
    Map<Position, ReadCounts> readCounts = countsBuilder.build();
    LikelihoodFn fn = new LikelihoodFn(readCounts);
    // Likelihood of a single REF with the parameters above
    final double likRef = 0.3354133333;
    assertEquals(Precision.compareTo(fn.value(alpha), Math.log(likRef), tolerance), 0);

    /*
     * Observe single NONREF read
     */
    countsBuilder = ImmutableMap.builder();
    rc = new ReadCounts();
    rc.setRefFreq(pRef);
    rqc = new ReadQualityCount();
    rqc.setBase(Base.NONREF);
    rqc.setQuality(errPhred);
    rqc.setCount(1);
    rc.addReadQualityCount(rqc);
    countsBuilder.put(position1, rc);
    readCounts = countsBuilder.build();
    fn = new LikelihoodFn(readCounts);
    // Likelihood of a single NONREF with the parameters above
    final double likNonref = 0.20368;
    assertEquals(Precision.compareTo(fn.value(alpha), Math.log(likNonref), tolerance), 0);

    /*
     * Observe single OTHER read
     */
    countsBuilder = ImmutableMap.builder();
    rc = new ReadCounts();
    rc.setRefFreq(pRef);
    rqc = new ReadQualityCount();
    rqc.setBase(Base.OTHER);
    rqc.setQuality(errPhred);
    rqc.setCount(1);
    rc.addReadQualityCount(rqc);
    countsBuilder.put(position1, rc);
    readCounts = countsBuilder.build();
    fn = new LikelihoodFn(readCounts);
    // Likelihood of a single OTHER with the parameters above
    final double likOther = 0.03850666667;
    assertEquals(Precision.compareTo(fn.value(alpha), Math.log(likOther), tolerance), 0);

    // Likelihood for reads at 2 different positions should be product of
    // likelihoods for individual reads
    Position position2 = new Position().setReferenceName("1").setPosition(124L);
    countsBuilder = ImmutableMap.builder();
    rc = new ReadCounts();
    rc.setRefFreq(pRef);
    rqc = new ReadQualityCount();
    rqc.setBase(Base.REF);
    rqc.setQuality(errPhred);
    rqc.setCount(1);
    rc.addReadQualityCount(rqc);
    countsBuilder.put(position1, rc);
    rc = new ReadCounts();
    rc.setRefFreq(pRef);
    rqc = new ReadQualityCount();
    rqc.setBase(Base.NONREF);
    rqc.setQuality(errPhred);
    rqc.setCount(1);
    rc.addReadQualityCount(rqc);
    countsBuilder.put(position2, rc);
    readCounts = countsBuilder.build();
    fn = new LikelihoodFn(readCounts);
    assertEquals(Precision.compareTo(fn.value(alpha), Math.log(likRef * likNonref), tolerance), 0);
}

From source file:com.google.cloud.genomics.dataflow.functions.verifybamid.LikelihoodFnTest.java

@Test
public void testValue() {
    final double tolerance = 0.0000001; // slop factor for floating point comparisons
    final double alpha = 0.25;
    final int errPhred = 10; // P(err) = 0.1
    final double pRef = 0.6;

    Position position1 = Position.newBuilder().setReferenceName("1").setPosition(123L).build();

    /*/*from www . j a  v a2 s .c  o  m*/
     * Observe single REF read
     */
    ImmutableMap.Builder<Position, ReadCounts> countsBuilder = ImmutableMap.builder();
    ReadCounts rc = new ReadCounts();
    rc.setRefFreq(pRef);
    ReadQualityCount rqc = new ReadQualityCount();
    rqc.setBase(Base.REF);
    rqc.setQuality(errPhred);
    rqc.setCount(1);
    rc.addReadQualityCount(rqc);
    countsBuilder.put(position1, rc);
    Map<Position, ReadCounts> readCounts = countsBuilder.build();
    LikelihoodFn fn = new LikelihoodFn(readCounts);
    // Likelihood of a single REF with the parameters above
    final double likRef = 0.3354133333;
    assertEquals(Precision.compareTo(fn.value(alpha), Math.log(likRef), tolerance), 0);

    /*
     * Observe single NONREF read
     */
    countsBuilder = ImmutableMap.builder();
    rc = new ReadCounts();
    rc.setRefFreq(pRef);
    rqc = new ReadQualityCount();
    rqc.setBase(Base.NONREF);
    rqc.setQuality(errPhred);
    rqc.setCount(1);
    rc.addReadQualityCount(rqc);
    countsBuilder.put(position1, rc);
    readCounts = countsBuilder.build();
    fn = new LikelihoodFn(readCounts);
    // Likelihood of a single NONREF with the parameters above
    final double likNonref = 0.20368;
    assertEquals(Precision.compareTo(fn.value(alpha), Math.log(likNonref), tolerance), 0);

    /*
     * Observe single OTHER read
     */
    countsBuilder = ImmutableMap.builder();
    rc = new ReadCounts();
    rc.setRefFreq(pRef);
    rqc = new ReadQualityCount();
    rqc.setBase(Base.OTHER);
    rqc.setQuality(errPhred);
    rqc.setCount(1);
    rc.addReadQualityCount(rqc);
    countsBuilder.put(position1, rc);
    readCounts = countsBuilder.build();
    fn = new LikelihoodFn(readCounts);
    // Likelihood of a single OTHER with the parameters above
    final double likOther = 0.03850666667;
    assertEquals(Precision.compareTo(fn.value(alpha), Math.log(likOther), tolerance), 0);

    // Likelihood for reads at 2 different positions should be product of
    // likelihoods for individual reads
    Position position2 = Position.newBuilder().setReferenceName("1").setPosition(124L).build();
    countsBuilder = ImmutableMap.builder();
    rc = new ReadCounts();
    rc.setRefFreq(pRef);
    rqc = new ReadQualityCount();
    rqc.setBase(Base.REF);
    rqc.setQuality(errPhred);
    rqc.setCount(1);
    rc.addReadQualityCount(rqc);
    countsBuilder.put(position1, rc);
    rc = new ReadCounts();
    rc.setRefFreq(pRef);
    rqc = new ReadQualityCount();
    rqc.setBase(Base.NONREF);
    rqc.setQuality(errPhred);
    rqc.setCount(1);
    rc.addReadQualityCount(rqc);
    countsBuilder.put(position2, rc);
    readCounts = countsBuilder.build();
    fn = new LikelihoodFn(readCounts);
    assertEquals(Precision.compareTo(fn.value(alpha), Math.log(likRef * likNonref), tolerance), 0);
}

From source file:com.google.cloud.genomics.dataflow.functions.verifybamid.SolverTest.java

@Test
public void testMaximize() {
    assertEquals(Precision.compareTo(
            Solver.maximize(new Parabola(0.47), 0.0, 1.0, 0.1, 0.00001, 0.00001, 100, 100), 0.47, 0.00001), 0);
}

From source file:edu.cudenver.bios.matrix.test.TestMatrixOrthonormalization.java

/**
 * Verify that the A = QxR for the Q, R matrices produced by the
 * orthonormalization/*from ww  w . j  a v  a 2 s .c o  m*/
 */
public void testQRshouldBeA() {
    RealMatrix Q = norm.getQ();
    RealMatrix R = norm.getR();

    // verify that QR = A
    RealMatrix shouldBeOriginalMatrix = Q.multiply(R);
    // Make sure that QR has same dimension as original A matrix
    if (shouldBeOriginalMatrix.getRowDimension() != data.length
            || shouldBeOriginalMatrix.getColumnDimension() != data[0].length) {
        fail();
    }
    // make sure elements of QR match elements of A (within tolerance)
    for (int r = 0; r < shouldBeOriginalMatrix.getRowDimension(); r++) {
        for (int c = 0; c < shouldBeOriginalMatrix.getColumnDimension(); c++) {
            if (Precision.compareTo(shouldBeOriginalMatrix.getEntry(r, c), data[r][c], TOLERANCE) != 0)
                fail();
        }
    }

    assertTrue(true);
}

From source file:edu.cudenver.bios.matrix.test.TestMatrixOrthonormalization.java

/**
 * Verify that the Q'Q = I for the Q matrix produced by the
 * orthonormalization//from ww  w .  j  av  a2 s .  c o m
 */
public void testQQisIdentity() {
    RealMatrix Q = norm.getQ();

    // verify that Q'Q = identity
    RealMatrix shouldBeIdentityMatrix = Q.transpose().multiply(Q);
    // make sure the matrix is sqaure
    if (!shouldBeIdentityMatrix.isSquare()) {
        fail();
    }
    // make sure the diagonal elements are one (within tolerance), and off diagonals
    // are zero (within tolerance)
    for (int r = 0; r < shouldBeIdentityMatrix.getRowDimension(); r++) {
        for (int c = 0; c < shouldBeIdentityMatrix.getColumnDimension(); c++) {
            double shouldBeValue = (r == c) ? 1 : 0;
            if (Precision.compareTo(shouldBeIdentityMatrix.getEntry(r, c), shouldBeValue, TOLERANCE) != 0)
                fail();
        }
    }
    assertTrue(true);
}

From source file:com.google.cloud.genomics.dataflow.functions.verifybamid.SolverTest.java

@Test
public void testSolverOnKnownLikelihoodCases() {
    int phred = 200;

    Position position1 = Position.newBuilder().setReferenceName("1").setPosition(123L).build();

    /*/*from   w w  w.  ja  v  a  2 s.  c  om*/
     * Observe 900 REF reads and 100 NONREF
     * P(REF) = 0.8
     * error probability is near 0
     * Most likely explanation should be ~20% contamination
     * (if P(REF) were 0.5, we'd have peaks at 10% (nonref homozygous contaminant)
     * and 20% (heterozygous contaminant)
     */
    ImmutableMap.Builder<Position, ReadCounts> countsBuilder = ImmutableMap.builder();
    ReadCounts rc = new ReadCounts();
    rc.setRefFreq(0.8);
    ReadQualityCount rqc = new ReadQualityCount();
    rqc.setBase(Base.REF);
    rqc.setQuality(phred);
    rqc.setCount(900);
    rc.addReadQualityCount(rqc);
    rqc = new ReadQualityCount();
    rqc.setBase(Base.NONREF);
    rqc.setQuality(phred);
    rqc.setCount(100);
    rc.addReadQualityCount(rqc);
    rqc = new ReadQualityCount();
    rqc.setBase(Base.OTHER);
    rqc.setQuality(1);
    rqc.setCount(2);
    rc.addReadQualityCount(rqc);
    countsBuilder.put(position1, rc);
    Map<Position, ReadCounts> readCounts = countsBuilder.build();
    assertEquals(Precision.compareTo(
            Solver.maximize(new LikelihoodFn(readCounts), 0.0, 0.5, 0.05, 0.0001, 0.0001, 100, 100), 0.2,
            0.0001), 0);

    /*
     * Make sure things are symmetrical.  Observe 900 NONREF reads and 100 REF
     * P(NONREF) = 0.8 (i.e. P(REF) = 0.2)
     * error probability is near 0
     * Most likely explanation should be ~20% contamination
     * (if P(REF) were 0.5, we'd have peaks at 10% (nonref homozygous contaminant)
     * and 20% (heterozygous contaminant)
     */
    countsBuilder = ImmutableMap.builder();
    rc = new ReadCounts();
    rc.setRefFreq(0.2);
    rqc = new ReadQualityCount();
    rqc.setBase(Base.NONREF);
    rqc.setQuality(phred);
    rqc.setCount(900);
    rc.addReadQualityCount(rqc);
    rqc = new ReadQualityCount();
    rqc.setBase(Base.REF);
    rqc.setQuality(phred);
    rqc.setCount(100);
    rc.addReadQualityCount(rqc);
    countsBuilder.put(position1, rc);
    readCounts = countsBuilder.build();
    assertEquals(Precision.compareTo(
            Solver.maximize(new LikelihoodFn(readCounts), 0.0, 0.5, 0.05, 0.0001, 0.0001, 100, 100), 0.2,
            0.0001), 0);

    /*
     * Assume a heterozygous desired base pair with a homozygous contaminant.
     * Observe 450 NONREF reads and 550 REF
     * P(REF) = 0.8
     * error probability is near 0
     * Most likely explanation should be ~10% contamination
     */
    countsBuilder = ImmutableMap.builder();
    rc = new ReadCounts();
    rc.setRefFreq(0.5);
    rqc = new ReadQualityCount();
    rqc.setBase(Base.NONREF);
    rqc.setQuality(phred);
    rqc.setCount(450);
    rc.addReadQualityCount(rqc);
    rqc = new ReadQualityCount();
    rqc.setBase(Base.REF);
    rqc.setQuality(phred);
    rqc.setCount(550);
    rc.addReadQualityCount(rqc);
    countsBuilder.put(position1, rc);
    readCounts = countsBuilder.build();
    assertEquals(Precision.compareTo(
            Solver.maximize(new LikelihoodFn(readCounts), 0.0, 0.5, 0.05, 0.0001, 0.0001, 100, 100), 0.1,
            0.0001), 0);
}

From source file:com.google.cloud.genomics.dataflow.utils.SolverTest.java

@Test
public void testSolverOnKnownLikelihoodCases() {
    int phred = 200;

    Position position1 = new Position().setReferenceName("1").setPosition(123L);

    /*/*from   w w  w .j av a 2  s. com*/
     * Observe 900 REF reads and 100 NONREF
     * P(REF) = 0.8
     * error probability is near 0
     * Most likely explanation should be ~20% contamination
     * (if P(REF) were 0.5, we'd have peaks at 10% (nonref homozygous contaminant)
     * and 20% (heterozygous contaminant)
     */
    ImmutableMap.Builder<Position, ReadCounts> countsBuilder = ImmutableMap.builder();
    ReadCounts rc = new ReadCounts();
    rc.setRefFreq(0.8);
    ReadQualityCount rqc = new ReadQualityCount();
    rqc.setBase(Base.REF);
    rqc.setQuality(phred);
    rqc.setCount(900);
    rc.addReadQualityCount(rqc);
    rqc = new ReadQualityCount();
    rqc.setBase(Base.NONREF);
    rqc.setQuality(phred);
    rqc.setCount(100);
    rc.addReadQualityCount(rqc);
    rqc = new ReadQualityCount();
    rqc.setBase(Base.OTHER);
    rqc.setQuality(1);
    rqc.setCount(2);
    rc.addReadQualityCount(rqc);
    countsBuilder.put(position1, rc);
    Map<Position, ReadCounts> readCounts = countsBuilder.build();
    assertEquals(Precision.compareTo(
            Solver.maximize(new LikelihoodFn(readCounts), 0.0, 0.5, 0.05, 0.0001, 0.0001, 100, 100), 0.2,
            0.0001), 0);

    /*
     * Make sure things are symmetrical.  Observe 900 NONREF reads and 100 REF
     * P(NONREF) = 0.8 (i.e. P(REF) = 0.2)
     * error probability is near 0
     * Most likely explanation should be ~20% contamination
     * (if P(REF) were 0.5, we'd have peaks at 10% (nonref homozygous contaminant)
     * and 20% (heterozygous contaminant)
     */
    countsBuilder = ImmutableMap.builder();
    rc = new ReadCounts();
    rc.setRefFreq(0.2);
    rqc = new ReadQualityCount();
    rqc.setBase(Base.NONREF);
    rqc.setQuality(phred);
    rqc.setCount(900);
    rc.addReadQualityCount(rqc);
    rqc = new ReadQualityCount();
    rqc.setBase(Base.REF);
    rqc.setQuality(phred);
    rqc.setCount(100);
    rc.addReadQualityCount(rqc);
    countsBuilder.put(position1, rc);
    readCounts = countsBuilder.build();
    assertEquals(Precision.compareTo(
            Solver.maximize(new LikelihoodFn(readCounts), 0.0, 0.5, 0.05, 0.0001, 0.0001, 100, 100), 0.2,
            0.0001), 0);

    /*
     * Assume a heterozygous desired base pair with a homozygous contaminant.
     * Observe 450 NONREF reads and 550 REF
     * P(REF) = 0.8
     * error probability is near 0
     * Most likely explanation should be ~10% contamination
     */
    countsBuilder = ImmutableMap.builder();
    rc = new ReadCounts();
    rc.setRefFreq(0.5);
    rqc = new ReadQualityCount();
    rqc.setBase(Base.NONREF);
    rqc.setQuality(phred);
    rqc.setCount(450);
    rc.addReadQualityCount(rqc);
    rqc = new ReadQualityCount();
    rqc.setBase(Base.REF);
    rqc.setQuality(phred);
    rqc.setCount(550);
    rc.addReadQualityCount(rqc);
    countsBuilder.put(position1, rc);
    readCounts = countsBuilder.build();
    assertEquals(Precision.compareTo(
            Solver.maximize(new LikelihoodFn(readCounts), 0.0, 0.5, 0.05, 0.0001, 0.0001, 100, 100), 0.1,
            0.0001), 0);
}

From source file:com.winterwell.maths.stats.algorithms.KalmanFilterTest.java

@Test
public void testConstant() {
    // simulates a simple process with a constant state and no control input

    double constantValue = 10d;
    double measurementNoise = 0.1d;
    double processNoise = 1e-5d;

    // A = [ 1 ]//from w  w  w .  ja  v a  2  s  . c  o  m
    Matrix A = new Matrix1D(1);
    // no control input
    RealMatrix B = null;
    // H = [ 1 ]
    Matrix H = new Matrix1D(1);
    // x = [ 10 ]
    Vector x = new X(constantValue);
    // Q = [ 1e-5 ]
    Matrix Q = new Matrix1D(processNoise);
    // R = [ 0.1 ]
    Matrix R = new Matrix1D(measurementNoise);

    //        ProcessModel pm
    //            = new DefaultProcessModel(A, B, Q,
    //                                      new ArrayRealVector(new double[] { constantValue }), null);
    //        MeasurementModel mm = new DefaultMeasurementModel(H, R);

    KalmanFilter filter = new KalmanFilter(1, 1);
    filter.setTransitionMatrix(A);
    filter.setTransitionNoise(Q);
    filter.setEmissionMatrix(H);
    filter.setEmissionNoise(R);

    Assert.assertEquals(1, filter.getMeasurementDimension());
    Assert.assertEquals(1, filter.getStateDimension());

    MatrixUtils.equals(Q, filter.getTransitionNoise());

    //        // check the initial state
    //        double[] expectedInitialState = new double[] { constantValue };
    //        assertVectorEquals(expectedInitialState, filter.getStateEstimation());

    Gaussian hiddenState = new Gaussian(new X(constantValue), new Matrix1D(0));

    Vector pNoise = new X(0);
    Vector mNoise = new X(0);

    RandomGenerator rand = new JDKRandomGenerator();
    // iterate 60 steps
    for (int i = 0; i < 60; i++) {
        hiddenState = filter.predict(hiddenState, null);

        // Simulate the process
        pNoise.set(0, processNoise * rand.nextGaussian());

        // x = A * x + p_noise
        x = MatrixUtils.apply(A, x).add(pNoise);

        // Simulate the measurement
        mNoise.set(0, measurementNoise * rand.nextGaussian());

        // z = H * x + m_noise
        Vector z = MatrixUtils.apply(H, x).add(mNoise);

        hiddenState = filter.correct(hiddenState, z);

        // state estimate shouldn't be larger than measurement noise
        double diff = FastMath.abs(constantValue - hiddenState.getMean().get(0));
        // System.out.println(diff);
        Assert.assertTrue(Precision.compareTo(diff, measurementNoise, 1e-6) < 0);
    }

    // error covariance should be already very low (< 0.02)
    Assert.assertTrue(Precision.compareTo(hiddenState.getCovar().get(0, 0), 0.02d, 1e-6) < 0);
}

From source file:com.winterwell.maths.stats.algorithms.KalmanFilterTest.java

@Test
public void testConstantAcceleration() {
    // simulates a vehicle, accelerating at a constant rate (0.1 m/s)

    // discrete time interval
    double dt = 0.1d;
    // position measurement noise (meter)
    double measurementNoise = 10d;
    // acceleration noise (meter/sec^2)
    double accelNoise = 0.2d;

    // A = [ 1 dt ]
    //     [ 0  1 ]
    RealMatrix A = new Array2DRowRealMatrix(new double[][] { { 1, dt }, { 0, 1 } });

    // B = [ dt^2/2 ]
    //     [ dt     ]
    RealMatrix Bnull = new Array2DRowRealMatrix(new double[][] { { FastMath.pow(dt, 2d) / 2d }, { dt } });

    // H = [ 1 0 ]
    RealMatrix H = new Array2DRowRealMatrix(new double[][] { { 1d, 0d } });

    // x = [ 0 0 ]
    RealVector x = new ArrayRealVector(new double[] { 0, 0 });

    RealMatrix tmp = new Array2DRowRealMatrix(
            new double[][] { { FastMath.pow(dt, 4d) / 4d, FastMath.pow(dt, 3d) / 2d },
                    { FastMath.pow(dt, 3d) / 2d, FastMath.pow(dt, 2d) } });

    // Q = [ dt^4/4 dt^3/2 ]
    //     [ dt^3/2 dt^2   ]
    RealMatrix Q = tmp.scalarMultiply(FastMath.pow(accelNoise, 2));

    // P0 = [ 1 1 ]
    //      [ 1 1 ]
    RealMatrix P0 = new Array2DRowRealMatrix(new double[][] { { 1, 1 }, { 1, 1 } });

    // R = [ measurementNoise^2 ]
    RealMatrix R = new Array2DRowRealMatrix(new double[] { FastMath.pow(measurementNoise, 2) });

    // constant control input, increase velocity by 0.1 m/s per cycle
    double uv = 0.1d * dt;
    RealVector u = new ArrayRealVector(new double[] { uv * uv / 2, uv });

    ProcessModel pm = new DefaultProcessModel(A, Bnull, Q, x, P0);
    MeasurementModel mm = new DefaultMeasurementModel(H, R);
    KalmanFilter filter = new KalmanFilter(pm, mm);

    Assert.assertEquals(1, filter.getMeasurementDimension());
    Assert.assertEquals(2, filter.getStateDimension());

    Gaussian state = new Gaussian(mtj(x), mtj(P0));
    MatrixUtils.equals(mtj(P0), state.getCovar());

    // check the initial state
    double[] expectedInitialState = new double[] { 0.0, 0.0 };
    //        assertVectorEquals(expectedInitialState, filter.getStateEstimation());

    RandomGenerator rand = new JDKRandomGenerator();

    RealVector tmpPNoise = new ArrayRealVector(new double[] { FastMath.pow(dt, 2d) / 2d, dt });

    // iterate 60 steps
    for (int i = 0; i < 60; i++) {
        state = filter.predict(state, mtj(u));

        // Simulate the process
        RealVector pNoise = tmpPNoise.mapMultiply(accelNoise * rand.nextGaussian());

        // x = A * x + B * u + pNoise
        x = A.operate(x).add(u).add(pNoise);

        // Simulate the measurement
        double mNoise = measurementNoise * rand.nextGaussian();

        // z = H * x + m_noise
        RealVector z = H.operate(x).mapAdd(mNoise);

        state = filter.correct(state, mtj(z));

        // state estimate shouldn't be larger than the measurement noise
        double diff = FastMath.abs(x.getEntry(0) - state.getMean().get(0));
        Assert.assertTrue(Precision.compareTo(diff, measurementNoise, 1e-6) < 0);
    }/*from ww  w . j a v  a  2 s. co m*/

    // error covariance of the velocity should be already very low (< 0.1)
    Assert.assertTrue(Precision.compareTo(state.getCovar().get(1, 1), 0.1d, 1e-6) < 0);
}