Example usage for org.apache.commons.math3.random GaussianRandomGenerator GaussianRandomGenerator

List of usage examples for org.apache.commons.math3.random GaussianRandomGenerator GaussianRandomGenerator

Introduction

In this page you can find the example usage for org.apache.commons.math3.random GaussianRandomGenerator GaussianRandomGenerator.

Prototype

public GaussianRandomGenerator(final RandomGenerator generator) 

Source Link

Document

Create a new generator.

Usage

From source file:com.itemanalysis.psychometrics.mixture.MvNormalComponentDistribution.java

public void generateStartValues(RealMatrix x, RealMatrix mean, RealMatrix cov) {
    JDKRandomGenerator jg = new JDKRandomGenerator();
    NormalizedRandomGenerator rg = new GaussianRandomGenerator(jg);
    CorrelatedRandomVectorGenerator sg = null;
    sg = new CorrelatedRandomVectorGenerator(mean.getColumn(0), cov, 0.00001, rg);
    mu = new Array2DRowRealMatrix(sg.nextVector());
}

From source file:com.analog.lyric.dimple.test.solvers.sumproduct.TestSampledFactors.java

/**
 * Adapted from MATLAB test4 in tests/algoGaussian/testSampledFactors.m
 *//*w  ww.j  ava2s .c  o m*/
@Test
public void sampledComplexProduct() {
    // NOTE: test may fail if seed is changed! We keep the number of samples down so that the test doesn't
    // take too long. Increasing the samples produces better results.

    testRand.setSeed(42);

    try (CurrentModel cur = using(new FactorGraph())) {
        final Complex a = complex("a");
        final Complex b = complex("b");
        final Complex c = product(a, b);

        double[] aMean = new double[] { 10, 10 };
        RealMatrix aCovariance = randCovariance(2);
        a.setPrior(new MultivariateNormal(aMean, aCovariance.getData()));

        double[] bMean = new double[] { -20, 20 };
        RealMatrix bCovariance = randCovariance(2);
        b.setPrior(new MultivariateNormalParameters(bMean, bCovariance.getData()));

        GaussianRandomGenerator normalGenerator = new GaussianRandomGenerator(testRand);
        CorrelatedRandomVectorGenerator aGenerator = new CorrelatedRandomVectorGenerator(aMean, aCovariance,
                1e-12, normalGenerator);
        CorrelatedRandomVectorGenerator bGenerator = new CorrelatedRandomVectorGenerator(bMean, bCovariance,
                1e-12, normalGenerator);

        StorelessCovariance expectedCov = new StorelessCovariance(2);

        final int nSamples = 10000;

        RealVector expectedMean = MatrixUtils.createRealVector(new double[2]);
        double[] cSample = new double[2];

        for (int i = 0; i < nSamples; ++i) {
            double[] aSample = aGenerator.nextVector();
            double[] bSample = bGenerator.nextVector();

            // Compute complex product
            cSample[0] = aSample[0] * bSample[0] - aSample[1] * bSample[1];
            cSample[1] = aSample[0] * bSample[1] + aSample[1] * bSample[0];

            expectedMean.addToEntry(0, cSample[0]);
            expectedMean.addToEntry(1, cSample[1]);

            expectedCov.increment(cSample);
        }

        expectedMean.mapDivideToSelf(nSamples); // normalize

        SumProductSolverGraph sfg = requireNonNull(cur.graph.setSolverFactory(new SumProductSolver()));
        sfg.setOption(GibbsOptions.numSamples, nSamples);

        sfg.solve();

        MultivariateNormalParameters cBelief = requireNonNull(c.getBelief());

        RealVector observedMean = MatrixUtils.createRealVector(cBelief.getMean());
        double scaledMeanDistance = expectedMean.getDistance(observedMean) / expectedMean.getNorm();

        //         System.out.format("expectedMean = %s\n", expectedMean);
        //         System.out.format("observedMean = %s\n", observedMean);
        //         System.out.println(scaledMeanDistance);

        assertEquals(0.0, scaledMeanDistance, .02);

        RealMatrix expectedCovariance = expectedCov.getCovarianceMatrix();
        RealMatrix observedCovariance = MatrixUtils.createRealMatrix(cBelief.getCovariance());
        RealMatrix diffCovariance = expectedCovariance.subtract(observedCovariance);

        double scaledCovarianceDistance = diffCovariance.getNorm() / expectedCovariance.getNorm();

        //         System.out.println(expectedCovariance);
        //         System.out.println(expectedCovariance.getNorm());
        //         System.out.println(diffCovariance);
        //         System.out.println(diffCovariance.getNorm());
        //         System.out.println(diffCovariance.getNorm() / expectedCovariance.getNorm());

        assertEquals(0.0, scaledCovarianceDistance, .2);
    }
}

From source file:inputHandling.DataGenCorrGaussCommons.java

@Override
public TupleList genData(int dimensions, int tupleCount) {
    logger.info("Generating gaussian correlated Data with " + tupleCount + " Tuples in " + dimensions
            + " dimensions, Coeff.: -" + this.coeff);
    genMatrices(dimensions);//from  w w  w .  j a v  a2 s  .com
    RealMatrix covariance = MatrixUtils.createRealMatrix(cov);
    RandomGenerator rg = new JDKRandomGenerator(Math.round(seed));
    GaussianRandomGenerator rawGenerator = new GaussianRandomGenerator(rg);
    double small = 1.0e-12 * covariance.getNorm();
    CorrelatedRandomVectorGenerator generator = new CorrelatedRandomVectorGenerator(mean, covariance, small,
            rawGenerator);

    // Generate the Tuples
    TupleList tupleList = new TupleList(dimensions);
    for (int j = 0; j < tupleCount; j++) {
        double[] randomVector = generator.nextVector();
        Tuple tuple = new Tuple(randomVector);
        tupleList.add(tuple);
    }
    return tupleList;
}

From source file:inputHandling.DataGenAntiCorrGaussCommons.java

@Override
public TupleList genData(int dimensions, int tupleCount) {
    logger.info("Generating gaussian anti-correlated Data with " + tupleCount + " Tuples in " + dimensions
            + " dimensions, Coeff.: -" + this.coeff);
    genMatrices(dimensions);//from  www  .  j av  a2 s  .c o  m
    RealMatrix covariance = MatrixUtils.createRealMatrix(cov);
    RandomGenerator rg = new JDKRandomGenerator(Math.round(seed));
    GaussianRandomGenerator rawGenerator = new GaussianRandomGenerator(rg);
    double small = 1.0e-12 * covariance.getNorm();
    CorrelatedRandomVectorGenerator generator = new CorrelatedRandomVectorGenerator(mean, covariance, small,
            rawGenerator);

    TupleList tupleList = new TupleList(dimensions);
    // Invert the Values, to receive Anti-Correlation
    for (int j = 0; j < tupleCount; j++) {
        double[] randomVector = generator.nextVector();
        for (int i = 0; i < dimensions; i++) {
            if (j % 2 == 0 && i % 2 == 0)
                randomVector[i] = 2 * mean[i] - randomVector[i];
            else if (j % 2 != 0 && i % 2 != 0)
                randomVector[i] = 2 * mean[i] - randomVector[i];
            else
                randomVector[i] = randomVector[i];
        }
        Tuple tuple = new Tuple(randomVector);
        tupleList.add(tuple);
    }
    return tupleList;
}

From source file:com.itemanalysis.psychometrics.irt.equating.HaebaraMethodTest.java

/**
 * Item parameters and true results from Kolen's STUIRT program.
 *//*from w w  w  . j av  a2s.  co m*/
@Test
public void haebaraTest1() {
    System.out.println("Haebara Test 1: Actual Distribution");

    LinkedHashMap<String, ItemResponseModel> irmX = new LinkedHashMap<String, ItemResponseModel>();
    LinkedHashMap<String, ItemResponseModel> irmY = new LinkedHashMap<String, ItemResponseModel>();

    irmX.put("i1", new Irm3PL(0.4551, -0.7101, 0.2087, 1.7));
    irmX.put("i2", new Irm3PL(0.5839, -0.8567, 0.2038, 1.7));
    irmX.put("i3", new Irm3PL(0.7544, 0.0212, 0.1600, 1.7));
    irmX.put("i4", new Irm3PL(0.6633, 0.0506, 0.1240, 1.7));
    irmX.put("i5", new Irm3PL(1.0690, 0.9610, 0.2986, 1.7));
    irmX.put("i6", new Irm3PL(0.9672, 0.1950, 0.0535, 1.7));
    irmX.put("i7", new Irm3PL(0.3479, 2.2768, 0.1489, 1.7));
    irmX.put("i8", new Irm3PL(1.4579, 1.0241, 0.2453, 1.7));
    irmX.put("i9", new Irm3PL(1.8811, 1.4062, 0.1992, 1.7));
    irmX.put("i10", new Irm3PL(0.7020, 2.2401, 0.0853, 1.7));
    irmX.put("i11", new Irm3PL(1.4080, 1.5556, 0.0789, 1.7));
    irmX.put("i12", new Irm3PL(1.2993, 2.1589, 0.1075, 1.7));

    irmY.put("i1", new Irm3PL(0.4416, -1.3349, 0.1559, 1.7));
    irmY.put("i2", new Irm3PL(0.5730, -1.3210, 0.1913, 1.7));
    irmY.put("i3", new Irm3PL(0.5987, -0.7098, 0.1177, 1.7));
    irmY.put("i4", new Irm3PL(0.6041, -0.3539, 0.0818, 1.7));
    irmY.put("i5", new Irm3PL(0.9902, 0.5320, 0.3024, 1.7));
    irmY.put("i6", new Irm3PL(0.8081, -0.1156, 0.0648, 1.7));
    irmY.put("i7", new Irm3PL(0.4140, 2.5538, 0.2410, 1.7));
    irmY.put("i8", new Irm3PL(1.3554, 0.5811, 0.2243, 1.7));
    irmY.put("i9", new Irm3PL(1.0417, 0.9392, 0.1651, 1.7));
    irmY.put("i10", new Irm3PL(0.6336, 1.8960, 0.0794, 1.7));
    irmY.put("i11", new Irm3PL(1.1347, 1.0790, 0.0630, 1.7));
    irmY.put("i12", new Irm3PL(0.9255, 2.1337, 0.1259, 1.7));

    double[] points = { -4.0000, -3.1110, -2.2220, -1.3330, -0.4444, 0.4444, 1.3330, 2.2220, 3.1110, 4.0000 };
    double[] xDensity = { 0.0001008, 0.002760, 0.03021, 0.1420, 0.3149, 0.3158, 0.1542, 0.03596, 0.003925,
            0.0001862 };
    double[] yDensity = { 0.0001173, 0.003242, 0.03449, 0.1471, 0.3148, 0.3110, 0.1526, 0.03406, 0.002510,
            0.0001116 };
    UserSuppliedDistributionApproximation distX = new UserSuppliedDistributionApproximation(points, xDensity);
    UserSuppliedDistributionApproximation distY = new UserSuppliedDistributionApproximation(points, yDensity);

    HaebaraMethod hb = new HaebaraMethod(irmX, irmY, distX, distY, EquatingCriterionType.Q1Q2);
    hb.setPrecision(6);
    double[] startValues = { 0, 1 };

    int numIterpolationPoints = 2 * 2;//two dimensions A and B
    BOBYQAOptimizer underlying = new BOBYQAOptimizer(numIterpolationPoints);
    RandomGenerator g = new JDKRandomGenerator();
    RandomVectorGenerator generator = new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultiStartMultivariateOptimizer optimizer = new MultiStartMultivariateOptimizer(underlying, 10, generator);
    org.apache.commons.math3.optim.PointValuePair optimum = optimizer.optimize(new MaxEval(1000),
            new ObjectiveFunction(hb), GoalType.MINIMIZE, SimpleBounds.unbounded(2),
            new InitialGuess(startValues));

    double[] hbCoefficients = optimum.getPoint();
    hb.setIntercept(hbCoefficients[0]);
    hb.setScale(hbCoefficients[1]);

    System.out.println("  Iterations: " + optimizer.getEvaluations());
    System.out.println("  fmin: " + optimum.getValue());
    System.out.println("  B = " + hbCoefficients[0] + "  A = " + hbCoefficients[1]);

    assertEquals("  Intercept test", -0.471281, hb.getIntercept(), 1e-4);
    assertEquals("  Scale test", 1.067800, hb.getScale(), 1e-4);

    System.out.println();

}

From source file:com.itemanalysis.psychometrics.irt.equating.IrtScaleLinking.java

public void computeCoefficients() {
    raschFamily = checkRaschModel();//  w w w. j  a  v a  2s  .  c o m

    ms.setPrecision(precision);
    mm.setPrecision(precision);
    hb.setPrecision(precision);
    sl.setPrecision(precision);

    if (raschFamily) {

        double[] sv = { mm.getIntercept() };

        UnivariateOptimizer underlying = new BrentOptimizer(1e-10, 1e-14);
        JDKRandomGenerator g = new JDKRandomGenerator();

        //Haebara method
        MultiStartUnivariateOptimizer optimizer = new MultiStartUnivariateOptimizer(underlying, 5, g);//Five random starts to Brent optimizer.
        UnivariatePointValuePair hbPair = optimizer.optimize(new MaxEval(500),
                new UnivariateObjectiveFunction(hb), GoalType.MINIMIZE, new SearchInterval(-4, 4),
                new InitialGuess(sv));
        hb.setIntercept(hbPair.getPoint());
        hb.setScale(1.0);
        fHB = hbPair.getValue();

        //Stocking-Lord method
        UnivariatePointValuePair slPair = optimizer.optimize(new MaxEval(500),
                new UnivariateObjectiveFunction(sl), GoalType.MINIMIZE, new SearchInterval(-4, 4),
                new InitialGuess(sv));
        sl.setIntercept(slPair.getPoint());
        sl.setScale(1.0);
        fSL = slPair.getValue();

    } else {

        double[] hbStartValues = { mm.getIntercept(), mm.getScale() };
        double[] slStartValues = { mm.getIntercept(), mm.getScale() };

        if (useUncmin) {
            DefaultUncminOptimizer optimizer = new DefaultUncminOptimizer();

            try {

                optimizer.minimize(hb, hbStartValues);
                double[] param = optimizer.getParameters();
                fHB = optimizer.getFunctionValue();
                hb.setIntercept(param[0]);

                if (param.length > 1) {
                    hb.setScale(param[1]);
                } else {
                    hb.setScale(1.0);//Rasch family of models
                }

                optimizer.minimize(sl, slStartValues);
                param = optimizer.getParameters();
                fSL = optimizer.getFunctionValue();
                sl.setIntercept(param[0]);

                if (param.length > 1) {
                    sl.setScale(param[1]);
                } else {
                    sl.setScale(1.0);//Rasch family of models
                }

            } catch (UncminException ex) {
                ex.printStackTrace();
            }
        } else {

            int numIterpolationPoints = 2 * 2;//two dimensions A and B
            BOBYQAOptimizer underlying = new BOBYQAOptimizer(numIterpolationPoints);
            RandomGenerator g = new JDKRandomGenerator();
            RandomVectorGenerator generator = new UncorrelatedRandomVectorGenerator(2,
                    new GaussianRandomGenerator(g));
            MultiStartMultivariateOptimizer optimizer = new MultiStartMultivariateOptimizer(underlying, 10,
                    generator);
            PointValuePair hbOptimum = optimizer.optimize(new MaxEval(1000), new ObjectiveFunction(hb),
                    GoalType.MINIMIZE, SimpleBounds.unbounded(2), new InitialGuess(hbStartValues));

            double[] hbCoefficients = hbOptimum.getPoint();
            hb.setIntercept(hbCoefficients[0]);
            hb.setScale(hbCoefficients[1]);
            fHB = hbOptimum.getValue();

            PointValuePair slOptimum = optimizer.optimize(new MaxEval(1000), new ObjectiveFunction(sl),
                    GoalType.MINIMIZE, SimpleBounds.unbounded(2), new InitialGuess(slStartValues));

            double[] slCoefficients = slOptimum.getPoint();
            sl.setIntercept(slCoefficients[0]);
            sl.setScale(slCoefficients[1]);
            fSL = slOptimum.getValue();

        }

    }

}

From source file:com.itemanalysis.psychometrics.irt.equating.StockingLordMethodTest.java

/**
 * Item parameters, quadrature points and weights, and "true" linking coefficients were obtained
 * from example1 in Kolen's STUIRT program. However, the Stocking-Lord procedure was changed in
 * the STUIRT example to produce symmetric optimization of the criterion function (i.e. SY BI BI
 * instead of SY BI NO).//  w  w w.  j  a  va  2 s.c  o m
 *
 * This test runs the Stocking-Lord procedure twice: One with the UNCMIN optimizer and once with
 * the BOBYQA optimizer. Results from each are compared to teh STUIRT results and to each other.
 *
 */
@Test
public void stockingLordTest0() {
    System.out.println("StockingLordMethod Test 0: Actual Distribution");
    LinkedHashMap<String, ItemResponseModel> irmX = new LinkedHashMap<String, ItemResponseModel>();
    LinkedHashMap<String, ItemResponseModel> irmY = new LinkedHashMap<String, ItemResponseModel>();

    irmX.put("i1", new Irm3PL(0.4551, -0.7101, 0.2087, 1.7));
    irmX.put("i2", new Irm3PL(0.5839, -0.8567, 0.2038, 1.7));
    irmX.put("i3", new Irm3PL(0.7544, 0.0212, 0.1600, 1.7));
    irmX.put("i4", new Irm3PL(0.6633, 0.0506, 0.1240, 1.7));
    irmX.put("i5", new Irm3PL(1.0690, 0.9610, 0.2986, 1.7));
    irmX.put("i6", new Irm3PL(0.9672, 0.1950, 0.0535, 1.7));
    irmX.put("i7", new Irm3PL(0.3479, 2.2768, 0.1489, 1.7));
    irmX.put("i8", new Irm3PL(1.4579, 1.0241, 0.2453, 1.7));
    irmX.put("i9", new Irm3PL(1.8811, 1.4062, 0.1992, 1.7));
    irmX.put("i10", new Irm3PL(0.7020, 2.2401, 0.0853, 1.7));
    irmX.put("i11", new Irm3PL(1.4080, 1.5556, 0.0789, 1.7));
    irmX.put("i12", new Irm3PL(1.2993, 2.1589, 0.1075, 1.7));

    irmY.put("i1", new Irm3PL(0.4416, -1.3349, 0.1559, 1.7));
    irmY.put("i2", new Irm3PL(0.5730, -1.3210, 0.1913, 1.7));
    irmY.put("i3", new Irm3PL(0.5987, -0.7098, 0.1177, 1.7));
    irmY.put("i4", new Irm3PL(0.6041, -0.3539, 0.0818, 1.7));
    irmY.put("i5", new Irm3PL(0.9902, 0.5320, 0.3024, 1.7));
    irmY.put("i6", new Irm3PL(0.8081, -0.1156, 0.0648, 1.7));
    irmY.put("i7", new Irm3PL(0.4140, 2.5538, 0.2410, 1.7));
    irmY.put("i8", new Irm3PL(1.3554, 0.5811, 0.2243, 1.7));
    irmY.put("i9", new Irm3PL(1.0417, 0.9392, 0.1651, 1.7));
    irmY.put("i10", new Irm3PL(0.6336, 1.8960, 0.0794, 1.7));
    irmY.put("i11", new Irm3PL(1.1347, 1.0790, 0.0630, 1.7));
    irmY.put("i12", new Irm3PL(0.9255, 2.1337, 0.1259, 1.7));

    double[] points = { -4.0000, -3.1110, -2.2220, -1.3330, -0.4444, 0.4444, 1.3330, 2.2220, 3.1110, 4.0000 };
    double[] xDensity = { 0.0001008, 0.002760, 0.03021, 0.1420, 0.3149, 0.3158, 0.1542, 0.03596, 0.003925,
            0.0001862 };
    double[] yDensity = { 0.0001173, 0.003242, 0.03449, 0.1471, 0.3148, 0.3110, 0.1526, 0.03406, 0.002510,
            0.0001116 };
    UserSuppliedDistributionApproximation distX = new UserSuppliedDistributionApproximation(points, xDensity);
    UserSuppliedDistributionApproximation distY = new UserSuppliedDistributionApproximation(points, yDensity);

    StockingLordMethod sl = new StockingLordMethod(irmX, irmY, distX, distY, EquatingCriterionType.Q1Q2);
    sl.setPrecision(6);
    double[] startValues = { 0, 1 };
    double f = 0;

    //Run test with UNCMIN optimizer
    DefaultUncminOptimizer optimizer = new DefaultUncminOptimizer();
    double[] param1 = null;
    double[] param2 = null;
    try {
        optimizer.minimize(sl, startValues);
        param1 = optimizer.getParameters();
        f = optimizer.getFunctionValue();
        sl.setIntercept(param1[0]);
        sl.setScale(param1[1]);

    } catch (UncminException ex) {
        ex.printStackTrace();
    }

    //Check UNCMIN values against results from STUIRT.
    System.out.println("  UNCMIN Optimization");
    System.out.println("  Iterations: ");
    System.out.println("  fmin: " + f);
    System.out.println("  B = " + sl.getIntercept() + "  A = " + sl.getScale());

    assertEquals("  Intercept test", -0.487619, sl.getIntercept(), 1e-6);
    assertEquals("  Scale test", 1.083417, sl.getScale(), 1e-6);

    //Run test with BOBYQA optimizer
    int numIterpolationPoints = 2 * 2;//two dimensions A and B
    BOBYQAOptimizer underlying = new BOBYQAOptimizer(numIterpolationPoints);
    RandomGenerator g = new JDKRandomGenerator();
    RandomVectorGenerator generator = new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultiStartMultivariateOptimizer optimizer2 = new MultiStartMultivariateOptimizer(underlying, 10, generator);
    PointValuePair optimum = optimizer2.optimize(new MaxEval(1000), new ObjectiveFunction(sl),
            GoalType.MINIMIZE, SimpleBounds.unbounded(2), new InitialGuess(startValues));

    param2 = optimum.getPoint();
    sl.setIntercept(param2[0]);
    sl.setScale(param2[1]);

    //Check BOBYQA values against results from STUIRT.
    System.out.println();
    System.out.println("  BOBYQA Optimization");
    System.out.println("  Iterations: " + optimizer2.getEvaluations());
    System.out.println("  fmin: " + optimum.getValue());
    System.out.println("  B = " + param2[0] + "  A = " + param2[1]);

    assertEquals("  Intercept test", -0.487619, sl.getIntercept(), 1e-6);
    assertEquals("  Scale test", 1.083417, sl.getScale(), 1e-6);

    //Compare results from each optimizer
    assertEquals("  Intercept test", param1[0], param2[0], 1e-6);
    assertEquals("  Scale test", param1[1], param2[1], 1e-6);

    System.out.println();

}

From source file:com.itemanalysis.psychometrics.irt.equating.HaebaraMethodTest.java

@Test
public void haearaTest2() {
    System.out.println("Haebara Test 2: Uniform Distribution");
    int n = aX.length;
    LinkedHashMap<String, ItemResponseModel> irmX = new LinkedHashMap<String, ItemResponseModel>();
    LinkedHashMap<String, ItemResponseModel> irmY = new LinkedHashMap<String, ItemResponseModel>();
    ItemResponseModel irm;//  www  . j  ava  2  s  . co  m

    for (int i = 0; i < n; i++) {
        String name = "V" + i;
        irm = new Irm3PL(aX[i], bX[i], cX[i], 1.0);
        irmX.put(name, irm);

        irm = new Irm3PL(aY[i], bY[i], cY[i], 1.0);
        irmY.put(name, irm);
    }

    UniformDistributionApproximation uniform = new UniformDistributionApproximation(-4, 4, 10);

    HaebaraMethod hb = new HaebaraMethod(irmX, irmY, uniform, uniform, EquatingCriterionType.Q1Q2);
    hb.setPrecision(4);
    double[] startValues = { 0, 1 };

    int numIterpolationPoints = 2 * 2;//two dimensions A and B
    BOBYQAOptimizer underlying = new BOBYQAOptimizer(numIterpolationPoints);
    RandomGenerator g = new JDKRandomGenerator();
    RandomVectorGenerator generator = new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultiStartMultivariateOptimizer optimizer = new MultiStartMultivariateOptimizer(underlying, 10, generator);
    org.apache.commons.math3.optim.PointValuePair optimum = optimizer.optimize(new MaxEval(1000),
            new ObjectiveFunction(hb), org.apache.commons.math3.optim.nonlinear.scalar.GoalType.MINIMIZE,
            SimpleBounds.unbounded(2), new InitialGuess(startValues));

    double[] slCoefficients = optimum.getPoint();
    hb.setIntercept(slCoefficients[0]);
    hb.setScale(slCoefficients[1]);

    System.out.println("  Iterations: " + optimizer.getEvaluations());
    System.out.println("  fmin: " + optimum.getValue());
    System.out.println("  B = " + slCoefficients[0] + "  A = " + slCoefficients[1]);

    assertEquals("  Intercept test", -0.4303, hb.getIntercept(), 1e-4);
    assertEquals("  Scale test", 1.0894, hb.getScale(), 1e-4);

    System.out.println();

}

From source file:com.itemanalysis.psychometrics.irt.equating.StockingLordMethodTest.java

@Test
public void stockingLordTest1() {
    System.out.println("StockingLordMethod Test 1: Actual Distribution");
    int n = aX.length;
    LinkedHashMap<String, ItemResponseModel> irmX = new LinkedHashMap<String, ItemResponseModel>();
    LinkedHashMap<String, ItemResponseModel> irmY = new LinkedHashMap<String, ItemResponseModel>();
    ItemResponseModel irm;// w w w . j av  a 2s  . c  om

    for (int i = 0; i < n; i++) {
        String name = "V" + i;
        irm = new Irm3PL(aX[i], bX[i], cX[i], 1.0);
        irmX.put(name, irm);

        irm = new Irm3PL(aY[i], bY[i], cY[i], 1.0);
        irmY.put(name, irm);
    }

    UserSuppliedDistributionApproximation distX = new UserSuppliedDistributionApproximation(points, xDensity);
    UserSuppliedDistributionApproximation distY = new UserSuppliedDistributionApproximation(points, yDensity);

    StockingLordMethod sl = new StockingLordMethod(irmX, irmY, distX, distY, EquatingCriterionType.Q1Q2);
    sl.setPrecision(4);
    double[] startValues = { 0, 1 };

    int numIterpolationPoints = 2 * 2;//two dimensions A and B
    BOBYQAOptimizer underlying = new BOBYQAOptimizer(numIterpolationPoints);
    RandomGenerator g = new JDKRandomGenerator();
    RandomVectorGenerator generator = new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultiStartMultivariateOptimizer optimizer = new MultiStartMultivariateOptimizer(underlying, 10, generator);
    PointValuePair optimum = optimizer.optimize(new MaxEval(1000), new ObjectiveFunction(sl), GoalType.MINIMIZE,
            SimpleBounds.unbounded(2), new InitialGuess(startValues));

    double[] slCoefficients = optimum.getPoint();
    sl.setIntercept(slCoefficients[0]);
    sl.setScale(slCoefficients[1]);

    System.out.println("  Iterations: " + optimizer.getEvaluations());
    System.out.println("  fmin: " + optimum.getValue());
    System.out.println("  B = " + slCoefficients[0] + "  A = " + slCoefficients[1]);

    assertEquals("  Intercept test", -0.4788, sl.getIntercept(), 1e-4);
    assertEquals("  Scale test", 1.0911, sl.getScale(), 1e-4);

    System.out.println();

}

From source file:com.itemanalysis.psychometrics.irt.equating.HaebaraMethodTest.java

@Test
public void haebaraTest3() {
    System.out.println("Haebara Test 3: Normal Distribution");
    int n = aX.length;
    LinkedHashMap<String, ItemResponseModel> irmX = new LinkedHashMap<String, ItemResponseModel>();
    LinkedHashMap<String, ItemResponseModel> irmY = new LinkedHashMap<String, ItemResponseModel>();
    ItemResponseModel irm;// w w w  .  j  a  v  a2s .c om

    for (int i = 0; i < n; i++) {
        String name = "V" + i;
        irm = new Irm3PL(aX[i], bX[i], cX[i], 1.0);
        irmX.put(name, irm);

        irm = new Irm3PL(aY[i], bY[i], cY[i], 1.0);
        irmY.put(name, irm);
    }

    NormalDistributionApproximation normal = new NormalDistributionApproximation(0, 1, -4, 4, 10);

    HaebaraMethod hb = new HaebaraMethod(irmX, irmY, normal, normal, EquatingCriterionType.Q1Q2);
    hb.setPrecision(4);
    double[] startValues = { 0, 1 };

    int numIterpolationPoints = 2 * 2;//two dimensions A and B
    BOBYQAOptimizer underlying = new BOBYQAOptimizer(numIterpolationPoints);
    RandomGenerator g = new JDKRandomGenerator();
    RandomVectorGenerator generator = new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultiStartMultivariateOptimizer optimizer = new MultiStartMultivariateOptimizer(underlying, 10, generator);
    org.apache.commons.math3.optim.PointValuePair optimum = optimizer.optimize(new MaxEval(1000),
            new ObjectiveFunction(hb), org.apache.commons.math3.optim.nonlinear.scalar.GoalType.MINIMIZE,
            SimpleBounds.unbounded(2), new InitialGuess(startValues));

    double[] slCoefficients = optimum.getPoint();
    hb.setIntercept(slCoefficients[0]);
    hb.setScale(slCoefficients[1]);

    System.out.println("  Iterations: " + optimizer.getEvaluations());
    System.out.println("  fmin: " + optimum.getValue());
    System.out.println("  B = " + slCoefficients[0] + "  A = " + slCoefficients[1]);

    assertEquals("  Intercept test", -0.4658, hb.getIntercept(), 1e-4);
    assertEquals("  Scale test", 1.0722, hb.getScale(), 1e-4);

    System.out.println();

}