Example usage for org.apache.commons.math3.optim SimpleBounds unbounded

List of usage examples for org.apache.commons.math3.optim SimpleBounds unbounded

Introduction

In this page you can find the example usage for org.apache.commons.math3.optim SimpleBounds unbounded.

Prototype

public static SimpleBounds unbounded(int dim) 

Source Link

Document

Factory method that creates instance of this class that represents unbounded ranges.

Usage

From source file:edu.utexas.cs.tactex.tariffoptimization.OptimizerWrapperApacheBOBYQA.java

@Override
public TreeMap<Double, TariffSpecification> findOptimum(TariffUtilityEstimate tariffUtilityEstimate,
        int NUM_RATES, int numEval) {

    double[] startingVertex = new double[NUM_RATES]; // start from the fixed-rate tariff's offset
    Arrays.fill(startingVertex, 0.0);
    //Arrays.fill(startingVertex, 0.5 * INITIAL_TRUST_REGION_RADIUS);
    //Arrays.fill(startingVertex, 1 * INITIAL_TRUST_REGION_RADIUS);

    final int numIterpolationPoints = 2 * NUM_RATES + 1; // BOBYQA recommends 2n+1 points
    BOBYQAOptimizer optimizer = new BOBYQAOptimizer(numIterpolationPoints, INITIAL_TRUST_REGION_RADIUS,
            STOPPING_TRUST_REGION_RADIUS);

    // needed since one optimization found positive 
    // charges (paying customer to consume...)
    double[][] boundaries = createBoundaries(NUM_RATES);

    final PointValuePair optimum = optimizer.optimize(new MaxEval(numEval),
            new ObjectiveFunction(new OptimizerWrapperApacheObjective(tariffUtilityEstimate)),
            GoalType.MAXIMIZE, new InitialGuess(startingVertex),
            //new SimpleBounds(boundaries[0], boundaries[1]));
            SimpleBounds.unbounded(NUM_RATES));

    TreeMap<Double, TariffSpecification> eval2TOUTariff = new TreeMap<Double, TariffSpecification>();
    eval2TOUTariff.put(optimum.getValue(), tariffUtilityEstimate.getCorrespondingSpec(optimum.getKey()));
    return eval2TOUTariff;
}

From source file:com.itemanalysis.psychometrics.irt.equating.HaebaraMethodTest.java

/**
 * Item parameters and true results from Kolen's STUIRT program.
 *///  w w  w . j a  v  a2s  .  com
@Test
public void haebaraTest1() {
    System.out.println("Haebara Test 1: Actual Distribution");

    LinkedHashMap<String, ItemResponseModel> irmX = new LinkedHashMap<String, ItemResponseModel>();
    LinkedHashMap<String, ItemResponseModel> irmY = new LinkedHashMap<String, ItemResponseModel>();

    irmX.put("i1", new Irm3PL(0.4551, -0.7101, 0.2087, 1.7));
    irmX.put("i2", new Irm3PL(0.5839, -0.8567, 0.2038, 1.7));
    irmX.put("i3", new Irm3PL(0.7544, 0.0212, 0.1600, 1.7));
    irmX.put("i4", new Irm3PL(0.6633, 0.0506, 0.1240, 1.7));
    irmX.put("i5", new Irm3PL(1.0690, 0.9610, 0.2986, 1.7));
    irmX.put("i6", new Irm3PL(0.9672, 0.1950, 0.0535, 1.7));
    irmX.put("i7", new Irm3PL(0.3479, 2.2768, 0.1489, 1.7));
    irmX.put("i8", new Irm3PL(1.4579, 1.0241, 0.2453, 1.7));
    irmX.put("i9", new Irm3PL(1.8811, 1.4062, 0.1992, 1.7));
    irmX.put("i10", new Irm3PL(0.7020, 2.2401, 0.0853, 1.7));
    irmX.put("i11", new Irm3PL(1.4080, 1.5556, 0.0789, 1.7));
    irmX.put("i12", new Irm3PL(1.2993, 2.1589, 0.1075, 1.7));

    irmY.put("i1", new Irm3PL(0.4416, -1.3349, 0.1559, 1.7));
    irmY.put("i2", new Irm3PL(0.5730, -1.3210, 0.1913, 1.7));
    irmY.put("i3", new Irm3PL(0.5987, -0.7098, 0.1177, 1.7));
    irmY.put("i4", new Irm3PL(0.6041, -0.3539, 0.0818, 1.7));
    irmY.put("i5", new Irm3PL(0.9902, 0.5320, 0.3024, 1.7));
    irmY.put("i6", new Irm3PL(0.8081, -0.1156, 0.0648, 1.7));
    irmY.put("i7", new Irm3PL(0.4140, 2.5538, 0.2410, 1.7));
    irmY.put("i8", new Irm3PL(1.3554, 0.5811, 0.2243, 1.7));
    irmY.put("i9", new Irm3PL(1.0417, 0.9392, 0.1651, 1.7));
    irmY.put("i10", new Irm3PL(0.6336, 1.8960, 0.0794, 1.7));
    irmY.put("i11", new Irm3PL(1.1347, 1.0790, 0.0630, 1.7));
    irmY.put("i12", new Irm3PL(0.9255, 2.1337, 0.1259, 1.7));

    double[] points = { -4.0000, -3.1110, -2.2220, -1.3330, -0.4444, 0.4444, 1.3330, 2.2220, 3.1110, 4.0000 };
    double[] xDensity = { 0.0001008, 0.002760, 0.03021, 0.1420, 0.3149, 0.3158, 0.1542, 0.03596, 0.003925,
            0.0001862 };
    double[] yDensity = { 0.0001173, 0.003242, 0.03449, 0.1471, 0.3148, 0.3110, 0.1526, 0.03406, 0.002510,
            0.0001116 };
    UserSuppliedDistributionApproximation distX = new UserSuppliedDistributionApproximation(points, xDensity);
    UserSuppliedDistributionApproximation distY = new UserSuppliedDistributionApproximation(points, yDensity);

    HaebaraMethod hb = new HaebaraMethod(irmX, irmY, distX, distY, EquatingCriterionType.Q1Q2);
    hb.setPrecision(6);
    double[] startValues = { 0, 1 };

    int numIterpolationPoints = 2 * 2;//two dimensions A and B
    BOBYQAOptimizer underlying = new BOBYQAOptimizer(numIterpolationPoints);
    RandomGenerator g = new JDKRandomGenerator();
    RandomVectorGenerator generator = new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultiStartMultivariateOptimizer optimizer = new MultiStartMultivariateOptimizer(underlying, 10, generator);
    org.apache.commons.math3.optim.PointValuePair optimum = optimizer.optimize(new MaxEval(1000),
            new ObjectiveFunction(hb), GoalType.MINIMIZE, SimpleBounds.unbounded(2),
            new InitialGuess(startValues));

    double[] hbCoefficients = optimum.getPoint();
    hb.setIntercept(hbCoefficients[0]);
    hb.setScale(hbCoefficients[1]);

    System.out.println("  Iterations: " + optimizer.getEvaluations());
    System.out.println("  fmin: " + optimum.getValue());
    System.out.println("  B = " + hbCoefficients[0] + "  A = " + hbCoefficients[1]);

    assertEquals("  Intercept test", -0.471281, hb.getIntercept(), 1e-4);
    assertEquals("  Scale test", 1.067800, hb.getScale(), 1e-4);

    System.out.println();

}

From source file:com.itemanalysis.psychometrics.irt.equating.IrtScaleLinking.java

public void computeCoefficients() {
    raschFamily = checkRaschModel();/*from  w ww .  j av a  2s.com*/

    ms.setPrecision(precision);
    mm.setPrecision(precision);
    hb.setPrecision(precision);
    sl.setPrecision(precision);

    if (raschFamily) {

        double[] sv = { mm.getIntercept() };

        UnivariateOptimizer underlying = new BrentOptimizer(1e-10, 1e-14);
        JDKRandomGenerator g = new JDKRandomGenerator();

        //Haebara method
        MultiStartUnivariateOptimizer optimizer = new MultiStartUnivariateOptimizer(underlying, 5, g);//Five random starts to Brent optimizer.
        UnivariatePointValuePair hbPair = optimizer.optimize(new MaxEval(500),
                new UnivariateObjectiveFunction(hb), GoalType.MINIMIZE, new SearchInterval(-4, 4),
                new InitialGuess(sv));
        hb.setIntercept(hbPair.getPoint());
        hb.setScale(1.0);
        fHB = hbPair.getValue();

        //Stocking-Lord method
        UnivariatePointValuePair slPair = optimizer.optimize(new MaxEval(500),
                new UnivariateObjectiveFunction(sl), GoalType.MINIMIZE, new SearchInterval(-4, 4),
                new InitialGuess(sv));
        sl.setIntercept(slPair.getPoint());
        sl.setScale(1.0);
        fSL = slPair.getValue();

    } else {

        double[] hbStartValues = { mm.getIntercept(), mm.getScale() };
        double[] slStartValues = { mm.getIntercept(), mm.getScale() };

        if (useUncmin) {
            DefaultUncminOptimizer optimizer = new DefaultUncminOptimizer();

            try {

                optimizer.minimize(hb, hbStartValues);
                double[] param = optimizer.getParameters();
                fHB = optimizer.getFunctionValue();
                hb.setIntercept(param[0]);

                if (param.length > 1) {
                    hb.setScale(param[1]);
                } else {
                    hb.setScale(1.0);//Rasch family of models
                }

                optimizer.minimize(sl, slStartValues);
                param = optimizer.getParameters();
                fSL = optimizer.getFunctionValue();
                sl.setIntercept(param[0]);

                if (param.length > 1) {
                    sl.setScale(param[1]);
                } else {
                    sl.setScale(1.0);//Rasch family of models
                }

            } catch (UncminException ex) {
                ex.printStackTrace();
            }
        } else {

            int numIterpolationPoints = 2 * 2;//two dimensions A and B
            BOBYQAOptimizer underlying = new BOBYQAOptimizer(numIterpolationPoints);
            RandomGenerator g = new JDKRandomGenerator();
            RandomVectorGenerator generator = new UncorrelatedRandomVectorGenerator(2,
                    new GaussianRandomGenerator(g));
            MultiStartMultivariateOptimizer optimizer = new MultiStartMultivariateOptimizer(underlying, 10,
                    generator);
            PointValuePair hbOptimum = optimizer.optimize(new MaxEval(1000), new ObjectiveFunction(hb),
                    GoalType.MINIMIZE, SimpleBounds.unbounded(2), new InitialGuess(hbStartValues));

            double[] hbCoefficients = hbOptimum.getPoint();
            hb.setIntercept(hbCoefficients[0]);
            hb.setScale(hbCoefficients[1]);
            fHB = hbOptimum.getValue();

            PointValuePair slOptimum = optimizer.optimize(new MaxEval(1000), new ObjectiveFunction(sl),
                    GoalType.MINIMIZE, SimpleBounds.unbounded(2), new InitialGuess(slStartValues));

            double[] slCoefficients = slOptimum.getPoint();
            sl.setIntercept(slCoefficients[0]);
            sl.setScale(slCoefficients[1]);
            fSL = slOptimum.getValue();

        }

    }

}

From source file:com.itemanalysis.psychometrics.irt.equating.StockingLordMethodTest.java

/**
 * Item parameters, quadrature points and weights, and "true" linking coefficients were obtained
 * from example1 in Kolen's STUIRT program. However, the Stocking-Lord procedure was changed in
 * the STUIRT example to produce symmetric optimization of the criterion function (i.e. SY BI BI
 * instead of SY BI NO).//from   w ww .j  a v  a  2 s.  co  m
 *
 * This test runs the Stocking-Lord procedure twice: One with the UNCMIN optimizer and once with
 * the BOBYQA optimizer. Results from each are compared to teh STUIRT results and to each other.
 *
 */
@Test
public void stockingLordTest0() {
    System.out.println("StockingLordMethod Test 0: Actual Distribution");
    LinkedHashMap<String, ItemResponseModel> irmX = new LinkedHashMap<String, ItemResponseModel>();
    LinkedHashMap<String, ItemResponseModel> irmY = new LinkedHashMap<String, ItemResponseModel>();

    irmX.put("i1", new Irm3PL(0.4551, -0.7101, 0.2087, 1.7));
    irmX.put("i2", new Irm3PL(0.5839, -0.8567, 0.2038, 1.7));
    irmX.put("i3", new Irm3PL(0.7544, 0.0212, 0.1600, 1.7));
    irmX.put("i4", new Irm3PL(0.6633, 0.0506, 0.1240, 1.7));
    irmX.put("i5", new Irm3PL(1.0690, 0.9610, 0.2986, 1.7));
    irmX.put("i6", new Irm3PL(0.9672, 0.1950, 0.0535, 1.7));
    irmX.put("i7", new Irm3PL(0.3479, 2.2768, 0.1489, 1.7));
    irmX.put("i8", new Irm3PL(1.4579, 1.0241, 0.2453, 1.7));
    irmX.put("i9", new Irm3PL(1.8811, 1.4062, 0.1992, 1.7));
    irmX.put("i10", new Irm3PL(0.7020, 2.2401, 0.0853, 1.7));
    irmX.put("i11", new Irm3PL(1.4080, 1.5556, 0.0789, 1.7));
    irmX.put("i12", new Irm3PL(1.2993, 2.1589, 0.1075, 1.7));

    irmY.put("i1", new Irm3PL(0.4416, -1.3349, 0.1559, 1.7));
    irmY.put("i2", new Irm3PL(0.5730, -1.3210, 0.1913, 1.7));
    irmY.put("i3", new Irm3PL(0.5987, -0.7098, 0.1177, 1.7));
    irmY.put("i4", new Irm3PL(0.6041, -0.3539, 0.0818, 1.7));
    irmY.put("i5", new Irm3PL(0.9902, 0.5320, 0.3024, 1.7));
    irmY.put("i6", new Irm3PL(0.8081, -0.1156, 0.0648, 1.7));
    irmY.put("i7", new Irm3PL(0.4140, 2.5538, 0.2410, 1.7));
    irmY.put("i8", new Irm3PL(1.3554, 0.5811, 0.2243, 1.7));
    irmY.put("i9", new Irm3PL(1.0417, 0.9392, 0.1651, 1.7));
    irmY.put("i10", new Irm3PL(0.6336, 1.8960, 0.0794, 1.7));
    irmY.put("i11", new Irm3PL(1.1347, 1.0790, 0.0630, 1.7));
    irmY.put("i12", new Irm3PL(0.9255, 2.1337, 0.1259, 1.7));

    double[] points = { -4.0000, -3.1110, -2.2220, -1.3330, -0.4444, 0.4444, 1.3330, 2.2220, 3.1110, 4.0000 };
    double[] xDensity = { 0.0001008, 0.002760, 0.03021, 0.1420, 0.3149, 0.3158, 0.1542, 0.03596, 0.003925,
            0.0001862 };
    double[] yDensity = { 0.0001173, 0.003242, 0.03449, 0.1471, 0.3148, 0.3110, 0.1526, 0.03406, 0.002510,
            0.0001116 };
    UserSuppliedDistributionApproximation distX = new UserSuppliedDistributionApproximation(points, xDensity);
    UserSuppliedDistributionApproximation distY = new UserSuppliedDistributionApproximation(points, yDensity);

    StockingLordMethod sl = new StockingLordMethod(irmX, irmY, distX, distY, EquatingCriterionType.Q1Q2);
    sl.setPrecision(6);
    double[] startValues = { 0, 1 };
    double f = 0;

    //Run test with UNCMIN optimizer
    DefaultUncminOptimizer optimizer = new DefaultUncminOptimizer();
    double[] param1 = null;
    double[] param2 = null;
    try {
        optimizer.minimize(sl, startValues);
        param1 = optimizer.getParameters();
        f = optimizer.getFunctionValue();
        sl.setIntercept(param1[0]);
        sl.setScale(param1[1]);

    } catch (UncminException ex) {
        ex.printStackTrace();
    }

    //Check UNCMIN values against results from STUIRT.
    System.out.println("  UNCMIN Optimization");
    System.out.println("  Iterations: ");
    System.out.println("  fmin: " + f);
    System.out.println("  B = " + sl.getIntercept() + "  A = " + sl.getScale());

    assertEquals("  Intercept test", -0.487619, sl.getIntercept(), 1e-6);
    assertEquals("  Scale test", 1.083417, sl.getScale(), 1e-6);

    //Run test with BOBYQA optimizer
    int numIterpolationPoints = 2 * 2;//two dimensions A and B
    BOBYQAOptimizer underlying = new BOBYQAOptimizer(numIterpolationPoints);
    RandomGenerator g = new JDKRandomGenerator();
    RandomVectorGenerator generator = new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultiStartMultivariateOptimizer optimizer2 = new MultiStartMultivariateOptimizer(underlying, 10, generator);
    PointValuePair optimum = optimizer2.optimize(new MaxEval(1000), new ObjectiveFunction(sl),
            GoalType.MINIMIZE, SimpleBounds.unbounded(2), new InitialGuess(startValues));

    param2 = optimum.getPoint();
    sl.setIntercept(param2[0]);
    sl.setScale(param2[1]);

    //Check BOBYQA values against results from STUIRT.
    System.out.println();
    System.out.println("  BOBYQA Optimization");
    System.out.println("  Iterations: " + optimizer2.getEvaluations());
    System.out.println("  fmin: " + optimum.getValue());
    System.out.println("  B = " + param2[0] + "  A = " + param2[1]);

    assertEquals("  Intercept test", -0.487619, sl.getIntercept(), 1e-6);
    assertEquals("  Scale test", 1.083417, sl.getScale(), 1e-6);

    //Compare results from each optimizer
    assertEquals("  Intercept test", param1[0], param2[0], 1e-6);
    assertEquals("  Scale test", param1[1], param2[1], 1e-6);

    System.out.println();

}

From source file:com.itemanalysis.psychometrics.irt.equating.HaebaraMethodTest.java

@Test
public void haearaTest2() {
    System.out.println("Haebara Test 2: Uniform Distribution");
    int n = aX.length;
    LinkedHashMap<String, ItemResponseModel> irmX = new LinkedHashMap<String, ItemResponseModel>();
    LinkedHashMap<String, ItemResponseModel> irmY = new LinkedHashMap<String, ItemResponseModel>();
    ItemResponseModel irm;/*from   w w  w.j  ava 2s.com*/

    for (int i = 0; i < n; i++) {
        String name = "V" + i;
        irm = new Irm3PL(aX[i], bX[i], cX[i], 1.0);
        irmX.put(name, irm);

        irm = new Irm3PL(aY[i], bY[i], cY[i], 1.0);
        irmY.put(name, irm);
    }

    UniformDistributionApproximation uniform = new UniformDistributionApproximation(-4, 4, 10);

    HaebaraMethod hb = new HaebaraMethod(irmX, irmY, uniform, uniform, EquatingCriterionType.Q1Q2);
    hb.setPrecision(4);
    double[] startValues = { 0, 1 };

    int numIterpolationPoints = 2 * 2;//two dimensions A and B
    BOBYQAOptimizer underlying = new BOBYQAOptimizer(numIterpolationPoints);
    RandomGenerator g = new JDKRandomGenerator();
    RandomVectorGenerator generator = new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultiStartMultivariateOptimizer optimizer = new MultiStartMultivariateOptimizer(underlying, 10, generator);
    org.apache.commons.math3.optim.PointValuePair optimum = optimizer.optimize(new MaxEval(1000),
            new ObjectiveFunction(hb), org.apache.commons.math3.optim.nonlinear.scalar.GoalType.MINIMIZE,
            SimpleBounds.unbounded(2), new InitialGuess(startValues));

    double[] slCoefficients = optimum.getPoint();
    hb.setIntercept(slCoefficients[0]);
    hb.setScale(slCoefficients[1]);

    System.out.println("  Iterations: " + optimizer.getEvaluations());
    System.out.println("  fmin: " + optimum.getValue());
    System.out.println("  B = " + slCoefficients[0] + "  A = " + slCoefficients[1]);

    assertEquals("  Intercept test", -0.4303, hb.getIntercept(), 1e-4);
    assertEquals("  Scale test", 1.0894, hb.getScale(), 1e-4);

    System.out.println();

}

From source file:com.itemanalysis.psychometrics.irt.equating.StockingLordMethodTest.java

@Test
public void stockingLordTest1() {
    System.out.println("StockingLordMethod Test 1: Actual Distribution");
    int n = aX.length;
    LinkedHashMap<String, ItemResponseModel> irmX = new LinkedHashMap<String, ItemResponseModel>();
    LinkedHashMap<String, ItemResponseModel> irmY = new LinkedHashMap<String, ItemResponseModel>();
    ItemResponseModel irm;//from w w w  .j  ava  2  s .  co m

    for (int i = 0; i < n; i++) {
        String name = "V" + i;
        irm = new Irm3PL(aX[i], bX[i], cX[i], 1.0);
        irmX.put(name, irm);

        irm = new Irm3PL(aY[i], bY[i], cY[i], 1.0);
        irmY.put(name, irm);
    }

    UserSuppliedDistributionApproximation distX = new UserSuppliedDistributionApproximation(points, xDensity);
    UserSuppliedDistributionApproximation distY = new UserSuppliedDistributionApproximation(points, yDensity);

    StockingLordMethod sl = new StockingLordMethod(irmX, irmY, distX, distY, EquatingCriterionType.Q1Q2);
    sl.setPrecision(4);
    double[] startValues = { 0, 1 };

    int numIterpolationPoints = 2 * 2;//two dimensions A and B
    BOBYQAOptimizer underlying = new BOBYQAOptimizer(numIterpolationPoints);
    RandomGenerator g = new JDKRandomGenerator();
    RandomVectorGenerator generator = new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultiStartMultivariateOptimizer optimizer = new MultiStartMultivariateOptimizer(underlying, 10, generator);
    PointValuePair optimum = optimizer.optimize(new MaxEval(1000), new ObjectiveFunction(sl), GoalType.MINIMIZE,
            SimpleBounds.unbounded(2), new InitialGuess(startValues));

    double[] slCoefficients = optimum.getPoint();
    sl.setIntercept(slCoefficients[0]);
    sl.setScale(slCoefficients[1]);

    System.out.println("  Iterations: " + optimizer.getEvaluations());
    System.out.println("  fmin: " + optimum.getValue());
    System.out.println("  B = " + slCoefficients[0] + "  A = " + slCoefficients[1]);

    assertEquals("  Intercept test", -0.4788, sl.getIntercept(), 1e-4);
    assertEquals("  Scale test", 1.0911, sl.getScale(), 1e-4);

    System.out.println();

}

From source file:com.itemanalysis.psychometrics.irt.equating.HaebaraMethodTest.java

@Test
public void haebaraTest3() {
    System.out.println("Haebara Test 3: Normal Distribution");
    int n = aX.length;
    LinkedHashMap<String, ItemResponseModel> irmX = new LinkedHashMap<String, ItemResponseModel>();
    LinkedHashMap<String, ItemResponseModel> irmY = new LinkedHashMap<String, ItemResponseModel>();
    ItemResponseModel irm;/*from ww  w .  j  a  v  a  2 s .com*/

    for (int i = 0; i < n; i++) {
        String name = "V" + i;
        irm = new Irm3PL(aX[i], bX[i], cX[i], 1.0);
        irmX.put(name, irm);

        irm = new Irm3PL(aY[i], bY[i], cY[i], 1.0);
        irmY.put(name, irm);
    }

    NormalDistributionApproximation normal = new NormalDistributionApproximation(0, 1, -4, 4, 10);

    HaebaraMethod hb = new HaebaraMethod(irmX, irmY, normal, normal, EquatingCriterionType.Q1Q2);
    hb.setPrecision(4);
    double[] startValues = { 0, 1 };

    int numIterpolationPoints = 2 * 2;//two dimensions A and B
    BOBYQAOptimizer underlying = new BOBYQAOptimizer(numIterpolationPoints);
    RandomGenerator g = new JDKRandomGenerator();
    RandomVectorGenerator generator = new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultiStartMultivariateOptimizer optimizer = new MultiStartMultivariateOptimizer(underlying, 10, generator);
    org.apache.commons.math3.optim.PointValuePair optimum = optimizer.optimize(new MaxEval(1000),
            new ObjectiveFunction(hb), org.apache.commons.math3.optim.nonlinear.scalar.GoalType.MINIMIZE,
            SimpleBounds.unbounded(2), new InitialGuess(startValues));

    double[] slCoefficients = optimum.getPoint();
    hb.setIntercept(slCoefficients[0]);
    hb.setScale(slCoefficients[1]);

    System.out.println("  Iterations: " + optimizer.getEvaluations());
    System.out.println("  fmin: " + optimum.getValue());
    System.out.println("  B = " + slCoefficients[0] + "  A = " + slCoefficients[1]);

    assertEquals("  Intercept test", -0.4658, hb.getIntercept(), 1e-4);
    assertEquals("  Scale test", 1.0722, hb.getScale(), 1e-4);

    System.out.println();

}

From source file:com.itemanalysis.psychometrics.irt.equating.StockingLordMethodTest.java

@Test
public void stockingLordTest2() {
    System.out.println("StockingLordMethod Test 2: Uniform Distribution");
    int n = aX.length;
    LinkedHashMap<String, ItemResponseModel> irmX = new LinkedHashMap<String, ItemResponseModel>();
    LinkedHashMap<String, ItemResponseModel> irmY = new LinkedHashMap<String, ItemResponseModel>();
    ItemResponseModel irm;//www .  j a  v  a 2 s. c  om

    for (int i = 0; i < n; i++) {
        String name = "V" + i;
        irm = new Irm3PL(aX[i], bX[i], cX[i], 1.0);
        irmX.put(name, irm);

        irm = new Irm3PL(aY[i], bY[i], cY[i], 1.0);
        irmY.put(name, irm);
    }

    UniformDistributionApproximation uniform = new UniformDistributionApproximation(-4, 4, 10);

    StockingLordMethod sl = new StockingLordMethod(irmX, irmY, uniform, uniform, EquatingCriterionType.Q1Q2);
    sl.setPrecision(4);
    double[] startValues = { 0, 1 };

    int numIterpolationPoints = 2 * 2;//two dimensions A and B
    BOBYQAOptimizer underlying = new BOBYQAOptimizer(numIterpolationPoints);
    RandomGenerator g = new JDKRandomGenerator();
    RandomVectorGenerator generator = new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultiStartMultivariateOptimizer optimizer = new MultiStartMultivariateOptimizer(underlying, 10, generator);
    PointValuePair optimum = optimizer.optimize(new MaxEval(1000), new ObjectiveFunction(sl), GoalType.MINIMIZE,
            SimpleBounds.unbounded(2), new InitialGuess(startValues));

    double[] slCoefficients = optimum.getPoint();
    sl.setIntercept(slCoefficients[0]);
    sl.setScale(slCoefficients[1]);

    System.out.println("  Iterations: " + optimizer.getEvaluations());
    System.out.println("  fmin: " + optimum.getValue());
    System.out.println("  B = " + slCoefficients[0] + "  A = " + slCoefficients[1]);

    assertEquals("  Intercept test", -0.4532, sl.getIntercept(), 1e-4);
    assertEquals("  Scale test", 1.0962, sl.getScale(), 1e-4);

    System.out.println();

}

From source file:edu.cmu.tetrad.sem.GeneralizedSemEstimator.java

private double[] optimize(MultivariateFunction function, double[] values, int optimizer) {
    PointValuePair pair = null;/*from   w  w w . ja  va2s.c  o  m*/

    if (optimizer == 1) {
        //            0.01, 0.000001
        //2.0D * FastMath.ulp(1.0D), 1e-8
        MultivariateOptimizer search = new PowellOptimizer(1e-7, 1e-7);
        pair = search.optimize(new InitialGuess(values), new ObjectiveFunction(function), GoalType.MINIMIZE,
                new MaxEval(100000));
    } else if (optimizer == 2) {
        MultivariateOptimizer search = new SimplexOptimizer(1e-7, 1e-7);
        pair = search.optimize(new InitialGuess(values), new ObjectiveFunction(function), GoalType.MINIMIZE,
                new MaxEval(100000), new NelderMeadSimplex(values.length));
    } else if (optimizer == 3) {
        int dim = values.length;
        int additionalInterpolationPoints = 0;
        final int numIterpolationPoints = 2 * dim + 1 + additionalInterpolationPoints;

        BOBYQAOptimizer search = new BOBYQAOptimizer(numIterpolationPoints);
        pair = search.optimize(new MaxEval(100000), new ObjectiveFunction(function), GoalType.MINIMIZE,
                new InitialGuess(values), SimpleBounds.unbounded(dim));
    } else if (optimizer == 4) {
        MultivariateOptimizer search = new CMAESOptimizer(3000000, .05, false, 0, 0, new MersenneTwister(),
                false, new SimplePointChecker<PointValuePair>(0.5, 0.5));
        pair = search.optimize(new MaxEval(30000), new ObjectiveFunction(function), GoalType.MINIMIZE,
                new InitialGuess(values), new CMAESOptimizer.Sigma(new double[values.length]),
                new CMAESOptimizer.PopulationSize(1000));
    } else if (optimizer == 5) {
        //            0.01, 0.000001
        //2.0D * FastMath.ulp(1.0D), 1e-8
        MultivariateOptimizer search = new PowellOptimizer(.05, .05);
        pair = search.optimize(new InitialGuess(values), new ObjectiveFunction(function), GoalType.MINIMIZE,
                new MaxEval(100000));
    } else if (optimizer == 6) {
        MultivariateOptimizer search = new PowellOptimizer(1e-7, 1e-7);
        pair = search.optimize(new InitialGuess(values), new ObjectiveFunction(function), GoalType.MAXIMIZE,
                new MaxEval(10000));
    }

    return pair.getPoint();
}

From source file:com.itemanalysis.psychometrics.irt.equating.HaebaraMethodTest.java

@Test
public void haebaraTest4() {
    System.out.println("Haebara Test 4: Actual Distribution -backwards");
    int n = aX.length;
    LinkedHashMap<String, ItemResponseModel> irmX = new LinkedHashMap<String, ItemResponseModel>();
    LinkedHashMap<String, ItemResponseModel> irmY = new LinkedHashMap<String, ItemResponseModel>();
    ItemResponseModel irm;/*www  . j av  a2 s  . c o  m*/

    for (int i = 0; i < n; i++) {
        String name = "V" + i;
        irm = new Irm3PL(aX[i], bX[i], cX[i], 1.0);
        irmX.put(name, irm);

        irm = new Irm3PL(aY[i], bY[i], cY[i], 1.0);
        irmY.put(name, irm);
    }

    UserSuppliedDistributionApproximation distX = new UserSuppliedDistributionApproximation(points, xDensity);
    UserSuppliedDistributionApproximation distY = new UserSuppliedDistributionApproximation(points, yDensity);

    HaebaraMethod hb = new HaebaraMethod(irmX, irmY, distX, distY, EquatingCriterionType.Q1);
    hb.setPrecision(4);
    double[] startValues = { 0, 1 };

    int numIterpolationPoints = 2 * 2;//two dimensions A and B
    BOBYQAOptimizer underlying = new BOBYQAOptimizer(numIterpolationPoints);
    RandomGenerator g = new JDKRandomGenerator();
    RandomVectorGenerator generator = new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultiStartMultivariateOptimizer optimizer = new MultiStartMultivariateOptimizer(underlying, 10, generator);
    org.apache.commons.math3.optim.PointValuePair optimum = optimizer.optimize(new MaxEval(1000),
            new ObjectiveFunction(hb), org.apache.commons.math3.optim.nonlinear.scalar.GoalType.MINIMIZE,
            SimpleBounds.unbounded(2), new InitialGuess(startValues));

    double[] hbCoefficients = optimum.getPoint();
    hb.setIntercept(hbCoefficients[0]);
    hb.setScale(hbCoefficients[1]);

    System.out.println("  Iterations: " + optimizer.getEvaluations());
    System.out.println("  fmin: " + optimum.getValue());
    System.out.println("  B = " + hbCoefficients[0] + "  A = " + hbCoefficients[1]);

    assertEquals("  Intercept test", -0.4710, hb.getIntercept(), 1e-4);
    assertEquals("  Scale test", 1.0798, hb.getScale(), 1e-4);

    System.out.println();

}