Example usage for org.apache.commons.math3.optim MaxEval MaxEval

List of usage examples for org.apache.commons.math3.optim MaxEval MaxEval

Introduction

In this page you can find the example usage for org.apache.commons.math3.optim MaxEval MaxEval.

Prototype

public MaxEval(int max) 

Source Link

Usage

From source file:edu.cmu.tetrad.search.Ling.java

private void optimizeNonGaussianity(final int rowIndex, final DoubleMatrix2D dataSetMatrix, final double[][] W,
        List<Mapping> allMappings) {
    final List<Mapping> mappings = mappingsForRow(rowIndex, allMappings);

    MultivariateFunction function = new MultivariateFunction() {
        public double value(double[] values) {
            for (int i = 0; i < values.length; i++) {
                Mapping mapping = mappings.get(i);
                W[mapping.getI()][mapping.getJ()] = values[i];
            }/*from   w  w  w  . jav  a2s . c  om*/

            double v = ngFullData(rowIndex, dataSetMatrix, W);

            if (Double.isNaN(v))
                return 10000;

            return -(v);
        }
    };

    {
        double[] values = new double[mappings.size()];

        for (int k = 0; k < mappings.size(); k++) {
            Mapping mapping = mappings.get(k);
            values[k] = W[mapping.getI()][mapping.getJ()];
        }

        MultivariateOptimizer search = new PowellOptimizer(1e-7, 1e-7);

        PointValuePair pair = search.optimize(new InitialGuess(values), new ObjectiveFunction(function),
                GoalType.MINIMIZE, new MaxEval(100000));

        values = pair.getPoint();

        for (int k = 0; k < mappings.size(); k++) {
            Mapping mapping = mappings.get(k);
            W[mapping.getI()][mapping.getJ()] = values[k];
        }
    }

}

From source file:com.itemanalysis.psychometrics.irt.equating.StockingLordMethodTest.java

@Test
public void stockingLordTest1() {
    System.out.println("StockingLordMethod Test 1: Actual Distribution");
    int n = aX.length;
    LinkedHashMap<String, ItemResponseModel> irmX = new LinkedHashMap<String, ItemResponseModel>();
    LinkedHashMap<String, ItemResponseModel> irmY = new LinkedHashMap<String, ItemResponseModel>();
    ItemResponseModel irm;/*from w ww.j ava2 s  . com*/

    for (int i = 0; i < n; i++) {
        String name = "V" + i;
        irm = new Irm3PL(aX[i], bX[i], cX[i], 1.0);
        irmX.put(name, irm);

        irm = new Irm3PL(aY[i], bY[i], cY[i], 1.0);
        irmY.put(name, irm);
    }

    UserSuppliedDistributionApproximation distX = new UserSuppliedDistributionApproximation(points, xDensity);
    UserSuppliedDistributionApproximation distY = new UserSuppliedDistributionApproximation(points, yDensity);

    StockingLordMethod sl = new StockingLordMethod(irmX, irmY, distX, distY, EquatingCriterionType.Q1Q2);
    sl.setPrecision(4);
    double[] startValues = { 0, 1 };

    int numIterpolationPoints = 2 * 2;//two dimensions A and B
    BOBYQAOptimizer underlying = new BOBYQAOptimizer(numIterpolationPoints);
    RandomGenerator g = new JDKRandomGenerator();
    RandomVectorGenerator generator = new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultiStartMultivariateOptimizer optimizer = new MultiStartMultivariateOptimizer(underlying, 10, generator);
    PointValuePair optimum = optimizer.optimize(new MaxEval(1000), new ObjectiveFunction(sl), GoalType.MINIMIZE,
            SimpleBounds.unbounded(2), new InitialGuess(startValues));

    double[] slCoefficients = optimum.getPoint();
    sl.setIntercept(slCoefficients[0]);
    sl.setScale(slCoefficients[1]);

    System.out.println("  Iterations: " + optimizer.getEvaluations());
    System.out.println("  fmin: " + optimum.getValue());
    System.out.println("  B = " + slCoefficients[0] + "  A = " + slCoefficients[1]);

    assertEquals("  Intercept test", -0.4788, sl.getIntercept(), 1e-4);
    assertEquals("  Scale test", 1.0911, sl.getScale(), 1e-4);

    System.out.println();

}

From source file:com.itemanalysis.psychometrics.irt.equating.HaebaraMethodTest.java

@Test
public void haebaraTest3() {
    System.out.println("Haebara Test 3: Normal Distribution");
    int n = aX.length;
    LinkedHashMap<String, ItemResponseModel> irmX = new LinkedHashMap<String, ItemResponseModel>();
    LinkedHashMap<String, ItemResponseModel> irmY = new LinkedHashMap<String, ItemResponseModel>();
    ItemResponseModel irm;//w  w  w . j av a  2s  .c  om

    for (int i = 0; i < n; i++) {
        String name = "V" + i;
        irm = new Irm3PL(aX[i], bX[i], cX[i], 1.0);
        irmX.put(name, irm);

        irm = new Irm3PL(aY[i], bY[i], cY[i], 1.0);
        irmY.put(name, irm);
    }

    NormalDistributionApproximation normal = new NormalDistributionApproximation(0, 1, -4, 4, 10);

    HaebaraMethod hb = new HaebaraMethod(irmX, irmY, normal, normal, EquatingCriterionType.Q1Q2);
    hb.setPrecision(4);
    double[] startValues = { 0, 1 };

    int numIterpolationPoints = 2 * 2;//two dimensions A and B
    BOBYQAOptimizer underlying = new BOBYQAOptimizer(numIterpolationPoints);
    RandomGenerator g = new JDKRandomGenerator();
    RandomVectorGenerator generator = new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultiStartMultivariateOptimizer optimizer = new MultiStartMultivariateOptimizer(underlying, 10, generator);
    org.apache.commons.math3.optim.PointValuePair optimum = optimizer.optimize(new MaxEval(1000),
            new ObjectiveFunction(hb), org.apache.commons.math3.optim.nonlinear.scalar.GoalType.MINIMIZE,
            SimpleBounds.unbounded(2), new InitialGuess(startValues));

    double[] slCoefficients = optimum.getPoint();
    hb.setIntercept(slCoefficients[0]);
    hb.setScale(slCoefficients[1]);

    System.out.println("  Iterations: " + optimizer.getEvaluations());
    System.out.println("  fmin: " + optimum.getValue());
    System.out.println("  B = " + slCoefficients[0] + "  A = " + slCoefficients[1]);

    assertEquals("  Intercept test", -0.4658, hb.getIntercept(), 1e-4);
    assertEquals("  Scale test", 1.0722, hb.getScale(), 1e-4);

    System.out.println();

}

From source file:gdsc.smlm.ij.plugins.EMGainAnalysis.java

/**
 * Fit the EM-gain distribution (Gaussian * Gamma)
 * //w w w .j  a v a 2  s . c  o  m
 * @param h
 *            The distribution
 */
private void fit(int[] h) {
    final int[] limits = limits(h);
    final double[] x = getX(limits);
    final double[] y = getY(h, limits);

    Plot2 plot = new Plot2(TITLE, "ADU", "Frequency");
    double yMax = Maths.max(y);
    plot.setLimits(limits[0], limits[1], 0, yMax);
    plot.setColor(Color.black);
    plot.addPoints(x, y, Plot2.DOT);
    Utils.display(TITLE, plot);

    // Estimate remaining parameters. 
    // Assuming a gamma_distribution(shape,scale) then mean = shape * scale
    // scale = gain
    // shape = Photons = mean / gain
    double mean = getMean(h) - bias;
    // Note: if the bias is too high then the mean will be negative. Just move the bias.
    while (mean < 0) {
        bias -= 1;
        mean += 1;
    }
    double photons = mean / gain;

    if (simulate)
        Utils.log("Simulated bias=%d, gain=%s, noise=%s, photons=%s", (int) _bias, Utils.rounded(_gain),
                Utils.rounded(_noise), Utils.rounded(_photons));

    Utils.log("Estimate bias=%d, gain=%s, noise=%s, photons=%s", (int) bias, Utils.rounded(gain),
            Utils.rounded(noise), Utils.rounded(photons));

    final int max = (int) x[x.length - 1];
    double[] g = pdf(max, photons, gain, noise, (int) bias);

    plot.setColor(Color.blue);
    plot.addPoints(x, g, Plot2.LINE);
    Utils.display(TITLE, plot);

    // Perform a fit
    CustomPowellOptimizer o = new CustomPowellOptimizer(1e-6, 1e-16, 1e-6, 1e-16);
    double[] startPoint = new double[] { photons, gain, noise, bias };
    final int maxEval = 3000;

    // Set bounds
    double[] lower = new double[] { 0, 0.5 * gain, 0, bias - noise };
    double[] upper = new double[] { (limits[1] - limits[0]) / gain, 2 * gain, gain, bias + noise };

    // Restart until converged.
    // TODO - Maybe fix this with a better optimiser. This needs to be tested on real data.
    PointValuePair solution = null;
    for (int iter = 0; iter < 3; iter++) {
        IJ.showStatus("Fitting histogram ... Iteration " + iter);

        try {
            // Basic Powell optimiser
            MultivariateFunction fun = getFunction(limits, y, max, maxEval);
            PointValuePair optimum = o.optimize(new MaxEval(maxEval), new ObjectiveFunction(fun),
                    GoalType.MINIMIZE,
                    new InitialGuess((solution == null) ? startPoint : solution.getPointRef()));
            if (solution == null || optimum.getValue() < solution.getValue()) {
                solution = optimum;
            }
        } catch (Exception e) {
        }
        try {
            // Bounded Powell optimiser
            MultivariateFunction fun = getFunction(limits, y, max, maxEval);
            MultivariateFunctionMappingAdapter adapter = new MultivariateFunctionMappingAdapter(fun, lower,
                    upper);
            PointValuePair optimum = o.optimize(new MaxEval(maxEval), new ObjectiveFunction(adapter),
                    GoalType.MINIMIZE, new InitialGuess(adapter
                            .boundedToUnbounded((solution == null) ? startPoint : solution.getPointRef())));
            double[] point = adapter.unboundedToBounded(optimum.getPointRef());
            optimum = new PointValuePair(point, optimum.getValue());

            if (solution == null || optimum.getValue() < solution.getValue()) {
                solution = optimum;
            }
        } catch (Exception e) {
        }
    }

    IJ.showStatus("");
    IJ.showProgress(1);

    if (solution == null) {
        Utils.log("Failed to fit the distribution");
        return;
    }

    double[] point = solution.getPointRef();
    photons = point[0];
    gain = point[1];
    noise = point[2];
    bias = (int) Math.round(point[3]);
    String label = String.format("Fitted bias=%d, gain=%s, noise=%s, photons=%s", (int) bias,
            Utils.rounded(gain), Utils.rounded(noise), Utils.rounded(photons));
    Utils.log(label);

    if (simulate) {
        Utils.log("Relative Error bias=%s, gain=%s, noise=%s, photons=%s",
                Utils.rounded(relativeError(bias, _bias)), Utils.rounded(relativeError(gain, _gain)),
                Utils.rounded(relativeError(noise, _noise)), Utils.rounded(relativeError(photons, _photons)));
    }

    // Show the PoissonGammaGaussian approximation
    double[] f = null;
    if (showApproximation) {
        f = new double[x.length];
        PoissonGammaGaussianFunction fun = new PoissonGammaGaussianFunction(1.0 / gain, noise);
        final double expected = photons * gain;
        for (int i = 0; i < f.length; i++) {
            f[i] = fun.likelihood(x[i] - bias, expected);
            //System.out.printf("x=%d, g=%f, f=%f, error=%f\n", (int) x[i], g[i], f[i],
            //      gdsc.smlm.fitting.utils.DoubleEquality.relativeError(g[i], f[i]));
        }
        yMax = Maths.maxDefault(yMax, f);
    }

    // Replot
    g = pdf(max, photons, gain, noise, (int) bias);
    plot = new Plot2(TITLE, "ADU", "Frequency");
    plot.setLimits(limits[0], limits[1], 0, yMax * 1.05);
    plot.setColor(Color.black);
    plot.addPoints(x, y, Plot2.DOT);
    plot.setColor(Color.red);
    plot.addPoints(x, g, Plot2.LINE);

    plot.addLabel(0, 0, label);

    if (showApproximation) {
        plot.setColor(Color.blue);
        plot.addPoints(x, f, Plot2.LINE);
    }

    Utils.display(TITLE, plot);
}

From source file:edu.cmu.tetrad.sem.GeneralizedSemEstimator.java

private double[] optimize(MultivariateFunction function, double[] values, int optimizer) {
    PointValuePair pair = null;/*from  ww w  .ja v  a 2s.  co m*/

    if (optimizer == 1) {
        //            0.01, 0.000001
        //2.0D * FastMath.ulp(1.0D), 1e-8
        MultivariateOptimizer search = new PowellOptimizer(1e-7, 1e-7);
        pair = search.optimize(new InitialGuess(values), new ObjectiveFunction(function), GoalType.MINIMIZE,
                new MaxEval(100000));
    } else if (optimizer == 2) {
        MultivariateOptimizer search = new SimplexOptimizer(1e-7, 1e-7);
        pair = search.optimize(new InitialGuess(values), new ObjectiveFunction(function), GoalType.MINIMIZE,
                new MaxEval(100000), new NelderMeadSimplex(values.length));
    } else if (optimizer == 3) {
        int dim = values.length;
        int additionalInterpolationPoints = 0;
        final int numIterpolationPoints = 2 * dim + 1 + additionalInterpolationPoints;

        BOBYQAOptimizer search = new BOBYQAOptimizer(numIterpolationPoints);
        pair = search.optimize(new MaxEval(100000), new ObjectiveFunction(function), GoalType.MINIMIZE,
                new InitialGuess(values), SimpleBounds.unbounded(dim));
    } else if (optimizer == 4) {
        MultivariateOptimizer search = new CMAESOptimizer(3000000, .05, false, 0, 0, new MersenneTwister(),
                false, new SimplePointChecker<PointValuePair>(0.5, 0.5));
        pair = search.optimize(new MaxEval(30000), new ObjectiveFunction(function), GoalType.MINIMIZE,
                new InitialGuess(values), new CMAESOptimizer.Sigma(new double[values.length]),
                new CMAESOptimizer.PopulationSize(1000));
    } else if (optimizer == 5) {
        //            0.01, 0.000001
        //2.0D * FastMath.ulp(1.0D), 1e-8
        MultivariateOptimizer search = new PowellOptimizer(.05, .05);
        pair = search.optimize(new InitialGuess(values), new ObjectiveFunction(function), GoalType.MINIMIZE,
                new MaxEval(100000));
    } else if (optimizer == 6) {
        MultivariateOptimizer search = new PowellOptimizer(1e-7, 1e-7);
        pair = search.optimize(new InitialGuess(values), new ObjectiveFunction(function), GoalType.MAXIMIZE,
                new MaxEval(10000));
    }

    return pair.getPoint();
}

From source file:com.itemanalysis.psychometrics.irt.equating.StockingLordMethodTest.java

@Test
public void stockingLordTest2() {
    System.out.println("StockingLordMethod Test 2: Uniform Distribution");
    int n = aX.length;
    LinkedHashMap<String, ItemResponseModel> irmX = new LinkedHashMap<String, ItemResponseModel>();
    LinkedHashMap<String, ItemResponseModel> irmY = new LinkedHashMap<String, ItemResponseModel>();
    ItemResponseModel irm;/*  w w  w. ja  v  a2  s.c o  m*/

    for (int i = 0; i < n; i++) {
        String name = "V" + i;
        irm = new Irm3PL(aX[i], bX[i], cX[i], 1.0);
        irmX.put(name, irm);

        irm = new Irm3PL(aY[i], bY[i], cY[i], 1.0);
        irmY.put(name, irm);
    }

    UniformDistributionApproximation uniform = new UniformDistributionApproximation(-4, 4, 10);

    StockingLordMethod sl = new StockingLordMethod(irmX, irmY, uniform, uniform, EquatingCriterionType.Q1Q2);
    sl.setPrecision(4);
    double[] startValues = { 0, 1 };

    int numIterpolationPoints = 2 * 2;//two dimensions A and B
    BOBYQAOptimizer underlying = new BOBYQAOptimizer(numIterpolationPoints);
    RandomGenerator g = new JDKRandomGenerator();
    RandomVectorGenerator generator = new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultiStartMultivariateOptimizer optimizer = new MultiStartMultivariateOptimizer(underlying, 10, generator);
    PointValuePair optimum = optimizer.optimize(new MaxEval(1000), new ObjectiveFunction(sl), GoalType.MINIMIZE,
            SimpleBounds.unbounded(2), new InitialGuess(startValues));

    double[] slCoefficients = optimum.getPoint();
    sl.setIntercept(slCoefficients[0]);
    sl.setScale(slCoefficients[1]);

    System.out.println("  Iterations: " + optimizer.getEvaluations());
    System.out.println("  fmin: " + optimum.getValue());
    System.out.println("  B = " + slCoefficients[0] + "  A = " + slCoefficients[1]);

    assertEquals("  Intercept test", -0.4532, sl.getIntercept(), 1e-4);
    assertEquals("  Scale test", 1.0962, sl.getScale(), 1e-4);

    System.out.println();

}

From source file:gdsc.smlm.fitting.BinomialFitter.java

/**
 * Fit the binomial distribution (n,p) to the cumulative histogram. Performs fitting assuming a fixed n value and
 * attempts to optimise p./*from  w  ww.j av a2 s  . c  o  m*/
 * 
 * @param histogram
 *            The input histogram
 * @param mean
 *            The histogram mean (used to estimate p). Calculated if NaN.
 * @param n
 *            The n to evaluate
 * @param zeroTruncated
 *            True if the model should ignore n=0 (zero-truncated binomial)
 * @return The best fit (n, p)
 * @throws IllegalArgumentException
 *             If any of the input data values are negative
 * @throws IllegalArgumentException
 *             If any fitting a zero truncated binomial and there are no values above zero
 */
public PointValuePair fitBinomial(double[] histogram, double mean, int n, boolean zeroTruncated) {
    if (Double.isNaN(mean))
        mean = getMean(histogram);

    if (zeroTruncated && histogram[0] > 0) {
        log("Fitting zero-truncated histogram but there are zero values - Renormalising to ignore zero");
        double cumul = 0;
        for (int i = 1; i < histogram.length; i++)
            cumul += histogram[i];
        if (cumul == 0)
            throw new IllegalArgumentException(
                    "Fitting zero-truncated histogram but there are no non-zero values");
        histogram[0] = 0;
        for (int i = 1; i < histogram.length; i++)
            histogram[i] /= cumul;
    }

    int nFittedPoints = Math.min(histogram.length, n + 1) - ((zeroTruncated) ? 1 : 0);
    if (nFittedPoints < 1) {
        log("No points to fit (%d): Histogram.length = %d, n = %d, zero-truncated = %b", nFittedPoints,
                histogram.length, n, zeroTruncated);
        return null;
    }

    // The model is only fitting the probability p
    // For a binomial n*p = mean => p = mean/n
    double[] initialSolution = new double[] { FastMath.min(mean / n, 1) };

    // Create the function
    BinomialModelFunction function = new BinomialModelFunction(histogram, n, zeroTruncated);

    double[] lB = new double[1];
    double[] uB = new double[] { 1 };
    SimpleBounds bounds = new SimpleBounds(lB, uB);

    // Fit
    // CMAESOptimizer or BOBYQAOptimizer support bounds

    // CMAESOptimiser based on Matlab code:
    // https://www.lri.fr/~hansen/cmaes.m
    // Take the defaults from the Matlab documentation
    int maxIterations = 2000;
    double stopFitness = 0; //Double.NEGATIVE_INFINITY;
    boolean isActiveCMA = true;
    int diagonalOnly = 0;
    int checkFeasableCount = 1;
    RandomGenerator random = new Well19937c();
    boolean generateStatistics = false;
    ConvergenceChecker<PointValuePair> checker = new SimpleValueChecker(1e-6, 1e-10);
    // The sigma determines the search range for the variables. It should be 1/3 of the initial search region.
    OptimizationData sigma = new CMAESOptimizer.Sigma(new double[] { (uB[0] - lB[0]) / 3 });
    OptimizationData popSize = new CMAESOptimizer.PopulationSize((int) (4 + Math.floor(3 * Math.log(2))));

    try {
        PointValuePair solution = null;
        boolean noRefit = maximumLikelihood;
        if (n == 1 && zeroTruncated) {
            // No need to fit
            solution = new PointValuePair(new double[] { 1 }, 0);
            noRefit = true;
        } else {
            GoalType goalType = (maximumLikelihood) ? GoalType.MAXIMIZE : GoalType.MINIMIZE;

            // Iteratively fit
            CMAESOptimizer opt = new CMAESOptimizer(maxIterations, stopFitness, isActiveCMA, diagonalOnly,
                    checkFeasableCount, random, generateStatistics, checker);
            for (int iteration = 0; iteration <= fitRestarts; iteration++) {
                try {
                    // Start from the initial solution
                    PointValuePair result = opt.optimize(new InitialGuess(initialSolution),
                            new ObjectiveFunction(function), goalType, bounds, sigma, popSize,
                            new MaxIter(maxIterations), new MaxEval(maxIterations * 2));
                    //System.out.printf("CMAES Iter %d initial = %g (%d)\n", iteration, result.getValue(),
                    //      opt.getEvaluations());
                    if (solution == null || result.getValue() < solution.getValue()) {
                        solution = result;
                    }
                } catch (TooManyEvaluationsException e) {
                } catch (TooManyIterationsException e) {
                }
                if (solution == null)
                    continue;
                try {
                    // Also restart from the current optimum
                    PointValuePair result = opt.optimize(new InitialGuess(solution.getPointRef()),
                            new ObjectiveFunction(function), goalType, bounds, sigma, popSize,
                            new MaxIter(maxIterations), new MaxEval(maxIterations * 2));
                    //System.out.printf("CMAES Iter %d restart = %g (%d)\n", iteration, result.getValue(),
                    //      opt.getEvaluations());
                    if (result.getValue() < solution.getValue()) {
                        solution = result;
                    }
                } catch (TooManyEvaluationsException e) {
                } catch (TooManyIterationsException e) {
                }
            }
            if (solution == null)
                return null;
        }

        if (noRefit) {
            // Although we fit the log-likelihood, return the sum-of-squares to allow 
            // comparison across different n
            double p = solution.getPointRef()[0];
            double ss = 0;
            double[] obs = function.p;
            double[] exp = function.getP(p);
            for (int i = 0; i < obs.length; i++)
                ss += (obs[i] - exp[i]) * (obs[i] - exp[i]);
            return new PointValuePair(solution.getPointRef(), ss);
        }
        // We can do a LVM refit if the number of fitted points is more than 1
        else if (nFittedPoints > 1) {
            // Improve SS fit with a gradient based LVM optimizer
            LevenbergMarquardtOptimizer optimizer = new LevenbergMarquardtOptimizer();
            try {
                final BinomialModelFunctionGradient gradientFunction = new BinomialModelFunctionGradient(
                        histogram, n, zeroTruncated);
                PointVectorValuePair lvmSolution = optimizer.optimize(new MaxIter(3000),
                        new MaxEval(Integer.MAX_VALUE),
                        new ModelFunctionJacobian(new MultivariateMatrixFunction() {
                            public double[][] value(double[] point) throws IllegalArgumentException {
                                return gradientFunction.jacobian(point);
                            }
                        }), new ModelFunction(gradientFunction), new Target(gradientFunction.p),
                        new Weight(gradientFunction.getWeights()), new InitialGuess(solution.getPointRef()));

                double ss = 0;
                double[] obs = gradientFunction.p;
                double[] exp = lvmSolution.getValue();
                for (int i = 0; i < obs.length; i++)
                    ss += (obs[i] - exp[i]) * (obs[i] - exp[i]);
                // Check the pValue is valid since the LVM is not bounded.
                double p = lvmSolution.getPointRef()[0];
                if (ss < solution.getValue() && p <= 1 && p >= 0) {
                    //log("Re-fitting improved the SS from %s to %s (-%s%%)",
                    //      Utils.rounded(solution.getValue(), 4), Utils.rounded(ss, 4),
                    //      Utils.rounded(100 * (solution.getValue() - ss) / solution.getValue(), 4));
                    return new PointValuePair(lvmSolution.getPoint(), ss);
                }
            } catch (TooManyIterationsException e) {
                log("Failed to re-fit: Too many iterations (%d)", optimizer.getIterations());
            } catch (ConvergenceException e) {
                log("Failed to re-fit: %s", e.getMessage());
            } catch (Exception e) {
                // Ignore this ...
            }
        }

        return solution;
    } catch (Exception e) {
        log("Failed to fit Binomial distribution with N=%d : %s", n, e.getMessage());
    }
    return null;
}

From source file:com.itemanalysis.psychometrics.irt.equating.HaebaraMethodTest.java

@Test
public void haebaraTest4() {
    System.out.println("Haebara Test 4: Actual Distribution -backwards");
    int n = aX.length;
    LinkedHashMap<String, ItemResponseModel> irmX = new LinkedHashMap<String, ItemResponseModel>();
    LinkedHashMap<String, ItemResponseModel> irmY = new LinkedHashMap<String, ItemResponseModel>();
    ItemResponseModel irm;//  w ww. j a va  2 s . c om

    for (int i = 0; i < n; i++) {
        String name = "V" + i;
        irm = new Irm3PL(aX[i], bX[i], cX[i], 1.0);
        irmX.put(name, irm);

        irm = new Irm3PL(aY[i], bY[i], cY[i], 1.0);
        irmY.put(name, irm);
    }

    UserSuppliedDistributionApproximation distX = new UserSuppliedDistributionApproximation(points, xDensity);
    UserSuppliedDistributionApproximation distY = new UserSuppliedDistributionApproximation(points, yDensity);

    HaebaraMethod hb = new HaebaraMethod(irmX, irmY, distX, distY, EquatingCriterionType.Q1);
    hb.setPrecision(4);
    double[] startValues = { 0, 1 };

    int numIterpolationPoints = 2 * 2;//two dimensions A and B
    BOBYQAOptimizer underlying = new BOBYQAOptimizer(numIterpolationPoints);
    RandomGenerator g = new JDKRandomGenerator();
    RandomVectorGenerator generator = new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultiStartMultivariateOptimizer optimizer = new MultiStartMultivariateOptimizer(underlying, 10, generator);
    org.apache.commons.math3.optim.PointValuePair optimum = optimizer.optimize(new MaxEval(1000),
            new ObjectiveFunction(hb), org.apache.commons.math3.optim.nonlinear.scalar.GoalType.MINIMIZE,
            SimpleBounds.unbounded(2), new InitialGuess(startValues));

    double[] hbCoefficients = optimum.getPoint();
    hb.setIntercept(hbCoefficients[0]);
    hb.setScale(hbCoefficients[1]);

    System.out.println("  Iterations: " + optimizer.getEvaluations());
    System.out.println("  fmin: " + optimum.getValue());
    System.out.println("  B = " + hbCoefficients[0] + "  A = " + hbCoefficients[1]);

    assertEquals("  Intercept test", -0.4710, hb.getIntercept(), 1e-4);
    assertEquals("  Scale test", 1.0798, hb.getScale(), 1e-4);

    System.out.println();

}

From source file:edu.cmu.tetrad.search.Mimbuild2.java

private void optimizeNonMeasureVariancesQuick(Node[][] indicators, TetradMatrix measurescov,
        TetradMatrix latentscov, double[][] loadings, int[][] indicatorIndices) {
    int count = 0;

    for (int i = 0; i < indicators.length; i++) {
        for (int j = i; j < indicators.length; j++) {
            count++;/*from ww  w  .  j a  v a 2s.  co  m*/
        }
    }

    for (int i = 0; i < indicators.length; i++) {
        for (int j = 0; j < indicators[i].length; j++) {
            count++;
        }
    }

    double[] values = new double[count];
    count = 0;

    for (int i = 0; i < indicators.length; i++) {
        for (int j = i; j < indicators.length; j++) {
            values[count++] = latentscov.get(i, j);
        }
    }

    for (int i = 0; i < indicators.length; i++) {
        for (int j = 0; j < indicators[i].length; j++) {
            values[count++] = loadings[i][j];
        }
    }

    Function1 function1 = new Function1(indicatorIndices, measurescov, loadings, latentscov, count);
    MultivariateOptimizer search = new PowellOptimizer(1e-7, 1e-7);

    PointValuePair pair = search.optimize(new InitialGuess(values), new ObjectiveFunction(function1),
            GoalType.MINIMIZE, new MaxEval(100000));

    minimum = pair.getValue();
}

From source file:com.itemanalysis.psychometrics.irt.equating.StockingLordMethodTest.java

@Test
public void stockingLordTest3() {
    System.out.println("StockingLordMethod Test 3: Normal Distribution");
    int n = aX.length;
    LinkedHashMap<String, ItemResponseModel> irmX = new LinkedHashMap<String, ItemResponseModel>();
    LinkedHashMap<String, ItemResponseModel> irmY = new LinkedHashMap<String, ItemResponseModel>();
    ItemResponseModel irm;//from   w  w w.jav  a2s .  com

    for (int i = 0; i < n; i++) {
        String name = "V" + i;
        irm = new Irm3PL(aX[i], bX[i], cX[i], 1.0);
        irmX.put(name, irm);

        irm = new Irm3PL(aY[i], bY[i], cY[i], 1.0);
        irmY.put(name, irm);
    }

    NormalDistributionApproximation normal = new NormalDistributionApproximation(0, 1, -4, 4, 10);

    StockingLordMethod sl = new StockingLordMethod(irmX, irmY, normal, normal, EquatingCriterionType.Q1Q2);
    sl.setPrecision(4);
    double[] startValues = { 0, 1 };

    int numIterpolationPoints = 2 * 2;//two dimensions A and B
    BOBYQAOptimizer underlying = new BOBYQAOptimizer(numIterpolationPoints);
    RandomGenerator g = new JDKRandomGenerator();
    RandomVectorGenerator generator = new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultiStartMultivariateOptimizer optimizer = new MultiStartMultivariateOptimizer(underlying, 10, generator);
    PointValuePair optimum = optimizer.optimize(new MaxEval(1000), new ObjectiveFunction(sl), GoalType.MINIMIZE,
            SimpleBounds.unbounded(2), new InitialGuess(startValues));

    double[] slCoefficients = optimum.getPoint();
    sl.setIntercept(slCoefficients[0]);
    sl.setScale(slCoefficients[1]);

    System.out.println("  Iterations: " + optimizer.getEvaluations());
    System.out.println("  fmin: " + optimum.getValue());
    System.out.println("  B = " + slCoefficients[0] + "  A = " + slCoefficients[1]);

    assertEquals("  Intercept test", -0.4788, sl.getIntercept(), 1e-4);
    assertEquals("  Scale test", 1.0901, sl.getScale(), 1e-4);

    System.out.println();

}