List of usage examples for org.apache.commons.math3.optim MaxEval MaxEval
public MaxEval(int max)
From source file:gdsc.smlm.ij.plugins.pcpalm.PCPALMFitting.java
/** * Fits the correlation curve with r>0 to the clustered model using the estimated density and precision. Parameters * must be fit within a tolerance of the starting values. * //from www . j ava 2 s.c o m * @param gr * @param sigmaS * The estimated precision * @param proteinDensity * The estimated protein density * @return The fitted parameters [precision, density, clusterRadius, clusterDensity] */ private double[] fitEmulsionModel(double[][] gr, double sigmaS, double proteinDensity, String resultColour) { final EmulsionModelFunctionGradient myFunction = new EmulsionModelFunctionGradient(); emulsionModel = myFunction; log("Fitting %s: Estimated precision = %f nm, estimated protein density = %g um^-2", emulsionModel.getName(), sigmaS, proteinDensity * 1e6); emulsionModel.setLogging(true); for (int i = offset; i < gr[0].length; i++) { // Only fit the curve above the estimated resolution (points below it will be subject to error) if (gr[0][i] > sigmaS * fitAboveEstimatedPrecision) emulsionModel.addPoint(gr[0][i], gr[1][i]); } double[] parameters; // The model is: sigma, density, range, amplitude, alpha double[] initialSolution = new double[] { sigmaS, proteinDensity, sigmaS * 5, 1, sigmaS * 5 }; int evaluations = 0; // Constrain the fitting to be close to the estimated precision (sigmaS) and protein density. // LVM fitting does not support constrained fitting so use a bounded optimiser. SumOfSquaresModelFunction emulsionModelMulti = new SumOfSquaresModelFunction(emulsionModel); double[] x = emulsionModelMulti.x; double[] y = emulsionModelMulti.y; // Range should be equal to the first time the g(r) curve crosses 1 for (int i = 0; i < x.length; i++) if (y[i] < 1) { initialSolution[4] = initialSolution[2] = (i > 0) ? (x[i - 1] + x[i]) * 0.5 : x[i]; break; } // Put some bounds around the initial guess. Use the fitting tolerance (in %) if provided. double limit = (fittingTolerance > 0) ? 1 + fittingTolerance / 100 : 2; double[] lB = new double[] { initialSolution[0] / limit, initialSolution[1] / limit, 0, 0, 0 }; // The amplitude and range should not extend beyond the limits of the g(r) curve. // TODO - Find out the expected range for the alpha parameter. double[] uB = new double[] { initialSolution[0] * limit, initialSolution[1] * limit, Maths.max(x), Maths.max(gr[1]), Maths.max(x) * 2 }; log("Fitting %s using a bounded search: %s < precision < %s & %s < density < %s", emulsionModel.getName(), Utils.rounded(lB[0], 4), Utils.rounded(uB[0], 4), Utils.rounded(lB[1] * 1e6, 4), Utils.rounded(uB[1] * 1e6, 4)); PointValuePair constrainedSolution = runBoundedOptimiser(gr, initialSolution, lB, uB, emulsionModelMulti); if (constrainedSolution == null) return null; parameters = constrainedSolution.getPointRef(); evaluations = boundedEvaluations; // Refit using a LVM if (useLSE) { log("Re-fitting %s using a gradient optimisation", emulsionModel.getName()); LevenbergMarquardtOptimizer optimizer = new LevenbergMarquardtOptimizer(); PointVectorValuePair lvmSolution; try { lvmSolution = optimizer.optimize(new MaxIter(3000), new MaxEval(Integer.MAX_VALUE), new ModelFunctionJacobian(new MultivariateMatrixFunction() { public double[][] value(double[] point) throws IllegalArgumentException { return myFunction.jacobian(point); } }), new ModelFunction(myFunction), new Target(myFunction.getY()), new Weight(myFunction.getWeights()), new InitialGuess(parameters)); evaluations += optimizer.getEvaluations(); double ss = 0; double[] obs = emulsionModel.getY(); double[] exp = lvmSolution.getValue(); for (int i = 0; i < obs.length; i++) ss += (obs[i] - exp[i]) * (obs[i] - exp[i]); if (ss < constrainedSolution.getValue()) { log("Re-fitting %s improved the SS from %s to %s (-%s%%)", emulsionModel.getName(), Utils.rounded(constrainedSolution.getValue(), 4), Utils.rounded(ss, 4), Utils.rounded( 100 * (constrainedSolution.getValue() - ss) / constrainedSolution.getValue(), 4)); parameters = lvmSolution.getPoint(); } } catch (TooManyIterationsException e) { log("Failed to re-fit %s: Too many iterations (%d)", emulsionModel.getName(), optimizer.getIterations()); } catch (ConvergenceException e) { log("Failed to re-fit %s: %s", emulsionModel.getName(), e.getMessage()); } } emulsionModel.setLogging(false); // Ensure the width is positive parameters[0] = Math.abs(parameters[0]); //parameters[2] = Math.abs(parameters[2]); double ss = 0; double[] obs = emulsionModel.getY(); double[] exp = emulsionModel.value(parameters); for (int i = 0; i < obs.length; i++) ss += (obs[i] - exp[i]) * (obs[i] - exp[i]); ic3 = Maths.getInformationCriterion(ss, emulsionModel.size(), parameters.length); final double fitSigmaS = parameters[0]; final double fitProteinDensity = parameters[1]; final double domainRadius = parameters[2]; //The radius of the cluster domain final double domainDensity = parameters[3]; //The density of the cluster domain final double coherence = parameters[4]; //The coherence length between circles // This is from the PC-PALM paper. It may not be correct for the emulsion model. final double nCluster = 2 * domainDensity * Math.PI * domainRadius * domainRadius * fitProteinDensity; double e1 = parameterDrift(sigmaS, fitSigmaS); double e2 = parameterDrift(proteinDensity, fitProteinDensity); log(" %s fit: SS = %f. cAIC = %f. %d evaluations", emulsionModel.getName(), ss, ic3, evaluations); log(" %s parameters:", emulsionModel.getName()); log(" Average precision = %s nm (%s%%)", Utils.rounded(fitSigmaS, 4), Utils.rounded(e1, 4)); log(" Average protein density = %s um^-2 (%s%%)", Utils.rounded(fitProteinDensity * 1e6, 4), Utils.rounded(e2, 4)); log(" Domain radius = %s nm", Utils.rounded(domainRadius, 4)); log(" Domain density = %s", Utils.rounded(domainDensity, 4)); log(" Domain coherence = %s", Utils.rounded(coherence, 4)); log(" nCluster = %s", Utils.rounded(nCluster, 4)); // Check the fitted parameters are within tolerance of the initial estimates valid2 = true; if (fittingTolerance > 0 && (Math.abs(e1) > fittingTolerance || Math.abs(e2) > fittingTolerance)) { log(" Failed to fit %s within tolerance (%s%%): Average precision = %f nm (%s%%), average protein density = %g um^-2 (%s%%)", emulsionModel.getName(), Utils.rounded(fittingTolerance, 4), fitSigmaS, Utils.rounded(e1, 4), fitProteinDensity * 1e6, Utils.rounded(e2, 4)); valid2 = false; } // Check extra parameters. Domain radius should be higher than the precision. Density should be positive if (domainRadius < fitSigmaS) { log(" Failed to fit %s: Domain radius is smaller than the average precision (%s < %s)", emulsionModel.getName(), Utils.rounded(domainRadius, 4), Utils.rounded(fitSigmaS, 4)); valid2 = false; } if (domainDensity < 0) { log(" Failed to fit %s: Domain density is negative (%s)", emulsionModel.getName(), Utils.rounded(domainDensity, 4)); valid2 = false; } if (ic3 > ic1) { log(" Failed to fit %s - Information Criterion has increased %s%%", emulsionModel.getName(), Utils.rounded((100 * (ic3 - ic1) / ic1), 4)); valid2 = false; } addResult(emulsionModel.getName(), resultColour, valid2, fitSigmaS, fitProteinDensity, domainRadius, domainDensity, nCluster, coherence, ic3); return parameters; }
From source file:nl.systemsgenetics.cellTypeSpecificAlleleSpecificExpression.BetaBinomialTest.java
public BetaBinomialTest(ArrayList<IndividualSnpData> all_individuals) throws Exception { boolean debug = true; //basic information, get the zero instance, was checked as the same in ReadAsLinesIntoIndividualSNPdata snpName = all_individuals.get(0).getSnpName(); chromosome = all_individuals.get(0).getChromosome(); position = all_individuals.get(0).getPosition(); genotype = all_individuals.get(0).genotype; ArrayList<IndividualSnpData> het_individuals; het_individuals = UtilityMethods.isolateHeterozygotesFromIndividualSnpData(all_individuals); numberOfHets = het_individuals.size(); hetSampleNames = new ArrayList<String>(); asRef = new ArrayList<Integer>(); asAlt = new ArrayList<Integer>(); asNo = new ArrayList<Integer>(); dispersion = new ArrayList<Double>(); int total_overlap = 0; for (IndividualSnpData temp_het : het_individuals) { //Do nothing if there is no data in het_individuals hetSampleNames.add(temp_het.getSampleName()); asRef.add(temp_het.getRefNum()); asAlt.add(temp_het.getAltNum()); asNo.add(temp_het.getNoNum());/*from w w w.j a v a 2 s .c o m*/ dispersion.add(temp_het.getDispersion()); //this is used to check if we will continue with calculations. total_overlap += temp_het.getRefNum() + temp_het.getAltNum(); } if ((total_overlap >= GlobalVariables.minReads) && (numberOfHets >= GlobalVariables.minHets)) { // There is data to perform the binomial test, perform it. if (GlobalVariables.verbosity >= 100) { System.out.println(); System.out.println("---- Starting beta binomial LRT test estimate ----"); System.out.println("SNP name: " + snpName); System.out.println("at: chr" + chromosome + ":" + position); System.out.println("--------------------------------------------------"); System.out.println("debug:"); System.out.println("Num of hets: " + Integer.toString(numberOfHets)); System.out.println(het_individuals.get(0).getSnpName()); System.out.println(total_overlap); System.out.println("asRef: " + asRef.toString()); System.out.println("asAlt: " + asAlt.toString()); System.out.println("dispersion: " + dispersion.toString()); System.out.println("Starting Null estimation"); } Integer[] asRefArray = asRef.toArray(new Integer[asRef.size()]); Integer[] asAltArray = asAlt.toArray(new Integer[asAlt.size()]); Double[] dispArray = dispersion.toArray(new Double[dispersion.size()]); BetaBinomNullLikelihood betaBinomNull; betaBinomNull = new BetaBinomNullLikelihood(asRefArray, asAltArray, dispArray); nullLogLik = betaBinomNull.value(new double[] { 0.5 }); if (GlobalVariables.verbosity >= 100) { System.out.println("Starting Alt estimation"); } BetaBinomAltLikelihood betaBinomAlt; betaBinomAlt = new BetaBinomAltLikelihood(asRefArray, asAltArray, dispArray); NelderMeadSimplex simplex; simplex = new NelderMeadSimplex(2); SimplexOptimizer optimizer = new SimplexOptimizer(GlobalVariables.simplexThreshold, GlobalVariables.simplexThreshold); //numbers are to which precision you want it to be done. PointValuePair solutionAlt = optimizer.optimize(new ObjectiveFunction(betaBinomAlt), new MaxEval(GlobalVariables.maximumIterations), simplex, GoalType.MINIMIZE, new InitialGuess(new double[] { 0.5, 0.5 }), new SearchInterval(-1000.0, 1000.0)); double[] valueAlt = solutionAlt.getPoint(); altLogLik = betaBinomAlt.value(valueAlt); iterations = optimizer.getIterations(); alphaParam = valueAlt[0]; betaParam = valueAlt[1]; binomRatio = valueAlt[0] / (valueAlt[0] + valueAlt[1]); //chi squared statistic is determined based on both null and alt loglikelihoods. chiSq = 2.0 * (nullLogLik - altLogLik); //determine P value based on distribution ChiSquaredDistribution distribution = new ChiSquaredDistribution(1); pVal = 1 - distribution.cumulativeProbability(chiSq); if (GlobalVariables.verbosity >= 10) { System.out.println("\n--- Beta Binomial Test Statistics: ---"); System.out.println("LogLik of Alt converged to a threshold of " + Double.toString(GlobalVariables.simplexThreshold)); System.out.println("\tAlpha parameter: " + Double.toString(valueAlt[0])); System.out.println("\tBeta parameter: " + Double.toString(valueAlt[1])); System.out.println("\tIterations to converge: " + Integer.toString(iterations) + "\n"); System.out.println("\tNull log likelihood: " + Double.toString(nullLogLik)); System.out.println("\tAlt log likelihood: " + Double.toString(altLogLik) + "\n"); System.out.println("\tChisq statistic: " + Double.toString(chiSq)); System.out.println("\tP value: " + Double.toString(pVal)); System.out.println("------------------------------------------"); //TODO, I want to format this properly, but not necessary } testPerformed = true; //This below is to compare everything to python created by WASP //Will put the backup back into python, just to make sure. if (GlobalVariables.verbosity >= 100) { System.out.println("loglikRef = " + Double.toString(altLogLik)); System.out.println("loglik = 0"); for (int i = 0; i < asRef.size(); i++) { System.out.println("loglik += AS_betabinom_loglike(" + "[math.log(" + Double.toString(alphaParam) + ") - math.log(" + Double.toString(alphaParam) + " + " + Double.toString(betaParam) + "), " + "math.log(" + Double.toString(betaParam) + ") - math.log(" + Double.toString(alphaParam) + " + " + Double.toString(betaParam) + ")]," + Double.toString(dispersion.get(i)) + "," + Double.toString(asRef.get(i)) + "," + Double.toString(asAlt.get(i)) + "," + "0.980198, 0.005)"); } System.out.println("if allclose(-1.0 * loglik, loglikRef): correctTest += 1"); //I find exactly the correct values after running it through WASP implementation in python, after one check. //Using this as a foundation stone to write some tests: for (int i = 0; i < asRef.size(); i++) { String command = "Assert.assertEquals( " + "likelihoodFunctions.BetaBinomLogLik(" + String.format("%.15g", dispArray[i]) + "," + String.format("%.15g", alphaParam) + "," + String.format("%.15g", betaParam) + "," + Integer.toString(asRefArray[i]) + "," + Integer.toString(asAltArray[i]) + ")" + ", " + Double.toString(LikelihoodFunctions.BetaBinomLogLik(dispArray[i], alphaParam, betaParam, asRefArray[i], asAltArray[i])) + ");"; System.out.println(command); } } } }
From source file:nl.systemsgenetics.cellTypeSpecificAlleleSpecificExpression.BetaBinomOverdispInSample.java
@SuppressWarnings({ "empty-statement", "empty-statement" }) public BetaBinomOverdispInSample(String filename) throws FileNotFoundException, IOException { sampleName = filename;// w w w . j a v a 2 s . c o m if (GlobalVariables.verbosity >= 10) { System.out.println("\n--- Starting beta binomial dispersion estimate ---"); System.out.println("AS File: " + sampleName); System.out.println("--------------------------------------------------"); } ArrayList<IndividualSnpData> allSnps = new ArrayList<IndividualSnpData>(); File file = new File(filename); FileReader fr = new FileReader(file); BufferedReader br = new BufferedReader(fr); String line; while ((line = br.readLine()) != null) { allSnps.add(new IndividualSnpData(file.getAbsolutePath(), line)); } br.close(); fr.close(); ArrayList<IndividualSnpData> hets; hets = UtilityMethods.isolateHeterozygotesFromIndividualSnpData(allSnps); int numOfHets = hets.size(); int[] asRef = new int[numOfHets]; int[] asAlt = new int[numOfHets]; int totalRef = 0; int totalAlt = 0; for (int i = 0; i < numOfHets; i++) { asRef[i] = hets.get(i).getRefNum(); asAlt[i] = hets.get(i).getAltNum(); totalRef += asRef[i]; totalAlt += asAlt[i]; } if (GlobalVariables.verbosity >= 10) { System.out.println("sample loaded"); System.out.println("\ttotal_ref = " + totalRef); System.out.println("\ttotal_alt = " + totalAlt); } BetaBinomLikelihoodForOverdispersion betaBinom = new BetaBinomLikelihoodForOverdispersion(asRef, asAlt); NelderMeadSimplex simplex; simplex = new NelderMeadSimplex(1); SimplexOptimizer optimizer = new SimplexOptimizer(GlobalVariables.simplexThreshold, GlobalVariables.simplexThreshold); //numbers are to which precision you want it to be done. PointValuePair solution = optimizer.optimize(new ObjectiveFunction(betaBinom), new MaxEval(GlobalVariables.maximumIterations), simplex, GoalType.MINIMIZE, new InitialGuess(new double[] { 0.5 }), new SearchInterval(0.0, 1.0)); overdispersion = solution.getFirst(); if (GlobalVariables.verbosity >= 10) { System.out.println("Log likelihood converged to a threshold of " + Double.toString(GlobalVariables.simplexThreshold)); System.out.println("\tDispersion sigma: " + Double.toString(overdispersion[0])); System.out.println("\tLog likelihood: " + Double.toString(betaBinom.value(overdispersion))); } }
From source file:nl.systemsgenetics.cellTypeSpecificAlleleSpecificExpression.CTSBetaBinomialTest.java
public CTSBetaBinomialTest(ArrayList<IndividualSnpData> all_individuals) throws Exception { //basic information, get the zero instance. snpName = all_individuals.get(0).getSnpName(); chromosome = all_individuals.get(0).getChromosome(); position = all_individuals.get(0).getPosition(); //isolate heterozygote individuals. ArrayList<IndividualSnpData> het_individuals = UtilityMethods .isolateHeterozygotesFromIndividualSnpData(all_individuals); numberOfHets = het_individuals.size(); hetSampleNames = new ArrayList<String>(); asRef = new ArrayList<Integer>(); asAlt = new ArrayList<Integer>(); asNo = new ArrayList<Integer>(); dispersion = new ArrayList<Double>(); cellProp = new ArrayList<Double>(); int total_overlap = 0; //Get the basic data without doing any tests. for (IndividualSnpData temp_het : het_individuals) { //Do nothing if there is no data in het_individuals hetSampleNames.add(temp_het.getSampleName()); asRef.add(temp_het.getRefNum()); asAlt.add(temp_het.getAltNum()); asNo.add(temp_het.getNoNum());// w w w .j a va 2 s. c o m dispersion.add(temp_het.getDispersion()); cellProp.add(temp_het.getCellTypeProp()); //this is used to check if we will continue with calculations. //BASED on the minHets and MinReads total_overlap += temp_het.getRefNum() + temp_het.getAltNum(); } //Check if we do a test. if ((total_overlap >= GlobalVariables.minReads) && (numberOfHets >= GlobalVariables.minHets)) { // There is data to perform the binomial test, perform it. if (GlobalVariables.verbosity >= 100) { System.out.println(); System.out.println("SNP name: " + snpName); System.out.println("at: chr" + chromosome + ":" + position); System.out.println("------------------------------------------------------"); System.out.println("Num of hets: " + Integer.toString(numberOfHets)); System.out.println(het_individuals.get(0).getSnpName()); System.out.println(total_overlap); System.out.println("asRef: " + asRef.toString()); System.out.println("asAlt: " + asAlt.toString()); System.out.println("dispersion: " + dispersion.toString()); System.out.println("cellProp: " + cellProp.toString()); System.out.println("Starting non-CTS Beta binomial Null estimation"); } /* Going to do this in a two step procedure: 1. Determine a 2 parameter null the same way as what is done in the beta binomial alt, with starting values: This will be the actual null model in the CTS beta binomial output 2. Determine the cell type specific proportion, but now we use a cell type specific model as the alternative. */ Integer[] asRefArray = asRef.toArray(new Integer[asRef.size()]); Integer[] asAltArray = asAlt.toArray(new Integer[asAlt.size()]); Double[] dispArray = dispersion.toArray(new Double[dispersion.size()]); Double[] cellPropArray = cellProp.toArray(new Double[cellProp.size()]); try { BetaBinomAltLikelihood betaBinomNull; betaBinomNull = new BetaBinomAltLikelihood(asRefArray, asAltArray, dispArray); NelderMeadSimplex simplex; simplex = new NelderMeadSimplex(2); SimplexOptimizer optimizer = new SimplexOptimizer(GlobalVariables.simplexThreshold, GlobalVariables.simplexThreshold); //numbers are to which precision you want it to be done. PointValuePair solutionNull = optimizer.optimize(new ObjectiveFunction(betaBinomNull), new MaxEval(GlobalVariables.maximumIterations), simplex, GoalType.MINIMIZE, new InitialGuess(new double[] { 0.5, 0.5 }), new SearchInterval(-1000.0, 1000.0)); double[] valueNull = solutionNull.getPoint(); nullLogLik = betaBinomNull.value(valueNull); nulliterations = optimizer.getIterations(); NullAlphaParam = valueNull[0]; NullBetaParam = valueNull[1]; NullbinomRatio = valueNull[0] / (valueNull[0] + valueNull[1]); if (GlobalVariables.verbosity >= 100) { System.out.println("LogLik of Null converged to a threshold of " + Double.toString(GlobalVariables.simplexThreshold)); System.out.println("\tNull Alpha parameter: " + Double.toString(valueNull[0])); System.out.println("\tNull Beta parameter: " + Double.toString(valueNull[1])); System.out.println("\tIterations to converge: " + Integer.toString(nulliterations) + "\n"); } //CHECK WHAT THE version does in terms of loglik. CTSbetaBinomialAltLikelihoodVersion2 CTSbetaBinomAlt; CTSbetaBinomAlt = new CTSbetaBinomialAltLikelihoodVersion2(asRefArray, asAltArray, dispArray, cellPropArray); // Going to make some initialGuesses, because sometimes // this falls into a local minimum with the MLE, when only the // null parameters are used as a starting point. double nonCTSprop; nonCTSprop = valueNull[0] / (valueNull[0] + valueNull[1]); ArrayList<InitialGuess> GuessList = new ArrayList<InitialGuess>(); GuessList.add(new InitialGuess(new double[] { 0.0, nonCTSprop })); GuessList.add(new InitialGuess(new double[] { 0.25, 0.5 })); GuessList.add(new InitialGuess(new double[] { 0.75, 0.5 })); simplex = new NelderMeadSimplex(2); ArrayList<double[]> OptimizerResults = new ArrayList<double[]>(); Double[] OptimizedLogLik = new Double[8]; int i = 0; int lowestIndices = -1; double lowestLogLik = 0; for (InitialGuess IGuess : GuessList) { PointValuePair solutionAlt = optimizer.optimize(new ObjectiveFunction(CTSbetaBinomAlt), new MaxEval(500), simplex, GoalType.MINIMIZE, IGuess, new SearchInterval(0, 1)); double[] valueAlt = solutionAlt.getPoint(); OptimizerResults.add(valueAlt); OptimizedLogLik[i] = CTSbetaBinomAlt.value(valueAlt); //determine lowest loglik. if (i == 0) { lowestIndices = 0; lowestLogLik = OptimizedLogLik[i]; } else if (OptimizedLogLik[i] < lowestLogLik) { lowestIndices = i; lowestLogLik = OptimizedLogLik[i]; } if (GlobalVariables.verbosity >= 100) { System.out.println("\nAlt Loglik convergence of starting coordinates" + Arrays.toString(IGuess.getInitialGuess())); System.out.println("\tFinal parameters: " + Arrays.toString(valueAlt)); System.out.println("\tLogLik: " + Double.toString(OptimizedLogLik[i])); } i++; } //Now select the lowest LogLik. double[] bestParams = OptimizerResults.get(lowestIndices); altLogLik = OptimizedLogLik[lowestIndices]; binomRatioCellType = bestParams[0]; binomRatioResidual = bestParams[1]; //chi squared statistic is determined based on both null and alt loglikelihoods. chiSq = LikelihoodFunctions.ChiSqFromLogLik(nullLogLik, altLogLik); //determine P value based on distribution pVal = LikelihoodFunctions.determinePvalFrom1DFchiSq(chiSq); if (GlobalVariables.verbosity >= 10) { System.out.println("\n--- Starting cell type specific beta binomial LRT test estimate ---"); System.out.println("LogLik of converged to a threshold of " + Double.toString(GlobalVariables.simplexThreshold) + "\n"); System.out.println( "\tCellType Binomial ratio: " + Double.toString(binomRatioCellType) + "\n"); System.out.println( "\tResidual Binomial ratio: " + Double.toString(binomRatioResidual) + "\n"); System.out.println("\tNull log likelihood: " + Double.toString(nullLogLik)); System.out.println("\tAlt log likelihood: " + Double.toString(altLogLik) + "\n"); System.out.println("\tChisq statistic: " + Double.toString(chiSq)); System.out.println("\tP value: " + Double.toString(pVal)); System.out.println("-----------------------------------------------------------------------"); } testConverged = true; } catch (TooManyEvaluationsException e) { if (GlobalVariables.verbosity >= 1) { System.out.println("WARNING: Did not converge to a solution for SNP " + snpName + "in cell type specific beta binomial"); System.out.println(" After " + Integer.toString(GlobalVariables.maximumIterations) + " iterations."); System.out.println(" Continue-ing with the next."); } } //Finally test was done, so we say the test was performed. testPerformed = true; } }
From source file:nl.systemsgenetics.cellTypeSpecificAlleleSpecificExpression.CTSbinomialTest.java
public CTSbinomialTest(ArrayList<IndividualSnpData> all_individuals) throws Exception { //basic information, get the zero instance. snpName = all_individuals.get(0).getSnpName(); chromosome = all_individuals.get(0).getChromosome(); position = all_individuals.get(0).getPosition(); ArrayList<IndividualSnpData> het_individuals = UtilityMethods .isolateHeterozygotesFromIndividualSnpData(all_individuals); numberOfHets = het_individuals.size(); hetSampleNames = new ArrayList<String>(); asRef = new ArrayList<Integer>(); asAlt = new ArrayList<Integer>(); asNo = new ArrayList<Integer>(); cellProp = new ArrayList<Double>(); int total_overlap = 0; int ref_total = 0; for (IndividualSnpData temp_het : het_individuals) { //Do nothing if there is no data in het_individuals hetSampleNames.add(temp_het.getSampleName()); asRef.add(temp_het.getRefNum()); asAlt.add(temp_het.getAltNum()); asNo.add(temp_het.getNoNum());/*from w w w .j a v a 2 s .c o m*/ cellProp.add(temp_het.getCellTypeProp()); //this is used to check if we will continue with calculations. total_overlap += temp_het.getRefNum() + temp_het.getAltNum(); ref_total += temp_het.getRefNum(); } if ((total_overlap >= GlobalVariables.minReads) && (numberOfHets >= GlobalVariables.minHets)) { Integer[] asRefArray = asRef.toArray(new Integer[asRef.size()]); Integer[] asAltArray = asAlt.toArray(new Integer[asAlt.size()]); Double[] cellPropArray = cellProp.toArray(new Double[cellProp.size()]); // Just using the alternative likelihood of the binomial test here. // As out null loglikelihood. try { CTSnullBinomialLikelihood CTSbinomNull = new CTSnullBinomialLikelihood(asRefArray, asAltArray, cellPropArray); double nullProp = ((double) ref_total / (double) total_overlap); double[] nullInput = { nullProp }; nullLogLik = CTSbinomNull.value(nullInput); if (GlobalVariables.verbosity >= 100) { System.out.println("LogLik of NULL"); System.out.println("\tResidual ratio: " + Double.toString(nullInput[0])); System.out.println("\tnullLogLik: " + Double.toString(nullLogLik)); } // Now do MLE of the alternative test // more computationally intensive // than the previous one. CTSaltBinomialLikelihood CTSbinom = new CTSaltBinomialLikelihood(asRefArray, asAltArray, cellPropArray); NelderMeadSimplex simplex; simplex = new NelderMeadSimplex(2, 1.0, 1.0, 2.0, 0.25, 0.25); SimplexOptimizer optimizer = new SimplexOptimizer(GlobalVariables.simplexThreshold, GlobalVariables.simplexThreshold); PointValuePair solutionAlt = optimizer.optimize(new ObjectiveFunction(CTSbinom), new MaxEval(GlobalVariables.maximumIterations), simplex, GoalType.MINIMIZE, new InitialGuess(new double[] { 0, nullInput[0] }), new SearchInterval(0.0, 1.0)); double[] valueAlt = solutionAlt.getPoint(); /* In simulations it was found that in about 30% of the cases A solution was not found, so we do it again if there */ if ((optimizer.getIterations() <= 5)) { if (GlobalVariables.verbosity >= 100) { System.out.println("\nfirst starting point was already in a minimum."); System.out.println("Trying with other starting values."); } ArrayList<double[]> StartingValueList = new ArrayList<double[]>(); //These are the starting values to try StartingValueList.add(new double[] { 0.5, 0.0 }); StartingValueList.add(new double[] { 0.5, 0.5 }); StartingValueList.add(new double[] { 0.0, 0.0 }); StartingValueList.add(new double[] { 0.75, 0.0 }); StartingValueList.add(new double[] { 0.0, 0.75 }); for (double[] startingValue : StartingValueList) { SimplexOptimizer newOptimizer = new SimplexOptimizer(GlobalVariables.simplexThreshold, GlobalVariables.simplexThreshold); PointValuePair newSolutionAlt = newOptimizer.optimize(new ObjectiveFunction(CTSbinom), new MaxEval(GlobalVariables.maximumIterations), simplex, GoalType.MINIMIZE, new InitialGuess(startingValue), new SearchInterval(0.0, 1.0)); double[] newValueAlt = newSolutionAlt.getPoint(); if (CTSbinom.value(newValueAlt) < CTSbinom.value(valueAlt)) { if (GlobalVariables.verbosity >= 100 && newOptimizer.getIterations() >= 5) { System.out.println("New starting values are a better fit to the data"); System.out.println("keeping results of the new starting values.\n"); } //Asign the new values to the actual used ones. valueAlt = newValueAlt; optimizer = newOptimizer; break; } } } altLogLik = CTSbinom.value(valueAlt); iterations = optimizer.getIterations(); MLEratioCellType = valueAlt[1]; MLEratioResidual = valueAlt[0]; //chi squared statistic is determined based on both null and alt loglikelihoods. chiSq = LikelihoodFunctions.ChiSqFromLogLik(nullLogLik, altLogLik); //determine P value based on distribution pVal = LikelihoodFunctions.determinePvalFrom1DFchiSq(chiSq); if (GlobalVariables.verbosity >= 10) { System.out.println("\n--- Starting cell type specific binomial LRT test estimate ---"); System.out.println("LogLik of Alt converged to a threshold of " + Double.toString(GlobalVariables.simplexThreshold)); System.out.println("\tCelltype ratio: " + Double.toString(valueAlt[0])); System.out.println("\tResidual ratio: " + Double.toString(valueAlt[1])); System.out .println("\tIterations to converge: " + Integer.toString(iterations) + "\n"); System.out.println("\tNull log likelihood: " + Double.toString(nullLogLik)); System.out.println("\tAlt log likelihood: " + Double.toString(altLogLik) + "\n"); System.out.println("\tChisq statistic: " + Double.toString(chiSq)); System.out.println("\tP value: " + Double.toString(pVal)); System.out.println("--------------------------------------------------------------"); } testConverged = true; } catch (TooManyEvaluationsException e) { if (GlobalVariables.verbosity >= 1) { System.out.println("WARNING: Did not converge to a solution for SNP: " + snpName + "in cell type specific beta binomial"); System.out.println(" After " + Integer.toString(GlobalVariables.maximumIterations) + " iterations."); System.out.println(" Continue-ing with the next."); System.out.println("--------------------------------------------------------------"); } } // Add some extra values that will make sure that there could be some kind of non-convergence. testPerformed = true; } }
From source file:norbert.mynemo.core.selection.UserRecommenderSelector.java
/** * Evaluates the given recommender type for several configurations. The number of neighbors for * each evaluation is chosen via an optimizer. The number of evaluation performed depends of the * number of steps necessary for the evaluation results to converge. The number of evaluations * performed is also bounded by an internal maximum. * * <p>/* w ww . ja va 2 s. com*/ * The given recommender type must be part of the user similarity based family. * * @return all evaluations done during the selection process */ public Collection<RecommenderEvaluation> select(RecommenderType type, double minimumCoverage) { checkArgument(type.getFamily() == RecommenderFamily.USER_SIMILARITY_BASED); checkArgument(0 <= minimumCoverage && minimumCoverage <= 1, "The minimum coverage must be" + "between 0 and 1."); final UserBasedRecommenderEvaluationFunction function = new UserBasedRecommenderEvaluationFunction( configuration, type, minimumCoverage); // find a good initial guess: test 10%, 20% 90% of maximum neighbors double secondBestInitialValue = 1; double bestInitialValue = 0; double bestEvalResult = Integer.MAX_VALUE; for (double factor = 0.1; factor <= 0.9; factor += 0.1) { double currentNumNeighbors = maxNeighbors * factor; double evalResult = function.value(currentNumNeighbors); if (evalResult < bestEvalResult) { bestEvalResult = evalResult; secondBestInitialValue = bestInitialValue; bestInitialValue = currentNumNeighbors; } } // initialize the quality parameters of the CMAES optimizer final double minNeighbors = 1; final double initialGuessNeighbors = bestInitialValue; final double sigmaNeighbors = Math.min(Math.abs(bestInitialValue - secondBestInitialValue), Math.min(maxNeighbors - bestInitialValue, bestInitialValue - 1)); // not sure about that final int populationSize = (int) (Math.log10(maxNeighbors) / Math.log10(2)); // inhibit the exception throw if the maximum number of evaluation is reached final int maxCmaesEvaluations = Integer.MAX_VALUE; // not sure about that final int maxCmaesIterations = (int) (Math.log10(maxNeighbors) / Math.log10(2)); // initialize the other parameters final ConvergenceChecker<PointValuePair> checker = new MaxIterationChecker<PointValuePair>(20); final ObjectiveFunction objectiveFunction = new ObjectiveFunction(function); final CMAESOptimizer optimizer = new CMAESOptimizer(maxCmaesIterations, 1.0, true, 0, 0, new JDKRandomGenerator(), false, checker); final MaxEval maxEval = new MaxEval(maxCmaesEvaluations); final GoalType goalType = GoalType.MINIMIZE; final SimpleBounds bounds = new SimpleBounds(new double[] { minNeighbors, minNeighbors }, new double[] { maxNeighbors, maxNeighbors }); final InitialGuess initialGuess = new InitialGuess( new double[] { initialGuessNeighbors, initialGuessNeighbors }); final CMAESOptimizer.PopulationSize popSize = new CMAESOptimizer.PopulationSize((populationSize)); final CMAESOptimizer.Sigma sigma = new CMAESOptimizer.Sigma( new double[] { sigmaNeighbors, sigmaNeighbors }); // run the optimizer optimizer.optimize(objectiveFunction, goalType, initialGuess, popSize, sigma, bounds, maxEval); return function.getEvaluations(); }
From source file:objenome.goal.numeric.OptimizeMultivariate.java
@Override public synchronized void solve(Objenome o, List<SetNumericValue> variables) { this.variables = variables; objenome = o;//from w ww . j a v a 2 s .co m if (numStarts == -1) numStarts = variables.size() * 2; double[] lower = new double[variables.size()]; double[] upper = new double[variables.size()]; double[] mid = new double[variables.size()]; int j = 0; for (SetNumericValue n : variables) { lower[j] = n.getMin().doubleValue(); upper[j] = n.getMax().doubleValue(); mid[j] = (lower[j] + upper[j]) * 0.5f; j++; } //TODO add MultiStart MultivariateOptimizer optimize = new BOBYQAOptimizer(variables.size() * 2); //new PowellOptimizer(0.01, 0.05); RandomGenerator rng = getRandomGenerator(); MultiStartMultivariateOptimizer multiOptimize = new MultiStartMultivariateOptimizer(optimize, numStarts, new UncorrelatedRandomVectorGenerator(variables.size(), new UniformRandomGenerator(rng))); PointValuePair result = multiOptimize.optimize(new MaxEval(evaluations), new SimpleBounds(lower, upper), goal, new InitialGuess(mid), new ObjectiveFunction(this)); apply(result.getPointRef()); bestValue = result.getValue(); }
From source file:org.compevol.ssgd.MaximumLikelihood.java
@Override public void run() { final Bounds<Double> bounds = variables.getBounds(); final double[] lower = new double[bounds.getBoundsDimension()]; final double[] upper = new double[bounds.getBoundsDimension()]; final double[] sigma = new double[variables.getDimension()]; Arrays.fill(sigma, 1.0);//from ww w . j a v a 2 s . c o m for (int i = 0; i < variables.getDimension(); ++i) { lower[i] = bounds.getLowerLimit(i) / variables.getParameterValue(i); upper[i] = bounds.getUpperLimit(i) / variables.getParameterValue(i); } final PointValuePair result = optimizer.optimize( new CMAESOptimizer.PopulationSize(4 + 3 * (int) Math.log(variables.getDimension())), new CMAESOptimizer.Sigma(sigma), GoalType.MAXIMIZE, new ObjectiveFunction(likelihood), new InitialGuess(initial), new SimpleBounds(lower, upper), new MaxEval(Integer.MAX_VALUE)); System.out.println(variables); System.out.println(result.getValue()); }
From source file:org.dawnsci.plotting.tools.diffraction.BeamCenterRefinement.java
/** * Run optimisation of sector region position in a separate job. * /*from ww w . ja v a2s .c om*/ * @param startPosition Initial position of sector region */ public void optimize(final double[] startPosition) { final int cmaesLambda = this.cmaesLambda; final double[] cmaesInputSigma = this.cmaesInputSigma; final int cmaesMaxIterations = this.cmaesMaxIterations; final int cmaesCheckFeasableCount = this.cmaesCheckFeasableCount; final ConvergenceChecker<PointValuePair> cmaesChecker = this.cmaesChecker; final BeamCenterRefinement function = this; Job job = new Job("Beam Position Refinement") { @Override protected IStatus run(IProgressMonitor monitor) { function.setInitPeaks(initPeaks); function.setMonitor(monitor); final double[] lB = new double[] { startPosition[0] - 20, startPosition[1] - 20 }; final double[] uB = new double[] { startPosition[0] + 20, startPosition[1] + 20 }; CMAESOptimizer beamPosOptimizer = new CMAESOptimizer(cmaesMaxIterations, 0.0, true, 0, cmaesCheckFeasableCount, new Well19937a(), false, cmaesChecker); final PointValuePair result = beamPosOptimizer.optimize(new MaxEval(cmaesMaxIterations), new ObjectiveFunction(function), GoalType.MAXIMIZE, new CMAESOptimizer.PopulationSize(cmaesLambda), new CMAESOptimizer.Sigma(cmaesInputSigma), new SimpleBounds(lB, uB), new InitialGuess(startPosition)); final double[] newBeamPosition = result.getPoint(); logger.info("Optimiser terminated at beam position ({}, {}) with the value {}", new Object[] { newBeamPosition[0], newBeamPosition[1], result.getValue() }); Display.getDefault().syncExec(new Runnable() { @Override public void run() { ((IDiffractionMetadata) dataset.getMetadata()).getDetector2DProperties() .setBeamCentreCoords(newBeamPosition); } }); return Status.OK_STATUS; } }; job.schedule(); }
From source file:org.dawnsci.plotting.tools.powdercheck.PowderCheckJob.java
private List<PowderCheckResult> fitPeaksToTrace(final Dataset xIn, final Dataset yIn, Dataset baselineIn) { resultList.clear();//from w w w .j ava 2s . co m List<HKL> spacings = CalibrationFactory.getCalibrationStandards().getCalibrant().getHKLs(); final double[] qVals = new double[spacings.size()]; for (int i = 0; i < spacings.size(); i++) { if (xAxis == XAxis.ANGLE) qVals[i] = 2 * Math.toDegrees(Math.asin((metadata.getDiffractionCrystalEnvironment().getWavelength() / (2 * spacings.get(i).getDNano() * 10)))); else qVals[i] = (Math.PI * 2) / (spacings.get(i).getDNano() * 10); } double qMax = xIn.max().doubleValue(); double qMin = xIn.min().doubleValue(); List<Double> qList = new ArrayList<Double>(); int count = 0; for (double q : qVals) { if (q > qMax || q < qMin) continue; count++; qList.add(q); } double minPeak = Collections.min(qList); double maxPeak = Collections.max(qList); int minXidx = ROISliceUtils.findPositionOfClosestValueInAxis(xIn, minPeak) - EDGE_PIXEL_NUMBER; int maxXidx = ROISliceUtils.findPositionOfClosestValueInAxis(xIn, maxPeak) + EDGE_PIXEL_NUMBER; int maxSize = xIn.getSize(); minXidx = minXidx < 0 ? 0 : minXidx; maxXidx = maxXidx > maxSize - 1 ? maxSize - 1 : maxXidx; final Dataset x = xIn.getSlice(new int[] { minXidx }, new int[] { maxXidx }, null); final Dataset y = yIn.getSlice(new int[] { minXidx }, new int[] { maxXidx }, null); y.setName("Fit"); Dataset baseline = baselineIn.getSlice(new int[] { minXidx }, new int[] { maxXidx }, null); List<APeak> peaks = Generic1DFitter.fitPeaks(x, y, Gaussian.class, count + 10); List<PowderCheckResult> initResults = new ArrayList<PowderCheckResult>(); CompositeFunction cf = new CompositeFunction(); for (APeak peak : peaks) cf.addFunction(peak); double limit = findMatchLimit(qList, cf); while (cf.getNoOfFunctions() != 0 && !qList.isEmpty()) findMatches(initResults, qList, cf, limit); final CompositeFunction cfFinal = compositeFunctionFromResults(initResults); double[] initParam = new double[cfFinal.getFunctions().length * 3]; { int i = 0; for (IFunction func : cfFinal.getFunctions()) { initParam[i++] = func.getParameter(0).getValue(); initParam[i++] = func.getParameter(1).getValue(); initParam[i++] = func.getParameter(2).getValue(); } } final Dataset yfit = DatasetFactory.zeros(x, Dataset.FLOAT64); MultivariateOptimizer opt = new SimplexOptimizer(REL_TOL, ABS_TOL); MultivariateFunction fun = new MultivariateFunction() { @Override public double value(double[] arg0) { int j = 0; for (IFunction func : cfFinal.getFunctions()) { double[] p = func.getParameterValues(); p[0] = arg0[j++]; p[1] = arg0[j++]; p[2] = arg0[j++]; func.setParameterValues(p); } for (int i = 0; i < yfit.getSize(); i++) { yfit.set(cfFinal.val(x.getDouble(i)), i); } return y.residual(yfit); } }; opt.optimize(new InitialGuess(initParam), GoalType.MINIMIZE, new ObjectiveFunction(fun), new MaxEval(MAX_EVAL), new NelderMeadSimplex(initParam.length)); Dataset fit = Maths.add(yfit, baseline); fit.setName("Fit"); Dataset residual = Maths.subtract(y, yfit); residual.setName("Residual"); system.updatePlot1D(x, Arrays.asList(new IDataset[] { fit, residual }), null); setPlottingSystemAxes(); for (int i = 0; i < cfFinal.getNoOfFunctions(); i++) { resultList.add(new PowderCheckResult(cfFinal.getFunction(i), initResults.get(i).getCalibrantQValue())); } return resultList; }