List of usage examples for org.apache.commons.math3.analysis UnivariateFunction UnivariateFunction
UnivariateFunction
From source file:gdsc.smlm.function.PoissonGammaGaussianFunction.java
/** * Compute the likelihood/*ww w . j ava 2 s . c o m*/ * * @param o * The observed count * @param e * The expected count * @return The likelihood */ public double likelihood(final double o, final double e) { // Use the same variables as the Python code final double cij = o; final double eta = alpha * e; // convert to photons if (sigma == 0) { // No convolution with a Gaussian. Simply evaluate for a Poisson-Gamma distribution. // Any observed count above zero if (cij > 0.0) { // The observed count converted to photons final double nij = alpha * cij; if (eta * nij > 10000) { // Approximate Bessel function i1(x) when using large x: // i1(x) ~ exp(x)/sqrt(2*pi*x) // However the entire equation is logged (creating transform), // evaluated then raised to e to prevent overflow error on // large exp(x) final double transform = 0.5 * Math.log(alpha * eta / cij) - nij - eta + 2 * Math.sqrt(eta * nij) - Math.log(twoSqrtPi * Math.pow(eta * nij, 0.25)); return FastMath.exp(transform); } else { // Second part of equation 135 return Math.sqrt(alpha * eta / cij) * FastMath.exp(-nij - eta) * Bessel.I1(2 * Math.sqrt(eta * nij)); } } else if (cij == 0.0) { return FastMath.exp(-eta); } else { return 0; } } else if (useApproximation) { return mortensenApproximation(cij, eta); } else { // This code is the full evaluation of equation 7 from the supplementary information // of the paper Chao, et al (2013) Nature Methods 10, 335-338. // It is the full evaluation of a Poisson-Gamma-Gaussian convolution PMF. final double sk = sigma; // Read noise final double g = 1.0 / alpha; // Gain final double z = o; // Observed pixel value final double vk = eta; // Expected number of photons // Compute the integral to infinity of: // exp( -((z-u)/(sqrt(2)*s)).^2 - u/g ) * sqrt(vk*u/g) .* besseli(1, 2 * sqrt(vk*u/g)) ./ u; final double vk_g = vk * alpha; // vk / g final double sqrt2sigma = Math.sqrt(2) * sk; // Specify the function to integrate UnivariateFunction f = new UnivariateFunction() { public double value(double u) { return eval(sqrt2sigma, z, vk_g, g, u); } }; // Integrate to infinity is not necessary. The convolution of the function with the // Gaussian should be adequately sampled using a nxSD around the maximum. // Find a bracket containing the maximum double lower, upper; double maxU = Math.max(1, o); double rLower = maxU; double rUpper = maxU + 1; double f1 = f.value(rLower); double f2 = f.value(rUpper); // Calculate the simple integral and the range double sum = f1 + f2; boolean searchUp = f2 > f1; if (searchUp) { while (f2 > f1) { f1 = f2; rUpper += 1; f2 = f.value(rUpper); sum += f2; } maxU = rUpper - 1; } else { // Ensure that u stays above zero while (f1 > f2 && rLower > 1) { f2 = f1; rLower -= 1; f1 = f.value(rLower); sum += f1; } maxU = (rLower > 1) ? rLower + 1 : rLower; } lower = Math.max(0, maxU - 5 * sk); upper = maxU + 5 * sk; if (useSimpleIntegration && lower > 0) { // If we are not at the zero boundary then we can use a simple integration by adding the // remaining points in the range for (double u = rLower - 1; u >= lower; u -= 1) { sum += f.value(u); } for (double u = rUpper + 1; u <= upper; u += 1) { sum += f.value(u); } } else { // Use Legendre-Gauss integrator try { final double relativeAccuracy = 1e-4; final double absoluteAccuracy = 1e-8; final int minimalIterationCount = 3; final int maximalIterationCount = 32; final int integrationPoints = 16; // Use an integrator that does not use the boundary since u=0 is undefined (divide by zero) UnivariateIntegrator i = new IterativeLegendreGaussIntegrator(integrationPoints, relativeAccuracy, absoluteAccuracy, minimalIterationCount, maximalIterationCount); sum = i.integrate(2000, f, lower, upper); } catch (TooManyEvaluationsException ex) { return mortensenApproximation(cij, eta); } } // Compute the final probability //final double f1 = z / sqrt2sigma; return (FastMath.exp(-vk) / (sqrt2pi * sk)) * (FastMath.exp(-(f1 * f1)) + sum); } }
From source file:ca.mcgill.networkdynamics.geoinference.evaluation.CrossValidationScorer.java
public static double computeAuc(TDoubleList errors) { double[] normalizedErrors = new double[errors.size()]; int[] errorsPerKm = new int[MAX_KM]; for (int i = 0; i < errors.size(); ++i) { int error = (int) (Math.round(errors.get(i))); errorsPerKm[error]++;/*from www . j av a2s . com*/ } // The accumulated sum of errors per km int[] errorsBelowEachKm = new int[errorsPerKm.length]; for (int i = 0; i < errorsBelowEachKm.length; ++i) { errorsBelowEachKm[i] = errorsPerKm[i]; if (i > 0) errorsBelowEachKm[i] += errorsBelowEachKm[i - 1]; } final double[] cdf = new double[errorsBelowEachKm.length]; double dSize = errors.size(); // to avoid casting all the time for (int i = 0; i < cdf.length; ++i) cdf[i] = errorsBelowEachKm[i] / dSize; final double maxLogKm = Math.log10(MAX_KM - 1); // At this point, the CDF is between [0, 20038], so we first need // log-scale the x-values and then to normalize it into [0, 1] UnivariateFunction logNormalizedScaledCdf = new UnivariateFunction() { public double value(double x) { // First, unscale by the log(MAX_DIST) so the valus is just // Math.log10(x) double unscaled = x * maxLogKm; // Second, invert the log transformation double errorInKm = Math.pow(10, unscaled); // Get the probability of having an error less than this // amount double prob = cdf[(int) (Math.round(errorInKm))]; // Now look up the CDF value for that error return prob; } }; TrapezoidIntegrator ti = new TrapezoidIntegrator(); double auc = ti.integrate(10_000_000, logNormalizedScaledCdf, 0, 1); return auc; }
From source file:it.unibo.alchemist.modelchecker.AlchemistASMC.java
private static int computeMinimum(final double interval, final double confidence) { final UnivariateFunction f = new UnivariateFunction() { @Override/* ww w.j a va2s . c o m*/ public double value(final double n) { double t; if (Math.ceil(n) == FastMath.floor(n)) { t = new TDistribution((int) n).inverseCumulativeProbability(1 - confidence / 2); } else { double t1 = new TDistribution((int) FastMath.ceil(n)) .inverseCumulativeProbability((1 - confidence / 2)) * (n - Math.floor(n)); double t2 = new TDistribution((int) FastMath.floor(n)) .inverseCumulativeProbability((1 - confidence / 2)) * (Math.ceil(n) - n); t = t1 + t2; } double value = 2 * t / n; return value - interval; } }; final BisectionSolver bs = new BisectionSolver(); return (int) Math.ceil(bs.solve(Integer.MAX_VALUE, f, 1, Integer.MAX_VALUE)); }
From source file:edu.rice.cs.bioinfo.programs.phylonet.commands.SearchBranchLengthsMaxGTProb.java
@Override protected String produceResult() { StringBuffer result = new StringBuffer(); final List<Tree> geneTrees = new ArrayList<Tree>(); final List<Integer> counter = new ArrayList<Integer>(); for (NetworkNonEmpty geneTree : _geneTrees) { String phylonetGeneTree = NetworkTransformer.toENewickTree(geneTree); NewickReader nr = new NewickReader(new StringReader(phylonetGeneTree)); STITree<Double> newtr = new STITree<Double>(true); try {/*from w w w . ja va 2 s . c om*/ nr.readTree(newtr); } catch (Exception e) { errorDetected.execute(e.getMessage(), this._motivatingCommand.getLine(), this._motivatingCommand.getColumn()); } boolean found = false; int index = 0; for (Tree tr : geneTrees) { if (Trees.haveSameRootedTopology(tr, newtr)) { found = true; break; } index++; } if (found) { counter.set(index, counter.get(index) + 1); } else { geneTrees.add(newtr); counter.add(1); } } NetworkFactoryFromRNNetwork transformer = new NetworkFactoryFromRNNetwork(); final Network<Double> speciesNetwork = transformer.makeNetwork(_speciesNetwork); /* * Make the branch length of every edge initially 1 if no initial user value is specified. * Make the hybrid prob of every hybrid edge initially .5 if no initial user value is specified. */ for (NetNode<Double> parent : speciesNetwork.bfs()) { for (NetNode<Double> child : parent.getChildren()) { double initialBL = child.getParentDistance(parent); if (initialBL == NetNode.NO_DISTANCE || Double.isNaN(initialBL)) // no specification from user on branch length initialBL = 1.0; child.setParentDistance(parent, initialBL); if (child.getParentCount() == 2) { for (NetNode<Double> hybridParent : child.getParents()) { if (child.getParentProbability(hybridParent) == 1.0) // no specification from user on hybrid prob { child.setParentProbability(parent, 0.5); } } } } } /* * Try to assign branch lengths and hybrid probs to increase GTProb from the initial network. * Except branch lengths of leaf edges. They don't impact GTProb. */ // def: a round is an attempt to tweak each branch length and each hybrid prob. boolean continueRounds = true; // keep trying to improve network final Container<Double> lnGtProbOfSpeciesNetwork = new Container<Double>( _computeGTProbStrategy.execute(speciesNetwork, geneTrees, counter)); // records the GTProb of the network at all times int assigmentRound = 0; for (; assigmentRound < _assigmentRounds && continueRounds; assigmentRound++) { /* * Prepare a random ordering of network edge examinations each of which attempts to change a branch length or hybrid prob to improve the GTProb score. */ double lnGtProbLastRound = lnGtProbOfSpeciesNetwork.getContents(); List<Proc> assigmentActions = new ArrayList<Proc>(); // store adjustment commands here. Will execute them one by one later. // add branch length adjustments to the list for (final NetNode<Double> parent : speciesNetwork.bfs()) { for (final NetNode<Double> child : parent.getChildren()) { if (!parent.isRoot()) // jd tmp continue; if (child.isLeaf()) // leaf edge, skip continue; assigmentActions.add(new Proc() { public void execute() { UnivariateFunction functionToOptimize = new UnivariateFunction() { public double value(double suggestedBranchLength) { // brent suggests a new branch length double incumbentBranchLength = child.getParentDistance(parent); // mutate and see if it yields an improved network child.setParentDistance(parent, suggestedBranchLength); double lnProb = _computeGTProbStrategy.execute(speciesNetwork, geneTrees, counter); RnNewickPrinter<Double> rnNewickPrinter = new RnNewickPrinter<Double>(); StringWriter sw = new StringWriter(); rnNewickPrinter.print(speciesNetwork, sw); // String inferredNetwork = sw.toString(); // System.out.println(inferredNetwork + "\t" + lnProb); if (lnProb > lnGtProbOfSpeciesNetwork.getContents()) // did improve, keep change { lnGtProbOfSpeciesNetwork.setContents(lnProb); // System.out.println("(improved)"); } else // didn't improve, roll back change { child.setParentDistance(parent, incumbentBranchLength); } return lnProb; } }; BrentOptimizer optimizer = new BrentOptimizer(.000000000001, .0000000000000001); // very small numbers so we control when brent stops, not brent. try { optimizer.optimize(_maxAssigmentAttemptsPerBranchParam, functionToOptimize, GoalType.MAXIMIZE, Double.MIN_VALUE, _maxBranchLength); } catch (TooManyEvaluationsException e) // _maxAssigmentAttemptsPerBranchParam exceeded { } // System.out.println("-----------------------------------------------------------------------"); } }); } } // add hybrid probs to hybrid edges for (final NetNode<Double> child : speciesNetwork.bfs()) // find every hybrid node { if (child.isRoot()) // calling getParentNumber on root causes NPE. Bug workaround. continue; if (child.getParentCount() == 2) // hybrid node { Iterator<NetNode<Double>> hybridParents = child.getParents().iterator(); final NetNode<Double> hybridParent1 = hybridParents.next(); final NetNode<Double> hybridParent2 = hybridParents.next(); assigmentActions.add(new Proc() { public void execute() { UnivariateFunction functionToOptimize = new UnivariateFunction() { public double value(double suggestedProb) { double incumbentHybridProbParent1 = child.getParentProbability(hybridParent1); // try new pair of hybrid probs child.setParentProbability(hybridParent1, suggestedProb); child.setParentProbability(hybridParent2, 1.0 - suggestedProb); double lnProb = _computeGTProbStrategy.execute(speciesNetwork, geneTrees, counter); if (lnProb > lnGtProbOfSpeciesNetwork.getContents()) // change improved GTProb, keep it { lnGtProbOfSpeciesNetwork.setContents(lnProb); } else // change did not improve, roll back { child.setParentProbability(hybridParent1, incumbentHybridProbParent1); child.setParentProbability(hybridParent2, 1.0 - incumbentHybridProbParent1); } return lnProb; } }; BrentOptimizer optimizer = new BrentOptimizer(.000000000001, .0000000000000001); // very small numbers so we control when brent stops, not brent. try { optimizer.optimize(_maxAssigmentAttemptsPerBranchParam, functionToOptimize, GoalType.MAXIMIZE, 0, 1.0); } catch (TooManyEvaluationsException e) // _maxAssigmentAttemptsPerBranchParam exceeded { } } }); } } // Collections.shuffle(assigmentActions); // randomize the order we will try to adjust network edge properties for (Proc assigment : assigmentActions) // for each change attempt, perform attempt { assigment.execute(); } if (((double) lnGtProbOfSpeciesNetwork.getContents()) == lnGtProbLastRound) // if no improvement was made wrt to last around, stop trying to find a better assignment { continueRounds = false; } else if (lnGtProbOfSpeciesNetwork.getContents() > lnGtProbLastRound) // improvement was made, ensure it is large enough wrt to improvement threshold to continue searching { double improvementPercentage = Math.pow(Math.E, (lnGtProbOfSpeciesNetwork.getContents() - lnGtProbLastRound)) - 1.0; // how much did we improve over last round if (improvementPercentage < _improvementThreshold) // improved, but not enough to keep searching { continueRounds = false; } } else { throw new IllegalStateException("Should never have decreased prob."); } } RnNewickPrinter<Double> rnNewickPrinter = new RnNewickPrinter<Double>(); StringWriter sw = new StringWriter(); rnNewickPrinter.print(speciesNetwork, sw); String inferredNetwork = sw.toString(); this.richNewickGenerated(inferredNetwork); result.append( "\nTotal log probability: " + lnGtProbOfSpeciesNetwork.getContents() + ": " + inferredNetwork); return result.toString(); }
From source file:edu.rice.cs.bioinfo.programs.phylonet.algos.network.NetworkLikelihoodFromGTTBL.java
protected double findOptimalBranchLength(final Network<Object> speciesNetwork, final Map<String, List<String>> species2alleles, final List gts, final List gtCorrespondence, final Set<String> singleAlleleSpecies) { boolean continueRounds = true; // keep trying to improve network if (_pair2time == null) { computePairwiseCoalesceTime(gts, species2alleles); }/*from w ww.j av a2 s .co m*/ //System.out.println("\n"+speciesNetwork); final Map<NetNode, Double> node2constraints = new Hashtable<NetNode, Double>(); computeNodeHeightUpperbound(speciesNetwork, node2constraints); final Map<NetNode<Object>, Double> node2height = new Hashtable<NetNode<Object>, Double>(); initializeNetwork(speciesNetwork, node2constraints, node2height); double initialProb = computeProbability(speciesNetwork, gts, species2alleles, gtCorrespondence); final Container<Double> lnGtProbOfSpeciesNetwork = new Container<Double>(initialProb); // records the GTProb of the network at all times final Container<Map<NetNode<Object>, Double>> node2heightContainer = new Container<Map<NetNode<Object>, Double>>( node2height); int roundIndex = 0; for (; roundIndex < _maxRounds && continueRounds; roundIndex++) { double lnGtProbLastRound = lnGtProbOfSpeciesNetwork.getContents(); List<Proc> assigmentActions = new ArrayList<Proc>(); // store adjustment commands here. Will execute them one by one later. for (final NetNode<Object> child : speciesNetwork.getNetworkNodes()) // find every hybrid node { Iterator<NetNode<Object>> hybridParents = child.getParents().iterator(); final NetNode hybridParent1 = hybridParents.next(); final NetNode hybridParent2 = hybridParents.next(); assigmentActions.add(new Proc() { public void execute() { UnivariateFunction functionToOptimize = new UnivariateFunction() { public double value(double suggestedProb) { double incumbentHybridProbParent1 = child.getParentProbability(hybridParent1); child.setParentProbability(hybridParent1, suggestedProb); child.setParentProbability(hybridParent2, 1.0 - suggestedProb); double lnProb = computeProbability(speciesNetwork, gts, species2alleles, gtCorrespondence); if (lnProb > lnGtProbOfSpeciesNetwork.getContents()) // change improved GTProb, keep it { lnGtProbOfSpeciesNetwork.setContents(lnProb); } else // change did not improve, roll back { child.setParentProbability(hybridParent1, incumbentHybridProbParent1); child.setParentProbability(hybridParent2, 1.0 - incumbentHybridProbParent1); } return lnProb; } }; BrentOptimizer optimizer = new BrentOptimizer(_Brent1, _Brent2); // very small numbers so we control when brent stops, not brent. try { optimizer.optimize(_maxTryPerBranch, functionToOptimize, GoalType.MAXIMIZE, 0, 1.0); } catch (TooManyEvaluationsException e) // _maxAssigmentAttemptsPerBranchParam exceeded { } } }); } for (final NetNode<Object> node : Networks.postTraversal(speciesNetwork)) { if (node.isLeaf()) { continue; } assigmentActions.add(new Proc() { public void execute() { final Container<Double> minHeight = new Container<Double>(0.0); final Container<Double> maxHeight = new Container<Double>(Double.MAX_VALUE); for (NetNode<Object> child : node.getChildren()) { double childHeight = node2heightContainer.getContents().get(child); minHeight.setContents(Math.max(minHeight.getContents(), childHeight)); } double minParent = Double.MAX_VALUE; if (!node.isRoot()) { for (NetNode<Object> parent : node.getParents()) { double parentHeight = node2heightContainer.getContents().get(parent); minParent = Math.min(minParent, parentHeight); } } else { minParent = minHeight.getContents() + _maxBranchLength; } maxHeight.setContents(Math.min(minParent, node2constraints.get(node))); //System.out.println("\nChanging node " + node.getName() + " from " + minHeight.getContents() + " to " + maxHeight.getContents()); UnivariateFunction functionToOptimize = new UnivariateFunction() { public double value(double suggestedHeight) { // brent suggests a new branch length double incumbentHeight = node2heightContainer.getContents().get(node); for (NetNode<Object> child : node.getChildren()) { child.setParentDistance(node, suggestedHeight - node2heightContainer.getContents().get(child)); } if (!node.isRoot()) { for (NetNode<Object> parent : node.getParents()) { node.setParentDistance(parent, node2heightContainer.getContents().get(parent) - suggestedHeight); } } double lnProb = computeProbability(speciesNetwork, gts, species2alleles, gtCorrespondence); //System.out.print("suggest: "+ suggestedHeight + " " + lnProb + " vs. " + lnGtProbOfSpeciesNetwork.getContents() + ": "); if (lnProb > lnGtProbOfSpeciesNetwork.getContents()) // did improve, keep change { lnGtProbOfSpeciesNetwork.setContents(lnProb); node2heightContainer.getContents().put(node, suggestedHeight); //System.out.println( " better "); } else // didn't improve, roll back change { for (NetNode<Object> child : node.getChildren()) { child.setParentDistance(node, incumbentHeight - node2heightContainer.getContents().get(child)); } if (!node.isRoot()) { for (NetNode<Object> parent : node.getParents()) { node.setParentDistance(parent, node2heightContainer.getContents().get(parent) - incumbentHeight); } } //System.out.println( " worse "); } return lnProb; } }; BrentOptimizer optimizer = new BrentOptimizer(_Brent1, _Brent2); // very small numbers so we control when brent stops, not brent. try { optimizer.optimize(_maxTryPerBranch, functionToOptimize, GoalType.MAXIMIZE, minHeight.getContents(), maxHeight.getContents()); } catch (TooManyEvaluationsException e) // _maxAssigmentAttemptsPerBranchParam exceeded { } //System.out.println(network2String(speciesNetwork) + " " + lnGtProbOfSpeciesNetwork.getContents()); } }); } Collections.shuffle(assigmentActions); for (Proc assigment : assigmentActions) // for each change attempt, perform attempt { assigment.execute(); } if (((double) lnGtProbOfSpeciesNetwork.getContents()) == lnGtProbLastRound) // if no improvement was made wrt to last around, stop trying to find a better assignment { continueRounds = false; } else if (lnGtProbOfSpeciesNetwork.getContents() > lnGtProbLastRound) // improvement was made, ensure it is large enough wrt to improvement threshold to continue searching { double improvementPercentage = Math.pow(Math.E, (lnGtProbOfSpeciesNetwork.getContents() - lnGtProbLastRound)) - 1.0; // how much did we improve over last round //System.out.println(improvementPercentage + " vs. " + _improvementThreshold); if (improvementPercentage < _improvementThreshold) // improved, but not enough to keep searching { continueRounds = false; } } else { throw new IllegalStateException("Should never have decreased prob."); } } //System.out.print("\n" + lnGtProbOfSpeciesNetwork.getContents() + ": " + speciesNetwork); return lnGtProbOfSpeciesNetwork.getContents(); }
From source file:edu.rice.cs.bioinfo.programs.phylonet.algos.network.NetworkPseudoLikelihoodFromGTT.java
protected double findOptimalBranchLength(final Network<Object> speciesNetwork, final Map<String, List<String>> species2alleles, final List tripleFrequencies, final List gtCorrespondence, final Set<String> singleAlleleSpecies) { boolean continueRounds = true; // keep trying to improve network for (NetNode<Object> node : speciesNetwork.dfs()) { for (NetNode<Object> parent : node.getParents()) { node.setParentDistance(parent, 1.0); if (node.isNetworkNode()) { node.setParentProbability(parent, 0.5); }//from ww w.jav a 2 s. c om } } Set<NetNode> node2ignoreForBL = findEdgeHavingNoBL(speciesNetwork); double initalProb = computeProbability(speciesNetwork, tripleFrequencies, species2alleles, gtCorrespondence); if (_printDetails) System.out.println(speciesNetwork.toString() + " : " + initalProb); final Container<Double> lnGtProbOfSpeciesNetwork = new Container<Double>(initalProb); // records the GTProb of the network at all times int roundIndex = 0; for (; roundIndex < _maxRounds && continueRounds; roundIndex++) { /* * Prepare a random ordering of network edge examinations each of which attempts to change a branch length or hybrid prob to improve the GTProb score. */ double lnGtProbLastRound = lnGtProbOfSpeciesNetwork.getContents(); List<Proc> assigmentActions = new ArrayList<Proc>(); // store adjustment commands here. Will execute them one by one later. for (final NetNode<Object> parent : edu.rice.cs.bioinfo.programs.phylonet.structs.network.util.Networks .postTraversal(speciesNetwork)) { for (final NetNode<Object> child : parent.getChildren()) { if (node2ignoreForBL.contains(child)) { continue; } assigmentActions.add(new Proc() { public void execute() { UnivariateFunction functionToOptimize = new UnivariateFunction() { public double value(double suggestedBranchLength) { double incumbentBranchLength = child.getParentDistance(parent); child.setParentDistance(parent, suggestedBranchLength); double lnProb = computeProbability(speciesNetwork, tripleFrequencies, species2alleles, gtCorrespondence); //System.out.println(speciesNetwork + ": " + lnProb); if (lnProb > lnGtProbOfSpeciesNetwork.getContents()) // did improve, keep change { lnGtProbOfSpeciesNetwork.setContents(lnProb); } else // didn't improve, roll back change { child.setParentDistance(parent, incumbentBranchLength); } return lnProb; } }; BrentOptimizer optimizer = new BrentOptimizer(_Brent1, _Brent2); // very small numbers so we control when brent stops, not brent. try { optimizer.optimize(_maxTryPerBranch, functionToOptimize, GoalType.MAXIMIZE, Double.MIN_VALUE, _maxBranchLength); } catch (TooManyEvaluationsException e) // _maxAssigmentAttemptsPerBranchParam exceeded { } if (_printDetails) System.out.println( speciesNetwork.toString() + " : " + lnGtProbOfSpeciesNetwork.getContents()); } }); } } for (final NetNode<Object> child : speciesNetwork.getNetworkNodes()) // find every hybrid node { Iterator<NetNode<Object>> hybridParents = child.getParents().iterator(); final NetNode hybridParent1 = hybridParents.next(); final NetNode hybridParent2 = hybridParents.next(); assigmentActions.add(new Proc() { public void execute() { UnivariateFunction functionToOptimize = new UnivariateFunction() { public double value(double suggestedProb) { double incumbentHybridProbParent1 = child.getParentProbability(hybridParent1); child.setParentProbability(hybridParent1, suggestedProb); child.setParentProbability(hybridParent2, 1.0 - suggestedProb); double lnProb = computeProbability(speciesNetwork, tripleFrequencies, species2alleles, gtCorrespondence); //System.out.println(speciesNetwork + ": " + lnProb); if (lnProb > lnGtProbOfSpeciesNetwork.getContents()) // change improved GTProb, keep it { lnGtProbOfSpeciesNetwork.setContents(lnProb); } else // change did not improve, roll back { child.setParentProbability(hybridParent1, incumbentHybridProbParent1); child.setParentProbability(hybridParent2, 1.0 - incumbentHybridProbParent1); } return lnProb; } }; BrentOptimizer optimizer = new BrentOptimizer(_Brent1, _Brent2); // very small numbers so we control when brent stops, not brent. try { optimizer.optimize(_maxTryPerBranch, functionToOptimize, GoalType.MAXIMIZE, 0, 1.0); } catch (TooManyEvaluationsException e) // _maxAssigmentAttemptsPerBranchParam exceeded { } if (_printDetails) System.out.println( speciesNetwork.toString() + " : " + lnGtProbOfSpeciesNetwork.getContents()); } }); } // add hybrid probs to hybrid edges Collections.shuffle(assigmentActions); for (Proc assigment : assigmentActions) // for each change attempt, perform attempt { assigment.execute(); } if (_printDetails) { System.out.println("Round end ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"); System.out .println(speciesNetwork.toString() + "\n" + lnGtProbOfSpeciesNetwork.getContents() + "\n"); } if (((double) lnGtProbOfSpeciesNetwork.getContents()) == lnGtProbLastRound) // if no improvement was made wrt to last around, stop trying to find a better assignment { continueRounds = false; } else if (lnGtProbOfSpeciesNetwork.getContents() > lnGtProbLastRound) // improvement was made, ensure it is large enough wrt to improvement threshold to continue searching { double improvementPercentage = Math.pow(Math.E, (lnGtProbOfSpeciesNetwork.getContents() - lnGtProbLastRound)) - 1.0; // how much did we improve over last round if (improvementPercentage < _improvementThreshold) // improved, but not enough to keep searching { continueRounds = false; } } else { throw new IllegalStateException("Should never have decreased prob."); } } //System.out.println(speciesNetwork + " " + lnGtProbOfSpeciesNetwork.getContents()); return lnGtProbOfSpeciesNetwork.getContents(); }
From source file:edu.rice.cs.bioinfo.programs.phylonet.algos.network.InferMLNetworkFromSequences.java
protected double findNonUltrametricOptimalBranchLength(final String[] gtTaxa, final Network<Object> speciesNetwork, final Map<String, String> allele2species, final List<Tuple<char[], Integer>> sequences, final RateModel rModel, final double theta) { boolean continueRounds = true; // keep trying to improve network for (NetNode<Object> node : speciesNetwork.dfs()) { for (NetNode<Object> parent : node.getParents()) { node.setParentDistance(parent, theta); if (node.isNetworkNode()) { //node.setParentDistance(parent,0.0); node.setParentProbability(parent, 0.5); }// ww w . j a v a 2 s. com } } //long start = System.currentTimeMillis(); double initalProb = computeProbability(gtTaxa, speciesNetwork, allele2species, sequences, rModel, theta); //System.out.println(initalProb); //System.out.print("\n"+(System.currentTimeMillis()-start)); //computeProbability(speciesNetwork, distinctTrees, species2alleles, nbTreeAndCountAndBinaryIDList); final Container<Double> lnGtProbOfSpeciesNetwork = new Container<Double>(initalProb); // records the GTProb of the network at all times //final Container<Integer> callCount = new Container<Integer>(0); int roundIndex = 0; for (; roundIndex < _maxRounds && continueRounds; roundIndex++) { /* * Prepare a random ordering of network edge examinations each of which attempts to change a branch length or hybrid prob to improve the GTProb score. */ double lnGtProbLastRound = lnGtProbOfSpeciesNetwork.getContents(); List<Proc> assigmentActions = new ArrayList<Proc>(); // store adjustment commands here. Will execute them one by one later. for (final NetNode<Object> parent : edu.rice.cs.bioinfo.programs.phylonet.structs.network.util.Networks .postTraversal(speciesNetwork)) { for (final NetNode<Object> child : parent.getChildren()) { assigmentActions.add(new Proc() { public void execute() { UnivariateFunction functionToOptimize = new UnivariateFunction() { public double value(double suggestedBranchLength) { //System.out.print(" l/"+child.getName()+" "); //callCount.setContents(callCount.getContents()+1); double incumbentBranchLength = child.getParentDistance(parent); child.setParentDistance(parent, suggestedBranchLength); double lnProb = computeProbability(gtTaxa, speciesNetwork, allele2species, sequences, rModel, theta); //System.out.println("Changing branch ("+parent.getName()+","+child.getName()+") to " + suggestedBranchLength); //System.out.println(network2String(speciesNetwork)+": " + lnProb); if (lnProb > lnGtProbOfSpeciesNetwork.getContents()) // did improve, keep change { lnGtProbOfSpeciesNetwork.setContents(lnProb); } else // didn't improve, roll back change { child.setParentDistance(parent, incumbentBranchLength); } return lnProb; } }; BrentOptimizer optimizer = new BrentOptimizer(_Brent1, _Brent2); // very small numbers so we control when brent stops, not brent. try { optimizer.optimize(_maxTryPerBranch, functionToOptimize, GoalType.MAXIMIZE, Double.MIN_VALUE, _maxBranchLength); } catch (TooManyEvaluationsException e) // _maxAssigmentAttemptsPerBranchParam exceeded { } } }); } } for (final NetNode<Object> child : speciesNetwork.getNetworkNodes()) // find every hybrid node { Iterator<NetNode<Object>> hybridParents = child.getParents().iterator(); final NetNode hybridParent1 = hybridParents.next(); final NetNode hybridParent2 = hybridParents.next(); assigmentActions.add(new Proc() { public void execute() { UnivariateFunction functionToOptimize = new UnivariateFunction() { public double value(double suggestedProb) { //callCount.setContents(callCount.getContents()+1); //System.out.print(" p/"+child.getName()+" "); double incumbentHybridProbParent1 = child.getParentProbability(hybridParent1); child.setParentProbability(hybridParent1, suggestedProb); child.setParentProbability(hybridParent2, 1.0 - suggestedProb); double lnProb = computeProbability(gtTaxa, speciesNetwork, allele2species, sequences, rModel, theta); //System.out.println("Changing node probability to "+ suggestedProb); //System.out.println(network2String(speciesNetwork)+": " + lnProb); //System.out.println(Math.abs(computeProbability(speciesNetwork, distinctTrees, species2alleles, nbTreeAndCountAndBinaryIDList) - lnProb)); if (lnProb > lnGtProbOfSpeciesNetwork.getContents()) // change improved GTProb, keep it { lnGtProbOfSpeciesNetwork.setContents(lnProb); } else // change did not improve, roll back { child.setParentProbability(hybridParent1, incumbentHybridProbParent1); child.setParentProbability(hybridParent2, 1.0 - incumbentHybridProbParent1); } return lnProb; } }; BrentOptimizer optimizer = new BrentOptimizer(_Brent1, _Brent2); // very small numbers so we control when brent stops, not brent. try { optimizer.optimize(_maxTryPerBranch, functionToOptimize, GoalType.MAXIMIZE, 0, 1.0); } catch (TooManyEvaluationsException e) // _maxAssigmentAttemptsPerBranchParam exceeded { } //System.out.println(network2String(speciesNetwork) + " : " + lnGtProbOfSpeciesNetwork.getContents()); } }); } // add hybrid probs to hybrid edges if (_seed == null) { //Collections.shuffle(assigmentActions); } for (Proc assigment : assigmentActions) // for each change attempt, perform attempt { assigment.execute(); } if (((double) lnGtProbOfSpeciesNetwork.getContents()) == lnGtProbLastRound) // if no improvement was made wrt to last around, stop trying to find a better assignment { continueRounds = false; } else if (lnGtProbOfSpeciesNetwork.getContents() > lnGtProbLastRound) // improvement was made, ensure it is large enough wrt to improvement threshold to continue searching { double improvementPercentage = Math.pow(Math.E, (lnGtProbOfSpeciesNetwork.getContents() - lnGtProbLastRound)) - 1.0; // how much did we improve over last round if (improvementPercentage < _improvementThreshold) // improved, but not enough to keep searching { continueRounds = false; } } else { throw new IllegalStateException("Should never have decreased prob."); } } //System.out.println(callCount.getContents()); //System.out.println(computeProbability(speciesNetwork, distinctTrees, species2alleles, nbTreeAndCountAndBinaryIDList) + " vs. " + lnGtProbOfSpeciesNetwork.getContents()); return lnGtProbOfSpeciesNetwork.getContents(); }
From source file:gdsc.smlm.results.PeakResult.java
/** * Compute the function I1 using numerical integration. See Mortensen, et al (2010) Nature Methods 7, 377-383), SI * equation 43.//from w ww.ja v a 2 s. c o m * * <pre> * I1 = 1 + sum [ ln(t) / (1 + t/rho) ] dt * = - sum [ t * ln(t) / (t + rho) ] dt * </pre> * * Where sum is the integral between 0 and 1. In the case of rho=0 the function returns 1; * * @param rho * @param integrationPoints * the number of integration points for the LegendreGaussIntegrator * @return the I1 value */ private static double computeI1(final double rho, int integrationPoints) { if (rho == 0) return 1; final double relativeAccuracy = 1e-4; final double absoluteAccuracy = 1e-8; final int minimalIterationCount = 3; final int maximalIterationCount = 32; // Use an integrator that does not use the boundary since log(0) is undefined. UnivariateIntegrator i = new IterativeLegendreGaussIntegrator(integrationPoints, relativeAccuracy, absoluteAccuracy, minimalIterationCount, maximalIterationCount); // Specify the function to integrate UnivariateFunction f = new UnivariateFunction() { public double value(double x) { return x * Math.log(x) / (x + rho); } }; final double i1 = -i.integrate(2000, f, 0, 1); //System.out.printf("I1 = %f (%d)\n", i1, i.getEvaluations()); // The function requires more evaluations and sometimes does not converge, // presumably because log(x) significantly changes as x -> 0 where as x log(x) in the function above // is more stable // UnivariateFunction f2 = new UnivariateFunction() // { // @Override // public double value(double x) // { // return Math.log(x) / ( 1 + x / rho); // } // }; // double i2 = 1 + i.integrate(2000, f2, 0, 1); // System.out.printf("I1 (B) = %f (%d)\n", i2, i.getEvaluations()); return i1; }
From source file:eu.crisis_economics.abm.fund.FundTest.java
/** * This test is in two parts.//from w w w. j a va2 s . c o m * Part 1: a new fund is created with an initial deposit. This deposit is * reserved as private equity. A collection of household stubs are * created with ample starting deposits. For a number of business * cycles, the households all withdraw or contribute random sums to * the fund. The size of these sums is drawn from a Gaussian * distribution with custom mean and variance. After a fixed number * of business cycles have elapsed, the total amount of cash owned * by each household (be this in fund investments or bank deposits) * is counted. Since the fund has no investment sources and cannot * make a profit, it is expected that no household ever loses or * gains assets (cash plus fund account value). At every step, it is * further asserted that the equity of the fund is not significantly * different to the private (constant) equity. * Part 2: the fund and all households are destroyed and respawned. Households * begin with ample deposits. The test now continues as in Part 1, except * that the fund deposit account is directly injected with exogenous * profits during each business cycle. The size of these profits is * drawn from a Gaussian distribution with custom mean and variance. * At the conclusion of Part 2, the amount of cash in the system * is tallied. It is then asserted that the sum of all household deposits * and fund account values is equal to the initial cash in the system * plus total exogenous profits. * Part 3: as Part 2, except the fund now makes exogenous losses as well as * profits. In this test, the fund is deliberately taken to the edge of * its private equity due to market losses. */ public void testPrivateEquityConstancyAndConservationOfCash() { /* * Test parameters */ final int numberOfBusinessCycles = 1000; final double meanFundProfit = 1.e6, fundProfitVariance = 2.e5, meanHouseholdInvestment = 0., householdInvestmentVariances = 1000., householdInitialDeposits = 1.e7, fundInitialDeposits = 0.; Random dice = new Random(12345); myFund = new MutualFund(myBank, new HomogeneousPortfolioWeighting(), new FundamentalistStockReturnExpectationFunction(), clearingHouse, new NoSmoothingAlgorithm()); // Fresh fund myFund.debit(fundInitialDeposits); final Contract exogenousReturns = myFund.mDepositor.depositAccount; Assert.assertEquals(myFund.getEquity(), fundInitialDeposits); for (int i = 0; i < myHouseholds.length; ++i) { myHouseholds[i] = // Ample household liquidity new HouseholdStub(myFund, myBank, householdInitialDeposits); Assert.assertEquals(myHouseholds[i].getTotalAssets(), householdInitialDeposits, 1.e-5); } /* * Part 1 */ for (int i = 0; i < numberOfBusinessCycles; ++i) { // Random household investments and withdrawals for (int j = 0; j < N_HOUSEHOLDS; ++j) { final double investment = dice.nextGaussian() * householdInvestmentVariances + meanHouseholdInvestment; try { if (investment > 0.) myFund.invest(myHouseholds[j], investment); else if (investment < 0.) { final double maximumWithdrawal = myFund.getBalance(myHouseholds[j]); if (maximumWithdrawal == 0.) continue; myFund.requestWithdrawal(myHouseholds[j], Math.min(-investment, maximumWithdrawal)); } } catch (final InsufficientFundsException noFunds) { Assert.fail(); } } myFund.preClearingProcessing(); // No profits from fund investments. try { myFund.postClearingProcessing(); } catch (final InsufficientFundsException unexpectedException) { Assert.fail(); } System.out.printf("MutualFund assets: %16.10g\n", myFund.getTotalAssets()); System.out.printf("MutualFund liabilities: %16.10g\n", myFund.getTotalLiabilities()); System.out.printf("MutualFund equity: %16.10g\n", myFund.getEquity()); // Assertions final double postClearingEquity = myFund.getEquity(); Assert.assertEquals(postClearingEquity, fundInitialDeposits, 1.e-5); continue; } // Assert no cash has been created System.out.printf("household deposits [open] fund investments [open]" + " deposits [close] fund investments [close] sum [close]\n"); for (int i = 0; i < N_HOUSEHOLDS; ++i) { // Assert no money creation final double closingHouseholdDeposits = myHouseholds[i].bankAccount.getBalance(), closingFundAccountValue = myFund.getBalance(myHouseholds[i]); System.out.printf("%5d %16.10g %16.10g %16.10g %16.10g %16.10g\n", i, householdInitialDeposits, 0., closingHouseholdDeposits, closingFundAccountValue, closingHouseholdDeposits + closingFundAccountValue); Assert.assertEquals(closingHouseholdDeposits + closingFundAccountValue, householdInitialDeposits, 1.e-6); } /* * Part 2 */ // Rerun with exogenous fund profits double totalFundProfits = 0.; for (int i = 0; i < numberOfBusinessCycles; ++i) { // Random household investments and withdrawals for (int j = 0; j < N_HOUSEHOLDS; ++j) { final double investment = dice.nextGaussian() * householdInvestmentVariances + meanHouseholdInvestment; try { if (investment > 0.) myFund.invest(myHouseholds[j], investment); else if (investment < 0.) { final double maximumWithdrawal = myFund.getBalance(myHouseholds[j]); if (maximumWithdrawal == 0.) continue; myFund.requestWithdrawal(myHouseholds[j], Math.min(-investment, maximumWithdrawal)); } } catch (final InsufficientFundsException noFunds) { Assert.fail(); } } myFund.preClearingProcessing(); final double fundProfits = dice.nextGaussian() * fundProfitVariance + meanFundProfit; exogenousReturns.setValue(exogenousReturns.getValue() + fundProfits); totalFundProfits += fundProfits; try { myFund.postClearingProcessing(); } catch (final InsufficientFundsException unexpectedException) { Assert.fail(); } System.out.printf("MutualFund profits: %16.10g\n", fundProfits); System.out.printf("MutualFund assets: %16.10g\n", myFund.getTotalAssets()); System.out.printf("MutualFund liabilities: %16.10g\n", myFund.getTotalLiabilities()); System.out.printf("MutualFund equity: %16.10g\n", myFund.getEquity()); System.out.println("Number of shares: " + myFund.mInvestmentAccount.getNumberOfEmittedShares()); // Assertions final double postClearingEquity = myFund.getEquity(); Assert.assertEquals(postClearingEquity, fundInitialDeposits, 1.e-1); continue; } // Tally the total amount of cash in the system. double totalHouseholdProfits = 0.; System.out.printf("household deposits [open] fund investments [open]" + " deposits [close] fund investments [close] sum [close]\n"); for (int i = 0; i < N_HOUSEHOLDS; ++i) { // Assert no money creation final double closingHouseholdDeposits = myHouseholds[i].bankAccount.getBalance(), closingFundAccountValue = myFund.getBalance(myHouseholds[i]); System.out.printf("%5d %16.10g %16.10g %16.10g %16.10g %16.10g\n", i, householdInitialDeposits, 0., closingHouseholdDeposits, closingFundAccountValue, closingHouseholdDeposits + closingFundAccountValue); totalHouseholdProfits += (closingHouseholdDeposits + closingFundAccountValue) - householdInitialDeposits; } System.out.printf("Aggregate household profits: %16.10g. MutualFund profits: %16.10g\n", totalHouseholdProfits, totalFundProfits); Assert.assertEquals(totalHouseholdProfits / Math.pow(10., (int) Math.log10(totalHouseholdProfits)), totalFundProfits / Math.pow(10., (int) Math.log10(totalFundProfits)), 1.e-12); /* * Part 3 */ // Rerun with predetermined fund losses. UnivariateFunction forcedFundAssetPosition = new UnivariateFunction() { @Override public double value(double time) { return fundInitialDeposits + 1.e-5 + householdInvestmentVariances * (1. + Math.sin(2. * Math.PI * time / 50.)); } }; for (int i = 0; i < numberOfBusinessCycles; ++i) { // Random household investments and withdrawals for (int j = 0; j < N_HOUSEHOLDS; ++j) { final double investment = dice.nextGaussian() * householdInvestmentVariances + meanHouseholdInvestment; try { if (investment > 0.) { myFund.invest(myHouseholds[j], investment); } else if (investment < 0.) { final double maximumWithdrawal = myFund.getBalance(myHouseholds[j]); if (maximumWithdrawal == 0.) continue; double withdrawal = Math.min(-investment, maximumWithdrawal); myFund.requestWithdrawal(myHouseholds[j], withdrawal); } } catch (final InsufficientFundsException noFunds) { Assert.fail(); } } myFund.preClearingProcessing(); final double profitsNow = forcedFundAssetPosition.value(i) - exogenousReturns.getValue(); totalFundProfits += profitsNow; exogenousReturns.setValue(forcedFundAssetPosition.value(i)); try { myFund.postClearingProcessing(); } catch (final InsufficientFundsException unexpectedException) { Assert.fail(); } System.out.printf("MutualFund profits: %16.10g\n", profitsNow); System.out.printf("MutualFund assets: %16.10g\n", myFund.getTotalAssets()); System.out.printf("MutualFund liabilities: %16.10g\n", myFund.getTotalLiabilities()); System.out.printf("MutualFund equity: %16.10g\n", myFund.getEquity()); System.out.println("Number of shares: " + myFund.mInvestmentAccount.getNumberOfEmittedShares()); // Assertions final double postClearingEquity = myFund.getEquity(); Assert.assertEquals(postClearingEquity, fundInitialDeposits, 1.e-1); continue; } // Tally the total amount of cash in the system. totalHouseholdProfits = 0.; System.out.printf("household deposits [open] fund investments [open]" + " deposits [close] fund investments [close] sum [close]\n"); for (int i = 0; i < N_HOUSEHOLDS; ++i) { // Assert no money creation final double closingHouseholdDeposits = myHouseholds[i].bankAccount.getBalance(), closingFundAccountValue = myFund.getBalance(myHouseholds[i]); System.out.printf("%5d %16.10g %16.10g %16.10g %16.10g %16.10g\n", i, householdInitialDeposits, 0., closingHouseholdDeposits, closingFundAccountValue, closingHouseholdDeposits + closingFundAccountValue); totalHouseholdProfits += (closingHouseholdDeposits + closingFundAccountValue) - householdInitialDeposits; } System.out.printf("Aggregate household profits: %16.10g. MutualFund profits: %16.10g\n", totalHouseholdProfits, totalFundProfits); Assert.assertEquals( totalHouseholdProfits / Math.pow(10., (int) Math.log10(Math.abs(totalHouseholdProfits))), totalFundProfits / Math.pow(10., (int) Math.log10(Math.abs(totalFundProfits))), 1.e-12); return; }
From source file:gdsc.smlm.model.AiryPSFModel.java
private static synchronized void createAiryDistribution() { if (spline != null) return;/*from w w w. j a v a 2s . co m*/ final double relativeAccuracy = 1e-4; final double absoluteAccuracy = 1e-8; final int minimalIterationCount = 3; final int maximalIterationCount = 32; UnivariateIntegrator integrator = new SimpsonIntegrator(relativeAccuracy, absoluteAccuracy, minimalIterationCount, maximalIterationCount); UnivariateFunction f = new UnivariateFunction() { public double value(double x) { // The pattern profile is in one dimension. // Multiply by the perimeter of a circle to convert to 2D volume then normalise by 4 pi //return AiryPattern.intensity(x) * 2 * Math.PI * x / (4 * Math.PI); return AiryPattern.intensity(x) * 0.5 * x; } }; // Integrate up to a set number of dark rings int samples = 1000; final double step = RINGS[SAMPLE_RINGS] / samples; double to = 0, from = 0; r = new double[samples + 1]; sum = new double[samples + 1]; for (int i = 1; i < sum.length; i++) { from = to; r[i] = to = step * i; sum[i] = integrator.integrate(2000, f, from, to) + sum[i - 1]; } if (DoubleEquality.relativeError(sum[samples], POWER[SAMPLE_RINGS]) > 1e-3) throw new RuntimeException("Failed to create the Airy distribution"); SplineInterpolator si = new SplineInterpolator(); spline = si.interpolate(sum, r); }