Example usage for org.apache.commons.math MathException getMessage

List of usage examples for org.apache.commons.math MathException getMessage

Introduction

In this page you can find the example usage for org.apache.commons.math MathException getMessage.

Prototype

@Override
public String getMessage() 

Source Link

Usage

From source file:dr.math.distributions.GammaDistribution.java

public static void main(String[] args) {

    testQuantile(1e-10, 0.878328435043444, 0.0013696236839573005);
    testQuantile(0.5, 0.878328435043444, 0.0013696236839573005);
    testQuantile(1.0 - 1e-10, 0.878328435043444, 0.0013696236839573005);

    testQuantileCM(1e-10, 0.878328435043444, 0.0013696236839573005);
    testQuantileCM(0.5, 0.878328435043444, 0.0013696236839573005);
    testQuantileCM(1.0 - 1e-10, 0.878328435043444, 0.0013696236839573005);

    for (double i = 0.0125; i < 1.0; i += 0.025) {
        System.out.print(i + ": ");
        try {/* ww  w .  j  a  v a 2s.c om*/
            System.out.println(new GammaDistributionImpl(0.878328435043444, 0.0013696236839573005)
                    .inverseCumulativeProbability(i));
        } catch (MathException e) {
            System.out.println(e.getMessage());
        }
    }

    GammaDistribution gamma = new GammaDistribution(0.01, 100.0);
    double[] samples = new double[100000];
    double sum = 0.0;
    for (int i = 0; i < samples.length; i++) {
        samples[i] = gamma.nextGamma();
        sum += samples[i];
    }
    double mean = sum / (double) samples.length;
    System.out.println("Mean = " + mean);
    double variance = 0.0;
    for (int i = 0; i < samples.length; i++) {
        variance += Math.pow((samples[i] - mean), 2.0);
    }
    variance = variance / (double) samples.length;
    System.out.println("Variance = " + variance);

    //        System.out
    //                .println("K-S critical values: 1.22(10%), 1.36(5%), 1.63(1%)\n");
    //
    //        int iters = 30000;
    //        testExpGamma(1.0, 0.01, 7, iters);
    //        testExpGamma(1.0, 0.01, 5, iters);
    //        testExpGamma(2.0, 0.01, 10000, iters);
    //        testExpGamma(1.0, 0.01, 10000, iters);
    //        testExpGamma(0.1, 0.01, 10000, iters);
    //        testExpGamma(0.01, 0.01, 10000, iters);
    //
    //        testExpGamma(2.0, 0.01, 10, iters);
    //        testExpGamma(1.5, 0.01, 10, iters);
    //        testExpGamma(1.0, 0.01, 10, iters);
    //        testExpGamma(0.9, 0.01, 10, iters);
    //        testExpGamma(0.5, 0.01, 10, iters);
    //        testExpGamma(0.4, 0.01, 10, iters);
    //        testExpGamma(0.3, 0.01, 10, iters);
    //        testExpGamma(0.2, 0.01, 10, iters);
    //        testExpGamma(0.1, 0.01, 10, iters);
    //
    //        // test distributions with severe bias, where rejection sampling doesn't
    //        // work anymore
    //        testExpGamma2(2.0, 0.01, 1, iters, 0.112946);
    //        testExpGamma2(2.0, 0.01, 0.1, iters, 0.328874);
    //        testExpGamma2(2.0, 0.01, 0.01, iters, 1.01255);
    //        testExpGamma2(1.0, 0.01, 0.0003, iters, 5.781);
    //        testExpGamma2(4.0, 0.01, 0.0003, iters, 5.79604);
    //        testExpGamma2(20.0, 0.01, 0.0003, iters, 5.87687);
    //        testExpGamma2(10.0, 0.01, 0.01, iters, 1.05374);
    //        testExpGamma2(1.0, 0.01, 0.05, iters, 0.454734);
    //        // test the basic Gamma distribution
    //        test(1.0, 1.0, iters);
    //        test(2.0, 1.0, iters);
    //        test(3.0, 1.0, iters);
    //        test(4.0, 1.0, iters);
    //        test(100.0, 1.0, iters);
    //        testAddition(0.5, 1.0, 2, iters);
    //        testAddition(0.25, 1.0, 4, iters);
    //        testAddition(0.1, 1.0, 10, iters);
    //        testAddition(10, 1.0, 10, iters);
    //        testAddition(20, 1.0, 10, iters);
    //        test(0.001, 1.0, iters);
    //        test(1.0, 2.0, iters);
    //        test(10.0, 1.0, iters);
    //        test(16.0, 1.0, iters);
    //        test(16.0, 0.1, iters);
    //        test(100.0, 1.0, iters);
    //        test(0.5, 1.0, iters);
    //        test(0.5, 0.1, iters);
    //        test(0.1, 1.0, iters);
    //        test(0.9, 1.0, iters);
    //        // test distributions with milder biases, and compare with results from
    //        // simple rejection sampling
    //        testExpGamma(2.0, 0.000001, 1000000, iters);
    //        testExpGamma(2.0, 0.000001, 100000, iters);
    //        testExpGamma(2.0, 0.000001, 70000, iters);
    //        testExpGamma(10.0, 0.01, 7, iters);
    //        testExpGamma(10.0, 0.01, 5, iters);
    //        testExpGamma(1.0, 0.01, 100, iters);
    //        testExpGamma(1.0, 0.01, 10, iters);
    //        testExpGamma(1.0, 0.01, 7, iters / 3);
    //        testExpGamma(1.0, 0.01, 5, iters / 3);
    //        testExpGamma(1.0, 0.00001, 1000000, iters);
    //        testExpGamma(1.0, 0.00001, 100000, iters);
    //        testExpGamma(1.0, 0.00001, 10000, iters);
    //        testExpGamma(1.0, 0.00001, 5000, iters / 3); /*
    //                                           * this one takes some
    //                                           * time
    //                                           */
    //        testExpGamma(2.0, 1.0, 0.5, iters);
    //        testExpGamma(2.0, 1.0, 1.0, iters);
    //        testExpGamma(2.0, 1.0, 2.0, iters);
    //        testExpGamma(3.0, 3.0, 2.0, iters);
    //        testExpGamma(10.0, 3.0, 5.0, iters);
    //        testExpGamma(1.0, 3.0, 5.0, iters);
    //        testExpGamma(1.0, 10.0, 5.0, iters);
    //        testExpGamma(2.0, 10.0, 5.0, iters);
    //        // test the basic Gamma distribution
    //        test(1.0, 1.0, iters);
    //        test(2.0, 1.0, iters);
    //        test(3.0, 1.0, iters);
    //        test(4.0, 1.0, iters);
    //        test(100.0, 1.0, iters);
    //        testAddition(0.5, 1.0, 2, iters);
    //        testAddition(0.25, 1.0, 4, iters);
    //        testAddition(0.1, 1.0, 10, iters);
    //        testAddition(10, 1.0, 10, iters);
    //        testAddition(20, 1.0, 10, iters);
    //        test(0.001, 1.0, iters);
    //        test(1.0, 2.0, iters);
    //        test(10.0, 1.0, iters);
    //        test(16.0, 1.0, iters);
    //        test(16.0, 0.1, iters);
    //        test(100.0, 1.0, iters);
    //        test(0.5, 1.0, iters);
    //        test(0.5, 0.1, iters);
    //        test(0.1, 1.0, iters);
    //        test(0.9, 1.0, iters);

}

From source file:ch.algotrader.option.SABR.java

/**
 * Perfors a SABR calibartion based on specified volatilities.
 *
 * @return SABRSmileVO The SABR smile/*from w  ww .j  av a2  s .c om*/
 */
public static SABRSmileVO calibrate(final Double[] strikes, final Double[] volatilities, final double atmVol,
        final double forward, final double years) throws SABRException {

    MultivariateRealFunction estimateRhoAndVol = x -> {

        double r = x[0];
        double v = x[1];
        double alpha = findAlpha(forward, forward, atmVol, years, beta, x[0], x[1]);
        double sumErrors = 0;

        for (int i = 0; i < volatilities.length; i++) {

            double modelVol = vol(forward, strikes[i], years, alpha, beta, r, v);
            sumErrors += Math.pow(modelVol - volatilities[i], 2);
        }

        if (Math.abs(r) > 1) {
            sumErrors = 1e100;
        }

        return sumErrors;
    };

    NelderMead nelderMead = new NelderMead();
    RealPointValuePair result;
    try {
        result = nelderMead.optimize(estimateRhoAndVol, GoalType.MINIMIZE, new double[] { -0.5, 2.6 });
    } catch (MathException ex) {
        throw new SABRException(ex.getMessage(), ex);
    }

    double rho = result.getPoint()[0];
    double volVol = result.getPoint()[1];

    SABRSmileVO params = new SABRSmileVO();
    params.setYears(years);
    params.setRho(rho);
    params.setVolVol(volVol);
    params.setAlpha(findAlpha(forward, forward, atmVol, years, beta, rho, volVol));
    params.setAtmVol(atmVol);

    return params;
}

From source file:clus.heuristic.FTest.java

public static double getCriticalFCommonsMath(double sig, double df) {
    try {/*  ww w.j  ava  2  s . c o  m*/
        m_FDist.setDenominatorDegreesOfFreedom(df);
        return m_FDist.inverseCumulativeProbability(1 - sig);
    } catch (MathException e) {
        System.err.println("F-Distribution error: " + e.getMessage());
        return 0.0;
    }
}

From source file:beast.evolution.speciation.CalibratedYuleInitialTree.java

@Override
public void initStateNodes() {
    // Would have been nice to use the MCMC CalibratedYuleModel beastObject directly, but at this point
    // it does not exist since the tree being initialized is one of its arguments. So, build a temporary
    // one using the initializer tree.

    final List<CalibrationPoint> cals = calibrationsInput.get();

    final CalibratedYuleModel cym = new CalibratedYuleModel();
    for (final CalibrationPoint cal : cals) {
        cym.setInputValue("calibrations", cal);
    }/*from  w w  w . j a  v a2  s. c  o  m*/
    cym.setInputValue("tree", this);
    cym.setInputValue("type", CalibratedYuleModel.Type.NONE);
    cym.initAndValidate();

    Tree t;
    try {
        t = cym.compatibleInitialTree();
    } catch (MathException e) {
        throw new IllegalArgumentException(e.getMessage());
    }
    m_initial.get().assignFromWithoutID(t);
}

From source file:ch.algotrader.entity.security.OptionImpl.java

@Override
public double getLeverage(MarketDataEventI marketDataEvent, MarketDataEventI underlyingMarketDataEvent,
        Date currentTime) {/*from ww  w  .  j  a v a2 s .c o m*/

    Validate.notNull(currentTime, "Current time is null");

    if (marketDataEvent == null) {
        throw new LeverageCalculationException(getSymbol() + " market data not available");
    }
    if (underlyingMarketDataEvent == null) {
        throw new LeverageCalculationException(getSymbol() + " underlying market data not available");
    }

    double currentValue = marketDataEvent.getCurrentValueDouble();
    double underlyingCurrentValue = marketDataEvent.getCurrentValueDouble();

    try {
        double delta = OptionUtil.getDelta(this, currentValue, underlyingCurrentValue, currentTime);
        return underlyingCurrentValue / currentValue * delta;
    } catch (MathException e) {
        throw new LeverageCalculationException(getSymbol() + ": " + e.getMessage(), e);
    }
}

From source file:clus.pruning.C45Pruner.java

public double computeZScore() throws ClusException {
    try {// w  ww.  ja va2s. co  m
        DistributionFactory distributionFactory = DistributionFactory.newInstance();
        return distributionFactory.createNormalDistribution()
                .inverseCumulativeProbability(1 - m_ConfidenceFactor);
    } catch (MathException e) {
        throw new ClusException(e.getMessage());
    }
}

From source file:com.krawler.esp.project.task.TaskCPMWithPERTImpl.java

@Override
public double getProbability(Connection conn, String projectID, double desiredDuration, double sumOfExpected,
        double sumOfVariance) throws ServiceException {
    try {/*ww  w .j ava2  s. co m*/
        NormalDistribution nd = new NormalDistributionImpl(sumOfExpected, Math.sqrt(sumOfVariance));
        double z = nd.cumulativeProbability(desiredDuration);
        return z;
    } catch (MathException ex) {
        throw ServiceException.FAILURE("Math Exception while calculating probability :: " + ex.getMessage(),
                ex);
    }
}

From source file:clus.ext.hierarchical.WHTDStatistic.java

public void getExtraInfoRec(ClassTerm node, double[] discrmean, StringBuffer out) {
    if (m_Validation != null) {
        int i = node.getIndex();
        if (discrmean[i] > 0.5) {
            /* Predicted class i, check sig? */
            int pop_tot = round(m_Global.getTotalWeight());
            int pop_cls = round(m_Global.getTotalWeight() * m_Global.m_Means[i]);
            int rule_tot = round(m_Validation.getTotalWeight());
            int rule_cls = round(m_Validation.getTotalWeight() * m_Validation.m_Means[i]);
            int upper = Math.min(rule_tot, pop_cls);
            int nb_other = pop_tot - pop_cls;
            int min_this = rule_tot - nb_other;
            int lower = Math.max(rule_cls, min_this);
            HypergeometricDistribution dist = m_Fac.createHypergeometricDistribution(pop_tot, pop_cls,
                    rule_tot);//w  ww .  j  a  v  a2  s. c o m
            try {
                double stat = dist.cumulativeProbability(lower, upper);
                out.append(node.toStringHuman(getHier()) + ":");
                out.append(" pop_tot = " + String.valueOf(pop_tot));
                out.append(" pop_cls = " + String.valueOf(pop_cls));
                out.append(" rule_tot = " + String.valueOf(rule_tot));
                out.append(" rule_cls = " + String.valueOf(rule_cls));
                out.append(" upper = " + String.valueOf(upper));
                out.append(" prob = " + ClusFormat.SIX_AFTER_DOT.format(stat));
                // out.append(" siglevel = "+m_SigLevel);
                out.append("\n");
            } catch (MathException me) {
                System.err.println("Math error: " + me.getMessage());
            }
        }
    }
    for (int i = 0; i < node.getNbChildren(); i++) {
        getExtraInfoRec((ClassTerm) node.getChild(i), discrmean, out);
    }
}

From source file:clus.ext.hierarchical.WHTDStatistic.java

public void performSignificanceTest() {
    if (m_Validation != null) {
        for (int i = 0; i < m_DiscrMean.length; i++) {
            if (m_DiscrMean[i]) {
                /* Predicted class i, check sig? */
                int pop_tot = round(m_Global.getTotalWeight());
                int pop_cls = round(m_Global.getTotalWeight() * m_Global.m_Means[i]);
                int rule_tot = round(m_Validation.getTotalWeight());
                int rule_cls = round(m_Validation.getTotalWeight() * m_Validation.m_Means[i]);
                int upper = Math.min(rule_tot, pop_cls);
                int nb_other = pop_tot - pop_cls;
                int min_this = rule_tot - nb_other;
                int lower = Math.max(rule_cls, min_this);
                if (rule_cls < min_this || lower > upper) {
                    System.err.println("BUG?");
                    System.out.println("rule = " + m_Validation.getTotalWeight() * m_Validation.m_Means[i]);
                    System.out.println("pop_tot = " + pop_tot + " pop_cls = " + pop_cls + " rule_tot = "
                            + rule_tot + " rule_cls = " + rule_cls);
                }//from   w w  w  .  j a v a2 s.c om
                HypergeometricDistribution dist = m_Fac.createHypergeometricDistribution(pop_tot, pop_cls,
                        rule_tot);
                try {
                    double stat = dist.cumulativeProbability(lower, upper);
                    if (stat >= m_SigLevel) {
                        m_DiscrMean[i] = false;
                    }
                } catch (MathException me) {
                    System.err.println("Math error: " + me.getMessage());
                }
            }
        }
    }
}

From source file:beast.evolution.tree.RandomTree.java

@SuppressWarnings("unchecked")
@Override//from w  w w  . j  a v a2s .  c  o m
public void initStateNodes() {
    // find taxon sets we are dealing with
    taxonSets = new ArrayList<>();
    m_bounds = new ArrayList<>();
    distributions = new ArrayList<>();
    taxonSetIDs = new ArrayList<>();
    lastMonophyletic = 0;

    if (taxaInput.get() != null) {
        taxa.addAll(taxaInput.get().getTaxaNames());
    } else {
        taxa.addAll(m_taxonset.get().asStringList());
    }

    // pick up constraints from outputs, m_inititial input tree and output tree, if any
    List<MRCAPrior> calibrations = new ArrayList<>();
    calibrations.addAll(calibrationsInput.get());
    //       for (BEASTObject beastObject : outputs) {
    //       // pick up constraints in outputs
    //      if (beastObject instanceof MRCAPrior && !calibrations.contains(beastObject)) {
    //         calibrations.add((MRCAPrior) beastObject);
    //      } else  if (beastObject instanceof Tree) {
    //           // pick up constraints in outputs if output tree
    //         Tree tree = (Tree) beastObject;
    //         if (tree.m_initial.get() == this) {
    //               for (BEASTObject beastObject2 : tree.outputs) {
    //                  if (beastObject2 instanceof MRCAPrior && !calibrations.contains(beastObject2)) {
    //                     calibrations.add((MRCAPrior) beastObject2);
    //                  }                      
    //               }
    //         }
    //      }
    //      
    //   }
    // pick up constraints in m_initial tree
    for (final Object beastObject : getOutputs()) {
        if (beastObject instanceof MRCAPrior && !calibrations.contains(beastObject)) {
            calibrations.add((MRCAPrior) beastObject);
        }
    }
    if (m_initial.get() != null) {
        for (final Object beastObject : m_initial.get().getOutputs()) {
            if (beastObject instanceof MRCAPrior && !calibrations.contains(beastObject)) {
                calibrations.add((MRCAPrior) beastObject);
            }
        }
    }

    for (final MRCAPrior prior : calibrations) {
        final TaxonSet taxonSet = prior.taxonsetInput.get();
        if (taxonSet != null && !prior.onlyUseTipsInput.get()) {
            final Set<String> usedTaxa = new LinkedHashSet<>();
            if (taxonSet.asStringList() == null) {
                taxonSet.initAndValidate();
            }
            for (final String taxonID : taxonSet.asStringList()) {
                if (!taxa.contains(taxonID)) {
                    throw new IllegalArgumentException("Taxon <" + taxonID
                            + "> could not be found in list of taxa. Choose one of " + taxa);
                }
                usedTaxa.add(taxonID);
            }
            final ParametricDistribution distr = prior.distInput.get();
            final Bound bounds = new Bound();
            if (distr != null) {
                List<BEASTInterface> beastObjects = new ArrayList<>();
                distr.getPredecessors(beastObjects);
                for (int i = beastObjects.size() - 1; i >= 0; i--) {
                    beastObjects.get(i).initAndValidate();
                }
                try {
                    bounds.lower = distr.inverseCumulativeProbability(0.0) + distr.offsetInput.get();
                    bounds.upper = distr.inverseCumulativeProbability(1.0) + distr.offsetInput.get();
                } catch (MathException e) {
                    Log.warning.println("At RandomTree::initStateNodes, bound on MRCAPrior could not be set "
                            + e.getMessage());
                }
            }

            if (prior.isMonophyleticInput.get()) {
                // add any monophyletic constraint
                taxonSets.add(lastMonophyletic, usedTaxa);
                distributions.add(lastMonophyletic, distr);
                m_bounds.add(lastMonophyletic, bounds);
                taxonSetIDs.add(prior.getID());
                lastMonophyletic++;
            } else {
                // only calibrations with finite bounds are added
                if (!Double.isInfinite(bounds.lower) || !Double.isInfinite(bounds.upper)) {
                    taxonSets.add(usedTaxa);
                    distributions.add(distr);
                    m_bounds.add(bounds);
                    taxonSetIDs.add(prior.getID());
                }
            }
        }
    }

    // assume all calibration constraints are MonoPhyletic
    // TODO: verify that this is a reasonable assumption
    lastMonophyletic = taxonSets.size();

    // sort constraints such that if taxon set i is subset of taxon set j, then i < j
    for (int i = 0; i < lastMonophyletic; i++) {
        for (int j = i + 1; j < lastMonophyletic; j++) {

            Set<String> intersection = new LinkedHashSet<>(taxonSets.get(i));
            intersection.retainAll(taxonSets.get(j));

            if (intersection.size() > 0) {
                final boolean isSubset = taxonSets.get(i).containsAll(taxonSets.get(j));
                final boolean isSubset2 = taxonSets.get(j).containsAll(taxonSets.get(i));
                // sanity check: make sure either
                // o taxonset1 is subset of taxonset2 OR
                // o taxonset1 is superset of taxonset2 OR
                // o taxonset1 does not intersect taxonset2
                if (!(isSubset || isSubset2)) {
                    throw new IllegalArgumentException(
                            "333: Don't know how to generate a Random Tree for taxon sets that intersect, "
                                    + "but are not inclusive. Taxonset " + taxonSetIDs.get(i) + " and "
                                    + taxonSetIDs.get(j));
                }
                // swap i & j if b1 subset of b2
                if (isSubset) {
                    swap(taxonSets, i, j);
                    swap(distributions, i, j);
                    swap(m_bounds, i, j);
                    swap(taxonSetIDs, i, j);
                }
            }
        }
    }

    // build tree of mono constraints such that j is parent of i if i is a subset of j but i+1,i+2,...,j-1 are not.
    // The last one, standing for the virtual "root" of all monophyletic clades is not associated with an actual clade
    final int[] parent = new int[lastMonophyletic];
    children = new List[lastMonophyletic + 1];
    for (int i = 0; i < lastMonophyletic + 1; i++) {
        children[i] = new ArrayList<>();
    }
    for (int i = 0; i < lastMonophyletic; i++) {
        int j = i + 1;
        while (j < lastMonophyletic && !taxonSets.get(j).containsAll(taxonSets.get(i))) {
            j++;
        }
        parent[i] = j;
        children[j].add(i);
    }

    // make sure upper bounds of a child does not exceed the upper bound of its parent
    for (int i = lastMonophyletic - 1; i >= 0; --i) {
        if (parent[i] < lastMonophyletic) {
            if (m_bounds.get(i).upper > m_bounds.get(parent[i]).upper) {
                m_bounds.get(i).upper = m_bounds.get(parent[i]).upper - 1e-100;
            }
        }
    }

    final PopulationFunction popFunction = populationFunctionInput.get();

    simulateTree(taxa, popFunction);
    if (rootHeightInput.get() != null) {
        scaleToFit(rootHeightInput.get() / root.getHeight(), root);
    }

    nodeCount = 2 * taxa.size() - 1;
    internalNodeCount = taxa.size() - 1;
    leafNodeCount = taxa.size();

    HashMap<String, Integer> taxonToNR = null;
    // preserve node numbers where possible
    if (m_initial.get() != null) {
        if (leafNodeCount == m_initial.get().getLeafNodeCount()) {
            // dont ask me how the initial tree is rubbish  (i.e. 0:0.0)
            taxonToNR = new HashMap<>();
            for (Node n : m_initial.get().getExternalNodes()) {
                taxonToNR.put(n.getID(), n.getNr());
            }
        }
    } else {
        taxonToNR = new HashMap<>();
        String[] taxa = getTaxaNames();
        for (int k = 0; k < taxa.length; ++k) {
            taxonToNR.put(taxa[k], k);
        }
    }
    // multiple simulation tries may produce an excess of nodes with invalid nr's. reset those.
    setNodesNrs(root, 0, new int[1], taxonToNR);

    initArrays();

    if (m_initial.get() != null) {
        m_initial.get().assignFromWithoutID(this);
    }
    for (int k = 0; k < lastMonophyletic; ++k) {
        final MRCAPrior p = calibrations.get(k);
        if (p.isMonophyleticInput.get()) {
            final TaxonSet taxonSet = p.taxonsetInput.get();
            if (taxonSet == null) {
                throw new IllegalArgumentException("Something is wrong with constraint " + p.getID()
                        + " -- a taxonset must be specified if a monophyletic constraint is enforced.");
            }
            final Set<String> usedTaxa = new LinkedHashSet<>();
            if (taxonSet.asStringList() == null) {
                taxonSet.initAndValidate();
            }
            usedTaxa.addAll(taxonSet.asStringList());
            /* int c = */ traverse(root, usedTaxa, taxonSet.getTaxonCount(), new int[1]);
            // boolean b = c == nrOfTaxa + 127;
        }
    }
}