List of usage examples for org.apache.commons.math.distribution TDistributionImpl TDistributionImpl
public TDistributionImpl(double degreesOfFreedom)
From source file:geogebra.common.kernel.statistics.AlgoTMean2Estimate.java
/** * Computes margin of error for 2-sample t-estimate; this is the half-width * of the confidence interval//w ww . j a v a 2 s. c o m * * @param v1 * first sample variance * @param v2 * second sample variance * @param n1 * first sample n * @param n2 * second sample n * @param confLevel * confidence level * @return margin of error for 2 mean interval estimate * @throws MathException */ private double getMarginOfError(double v1, double n1, double v2, double n2, double confLevel, boolean pooled) throws MathException { if (pooled) { double pooledVariance = ((n1 - 1) * v1 + (n2 - 1) * v2) / (n1 + n2 - 2); double se = Math.sqrt(pooledVariance * (1d / n1 + 1d / n2)); tDist = new TDistributionImpl(getDegreeOfFreedom(v1, v2, n1, n2, pooled)); double a = tDist.inverseCumulativeProbability((confLevel + 1d) / 2); return a * se; } double se = Math.sqrt((v1 / n1) + (v2 / n2)); tDist = new TDistributionImpl(getDegreeOfFreedom(v1, v2, n1, n2, pooled)); double a = tDist.inverseCumulativeProbability((confLevel + 1d) / 2); return a * se; }
From source file:geogebra.kernel.statistics.AlgoDistribution.java
TDistribution getTDistribution(double param) { if (t == null) t = new TDistributionImpl(param); else//from w ww . j av a 2s . c om t.setDegreesOfFreedom(param); return t; }
From source file:edu.utexas.cs.tactex.servercustomers.factoredcustomer.ProbabilityDistribution.java
ProbabilityDistribution(FactoredCustomerService service, Element xml) { if (null == randomSeedRepo) randomSeedRepo = (RandomSeedRepo) SpringApplicationContext.getBean("randomSeedRepo"); type = Enum.valueOf(DistType.class, xml.getAttribute("distribution")); switch (type) { case POINTMASS: case DEGENERATE: param1 = Double.parseDouble(xml.getAttribute("value")); sampler = new DegenerateSampler(param1); break;//from w w w .j av a 2s. co m case UNIFORM: param1 = Double.parseDouble(xml.getAttribute("low")); param2 = Double.parseDouble(xml.getAttribute("high")); sampler = new UniformSampler(param1, param2); break; case INTERVAL: param1 = Double.parseDouble(xml.getAttribute("mean")); param2 = Double.parseDouble(xml.getAttribute("stdDev")); param3 = Double.parseDouble(xml.getAttribute("low")); param4 = Double.parseDouble(xml.getAttribute("high")); sampler = new IntervalSampler(param1, param2, param3, param4); break; case NORMAL: case GAUSSIAN: param1 = Double.parseDouble(xml.getAttribute("mean")); param2 = Double.parseDouble(xml.getAttribute("stdDev")); sampler = new ContinuousSampler(new NormalDistributionImpl(param1, param2)); break; case STDNORMAL: param1 = 0; param2 = 1; sampler = new ContinuousSampler(new NormalDistributionImpl(param1, param2)); break; case LOGNORMAL: param1 = Double.parseDouble(xml.getAttribute("expMean")); param2 = Double.parseDouble(xml.getAttribute("expStdDev")); sampler = new LogNormalSampler(param1, param2); break; case CAUCHY: param1 = Double.parseDouble(xml.getAttribute("median")); param2 = Double.parseDouble(xml.getAttribute("scale")); sampler = new ContinuousSampler(new CauchyDistributionImpl(param1, param2)); break; case BETA: param1 = Double.parseDouble(xml.getAttribute("alpha")); param2 = Double.parseDouble(xml.getAttribute("beta")); sampler = new ContinuousSampler(new BetaDistributionImpl(param1, param2)); break; case BINOMIAL: param1 = Double.parseDouble(xml.getAttribute("trials")); param2 = Double.parseDouble(xml.getAttribute("success")); sampler = new DiscreteSampler(new BinomialDistributionImpl((int) param1, param2)); break; case POISSON: param1 = Double.parseDouble(xml.getAttribute("lambda")); sampler = new DiscreteSampler(new PoissonDistributionImpl(param1)); break; case CHISQUARED: param1 = Double.parseDouble(xml.getAttribute("dof")); sampler = new ContinuousSampler(new ChiSquaredDistributionImpl(param1)); break; case EXPONENTIAL: param1 = Double.parseDouble(xml.getAttribute("mean")); sampler = new ContinuousSampler(new ExponentialDistributionImpl(param1)); break; case GAMMA: param1 = Double.parseDouble(xml.getAttribute("alpha")); param2 = Double.parseDouble(xml.getAttribute("beta")); sampler = new ContinuousSampler(new GammaDistributionImpl(param1, param2)); break; case WEIBULL: param1 = Double.parseDouble(xml.getAttribute("alpha")); param2 = Double.parseDouble(xml.getAttribute("beta")); sampler = new ContinuousSampler(new WeibullDistributionImpl(param1, param2)); break; case STUDENT: param1 = Double.parseDouble(xml.getAttribute("dof")); sampler = new ContinuousSampler(new TDistributionImpl(param1)); break; case SNEDECOR: param1 = Double.parseDouble(xml.getAttribute("d1")); param2 = Double.parseDouble(xml.getAttribute("d2")); sampler = new ContinuousSampler(new FDistributionImpl(param1, param2)); break; default: throw new Error("Invalid probability distribution type!"); } sampler.reseedRandomGenerator(service.getRandomSeedRepo() .getRandomSeed("factoredcustomer.ProbabilityDistribution", SeedIdGenerator.getId(), "Sampler") .getValue()); }
From source file:geogebra.common.kernel.statistics.AlgoDistribution.java
/** * @param param/*from w ww.java2 s . co m*/ * degrees of freedom * @return T-distribution */ protected TDistribution getTDistribution(double param) { if (t == null || t.getDegreesOfFreedom() != param) t = new TDistributionImpl(param); return t; }
From source file:org.apache.mahout.freqtermsets.fpgrowth.FPGrowth.java
private double getConfidenceIntervalHalfWidth(SummaryStatistics summaryStatistics, double significance) throws MathException { TDistributionImpl tDist = new TDistributionImpl(summaryStatistics.getN() - 1); double a = tDist.inverseCumulativeProbability(1.0 - significance / 2); return a * summaryStatistics.getStandardDeviation() / Math.sqrt(summaryStatistics.getN()); }
From source file:org.caleydo.view.tourguide.impl.PAGEAlgorithm.java
@Override protected float computePValueImpl(Set<Integer> geneSet, IProgressMonitor monitor) { float z = compute(geneSet, monitor); if (Float.isNaN(z)) return Float.NaN; int m = Sets.intersection(foldChanges.keySet(), geneSet).size(); if (m == 0)/*from w ww. ja v a 2s . c o m*/ return Float.NaN; TDistributionImpl t = new TDistributionImpl(m); float pValue = (float) t.density(z); return pValue; }
From source file:org.gwaspi.statistics.ChiSqrBoundaryCalculator.java
protected static void calculateChisqrBoundaryByFormula() throws IOException, MathException { FileWriter repFW = new FileWriter(boundaryPath); BufferedWriter repBW = new BufferedWriter(repFW); NetcdfFile ncfile = NetcdfFile.open(netCDFFile); List<Dimension> dims = ncfile.getDimensions(); Dimension sizeDim = dims.get(0); Dimension simsDim = dims.get(1); String varName = "distributions"; Variable distributions = ncfile.findVariable(varName); try {//from w w w . j av a 2 s .c o m for (int i = 0; i < pointsNb; i++) { //distributions(i:i:1, 0:simsNb:1) ArrayDouble.D2 rdDoubleArrayD2 = (ArrayDouble.D2) distributions .read(i + ":" + i + ":1, 0:" + (simsDim.getLength() - 1) + ":1"); ArrayDouble.D1 rdDoubleArrayD1 = (D1) rdDoubleArrayD2.reduce(); double sampleSize = rdDoubleArrayD2.getSize(); double currentTot = 0; double[] allValues = new double[(int) sampleSize]; for (int j = 0; j < sampleSize; j++) { allValues[j] = rdDoubleArrayD1.get(j); currentTot += rdDoubleArrayD1.get(j); } StandardDeviation stdDev = new StandardDeviation(); double stdDevValue = stdDev.evaluate(allValues); double currentAvg = currentTot / simNb; TDistributionImpl tDistImpl = new TDistributionImpl(sampleSize - 1); double tInvCumulProb = tDistImpl.inverseCumulativeProbability(0.05d); double tCumulProb = tDistImpl.cumulativeProbability(0.05d); // confidenceInterval = (STDEV(Ys) / SQRT(COUNT(Ys))) * TINV(0.05, COUNT(Ys) - 1) double confidenceInterval = (stdDevValue / Math.sqrt(sampleSize)) * tInvCumulProb; double low95 = currentAvg - confidenceInterval; double top95 = currentAvg + confidenceInterval; StringBuilder sb = new StringBuilder(); sb.append(top95); sb.append(","); sb.append(currentAvg); sb.append(","); sb.append(low95); repBW.append(sb + "\n"); } } catch (IOException ex) { log.error("Cannot read data", ex); } catch (InvalidRangeException ex) { log.error("Cannot read data", ex); } repBW.close(); repFW.close(); log.info("Confidence boundary created for {} points", N); }
From source file:org.peerfact.impl.util.stats.ConfidenceInterval.java
/** * Returns the delta between the mean and the lower(x1)/upper(x2) bound as * positive number. That is, the probabilistic bounds of x1 and x2 are given * by x1 <= mean <= x2 <=> mean-delta <= mean <= mean + delta * //from w ww . ja v a2s.c om * @param sdev * the given standard deviation * @param n * the given sample size * @param alpha * the given significance level * @return the upper/lower bound as positive number */ public static double getDeltaBound(double sdev, int n, double alpha) { TDistribution tDist = new TDistributionImpl(n - 1); double errorConfCoeff = 1d - (alpha / 2); double delta; try { double t = Math.abs(tDist.inverseCumulativeProbability(errorConfCoeff)); delta = t * sdev / Math.sqrt(n); } catch (MathException e) { throw new IllegalStateException(e); } return delta; }
From source file:org.renjin.Distributions.java
public static double dt(final double x, final double df, boolean log) { return d(new TDistributionImpl(df), x, log); }
From source file:org.renjin.Distributions.java
public static double pt(final double q, final double df, boolean lowerTail, boolean logP) { return p(new TDistributionImpl(df), q, lowerTail, logP); }