Example usage for org.apache.commons.math3.special Gamma logGamma

List of usage examples for org.apache.commons.math3.special Gamma logGamma

Introduction

In this page you can find the example usage for org.apache.commons.math3.special Gamma logGamma.

Prototype

public static double logGamma(double x) 

Source Link

Document

Returns the value of log Γ(x) for x > 0.

Usage

From source file:edu.byu.nlp.math.GammaFunctionsTest.java

/**
 * Test method for {@link edu.byu.nlp.math.GammaFunctions#logRisingFactorial(double, int)}.
 *///  ww w.ja v  a 2 s . c  o  m
@Test
public void testlogRisingFactorial() {
    Delta d = Delta.delta(1e-10);
    assertThat(GammaFunctions.logRisingFactorial(10.3, 0)).isEqualTo(log(1.0), d);
    assertThat(GammaFunctions.logRisingFactorial(10.3, 1)).isEqualTo(log(10.3), d);
    assertThat(GammaFunctions.logRisingFactorial(10.3, 2)).isEqualTo(log(10.3 * 11.3), d);
    assertThat(GammaFunctions.logRisingFactorial(10.3, 3)).isEqualTo(log(10.3 * 11.3 * 12.3), d);
    assertThat(GammaFunctions.logRisingFactorial(10.3, 4)).isEqualTo(log(10.3 * 11.3 * 12.3 * 13.3), d);

    // We want at least one test to be computed manually and we will compare it to the
    // difference in Gamma method.
    int threshold = GammaFunctions.MANUALLY_COMPUTE_RISING_FACTORIAL_THRESHOLD;
    if (threshold > 0) {
        assertThat(GammaFunctions.logRisingFactorial(1.3, threshold - 1))
                .isEqualTo(Gamma.logGamma(1.3 + threshold - 1) - Gamma.logGamma(1.3), d);
    }

    // A test that is not computed manually.
    assertThat(GammaFunctions.logRisingFactorial(1.3, threshold + 100))
            .isEqualTo(Gamma.logGamma(1.3 + threshold + 100) - Gamma.logGamma(1.3), d);
}

From source file:jhn.eda.EDA.java

@Override
public double logLikelihood() throws Exception {
    if (!usingSymmetricAlpha)
        throw new UnsupportedOperationException(
                "Log likelihood computation is unreasonably slow with assymetric alphas");

    //NOTE: assumes symmetric alphas!
    final double alpha = alphas[0];
    final double Kalpha = numTopics * alpha;
    final double log_gamma_alpha = Gamma.logGamma(alpha);

    double ll = Gamma.logGamma(Kalpha);
    ll -= numTopics * log_gamma_alpha;//from w w  w.j a v a  2 s .c  o  m

    int nonZeroTopics;
    int zeroTopics;

    IntIntCounter docTopicCounts;
    for (int docNum = 0; docNum < numDocs; docNum++) {
        docTopicCounts = docTopicCounts(docNum);

        // Deal with non-zero-count topics:
        for (Int2IntMap.Entry entry : docTopicCounts.int2IntEntrySet()) {
            ll += Gamma.logGamma(entry.getIntValue() + alpha);

        }

        // Deal with zero-count topics:
        nonZeroTopics = docTopicCounts.size();
        zeroTopics = numTopics - nonZeroTopics;
        ll += zeroTopics * log_gamma_alpha;

        ll -= Gamma.logGamma(tokens[docNum].length + Kalpha);

        for (int position = 0; position < docLengths[docNum]; position++) {
            double topicWordProb = Double.NaN;
            Iterator<TopicCount> it = typeTopicCounts.typeTopicCounts(tokens[docNum][position]);
            while (it.hasNext()) {
                TopicCount tc = it.next();
                if (tc.topic == topics[docNum][position]) {
                    topicWordProb = (double) tc.count / (double) topicCorpusTopicCounts.topicCount(tc.topic);
                    break;
                }
            }

            if (Double.isNaN(topicWordProb)) {
                System.err.println(topics[docNum][position]);
            } else {
                ll += FastMath.log(topicWordProb);
            }
        }
    }

    return ll;
}

From source file:edu.byu.nlp.stats.DirichletMultinomialMLEOptimizable.java

/**
 * Equation 53 from http://research.microsoft.com/en-us/um/people/minka/papers/dirichlet/minka-dirichlet.pdf
 *///from ww  w.  jav  a 2  s.  c  om
private static double computeLogLikelihood(double[][] data, double[] alpha, double alphaSum,
        double[] perIDataSums, int N, int K) {

    // this comes from the denominator of the second term
    // which can be factored out of the inner loop into 
    // prod_i prod_k (1/gamma(alpha_k))
    double[] logGammaOfAlpha = alpha.clone();
    for (int k = 0; k < K; k++) {
        logGammaOfAlpha[k] = Gamma.logGamma(logGammaOfAlpha[k]);
    }
    double logGammaOfAlphaSum = DoubleArrays.sum(logGammaOfAlpha);
    double llik = -logGammaOfAlphaSum * N;

    for (int i = 0; i < N; i++) {
        // first  term numerator
        llik += Gamma.logGamma(alphaSum);
        // first term denominator
        llik -= Gamma.logGamma(perIDataSums[i] + alphaSum);
        // second term
        for (int k = 0; k < K; k++) {
            // second term numerator
            llik += Gamma.logGamma(data[i][k] + alpha[k]);
            // second term denominator (factored out and precomputed)
        }
    }

    return llik;
}

From source file:it.unibo.alchemist.model.implementations.timedistributions.WeibullTime.java

/**
 * Generates a {@link WeibullDistribution} given its mean and stdev.
 * //from w w w  . j a  v a  2 s  .  com
 * @param mean
 *            the mean
 * @param deviation
 *            the stdev
 * @param random
 *            the random generator
 * @return a new {@link WeibullDistribution}
 */
protected static WeibullDistribution weibullFromMean(final double mean, final double deviation,
        final RandomGenerator random) {
    final double t = FastMath.log((deviation * deviation) / (mean * mean) + 1);
    double kmin = 0, kmax = 1;
    while (Gamma.logGamma(1 + 2 * kmax) - 2 * Gamma.logGamma(1 + kmax) < t) {
        kmin = kmax;
        kmax *= 2;
    }
    double k = (kmin + kmax) / 2;
    while (kmin < k && k < kmax) {
        if (Gamma.logGamma(1 + 2 * k) - 2 * Gamma.logGamma(1 + k) < t) {
            kmin = k;
        } else {
            kmax = k;
        }
        k = (kmin + kmax) / 2;
    }
    final double shapeParameter = 1 / k;
    final double scaleParameter = mean / FastMath.exp(Gamma.logGamma(1 + k));
    return new WeibullDistribution(random, shapeParameter, scaleParameter,
            WeibullDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY);
}

From source file:com.opengamma.strata.math.impl.statistics.distribution.NonCentralChiSquaredDistribution.java

/**
 * {@inheritDoc}/* w ww  .  ja va  2  s  .co  m*/
 */
@Override
public double getCDF(Double x) {
    ArgChecker.notNull(x, "x");
    if (x < 0) {
        return 0.0;
    }

    if ((_dofOverTwo + _lambdaOverTwo) > 1000) {
        return getFraserApproxCDF(x);
    }

    double regGammaStart = 0;
    double halfX = x / 2.0;
    double logX = Math.log(halfX);
    try {
        regGammaStart = Gamma.regularizedGammaP(_dofOverTwo + _k, halfX);
    } catch (MaxCountExceededException ex) {
        throw new MathException(ex);
    }

    double sum = _pStart * regGammaStart;
    double oldSum = Double.NEGATIVE_INFINITY;
    double p = _pStart;
    double regGamma = regGammaStart;
    double temp;
    int i = _k;

    // first add terms below _k
    while (i > 0 && Math.abs(sum - oldSum) / sum > _eps) {
        i--;
        p *= (i + 1) / _lambdaOverTwo;
        temp = (_dofOverTwo + i) * logX - halfX - Gamma.logGamma(_dofOverTwo + i + 1);
        regGamma += Math.exp(temp);
        oldSum = sum;
        sum += p * regGamma;
    }

    p = _pStart;
    regGamma = regGammaStart;
    oldSum = Double.NEGATIVE_INFINITY;
    i = _k;
    while (Math.abs(sum - oldSum) / sum > _eps) {
        i++;
        p *= _lambdaOverTwo / i;
        temp = (_dofOverTwo + i - 1) * logX - halfX - Gamma.logGamma(_dofOverTwo + i);
        regGamma -= Math.exp(temp);
        oldSum = sum;
        sum += p * regGamma;
    }

    return sum;
}

From source file:com.analog.lyric.dimple.factorfunctions.ExchangeableDirichlet.java

private final double logBeta(double alpha) {
    return Gamma.logGamma(alpha) * _dimension - Gamma.logGamma(alpha * _dimension);
}

From source file:edu.byu.nlp.stats.SymmetricDirichletMultinomialMatrixMAPOptimizable.java

/**
 * Equation 53 from http://research.microsoft.com/en-us/um/people/minka/papers/dirichlet/minka-dirichlet.pdf
 *//*from   w w  w  . j a  va 2  s .co  m*/
private static double computeLogLikelihood(double[][] data, double alpha, double[] perIDataSums, double gammaA,
        double gammaB, int N, int K) {

    double alphaK = alpha * K;

    double llik = 0;

    // gamma priors
    if (gammaA > 0 && gammaB > 0) {
        llik += ((gammaA - 1) * Math.log(alpha)) - gammaB * alpha;
    }

    for (int i = 0; i < N; i++) {
        // first  term numerator
        llik += Gamma.logGamma(alphaK);
        // first term denominator
        llik -= Gamma.logGamma(perIDataSums[i] + alphaK);
        // second term
        for (int k = 0; k < K; k++) {
            // second term numerator
            llik += Gamma.logGamma(data[i][k] + alpha);
            // second term denominator (factored out and precomputed)
        }
    }

    // this comes from the denominator of the second term
    // which can be factored out of the inner loop into 
    // prod_i prod_k (1/gamma(alpha_k))
    llik -= Gamma.logGamma(alpha) * N * K;

    return llik;
}

From source file:eu.amidst.core.exponentialfamily.EF_Gamma.java

/**
 * {@inheritDoc}/*from  ww w  .ja va  2s .  c o  m*/
 */
@Override
public double computeLogNormalizer() {
    double alpha = this.naturalParameters.get(0) + 1;
    double beta = -this.naturalParameters.get(1);
    return Gamma.logGamma(alpha) - alpha * Math.log(beta);
}

From source file:edu.cmu.tetrad.search.BicScore.java

@Override
public double localScore(int node, int parents[]) {

    // Number of categories for node.
    int c = numCategories[node];

    // Numbers of categories of parents.
    int[] dims = new int[parents.length];

    for (int p = 0; p < parents.length; p++) {
        dims[p] = numCategories[parents[p]];
    }//from  w  w w. j  a v a2s.  c om

    // Number of parent states.
    int ps = 1;

    for (int p = 0; p < parents.length; p++) {
        ps *= dims[p];
    }

    // Conditional cell coefs of data for node given parents(node).
    int n_jk[][] = new int[ps][c];
    int n_j[] = new int[ps];

    int[] parentValues = new int[parents.length];

    int[][] myParents = new int[parents.length][];
    for (int i = 0; i < parents.length; i++) {
        myParents[i] = data[parents[i]];
    }

    int[] myChild = data[node];

    for (int i = 0; i < sampleSize; i++) {
        for (int p = 0; p < parents.length; p++) {
            parentValues[p] = myParents[p][i];
        }

        int childValue = myChild[i];

        if (childValue == -99) {
            throw new IllegalStateException(
                    "Please remove or impute missing " + "values (record " + i + " column " + i + ")");
        }

        int rowIndex = getRowIndex(dims, parentValues);

        n_jk[rowIndex][childValue]++;
        n_j[rowIndex]++;
    }

    //Finally, compute the score
    double score = 0.0;

    for (int j = 0; j < ps; j++) {
        score -= Gamma.logGamma(n_j[j]);

        for (int k = 0; k < c; k++) {
            score += Gamma.logGamma(n_jk[j][k]);
        }
    }

    int numParams = ps * (c - 1);

    score += getPenaltyDiscount() * numParams * Math.log(getSampleSize());

    return score;
}

From source file:edu.cmu.tetrad.search.BDeuScore.java

@Override
public double localScore(int node, int parents[]) {

    // Number of categories for node.
    int c = numCategories[node];

    // Numbers of categories of parents.
    int[] dims = new int[parents.length];

    for (int p = 0; p < parents.length; p++) {
        dims[p] = numCategories[parents[p]];
    }/*from  ww w  .j  a v a  2 s .  c  o  m*/

    // Number of parent states.
    int r = 1;

    for (int p = 0; p < parents.length; p++) {
        r *= dims[p];
    }

    // Conditional cell coefs of data for node given parents(node).
    int n_jk[][] = new int[r][c];
    int n_j[] = new int[r];

    int[] parentValues = new int[parents.length];

    int[][] myParents = new int[parents.length][];
    for (int i = 0; i < parents.length; i++) {
        myParents[i] = data[parents[i]];
    }

    int[] myChild = data[node];

    for (int i = 0; i < sampleSize; i++) {
        for (int p = 0; p < parents.length; p++) {
            parentValues[p] = myParents[p][i];
        }

        int childValue = myChild[i];

        if (childValue == -99) {
            throw new IllegalStateException(
                    "Please remove or impute missing " + "values (record " + i + " column " + i + ")");
        }

        int rowIndex = getRowIndex(dims, parentValues);

        n_jk[rowIndex][childValue]++;
        n_j[rowIndex]++;
    }

    //Finally, compute the score
    double score = 0.0;

    score += getPriorForStructure(parents.length);

    final double cellPrior = getSamplePrior() / (c * r);
    final double rowPrior = getSamplePrior() / r;

    for (int j = 0; j < r; j++) {
        score -= Gamma.logGamma(rowPrior + n_j[j]);

        for (int k = 0; k < c; k++) {
            score += Gamma.logGamma(cellPrior + n_jk[j][k]);
        }
    }

    score += r * Gamma.logGamma(rowPrior);
    score -= c * r * Gamma.logGamma(cellPrior);

    return score;
}