Example usage for java.lang Math log

List of usage examples for java.lang Math log

Introduction

In this page you can find the example usage for java.lang Math log.

Prototype

@HotSpotIntrinsicCandidate
public static double log(double a) 

Source Link

Document

Returns the natural logarithm (base e) of a double value.

Usage

From source file:edu.byu.nlp.stats.RandomGeneratorsTest.java

/**
 * Tests the property that, with a sufficient number of samples, the empirical distribution
 * of the samples approaches that specified by the parameters.
 * /*from   w w  w. ja va2 s .c  o m*/
 * The methodology used is Bayesian hypothesis testing. It is true that a utility function could
 * have been used here (e.g., log-loss \equivto K-L divergence), but then we would have some
 * arbitrary threshold we would have to set as an acceptable metric. Instead, we can use
 * the arbitrary scale for Bayes factory given by Jeffreys.
 * 
 * The hypothesis is designed as such. H1 states that the observed data was generated using the
 * provided parameters \hat{\theta}, i.e., p(\theta | H1) = 1 iff \theta = \hat{\theta}. This is
 * to be interpreted as meaning that the code correctly samples from the provided distribution.
 * H2 is that all parameters are equally likely a-priori, i.e., \theta | H2 ~ uniform Dirichlet.
 * Should this hypothesis be more likely than the previous hypothesis, there is a bug. 
 * 
 * Assuming a uniform prior over H1 and H2, then the posterior probability of the respective
 * hypotheses after observing x (a count vector) are: p(H1 | x) \propto p(x|H1) and
 * p(H2 | x) \propto p(x | H2). Thus, we can use the Bayes Factor, p(x | H1) / p(x | H2) to
 * compare hypotheses. According to Jeffrey's scale, a ratio of 100:1 would give decisive support
 * to the conclusion that there are no bugs.
 * 
 * Note that there is a relationship between the number of samples we draw, the dimensionality
 * of the parameter vector, and the strength of the factor.
 * 
 * With enough samples then the Bayes factor will either be much greater than 100 (no bugs) or
 * much less than 1.0/100.0 (bugs).
 * 
 * See http://idiom.ucsd.edu/~rlevy/lign251/fall2007/lecture_9.pdf.
 */
public double logBayesFactor(int[] counts, double[] thetaHat) {
    assertThat(counts.length).isEqualTo(thetaHat.length);

    // p(x | H1) = \int p(\theta | H1) \prod_i p(x_i | \theta) d\theta
    //           = \prod_i p(x_i | \hat{\theta})
    double logPOfXGivenH1 = 0.0;
    for (int i = 0; i < counts.length; i++) {
        logPOfXGivenH1 += counts[i] * Math.log(thetaHat[i]);
    }

    // p(x | H2) = \int p(\theta | H2) \prod_i p(x_i | \theta) d\theta
    // x | H2 ~ UniformDCM
    double logPOfXGivenH2 = dcmLogDensityWithUniformPrior(counts);
    return logPOfXGivenH1 - logPOfXGivenH2;
}

From source file:bachelorthesis.methods.detection.bayesian.BayesianDetection.java

private double[][] offlineCpd(Value[] data) {
    int n = data.length;
    double[] Q = new double[n];
    double[] g = new double[n];
    double[] G = new double[n];
    double[][] P = new double[n][n];

    Arrays.fill(g, Math.log(1.d / (data.length + 1)));
    G[0] = g[0];//  w w  w . j  a v a  2 s . com
    for (int i = 1; i < G.length; i++) {
        G[i] = Math.log((Math.exp(G[i - 1]) + Math.exp(g[i])));
    }
    for (double[] array : P) {
        Arrays.fill(array, Double.NEGATIVE_INFINITY);
    }

    P[n - 1][n - 1] = gaussianObsLogLikelihood(data, n - 1, n);
    Q[n - 1] = P[n - 1][n - 1];

    for (int t = n - 2; t >= 0; t--) {
        double p_next_cp = Double.NEGATIVE_INFINITY;
        for (int s = t; s < n - 1; s++) {
            P[t][s] = gaussianObsLogLikelihood(data, t, s + 1);
            double summand = P[t][s] + Q[s + 1] + g[s + 1 - t];
            p_next_cp = Math.log((Math.exp(p_next_cp) + Math.exp(summand)));
            if (summand - p_next_cp < BAYESIAN_TRUNCATE) {
                break;
            }
        }
        P[t][n - 1] = gaussianObsLogLikelihood(data, t, n);
        double antiG;
        if (G[n - 1 - t] < -1e-15) {
            antiG = Math.log(1.d - Math.exp(G[n - 1 - t]));
        } else {
            antiG = Math.log(-G[n - 1 - t]);
        }
        Q[t] = Math.log((Math.exp(p_next_cp) + Math.exp(P[t][n - 1] + antiG)));
    }

    double[][] Pcp = new double[n - 1][n - 1];
    for (double[] array : Pcp) {
        Arrays.fill(array, Double.NEGATIVE_INFINITY);
    }
    for (int t = 0; t < n - 1; t++) {
        Pcp[0][t] = P[0][t] + Q[t + 1] + g[t] - Q[0];
        if (Double.isNaN(Pcp[0][t])) {
            Pcp[0][t] = Double.NEGATIVE_INFINITY;
        }
    }
    for (int j = 1; j < n - 1; j++) {
        for (int t = j; t < n - 1; t++) {
            double[] tmp_cond = copyOfRange(Pcp[j - 1], j - 1, t);

            tmp_cond = add(tmp_cond, getSameEntryOfAllArrays(copyOfRange(P, j, t + 1), t));
            double summand = Q[t + 1];
            tmp_cond = forEach(tmp_cond, value -> value + summand);
            tmp_cond = add(tmp_cond, copyOfRange(g, 0, t - j + 1));
            double[] negativePart = forEach(copyOfRange(Q, j, t + 1), value -> -value);
            tmp_cond = add(tmp_cond, negativePart);

            double[] tempArray = forEach(tmp_cond, value -> Math.exp(value));
            Pcp[j][t] = Math.log(sum(tempArray));
            if (Double.isNaN(Pcp[j][t])) {
                Pcp[j][t] = Double.NEGATIVE_INFINITY;
            }
        }
    }
    return Pcp;
}

From source file:bacter.model.ACGCoalescent.java

@Override
public double calculateLogP() {

    // Check whether conversion count exceeds bounds.
    if (acg.getTotalConvCount() < lowerCCBoundInput.get() || acg.getTotalConvCount() > upperCCBoundInput.get())
        return Double.NEGATIVE_INFINITY;

    logP = calculateClonalFrameLogP();// www . ja v a  2s . c o  m
    double poissonMean = rhoInput.get().getValue() * acg.getClonalFrameLength()
            * (acg.getTotalConvertibleSequenceLength()
                    + acg.getConvertibleLoci().size() * (deltaInput.get().getValue() - 1.0));

    // Probability of conversion count:
    if (poissonMean > 0.0) {
        logP += -poissonMean + acg.getTotalConvCount() * Math.log(poissonMean);
        //      - GammaFunction.lnGamma(acg.getConvCount()+1);
    } else {
        if (acg.getTotalConvCount() > 0)
            logP = Double.NEGATIVE_INFINITY;
    }

    for (Locus locus : acg.getConvertibleLoci())
        for (Conversion conv : acg.getConversions(locus))
            logP += calculateConversionLogP(conv);

    // This N! takes into account the permutation invariance of
    // the individual conversions, and cancels with the N! in the
    // denominator of the Poissonian above.
    // logP += GammaFunction.lnGamma(acg.getConvCount() + 1);

    if (lowerCCBoundInput.get() > 0 || upperCCBoundInput.get() < Integer.MAX_VALUE) {
        try {
            logP -= new PoissonDistributionImpl(poissonMean).cumulativeProbability(lowerCCBoundInput.get(),
                    upperCCBoundInput.get());
        } catch (MathException e) {
            throw new RuntimeException("Error computing modification to ARG "
                    + "prior density required by conversion number constraint.");
        }
    }

    return logP;
}

From source file:com.norconex.commons.lang.unit.DataUnitFormatter.java

/**
 * Formats a data amount of the given unit to a human-readable 
 * representation./*w w w.  ja v a2s  . co m*/
 * @param amount the amount to format
 * @param unit the data unit type of the amount
 * @return formatted string
 */
public String format(long amount, DataUnit unit) {

    // If no unit specified, return as string without a suffix
    if (unit == null) {
        return Long.toString(amount);
    }

    // Use coarser unit if applicable to make value more human-readable
    DataUnit finalUnit = unit;
    long finalAmount = amount;
    int ordinalShift = 0;
    if (!fixedUnit) {
        ordinalShift = (int) (Math.log(amount) / Math.log(K));
        if (ordinalShift > 0) {
            finalUnit = DATA_UNITS[Math.min(unit.ordinal() + ordinalShift, DATA_UNITS.length - 1)];
            finalAmount = finalUnit.convert(amount, unit);
        }
    }

    // Find out decimals
    long decimals = 0;
    if (decimalPrecision > 0 && unit.ordinal() < finalUnit.ordinal()) {
        int previousOrdinal = finalUnit.ordinal() - 1;
        if (previousOrdinal >= 0) {
            long originalBytes = unit.toBytes(amount);
            long finalBytes = finalUnit.toBytes(finalAmount);
            long diff = originalBytes - finalBytes;
            DataUnit previousUnit = DATA_UNITS[previousOrdinal];
            long remainder = previousUnit.convert(diff, DataUnit.B);
            long base = remainder * (long) Math.pow(D, decimalPrecision);
            decimals = base / K;
        }
    }

    Locale finalLocale = locale;
    if (finalLocale == null) {
        finalLocale = Locale.getDefault();
    }
    StringBuilder b = new StringBuilder();
    b.append(NumberFormat.getIntegerInstance(finalLocale).format(finalAmount));
    if (decimals > 0) {
        b.append(DecimalFormatSymbols.getInstance(finalLocale).getDecimalSeparator());
        b.append(StringUtils.left(Long.toString(decimals), decimalPrecision));
    }
    b.append('\u00A0').append(finalUnit.toString());
    return b.toString();
}

From source file:utils.RandomVariable.java

/**
 * Generate a random number from an exponantial random variable (Mean =
 * 1/lambda, variance = 1/lambda^2)./*w w w .j  av  a2 s  . c om*/
 *
 * @param lambda parmaeter of the exponential random variable.
 * @return a double.
 */
public static double exponential(double lambda) {
    double x = -1 / lambda * Math.log(rand());
    return x;
}

From source file:com.opengamma.analytics.financial.model.finitedifference.ExponentialMeshing.java

/**
 * creates a non-uniform set of points according to the formula $x_i = \theta + \eta*\exp(\lambda z_i)$, where the points run from 
 * $x_0$ to $x_{N-1}$ (i.e. there are N points), $\eta = (x_{N-1} - x_0)/(\exp(\lambda) - 1)$ and $\theta = x_0 - \eta$. 
 * The points $z_i$ are are close as possible to uniform on (0,1) while allowing the <em>fixedPoints</em> to be in the set of points.
 * @param lowerBound The value of $x_0$/*from  w ww. j  ava2 s.c  o m*/
 * @param upperBound The value of $x_{N-1}$
 * @param nPoints Number of Points (equal to N in the above formula).The number of points must exceed the number of fixed points by at least 2.
 * @param lambda Bunching parameter. $\lambda = 0$ is uniform, $\lambda > 0$ gives a high density of points near $x_0$ and $\lambda < 0$ gives a high density
 * of points near $x_{N-1}$
 * @param fixedPoints set of points that must be included. These must be within the lower and upper bound (exclusive) 
 */
public ExponentialMeshing(final double lowerBound, final double upperBound, final int nPoints,
        final double lambda, final double[] fixedPoints) {
    super(nPoints);
    Validate.isTrue(upperBound > lowerBound, "need upperBound>lowerBound");
    ArgumentChecker.notNull(fixedPoints, "null fixedPoints");
    _lambda = lambda;
    _l = lowerBound;
    _r = upperBound;

    _fpValues = FunctionUtils.unique(fixedPoints);

    int m = _fpValues.length;
    final double[] fp = new double[m];

    if (lambda == 0.0) {
        _linear = true;
        _theta = lowerBound;
        _eta = (upperBound - lowerBound);
        for (int ii = 0; ii < m; ii++) {
            fp[ii] = (fixedPoints[ii] - _theta) / _eta;
        }
    } else {
        _linear = false;
        _eta = (upperBound - lowerBound) / (Math.exp(lambda) - 1);
        _theta = lowerBound - _eta;
        for (int ii = 0; ii < m; ii++) {
            fp[ii] = Math.log((_fpValues[ii] - _theta) / _eta) / lambda;
        }
    }
    _um = new UniformMeshing(nPoints, fp);
}

From source file:net.myrrix.online.eval.PrecisionRecallEvaluator.java

@Override
public EvaluationResult evaluate(final MyrrixRecommender recommender, final RescorerProvider provider,
        final Multimap<Long, RecommendedItem> testData) throws TasteException {

    final Mean precision = new Mean();
    final Mean recall = new Mean();
    final Mean ndcg = new Mean();
    final Mean meanAveragePrecision = new Mean();

    Processor<Long> processor = new Processor<Long>() {
        @Override/* w ww . j  av  a  2 s  .c o  m*/
        public void process(Long userID, long count) {

            Collection<RecommendedItem> values = testData.get(userID);
            int numValues = values.size();
            if (numValues == 0) {
                return;
            }

            IDRescorer rescorer = provider == null ? null
                    : provider.getRecommendRescorer(new long[] { userID }, recommender);

            List<RecommendedItem> recs;
            try {
                recs = recommender.recommend(userID, numValues, rescorer);
            } catch (NoSuchUserException nsue) {
                // Probably OK, just removed all data for this user from training
                log.warn("User only in test data: {}", userID);
                return;
            } catch (TasteException te) {
                log.warn("Unexpected exception", te);
                return;
            }
            int numRecs = recs.size();

            Collection<Long> valueIDs = Sets.newHashSet();
            for (RecommendedItem rec : values) {
                valueIDs.add(rec.getItemID());
            }

            int intersectionSize = 0;
            double score = 0.0;
            double maxScore = 0.0;
            Mean precisionAtI = new Mean();
            double averagePrecision = 0.0;

            for (int i = 0; i < numRecs; i++) {
                RecommendedItem rec = recs.get(i);
                double value = LN2 / Math.log(2.0 + i); // 1 / log_2(1 + (i+1))
                if (valueIDs.contains(rec.getItemID())) {
                    intersectionSize++;
                    score += value;
                    precisionAtI.increment(1.0);
                    averagePrecision += precisionAtI.getResult();
                } else {
                    precisionAtI.increment(0.0);
                }
                maxScore += value;
            }
            averagePrecision /= numValues;

            synchronized (precision) {
                precision.increment(numRecs == 0 ? 0.0 : (double) intersectionSize / numRecs);
                recall.increment((double) intersectionSize / numValues);
                ndcg.increment(maxScore == 0.0 ? 0.0 : score / maxScore);
                meanAveragePrecision.increment(averagePrecision);
                if (count % 10000 == 0) {
                    log.info(new IRStatisticsImpl(precision.getResult(), recall.getResult(), ndcg.getResult(),
                            meanAveragePrecision.getResult()).toString());
                }
            }
        }
    };

    Paralleler<Long> paralleler = new Paralleler<Long>(testData.keySet().iterator(), processor, "PREval");
    try {
        if (Boolean.parseBoolean(System.getProperty("eval.parallel", "true"))) {
            paralleler.runInParallel();
        } else {
            paralleler.runInSerial();
        }
    } catch (InterruptedException ie) {
        throw new TasteException(ie);
    } catch (ExecutionException e) {
        throw new TasteException(e.getCause());
    }

    EvaluationResult result;
    if (precision.getN() > 0) {
        result = new IRStatisticsImpl(precision.getResult(), recall.getResult(), ndcg.getResult(),
                meanAveragePrecision.getResult());
    } else {
        result = null;
    }
    log.info(String.valueOf(result));
    return result;
}

From source file:de.bund.bfr.math.MathUtils.java

public static Double getAic(int numParam, int numSample, double sse) {
    if (numSample <= numParam + 2) {
        return null;
    }/*from   ww  w.  j  a v a  2s.c o m*/

    return numSample * Math.log(sse / numSample) + 2.0 * (numParam + 1.0)
            + 2.0 * (numParam + 1.0) * (numParam + 2.0) / (numSample - numParam - 2.0);
}

From source file:com.compomics.pepshell.view.statistics.RatioStatisticsPane.java

private CategoryDataset createRatioDataset(PepshellProtein aPepshellProtein) {
    DefaultCategoryDataset returnset = new DefaultCategoryDataset();
    //PepshellProtein protein = experimentGroup.get(0).getProteins().get(experimentGroup.get(0).getProteins().indexOf(aPepshellProtein));
    if (aPepshellProtein != null) {
        for (Experiment anExperiment : experimentGroup) {

            if (anExperiment.getProteins().indexOf(aPepshellProtein) != -1) {
                PepshellProtein protein = anExperiment.getProteins()
                        .get(anExperiment.getProteins().indexOf(aPepshellProtein));

                List<PeptideGroup> sortedCopy = protein.getPeptideGroups().stream()
                        .sorted(Comparator
                                .comparing(e -> e.getRepresentativePeptide().getBeginningProteinMatch()))
                        .collect(Collectors.toList());

                for (PeptideGroup aPeptideGroup : sortedCopy) {
                    PeptideInterface aPeptide = aPeptideGroup.getRepresentativePeptide();

                    if (aPeptide instanceof QuantedPeptide && ((QuantedPeptide) aPeptide).getRatio() != null) {
                        Double value = Math.log(((QuantedPeptide) aPeptide).getRatio()) / Math.log(2);
                        returnset.addValue(value, anExperiment.getExperimentName(),
                                String.valueOf(aPeptide.getBeginningProteinMatch()));
                        //this part can be put in it's own method

                        for (Experiment checkList : experimentGroup) {

                            if (!checkList.equals(anExperiment)
                                    && returnset.getRowKeys().contains(checkList.getExperimentName())) {
                                if (returnset.getValue(checkList.getExperimentName(),
                                        String.valueOf(aPeptide.getBeginningProteinMatch())) == null) {
                                    returnset.addValue(null, checkList.getExperimentName(),
                                            String.valueOf(aPeptide.getBeginningProteinMatch()));
                                }//from   ww  w  .ja va2 s. co m
                            }
                        }
                    }

                }

            }
        }
    }

    return returnset;
}

From source file:main.java.utils.Utility.java

@SuppressWarnings("unused")
private static double exp(double mean) {
    return -mean * Math.log(Global.rand.nextDouble());
}