Example usage for java.lang Math log10

List of usage examples for java.lang Math log10

Introduction

In this page you can find the example usage for java.lang Math log10.

Prototype

@HotSpotIntrinsicCandidate
public static double log10(double a) 

Source Link

Document

Returns the base 10 logarithm of a double value.

Usage

From source file:ca.mcgill.networkdynamics.geoinference.evaluation.CrossValidationScorer.java

public static double computeAuc(TDoubleList errors) {

    double[] normalizedErrors = new double[errors.size()];

    int[] errorsPerKm = new int[MAX_KM];

    for (int i = 0; i < errors.size(); ++i) {
        int error = (int) (Math.round(errors.get(i)));
        errorsPerKm[error]++;//from w  w w .j  av a 2  s  . c  o m
    }

    // The accumulated sum of errors per km
    int[] errorsBelowEachKm = new int[errorsPerKm.length];
    for (int i = 0; i < errorsBelowEachKm.length; ++i) {
        errorsBelowEachKm[i] = errorsPerKm[i];
        if (i > 0)
            errorsBelowEachKm[i] += errorsBelowEachKm[i - 1];
    }

    final double[] cdf = new double[errorsBelowEachKm.length];
    double dSize = errors.size(); // to avoid casting all the time
    for (int i = 0; i < cdf.length; ++i)
        cdf[i] = errorsBelowEachKm[i] / dSize;

    final double maxLogKm = Math.log10(MAX_KM - 1);

    // At this point, the CDF is between [0, 20038], so we first need
    // log-scale the x-values and then to normalize it into [0, 1]
    UnivariateFunction logNormalizedScaledCdf = new UnivariateFunction() {
        public double value(double x) {
            // First, unscale by the log(MAX_DIST) so the valus is just
            // Math.log10(x)
            double unscaled = x * maxLogKm;

            // Second, invert the log transformation
            double errorInKm = Math.pow(10, unscaled);

            // Get the probability of having an error less than this
            // amount
            double prob = cdf[(int) (Math.round(errorInKm))];

            // Now look up the CDF value for that error
            return prob;
        }
    };

    TrapezoidIntegrator ti = new TrapezoidIntegrator();
    double auc = ti.integrate(10_000_000, logNormalizedScaledCdf, 0, 1);
    return auc;
}

From source file:org.wso2.carbon.device.mgt.core.search.mgt.impl.ProcessorImpl.java

@Override
public List<Device> getUpdatedDevices(long epochTime) throws SearchMgtException {

    if ((1 + (int) Math.floor(Math.log10(epochTime))) <= 10) {
        epochTime = epochTime * 1000;/*from  w  ww.  ja  v  a2s . c  o m*/
    }
    QueryBuilder queryBuilder = new QueryBuilderImpl();
    try {
        QueryHolder query = queryBuilder.processUpdatedDevices(epochTime);
        DeviceManagementDAOFactory.openConnection();
        return searchDeviceDetailsTable(query);
    } catch (InvalidOperatorException e) {
        throw new SearchMgtException("Invalid operator was provided, so cannot execute the search.", e);
    } catch (SQLException e) {
        throw new SearchMgtException("Error occurred while managing database transactions.", e);
    } catch (SearchDAOException e) {
        throw new SearchMgtException("Error occurred while running the search operations for given time.", e);
    } finally {
        DeviceManagementDAOFactory.closeConnection();
    }
}

From source file:io.sweers.palettehelper.ui.widget.ElasticDragDismissFrameLayout.java

private void dragScale(int scroll) {
    if (scroll == 0) {
        return;/* w  ww  .  ja  v a 2s  .co  m*/
    }

    totalDrag += scroll;

    // track the direction & set the pivot point for scaling
    // don't double track i.e. if start dragging down and then reverse, keep tracking as
    // dragging down until they reach the 'natural' position
    if (scroll < 0 && !draggingUp && !draggingDown) {
        draggingDown = true;
        if (shouldScale) {
            setPivotY(getHeight());
        }
    } else if (scroll > 0 && !draggingDown && !draggingUp) {
        draggingUp = true;
        if (shouldScale) {
            setPivotY(0f);
        }
    }
    // how far have we dragged relative to the distance to perform a dismiss
    // (01 where 1 = dismiss distance). Decreasing logarithmically as we approach the limit
    float dragFraction = (float) Math.log10(1 + (Math.abs(totalDrag) / dragDismissDistance));

    // calculate the desired translation given the drag fraction
    float dragTo = dragFraction * dragDismissDistance * dragElacticity;

    if (draggingUp) {
        // as we use the absolute magnitude when calculating the drag fraction, need to
        // re-apply the drag direction
        dragTo *= -1;
    }
    setTranslationY(dragTo);

    if (shouldScale) {
        final float scale = 1 - ((1 - dragDismissScale) * dragFraction);
        setScaleX(scale);
        setScaleY(scale);
    }

    // if we've reversed direction and gone past the settle point then clear the flags to
    // allow the list to get the scroll events & reset any transforms
    if ((draggingDown && totalDrag >= 0) || (draggingUp && totalDrag <= 0)) {
        totalDrag = dragTo = dragFraction = 0;
        draggingDown = draggingUp = false;
        setTranslationY(0f);
        setScaleX(1f);
        setScaleY(1f);
    }
    dispatchDragCallback(dragFraction, dragTo, Math.min(1f, Math.abs(totalDrag) / dragDismissDistance),
            totalDrag);
}

From source file:grafix.telas.MolduraAreaDados.java

private Point2D converterPontoNoPlotParaMoldura_EixoLog(final Point2D pontoPlot) {
    XYPlot plot = getPlot();//  w  ww. j  a  va  2 s .  co  m
    ValueAxis vAxis = plot.getRangeAxis();
    ValueAxis dAxis = plot.getDomainAxis();
    double dX = dAxis.getUpperBound() - dAxis.getLowerBound();
    double dYlog = Math.log10(vAxis.getUpperBound()) - Math.log10(vAxis.getLowerBound());
    double proporcaoX = this.getWidth() / dX;
    double fracaoX = pontoPlot.getX() - dAxis.getLowerBound();
    double fracaoYlog = Math.log10(pontoPlot.getY()) - Math.log10(vAxis.getLowerBound());
    double coefYlog = fracaoYlog / dYlog;
    return new Point2D.Double(fracaoX * proporcaoX, this.getHeight() - (coefYlog * this.getHeight()));
}

From source file:com.act.lcms.db.io.report.IonAnalysisInterchangeModel.java

/**
 * This function is used to compute log frequency distribution of the ion model vs a metric.
 * @param metric The metric on which the frequency distribution is plotted
 * @return A map of a range to the count of molecules that get bucketed in that range
 */// w w w.ja  va  2 s . co m
public Map<Pair<Double, Double>, Integer> computeLogFrequencyDistributionOfMoleculeCountToMetric(
        METRIC metric) {
    Map<Pair<Double, Double>, Integer> rangeToHitCount = new HashMap<>();

    // This variable represents the total number of statistics that have zero values.
    Integer countOfZeroStats = 0;

    // This statistic represents the log value of the min statistic.
    Double minLogValue = Double.MAX_VALUE;

    for (ResultForMZ resultForMZ : this.getResults()) {
        for (HitOrMiss molecule : resultForMZ.getMolecules()) {

            Double power = 0.0;

            switch (metric) {
            case TIME:
                power = Math.log10(molecule.getTime());
                break;
            case INTENSITY:
                power = Math.log10(molecule.getIntensity());
                break;
            case SNR:
                power = Math.log10(molecule.getSnr());
                break;
            }

            if (power.equals(Double.NEGATIVE_INFINITY)) {
                // We know the statistic was 0 here.
                countOfZeroStats++;
                break;
            }

            Double floor = Math.floor(power);
            Double lowerBound = Math.pow(10.0, floor);
            Double upperBound = Math.pow(10.0, floor + 1);

            minLogValue = Math.min(minLogValue, lowerBound);
            Pair<Double, Double> key = Pair.of(lowerBound, upperBound);
            rangeToHitCount.compute(key, (k, v) -> (v == null) ? 1 : v + 1);
        }

        // We count the total number of zero statistics and put them in the 0 to minLog metric bucket.
        if (countOfZeroStats > 0) {
            Pair<Double, Double> key = Pair.of(0.0, minLogValue);
            rangeToHitCount.put(key, countOfZeroStats);
        }
    }

    return rangeToHitCount;
}

From source file:org.bigwiv.blastgraph.gui.graphvisualization.EWLayout.java

protected void calcAttraction(E e) {
    ValueEdge edge = (ValueEdge) e;//from w w  w . ja  v  a2  s.co  m

    double evalue = edge.getExpectValue();
    double ew; // edge weight

    if (evalue <= 0) {
        ew = 1;
    } else {
        ew = (Math.log10(maxEvalue) - Math.log10(evalue)) / (Math.log10(maxEvalue) - Math.log10(minEvalue));
    }

    Pair<V> endpoints = getGraph().getEndpoints(e);
    V v1 = endpoints.getFirst();
    V v2 = endpoints.getSecond();
    boolean v1_locked = isLocked(v1);
    boolean v2_locked = isLocked(v2);

    if (v1_locked && v2_locked) {
        // both locked, do nothing
        return;
    }
    Point2D p1 = transform(v1);
    Point2D p2 = transform(v2);
    if (p1 == null || p2 == null)
        return;
    double xDelta = p1.getX() - p2.getX();
    double yDelta = p1.getY() - p2.getY();

    double deltaLength = Math.max(EPSILON, Math.sqrt((xDelta * xDelta) + (yDelta * yDelta)));

    // force by edge weight
    double force = (deltaLength * deltaLength) * ew / attraction_constant;

    if (Double.isNaN(force)) {
        throw new IllegalArgumentException("Unexpected mathematical result in FRLayout:calcPositions [force]");
    }

    double dx = (xDelta / deltaLength) * force;
    double dy = (yDelta / deltaLength) * force;
    if (v1_locked == false) {
        VertexData fvd1 = getData(v1);
        fvd1.offset(-dx, -dy);
    }
    if (v2_locked == false) {
        VertexData fvd2 = getData(v2);
        fvd2.offset(dx, dy);
    }
}

From source file:com.bmwcarit.barefoot.markov.Filter.java

/**
 * Executes Hidden Markov Model (HMM) filter iteration that determines for a given measurement
 * sample <i>z<sub>t</sub></i>, which is a {@link Sample} object, and of a predecessor state
 * vector <i>S<sub>t-1</sub></i>, which is a set of {@link StateCandidate} objects, a state
 * vector <i>S<sub>t</sub></i> with filter and sequence probabilities set.
 * <p>/*ww w  .j  av a2 s.co m*/
 * <b>Note:</b> The set of state candidates <i>S<sub>t-1</sub></i> is allowed to be empty. This
 * is either the initial case or an HMM break occured, which is no state candidates representing
 * the measurement sample could be found.
 *
 * @param predecessors State vector <i>S<sub>t-1</sub></i>, which may be empty.
 * @param sample Measurement sample <i>z<sub>t</sub></i>.
 * @param previous Previous measurement sample <i>z<sub>t-1</sub></i>.
 *
 * @return State vector <i>S<sub>t</sub></i>, which may be empty if an HMM break occured.
 */
public Set<C> execute(Set<C> predecessors, S previous, S sample) {
    if (logger.isTraceEnabled()) {
        try {
            logger.trace("execute sample {}", sample.toJSON().toString());
        } catch (JSONException e) {
            logger.trace("execute sample (not JSON parsable sample: {})", e.getMessage());
        }
    }

    assert (predecessors != null);
    assert (sample != null);

    Set<C> result = new HashSet<>();
    Set<Tuple<C, Double>> candidates = candidates(predecessors, sample);
    logger.trace("{} state candidates", candidates.size());

    double normsum = 0;

    if (!predecessors.isEmpty()) {
        Set<C> states = new HashSet<>();
        for (Tuple<C, Double> candidate : candidates) {
            states.add(candidate.one());
        }

        Map<C, Map<C, Tuple<T, Double>>> transitions = transitions(new Tuple<>(previous, predecessors),
                new Tuple<>(sample, states));

        for (Tuple<C, Double> candidate : candidates) {
            C candidate_ = candidate.one();
            candidate_.seqprob(Double.NEGATIVE_INFINITY);

            if (logger.isTraceEnabled()) {
                try {
                    logger.trace("state candidate {} ({}) {}", candidate_.id(), candidate.two(),
                            candidate_.toJSON().toString());
                } catch (JSONException e) {
                    logger.trace("state candidate (not JSON parsable candidate: {})", e.getMessage());
                }
            }

            for (C predecessor : predecessors) {
                Tuple<T, Double> transition = transitions.get(predecessor).get(candidate_);

                if (transition == null || transition.two() == 0) {
                    continue;
                }

                candidate_.filtprob(candidate_.filtprob() + (transition.two() * predecessor.filtprob()));

                double seqprob = predecessor.seqprob() + Math.log10(transition.two())
                        + Math.log10(candidate.two());

                if (logger.isTraceEnabled()) {
                    try {
                        logger.trace("state transition {} -> {} ({}, {}, {}) {}", predecessor.id(),
                                candidate_.id(), predecessor.seqprob(), Math.log10(transition.two()),
                                Math.log10(candidate.two()), transition.one().toJSON().toString());
                    } catch (JSONException e) {
                        logger.trace("state transition (not JSON parsable transition: {})", e.getMessage());
                    }
                }

                if (seqprob > candidate_.seqprob()) {
                    candidate_.predecessor(predecessor);
                    candidate_.transition(transition.one());
                    candidate_.seqprob(seqprob);
                }
            }

            if (candidate_.predecessor() != null) {
                logger.trace("state candidate {} -> {} ({}, {})", candidate_.predecessor().id(),
                        candidate_.id(), candidate_.filtprob(), candidate_.seqprob());
            } else {
                logger.trace("state candidate - -> {} ({}, {})", candidate_.id(), candidate_.filtprob(),
                        candidate_.seqprob());
            }

            if (candidate_.filtprob() == 0) {
                continue;
            }

            candidate_.filtprob(candidate_.filtprob() * candidate.two());
            result.add(candidate_);

            normsum += candidate_.filtprob();
        }
    }

    if (!candidates.isEmpty() && result.isEmpty() && !predecessors.isEmpty()) {
        logger.info("HMM break - no state transitions");
    }

    if (result.isEmpty() || predecessors.isEmpty()) {
        for (Tuple<C, Double> candidate : candidates) {
            if (candidate.two() == 0) {
                continue;
            }
            C candidate_ = candidate.one();
            normsum += candidate.two();
            candidate_.filtprob(candidate.two());
            candidate_.seqprob(Math.log10(candidate.two()));
            result.add(candidate_);

            if (logger.isTraceEnabled()) {
                try {
                    logger.trace("state candidate {} ({}) {}", candidate_.id(), candidate.two(),
                            candidate_.toJSON().toString());
                } catch (JSONException e) {
                    logger.trace("state candidate (not JSON parsable candidate: {})", e.getMessage());
                }
            }
        }
    }

    if (result.isEmpty()) {
        logger.info("HMM break - no state emissions");
    }

    for (C candidate : result) {
        candidate.filtprob(candidate.filtprob() / normsum);
    }

    logger.trace("{} state candidates for state update", result.size());
    return result;
}

From source file:picard.analysis.TheoreticalSensitivityTest.java

@Test
public void testDeterministicQualityAndDepth() throws Exception {
    final double logOddsThreshold = 0.0;
    final double tolerance = 0.001;
    final int sampleSize = 1; //quality is deterministic, hence no sampling error
    for (int q = 5; q < 10; q++) {
        for (int n = 5; n < 10; n++) {
            final double minAltCount = 10 * n * Math.log10(2) / q; //alts required to call when log odds ratio threshold = 1
            double expectedResult = 0.0;

            final List<ArrayList<Double>> altCountProbabilities = TheoreticalSensitivity
                    .hetAltDepthDistribution(n + 1);
            for (int altCount = n; altCount > minAltCount; altCount--) {
                expectedResult += altCountProbabilities.get(n).get(altCount);
            }// ww w  . ja  v a 2s .  c  om

            //deterministic weights that always yield q are 0.0 for 0 through q - 1 and 1.0 for q
            final double[] qualityDistribution = new double[q + 1];
            Arrays.fill(qualityDistribution, 0L);
            qualityDistribution[qualityDistribution.length - 1] = 1L;
            final double[] depthDistribution = new double[n + 1];
            Arrays.fill(depthDistribution, 0L);
            depthDistribution[depthDistribution.length - 1] = 1L;

            final double result = TheoreticalSensitivity.hetSNPSensitivity(depthDistribution,
                    qualityDistribution, sampleSize, logOddsThreshold);
            Assert.assertEquals(result, expectedResult, tolerance);
        }
    }
}

From source file:org.apache.marmotta.ucuenca.wk.commons.function.SemanticDistance.java

private double ngd(String a, String b) throws IOException, SQLException {
    int min = 0;/*  ww  w .  j  av a 2s .c om*/
    int min2 = 1;
    a = a.trim();
    b = b.trim();

    if (a.compareToIgnoreCase(b) == min) {
        return 0;
    }

    //double n0 = getResultsCount(""+a+"");
    //double n1 = getResultsCount(""+b+"");
    //String c = ""+a+" "+b+"";
    double n0 = getResultsCount("\"" + a + "\"~10");
    double n1 = getResultsCount("\"" + b + "\"~10");
    String c = "\"" + a + " " + b + "\"~50";

    double n2 = getResultsCount(c);
    double m = 5029469;
    double distance = 0;
    int measure = 0;
    double l1 = Math.max(Math.log10(n0), Math.log10(n1)) - Math.log10(n2);
    double l2 = Math.log10(m) - Math.min(Math.log10(n0), Math.log10(n1));

    if (measure == min) {
        distance = l1 / l2;
    }
    if (measure == min2) {
        distance = 1 - (Math.log10(n2) / Math.log10(n0 + n1 - n2));
    }
    if (n0 == min || n1 == min || n2 == min) {
        distance = 1;
    }
    return distance;
}

From source file:uk.ac.diamond.scisoft.ncd.core.data.plots.LogLogPlotData.java

public Dataset getFitData(double[] params, IDataset axis) {
    double solvation = params[0];
    double correlation = params[1];
    double exponent = params[2];

    Dataset loglogAxis = getSaxsPlotAxis(axis);
    Dataset result = DatasetFactory.zeros(loglogAxis.getShape(), Dataset.FLOAT32);
    for (int i = 0; i < loglogAxis.getSize(); i++) {
        double axisVal = Math.pow(10.0, loglogAxis.getDouble(i));
        double func = solvation / (1.0 + Math.pow(axisVal * correlation, exponent));
        result.set(Math.log10(func), i);
    }//  w  w  w . ja v  a2 s .  c o  m

    return result;
}