Example usage for java.lang Math log10

List of usage examples for java.lang Math log10

Introduction

In this page you can find the example usage for java.lang Math log10.

Prototype

@HotSpotIntrinsicCandidate
public static double log10(double a) 

Source Link

Document

Returns the base 10 logarithm of a double value.

Usage

From source file:org.esa.nest.dat.views.polarview.Axis.java

private static double getStepValue(double thevalue, boolean up) {
    final boolean negative = thevalue < 0.0D;
    double val = thevalue;
    if (negative)
        val = -val;
    final int exponent = (int) Math.floor(Math.log10(val));
    val *= FastMath.pow(10D, -exponent);
    int i;/*w w  w.  j  a  v a2s.  c o  m*/
    for (i = stepValues.length - 1; i > 0; i--) {
        if (val > stepValues[i])
            break;
    }
    if (up)
        val = stepValues[i + 1];
    else
        val = stepValues[i];
    val *= FastMath.pow(10D, exponent);
    if (negative)
        val = -val;
    return val;
}

From source file:org.geowebcache.s3.S3KeyBuilder.java

public String tileKey(long x, long y, long z, String gridSetId, String parametersId, MimeType mimeType) {
    StringBuilder key = new StringBuilder(256);
    if (LOG.isDebugEnabled()) {
        LOG.debug(String.format("tileKey for %s %s %s %s %s", x, y, z, gridSetId, parametersId,
                mimeType.getMimeType()));
    }/*  w w w . ja  v a 2  s. c om*/

    long shift = z / 2;
    long half = 2 << shift;
    int digits = 1;
    if (half > 10) {
        digits = (int) (Math.log10(half)) + 1;
    }
    long halfx = x / half;
    long halfy = y / half;

    String fileExtension = mimeType.getFileExtension();

    appendDimension(key, 2 * digits, x, y);

    key.append(File.separatorChar);
    appendDimension(key, digits, halfx, halfy);

    key.append(File.separatorChar);
    appendGridsetZoomLevelDir(gridSetId, z, key);
    appendParameters(parametersId, key);

    key.append('.');
    key.append(fileExtension);

    String keyString = key.toString();
    if (LOG.isDebugEnabled()) {
        LOG.debug(keyString);
    }
    return keyString;
}

From source file:de.tudarmstadt.ukp.similarity.experiments.coling2012.util.WordIdfValuesGenerator.java

@SuppressWarnings("unchecked")
public static void computeIdfScores(Dataset dataset) throws Exception {
    File outputFile = new File(UTILS_DIR + "/word-idf/" + dataset.toString() + ".txt");

    System.out.println("Computing word idf values");

    if (outputFile.exists()) {
        System.out.println(" - skipping, already exists");
    } else {//  w ww  .  j a  va  2 s  .  com
        System.out.println(" - this may take a while...");

        CollectionReader reader = ColingUtils.getCollectionReader(dataset);

        // Tokenization
        AnalysisEngineDescription seg = createPrimitiveDescription(BreakIteratorSegmenter.class);
        AggregateBuilder builder = new AggregateBuilder();
        builder.add(seg, CombinationReader.INITIAL_VIEW, CombinationReader.VIEW_1);
        builder.add(seg, CombinationReader.INITIAL_VIEW, CombinationReader.VIEW_2);
        AnalysisEngine aggr_seg = builder.createAggregate();

        // POS Tagging
        AnalysisEngineDescription pos = createPrimitiveDescription(OpenNlpPosTagger.class,
                OpenNlpPosTagger.PARAM_LANGUAGE, "en");
        builder = new AggregateBuilder();
        builder.add(pos, CombinationReader.INITIAL_VIEW, CombinationReader.VIEW_1);
        builder.add(pos, CombinationReader.INITIAL_VIEW, CombinationReader.VIEW_2);
        AnalysisEngine aggr_pos = builder.createAggregate();

        // Lemmatization
        AnalysisEngineDescription lem = createPrimitiveDescription(StanfordLemmatizer.class);
        builder = new AggregateBuilder();
        builder.add(lem, CombinationReader.INITIAL_VIEW, CombinationReader.VIEW_1);
        builder.add(lem, CombinationReader.INITIAL_VIEW, CombinationReader.VIEW_2);
        AnalysisEngine aggr_lem = builder.createAggregate();

        // Output Writer
        AnalysisEngine writer = createPrimitive(WordIdfValuesGeneratorWriter.class,
                WordIdfValuesGeneratorWriter.PARAM_OUTPUT_FILE, outputFile.getAbsolutePath());

        SimplePipeline.runPipeline(reader, aggr_seg, aggr_pos, aggr_lem, writer);

        // Now we have the text format lemma1###lemma2###...###lemman
        List<String> lines = FileUtils.readLines(outputFile);

        Map<String, Double> idfValues = new HashMap<String, Double>();

        // Build up token representations of texts
        Set<List<String>> docs = new HashSet<List<String>>();

        for (String line : lines) {
            List<String> doc = CollectionUtils.arrayToList(line.split("###"));

            docs.add(doc);
        }

        // Get the shared token list
        Set<String> tokens = new HashSet<String>();
        for (List<String> doc : docs)
            tokens.addAll(doc);

        // Get the idf numbers
        for (String token : tokens) {
            double count = 0;
            for (List<String> doc : docs) {
                if (doc.contains(token))
                    count++;
            }
            idfValues.put(token, count);
        }

        // Compute the idf
        for (String lemma : idfValues.keySet()) {
            double idf = Math.log10(lines.size() / idfValues.get(lemma));
            idfValues.put(lemma, idf);
        }

        // Store persistently
        StringBuilder sb = new StringBuilder();
        for (String key : idfValues.keySet()) {
            sb.append(key + "\t" + idfValues.get(key) + LF);
        }
        FileUtils.writeStringToFile(outputFile, sb.toString());

        System.out.println(" - done");
    }
}

From source file:uk.ac.diamond.scisoft.ncd.core.data.plots.LogLogPlotData.java

@Override
public double getDataValue(int idx, IDataset axis, IDataset data) {
    return Math.log10(data.getDouble(idx));
}

From source file:gate.termraider.util.Utilities.java

public static double log2(double input) {
    /*  log_a x = log_b x * log_a b
     * //from w ww. jav  a 2s.co m
     *  log_b x = log_a x / log_a b
     */
    return Math.log10(input) / log10of2;
}

From source file:dsp.unige.figures.ChannelHelper.java

/**
 * returns the freespace loss given a station and a satellite
 * // w  w  w.  jav  a  2s  .  c om
 * @param station
 * @param satellite
 * @return The freespace loss, in dB
 */
public static double getFreeSpaceLoss(Station station, Satellite satellite) {

    return 20 * Math.log10(Orbits.getDistance(satellite.ORBIT_TYPE)) + 20 * Math.log10(station.frequency)
            + 92.44;

}

From source file:uk.ac.diamond.scisoft.ncd.core.data.plots.LogLogPlotData.java

@Override
public double getAxisValue(int idx, IDataset axis) {
    return Math.log10(axis.getDouble(idx));
}

From source file:org.apache.hadoop.mapred.TestSequenceFileInputFilter.java

public void testRegexFilter() throws Exception {
    // set the filter class
    LOG.info("Testing Regex Filter with patter: \\A10*");
    SequenceFileInputFilter.setFilterClass(job, SequenceFileInputFilter.RegexFilter.class);
    SequenceFileInputFilter.RegexFilter.setPattern(job, "\\A10*");

    // clean input dir
    fs.delete(inDir, true);//from   w  w  w  . java2s.c  o m

    // for a variety of lengths
    for (int length = 1; length < MAX_LENGTH; length += random.nextInt(MAX_LENGTH / 10) + 1) {
        LOG.info("******Number of records: " + length);
        createSequenceFile(length);
        int count = countRecords(0);
        assertEquals(count, length == 0 ? 0 : (int) Math.log10(length) + 1);
    }

    // clean up
    fs.delete(inDir, true);
}

From source file:picard.analysis.TheoreticalSensitivity.java

/**
 * @param depthDistribution   the probability of depth n is depthDistribution[n] for n = 0, 1. . . N - 1
 * @param qualityDistribution the probability of quality q is qualityDistribution[q] for q = 0, 1. . . Q
 * @param sampleSize          sample size is the number of random sums of quality scores for each m
 * @param logOddsThreshold    is the log_10 of the likelihood ratio required to call a SNP,
 *                            for example 5 if the variant likelihood must be 10^5 times greater.
 * @param withLogging         true to output log messages, false otherwise.
 *//*from   w w  w. j a  v a  2s. com*/
public static double hetSNPSensitivity(final double[] depthDistribution, final double[] qualityDistribution,
        final int sampleSize, final double logOddsThreshold, final boolean withLogging) {
    final int N = Math.min(depthDistribution.length, MAX_CONSIDERED_DEPTH_HET_SENS + 1);

    if (withLogging)
        log.info("Creating Roulette Wheel");
    final RouletteWheel qualitySampler = new RouletteWheel(qualityDistribution);

    //qualitySums[m] is a random sample of sums of m quality scores, for m = 0, 1, N - 1
    if (withLogging)
        log.info("Calculating quality sums from quality sampler");
    final List<ArrayList<Integer>> qualitySums = qualitySampler.sampleCumulativeSums(N, sampleSize,
            withLogging);

    //if a quality sum of m qualities exceeds the quality sum threshold for n total reads, a SNP is called
    final ArrayList<Double> qualitySumThresholds = new ArrayList<>(N);
    final double LOG_10 = Math.log10(2);

    for (int n = 0; n < N; n++)
        qualitySumThresholds.add(10 * (n * LOG_10 + logOddsThreshold));

    //probabilityToExceedThreshold[m][n] is the probability that the sum of m quality score
    //exceeds the nth quality sum threshold
    if (withLogging)
        log.info("Calculating theoretical het sensitivity");
    final List<ArrayList<Double>> probabilityToExceedThreshold = proportionsAboveThresholds(qualitySums,
            qualitySumThresholds);
    final List<ArrayList<Double>> altDepthDistribution = hetAltDepthDistribution(N);
    double result = 0.0;
    for (int n = 0; n < N; n++) {
        for (int m = 0; m <= n; m++) {
            result += depthDistribution[n] * altDepthDistribution.get(n).get(m)
                    * probabilityToExceedThreshold.get(m).get(n);
        }
    }
    return result;
}

From source file:javatranslation.matlab.dfa.java

public static void dfafunction(double[] tau, double MinBox, double MaxBox, int DFAorder) {

    double[] incoef = null;
    double[] l = null;
    l = new double[50];
    incoef = new double[50];
    for (int i = 0; i < 50; i++) {
        l = logspace(MinBox, MaxBox);/*from  w w  w  .  jav  a 2s  .  c  o  m*/
        //System.out.println(l[i]);
        incoef[i] = Math.round(l[i]);
    }
    double xx = mean(tau);
    for (int i = 0; i < tau.length; i++) {
        tau[i] = tau[i] - xx;
    }
    double[] Y = cumsum(tau, dfa.mean(tau));
    double maxnumseg = incoef.length;
    double[] winlen = null;
    winlen = new double[50];
    for (int truta = 0; truta < 50; truta++) {
        winlen[truta] = incoef[truta];
    }
    Arrays.sort(winlen);
    ArrayUtils.reverse(winlen);

    double Ylength = Y.length;
    double F[][] = new double[(int) maxnumseg][1];
    for (int k = 0; k < maxnumseg; k++) {
        F[k][0] = 0;
    }
    double[] timevec = new double[50];
    for (int kk = 0; kk < maxnumseg; kk++) {
        timevec[kk] = winlen[kk];
        double numsegm = Math.floor(Ylength / winlen[kk]);
        double tempY[][] = new double[1][(int) numsegm];
        for (int k = 0; k < numsegm; k++) {
            tempY[0][k] = 0;
        }
        for (int zz = 0; zz < numsegm; zz++) {
            double overflowtest = zz * winlen[kk];
            if (overflowtest <= Ylength) {
                double[] tempvec = null;
                int ko = 0;
                for (double p = ((zz - 1) * winlen[kk] + 1); p <= (zz
                        * winlen[kk]); p = ((zz - 1) * winlen[kk] + 1) + 1) { // there are some errors in this loop
                    tempvec[ko] = Y[(int) p];
                    System.out.println(tempvec[(int) p]);
                    ko++;
                }
                //double temppol = polyfit(timevec,tempvec,DFAorder);
                MultivariateVectorOptimizer optimizer;
                optimizer = null;
                PolynomialFitter x = new PolynomialFitter(optimizer); // error here too
                double[] temppol = x.fit(DFAorder, timevec);

                double[] temppol2 = x.fit(DFAorder, tempvec);
                double[] arrayOfCoefficients = new double[temppol2.length];
                arrayOfCoefficients = temppol2;
                int len = arrayOfCoefficients.length;
                double retAnswer = 0;
                double ret = 0;
                for (int i = 0; i < len; i++) {
                    retAnswer = retAnswer + Math.pow(arrayOfCoefficients[i], i);
                }
                for (int i = 0; i < tempvec.length; i++) {
                    ret = tempvec[i] - (Math.pow(retAnswer, 2));
                }
                tempY[0][zz] = ((ret) / winlen[kk]);
            }
        }
        int k;
        double[] kopp = new double[(int) maxnumseg];
        for (k = 0; k < maxnumseg; k++) {
            kopp[k] = tempY[0][k];
        }
        double nonzerotempY = dfa.sum2(kopp);
        F[kk][0] = Math.sqrt(dfa.sum(kopp) / nonzerotempY);

    }
    double[] LF;
    LF = new double[(int) maxnumseg];
    double[] LN;
    LN = new double[(int) maxnumseg];
    for (int i = 0; i < maxnumseg; i++) {
        LF[i] = Math.log10(F[i][0]);
        LN[i] = Math.log10(winlen[i]);
    }
    double[][] XLN = new double[LN.length][LN.length];
    for (int i = 0; i < LN.length; i++) {
        XLN[i][0] = 1;
        XLN[i][1] = LN[i];
    }
    SimpleRegression x;
    x = new SimpleRegression();
    x.addObservations(XLN, LF); // and probably error in this one too
    RegressionResults b = x.regress();
    // System.out.println(b.getParameterEstimates());

    //double LF=Math.log10(F); % log fluctuations
    //double LN=Math.log10(winlen);
}