Example usage for java.lang Double doubleValue

List of usage examples for java.lang Double doubleValue

Introduction

In this page you can find the example usage for java.lang Double doubleValue.

Prototype

@HotSpotIntrinsicCandidate
public double doubleValue() 

Source Link

Document

Returns the double value of this Double object.

Usage

From source file:org.openqa.selenium.ExecutingJavascriptTest.java

@JavascriptEnabled
public void testPassingAndReturningADoubleShouldReturnADecimal() {
    if (!(driver instanceof JavascriptExecutor)) {
        return;/*w  ww .j  ava  2 s.  com*/
    }

    driver.get(javascriptPage);
    Double expectedResult = 1.2;
    Object result = executeScript("return arguments[0];", expectedResult);
    assertTrue("Expected result to be a Double or Float but was a " + result.getClass(),
            result instanceof Float || result instanceof Double);
    assertEquals(expectedResult.doubleValue(), result);
}

From source file:edu.cudenver.bios.power.OneSampleStudentsTPowerCalculator.java

/**
 * Calculate power for the one sample Student's T test
 *
 * @see OneSampleStudentsTPowerParameters
 * @see OneSampleStudentsTPower/*from   w w  w  .  ja  va2  s  .  c  o m*/
 * @param params one sample student's t input parameters
 * @return list of power objects containing detectable difference results
 */
@Override
public List<Power> getPower(PowerParameters params) {
    OneSampleStudentsTPowerParameters studentsTParams = (OneSampleStudentsTPowerParameters) params;

    ArrayList<Power> results = new ArrayList<Power>();

    // calculate the power for either one or two tails
    for (Double alpha : studentsTParams.getAlphaList()) {
        for (Double sigma : studentsTParams.getSigmaList()) {
            for (MeanPair means : studentsTParams.getMeansList()) {
                for (Integer sampleSize : studentsTParams.getSampleSizeList()) {
                    try {
                        results.add(calculatePower(alpha.doubleValue(), means.mu0, means.muA,
                                sigma.doubleValue(), sampleSize.intValue(), studentsTParams.isTwoTailed()));
                    } catch (Exception me) {
                        // TODO: what to do?
                    }
                }
            }
        }
    }

    return results;
}

From source file:org.apache.ctakes.ytex.kernel.pagerank.PageRankServiceImpl.java

/**
 * perform one iteration of pagerank/*from www . j  ava2  s  .c  om*/
 * 
 * @param currentScores
 * @param cg
 * @return
 */
public Map<Integer, Double> pagerankIter(Map<Integer, Double> currentScores, Map<Integer, Double> dampingVector,
        ConceptGraph cg, double dampingFactor, double N) {
    Map<Integer, Double> newScores = new HashMap<Integer, Double>();
    if (dampingVector == null) {
        // the constant probability of randomly surfing into this node,
        // adjusted by damping factor
        double jump = ((1 - dampingFactor) / N);
        double initialValue = 1 / N;
        // the basic pagerank iteration with uniform damping vector
        // iterate over all nodes
        for (ConcRel c : cg.getConceptList()) {
            double score = 0d;
            // get nodes pointing at node c
            for (ConcRel in : c.getParents()) {
                // get the pagerank for node p which is pointing at c
                // if this is the first iteration, currentScores is null so
                // use the initial pagerank
                double prIn = currentScores == null ? initialValue : currentScores.get(in.getNodeIndex());
                // add the pagerank divided by the number of nodes p is
                // pointing at
                score += (prIn / (double) in.getChildren().size());
            }
            // adjust for uniform damping
            double adjusted = (score * dampingFactor) + jump;
            newScores.put(c.getNodeIndex(), adjusted);
        }
        // for (ConcRel c : cg.getConceptMap().values()) {
        // double score = 0d;
        // // get nodes pointing at node c
        // for (ConcRel in : c.getParents()) {
        // // get the pagerank for node p which is pointing at c
        // // if this is the first iteration, currentScores is null so
        // // use the initial pagerank
        // double prIn = currentScores == null ? initialValue
        // : currentScores.get(in.getConceptID());
        // // add the pagerank divided by the number of nodes p is
        // // pointing at
        // score += (prIn / (double) in.getChildren().size());
        // }
        // // adjust for uniform damping
        // double adjusted = (score * dampingFactor) + jump;
        // newScores.put(c.getConceptID(), adjusted);
        // }
    } else {
        // pagerank with non-uniform damping vector (topic vector).
        // because of the non-uniform damping vector, few nodes will have a
        // non-zero pagerank.
        // optimized so that we only iterate over nodes with non-zero
        // pagerank.
        // propagate from non-zero nodes to linked nodes
        // we assume currentScores is non-null - it is initialized to the
        // damping vector.
        // iterate over nodes that have a pagerank, and propagate the
        // pagerank to out-links.
        for (Map.Entry<Integer, Double> scoreEntry : currentScores.entrySet()) {
            // page (concept id)
            Integer index = scoreEntry.getKey();
            // pagerank
            double score = scoreEntry.getValue();
            // get concept id
            ConcRel cr = cg.getConceptList().get(index);
            // get number of out-links
            double nOutlinks = (double) cr.getChildren().size();
            if (nOutlinks > 0) {
                // propagate pagerank to out-links (children)
                for (ConcRel crOut : cr.getChildren()) {
                    // get current pagerank value for target page
                    double childScore = 0d;
                    Double childScoreD = newScores.get(crOut.getNodeIndex());
                    if (childScoreD != null)
                        childScore = childScoreD.doubleValue();
                    // add the pagerank/|links|
                    childScore += (score / nOutlinks);
                    newScores.put(crOut.getNodeIndex(), childScore);
                }
            }
        }
        // we just added the contribution of pages to newScores sum(score).
        // adjust: convert to (d)*sum(score) + (1-d)*v_i
        for (Map.Entry<Integer, Double> scoreEntry : newScores.entrySet()) {
            // v_i
            Double v_i = dampingVector.get(scoreEntry.getKey());
            // 1-c * v_i
            double v_i_adj = v_i != null ? v_i * (1 - dampingFactor) : 0d;
            double adjusted = (scoreEntry.getValue() * dampingFactor) + v_i_adj;
            scoreEntry.setValue(adjusted);
        }
        //
        //
        // for (Map.Entry<String, Double> scoreEntry : currentScores
        // .entrySet()) {
        // // page (concept id)
        // String page = scoreEntry.getKey();
        // // pagerank
        // double score = scoreEntry.getValue();
        // // get concept id
        // ConcRel cr = cg.getConceptMap().get(page);
        // // get number of out-links
        // double nOutlinks = (double) cr.getChildren().size();
        // if (nOutlinks > 0) {
        // // propagate pagerank to out-links (children)
        // for (ConcRel crOut : cr.getChildren()) {
        // // get current pagerank value for target page
        // double childScore = 0d;
        // Double childScoreD = newScores
        // .get(crOut.getConceptID());
        // if (childScoreD != null)
        // childScore = childScoreD.doubleValue();
        // // add the pagerank/|links|
        // childScore += (score / nOutlinks);
        // newScores.put(crOut.getConceptID(), childScore);
        // }
        // }
        // }
        // // we just added the contribution of pages to newScores
        // sum(score).
        // // adjust: convert to (d)*sum(score) + (1-d)*v_i
        // for (Map.Entry<String, Double> scoreEntry : newScores.entrySet())
        // {
        // // v_i
        // Double v_i = dampingVector.get(scoreEntry.getKey());
        // // 1-c * v_i
        // double v_i_adj = v_i != null ? v_i * (1 - dampingFactor) : 0d;
        // double adjusted = (scoreEntry.getValue() * dampingFactor)
        // + v_i_adj;
        // scoreEntry.setValue(adjusted);
        // }
    }
    return newScores;
}

From source file:com.thesmartweb.swebrank.TFIDF.java

/**
 * Method to compute the TFIDF score/*from  w w  w .  ja  v a2  s .c  o m*/
 * @param allDocs all the documents to analyze
 * @param topWords the amount of top words to get
 * @param directory the directory to save the output
 * @return a list with the top words
 */
public List<String> compute(String[] allDocs, int topWords, String directory) {
    try {
        List<List<String>> allwordsList = new ArrayList<>();
        int counterwords = 0;
        int negtfidf = 0;
        for (int i = 0; i < allDocs.length; i++) {
            List<String> allwordsList_single = new ArrayList<>();
            if (!(allDocs[i] == null)) {
                String stringtosplit = allDocs[i];
                if (!(stringtosplit == null) && (!(stringtosplit.equalsIgnoreCase("")))) {
                    stringtosplit = stringtosplit.replaceAll("[\\W&&[^\\s]]", "");
                    if (!(stringtosplit == null) && (!(stringtosplit.equalsIgnoreCase("")))) {
                        String[] tokenizedTerms = stringtosplit.split("\\W+");
                        for (int j = 0; j < tokenizedTerms.length; j++) {
                            if (!(tokenizedTerms[j] == null) && (!(tokenizedTerms[j].equalsIgnoreCase("")))) {
                                allwordsList_single.add(tokenizedTerms[j]);
                                counterwords++;
                            }
                        }
                    }
                }
            }
            allwordsList.add(i, allwordsList_single);
        }

        HashMap<String, Double> wordTFIDFscores = new HashMap<>();
        List<String> topwordsTFIDF;
        topwordsTFIDF = new ArrayList<>();
        List<String> wordsTFIDF = new ArrayList<>();
        List<Double> TFIDFscoreslist;
        List<Double> TFIDFscoreslistcopy = new ArrayList<>();
        TFIDFscoreslist = new ArrayList<>();
        for (int i = 0; i < allDocs.length; i++) {
            if (!(allDocs[i] == null)) {
                String stringtosplit = allDocs[i];
                if (!(stringtosplit == null) && (!(stringtosplit.equalsIgnoreCase("")))) {
                    stringtosplit = stringtosplit.replaceAll("[\\W&&[^\\s]]", "");
                    if (!(stringtosplit == null) && (!(stringtosplit.equalsIgnoreCase("")))) {
                        String[] tokenizedTerms = stringtosplit.split("\\W+");
                        for (int j = 0; j < tokenizedTerms.length; j++) {
                            if (!(tokenizedTerms[j] == null) && (!(tokenizedTerms[j].equalsIgnoreCase("")))) {
                                Double tfvalue = tfCalculator(allDocs[i], tokenizedTerms[j]);
                                Double idfvalue = idfCalculator(allwordsList, tokenizedTerms[j],
                                        allDocs.length);
                                Double tfidfvalue = tfvalue * idfvalue;
                                if (tfidfvalue < 0) {
                                    negtfidf++;
                                }
                                TFIDFscoreslist.add(tfvalue.doubleValue());
                                TFIDFscoreslistcopy.add(tfvalue.doubleValue());
                                wordsTFIDF.add(tokenizedTerms[j]);
                                if (wordTFIDFscores.get(tokenizedTerms[j]) == null
                                        || wordTFIDFscores.get(tokenizedTerms[j]).doubleValue() > tfidfvalue) {
                                    wordTFIDFscores.put(tokenizedTerms[j], tfidfvalue);
                                }
                            }
                        }
                    }
                }
            }
        }
        DataManipulation shmap = new DataManipulation();
        topwordsTFIDF = shmap.sortHashmap(wordTFIDFscores).subList(0, topWords);
        topWordsList = topwordsTFIDF;
        File file_words = new File(directory + "words.txt");
        FileUtils.writeLines(file_words, topWordsList);
        return topWordsList;
    } catch (IOException ex) {
        Logger.getLogger(TFIDF.class.getName()).log(Level.SEVERE, null, ex);

        return topWordsList;
    }

}

From source file:org.deegree.ogcwebservices.wms.dataaccess.ID2PInterpolation.java

private int buildQuadtree(FeatureCollection fc) throws IndexException {

    Iterator<Feature> iterator = fc.iterator();
    double min = Double.MAX_VALUE;
    double max = Double.MIN_VALUE;
    String tmp = prop.getProperty("z_value");
    int count = 0;
    while (iterator.hasNext()) {
        Feature feat = iterator.next();/*from  w  ww . j a  v a  2s . co  m*/
        Point point = (Point) feat.getDefaultGeometryPropertyValue();
        QualifiedName qn = new QualifiedName(tmp);
        Object o = feat.getDefaultProperty(qn).getValue();
        if (o != null) {
            Double zValue = Double.parseDouble(o.toString());
            point = GeometryFactory.createPoint(point.getX(), point.getY(), null);
            quadtree.insert(new DataTuple(point.getX(), point.getY(), zValue.doubleValue()), point);
            if (zValue < min) {
                min = zValue;
            }
            if (zValue > max) {
                max = zValue;
            }
            count++;
        }
    }
    System.out.println("min value : " + min);
    System.out.println("max value : " + max);
    return count;
}

From source file:com.xavax.json.JSON.java

/**
 * Get the double field with the specified name. If the field is missing
 * or null, return the specified default value.
 *
 * @param key  the name of the field.//w  w  w .  j av a 2  s.c  om
 * @param defaultValue  the value to return if the field is null.
 * @return the double field with the specified name.
 */
public double getDouble(final String key, final double defaultValue) {
    final Double value = getDouble(key);
    return value == null ? defaultValue : value.doubleValue();
}

From source file:gr.abiss.calipso.wicket.components.formfields.FieldSummaryHelper.java

private String getCalculatedSummary() {
    //logger.info("getCalculatedSummary, label: "+label+", sumary: "+summary+", type: "+type);
    String calculatedSummary = "";
    if (StringUtils.isNotBlank(summary)) {
        if (TYPE_DECIMAL.equalsIgnoreCase(type) || TYPE_INTEGER.equalsIgnoreCase(type)) {
            Double total = (Double) summaryObject;
            if (ZERO.equals(total)) {
                calculatedSummary = this.format(ZERO);
            } else if (SUMMARY_TOTAL.equalsIgnoreCase(summary)) {
                calculatedSummary = this.format(total);
            } else if (SUMMARY_AVERAGE.equalsIgnoreCase(summary)) {
                calculatedSummary = this.format(new Double(total.doubleValue() / summaryEntriesCount));
            }/*from   w  ww . jav a2s.c o m*/
        }
        //         else{
        //            logger.info("getCalculatedSummary for "+label+"skipped calculating sumary as helper.type is invalid");
        //         }
    }
    //      else{
    //         logger.info("getCalculatedSummary for "+label+"skipped calculating sumary as helper.summary is empty");
    //      }
    return calculatedSummary;
}

From source file:edu.harvard.med.screensaver.io.screenresults.ScreenResultParserTest.java

public void testParseNumericFormulaCellValue() throws Exception {

    jxl.Workbook wb = jxl.Workbook.getWorkbook(FORMULA_VALUE_TEST_WORKBOOK_FILE.getFile());
    Sheet sheet = wb.getSheet(0);/* w  ww . j a va2s. co m*/
    jxl.Cell numericFormulaCell = sheet.getCell(3, 1);
    assertEquals("cell type", CellType.NUMBER_FORMULA, numericFormulaCell.getType());
    double numericValue = ((NumberFormulaCell) numericFormulaCell).getValue();
    assertEquals("numeric value", 2.133, numericValue, 0.0001);
    String formula = ((NumberFormulaCell) numericFormulaCell).getFormula();
    assertEquals("formula", "B2+C2", formula);
    assertEquals("numeric decimal places", 4,
            ((NumberFormulaCell) numericFormulaCell).getNumberFormat().getMaximumFractionDigits());

    Workbook workbook = new Workbook(FORMULA_VALUE_TEST_WORKBOOK_FILE.getFile());
    Worksheet worksheet = workbook.getWorksheet(0);
    Cell cell = worksheet.getCell(3, 1, false);
    assertTrue(!cell.isEmpty());
    Double parsedNumericValue = cell.getDouble();
    assertEquals("parse numeric value", numericValue, parsedNumericValue.doubleValue(), 0.0001);

    // test numeric decimal places (TODO: should probably be a separate unit test)
    Cell numericFormatFormulaCell = worksheet.getCell(3, 1, false);
    assertEquals("decimal places of numeric format on formula cell", 4,
            numericFormatFormulaCell.getDoublePrecision());
    //    Cell generalFormatFormulaCell = cellFactory.getCell((short) 4, (short) 1);
    //    assertEquals("precision of general format on formula cell", -1,
    //                 generalFormatFormulaCell.getDoublePrecision());
    //    Cell generalFormatNumericCell = cellFactory.getCell((short) 1, (short) 1);
    //    assertEquals("precision of general format on numeric cell", -1,
    //                 generalFormatNumericCell.getDoublePrecision());
    Cell numericFormatNumericCell = worksheet.getCell(2, 1);
    assertEquals("decimal places of numeric format on numeric cell", 3,
            numericFormatNumericCell.getDoublePrecision());
    Cell integerNumericFormatNumericCell = worksheet.getCell(5, 1);
    assertEquals("decimal places of integer number format on numeric cell", 0,
            integerNumericFormatNumericCell.getDoublePrecision());
    Cell percentageNumericCell = worksheet.getCell(6, 1);
    assertEquals("decimal places of percentage number format on numeric cell", 3,
            percentageNumericCell.getDoublePrecision());
}

From source file:geogebra.common.kernel.statistics.AlgoFrequency.java

@Override
public final void compute() {

    if (isContingencyTable) {
        computeContingencyTable();//from  w  w w . j a  v  a2 s.c o  m
        return;
    }

    // Validate input arguments
    // =======================================================

    if (!dataList.isDefined() || dataList.size() == 0) {
        frequency.setUndefined();
        return;
    }

    if (!(dataList.getElementType().equals(GeoClass.TEXT)
            || dataList.getElementType().equals(GeoClass.NUMERIC))) {
        frequency.setUndefined();
        return;
    }

    if (classList != null) {
        if (!classList.getElementType().equals(GeoClass.NUMERIC) || classList.size() < 2) {
            frequency.setUndefined();
            return;
        }
    }

    if (density != null) {
        if (density.getDouble() <= 0) {
            frequency.setUndefined();
            return;
        }
    }

    if (scale != null) {
        if (!scale.isDefined()) {
            frequency.setUndefined();
            return;
        }
        scaleFactor = scale.getValue();
    }

    frequency.setDefined(true);
    frequency.clear();
    if (value != null)
        value.clear();

    double numMax = 0, numMin = 0;
    boolean doCumulative = isCumulative != null && isCumulative.getBoolean();

    // Load the data into f, an instance of Frequency class
    // =======================================================

    Frequency f = new FrequencyGgb();
    for (int i = 0; i < dataList.size(); i++) {
        if (dataList.getElementType().equals(GeoClass.TEXT))
            f.addValue(((GeoText) dataList.get(i)).toValueString(StringTemplate.defaultTemplate));
        if (dataList.getElementType().equals(GeoClass.NUMERIC))
            f.addValue(((GeoNumeric) dataList.get(i)).getDouble());
    }

    // If classList does not exist,
    // get the unique value list and compute frequencies for this list
    // =======================================================

    // handle string data
    if (dataList.getElementType().equals(GeoClass.TEXT)) {

        Iterator<Comparable<?>> itr = f.valuesIterator();
        String strMax = (String) itr.next();
        String strMin = strMax;
        itr = f.valuesIterator();

        while (itr.hasNext()) {
            String s = (String) itr.next();
            if (s.compareTo(strMax) > 0)
                strMax = s;
            if (s.compareTo(strMin) < 0)
                strMin = s;
            GeoText text = new GeoText(cons);
            text.setTextString(s);
            value.add(text);
            if (classList == null) {
                if (doCumulative) {
                    addValue(f.getCumFreq(s));
                } else {
                    addValue(f.getCount(s));
                }
            }
        }
    }

    // handle numeric data
    else {
        Iterator<Comparable<?>> itr = f.valuesIterator();
        numMax = (Double) itr.next();
        numMin = numMax;
        itr = f.valuesIterator();

        while (itr.hasNext()) {
            Double n = (Double) itr.next();
            if (n > numMax)
                numMax = n.doubleValue();
            if (n < numMin)
                numMin = n.doubleValue();
            value.add(new GeoNumeric(cons, n));

            if (classList == null)
                if (doCumulative)
                    addValue(f.getCumFreq(n));
                else
                    addValue(f.getCount(n));
        }
    }

    // If classList exists, compute frequencies using the classList
    // =======================================================

    if (classList != null) {

        double lowerClassBound = 0;
        double upperClassBound = 0;
        double classFreq = 0;

        // set density conditions
        boolean hasDensity = false;
        if (useDensity != null)
            hasDensity = useDensity.getBoolean();

        double densityValue = 1; // default density
        if (density != null) {
            densityValue = density.getDouble();
        }

        double cumulativeClassFreq = 0;
        double swap;
        int length = classList.size();
        for (int i = 1; i < length; i++) {

            lowerClassBound = ((GeoNumeric) classList.get(i - 1)).getDouble();
            upperClassBound = ((GeoNumeric) classList.get(i)).getDouble();

            // handle roundoff errror in class list values (this is possible
            // if auto-generated by another cmd)
            lowerClassBound = Kernel.checkDecimalFraction(lowerClassBound);
            upperClassBound = Kernel.checkDecimalFraction(upperClassBound);

            boolean increasing = true;
            if (lowerClassBound > upperClassBound) {
                swap = upperClassBound;
                upperClassBound = lowerClassBound;
                lowerClassBound = swap;
                increasing = false;
            }
            classFreq = f.getCumFreq(upperClassBound) - f.getCumFreq(lowerClassBound)
                    + f.getCount(lowerClassBound);
            if ((i != length - 1 && increasing) || (i != 1 && !increasing))
                classFreq -= f.getCount(upperClassBound);

            // System.out.println(" =================================");
            // System.out.println("class freq: " + classFreq + "   " +
            // density);

            if (doCumulative)
                cumulativeClassFreq += classFreq;

            // adjust the frequency and add to the output GeoList
            double v = doCumulative ? cumulativeClassFreq : classFreq;
            if (hasDensity) {
                v = densityValue * v / (upperClassBound - lowerClassBound);
            }
            addValue(v);
        }

        // handle the last (highest) class frequency specially
        // it must also count values equal to the highest class bound

    }
}

From source file:edu.cudenver.bios.power.OneSampleStudentsTPowerCalculator.java

/**
 * Run a power simulation for the one sample student's t test
 *
 * @see OneSampleStudentsTPowerParameters
 * @see OneSampleStudentsTPower//from  w ww.  j av a  2s.co  m
 * @param params one sample student's t input parameters
 * @param iterations number of iterations to use for the simulation
 * @return list of power objects containing detectable difference results
 */
@Override
public List<Power> getSimulatedPower(PowerParameters params, int iterations) {
    OneSampleStudentsTPowerParameters studentsTParams = (OneSampleStudentsTPowerParameters) params;

    ArrayList<Power> results = new ArrayList<Power>();

    // calculate the power for either one or two tails
    for (Double alpha : studentsTParams.getAlphaList()) {
        for (Double sigma : studentsTParams.getSigmaList()) {
            for (MeanPair means : studentsTParams.getMeansList()) {
                for (Integer sampleSize : studentsTParams.getSampleSizeList()) {
                    try {
                        results.add(
                                simulatePower(alpha.doubleValue(), means.mu0, means.muA, sigma.doubleValue(),
                                        sampleSize.intValue(), studentsTParams.isTwoTailed(), iterations));
                    } catch (Exception me) {
                        // TODO: how to handle this?
                    }
                }
            }
        }
    }

    return results;
}