Example usage for java.lang Double intValue

List of usage examples for java.lang Double intValue

Introduction

In this page you can find the example usage for java.lang Double intValue.

Prototype

public int intValue() 

Source Link

Document

Returns the value of this Double as an int after a narrowing primitive conversion.

Usage

From source file:com.efficio.fieldbook.web.nursery.service.impl.ExcelImportStudyServiceImpl.java

private void importDataToWorkbook(HSSFWorkbook xlsBook, Workbook workbook) {
    if (workbook.getObservations() != null) {
        HSSFSheet observationSheet = xlsBook.getSheetAt(1);
        int xlsRowIndex = 1; //row 0 is the header row
        for (MeasurementRow wRow : workbook.getObservations()) {
            HSSFRow xlsRow = observationSheet.getRow(xlsRowIndex);
            for (MeasurementData wData : wRow.getDataList()) {
                String label = wData.getLabel();
                int xlsColIndex = findColumn(observationSheet, label);
                Cell cell = xlsRow.getCell(xlsColIndex);
                String xlsValue = "";

                if (cell != null) {
                    if (cell.getCellType() == Cell.CELL_TYPE_NUMERIC) {
                        Double doubleVal = Double.valueOf(cell.getNumericCellValue());
                        Integer intVal = Integer.valueOf(doubleVal.intValue());
                        if (Double.parseDouble(intVal.toString()) == doubleVal.doubleValue()) {
                            xlsValue = intVal.toString();
                        } else {
                            xlsValue = doubleVal.toString();
                        }//from  w  w w. j av a  2 s . c  om

                    } else
                        xlsValue = cell.getStringCellValue();
                }
                wData.setValue(xlsValue);
            }
            xlsRowIndex++;
        }
    }
}

From source file:de.unidue.langtech.grading.tc.ClusteringTask.java

@Override
public void execute(TaskContext aContext) throws Exception {
    if (learningMode.equals(Constants.LM_MULTI_LABEL)) {
        throw new IllegalArgumentException("Cannot use multi-label setup in clustering.");
    }//from  w w  w .  j  a  v  a2s  .  co m
    boolean multiLabel = false;

    File arffFileTrain = new File(
            aContext.getStorageLocation(TEST_TASK_INPUT_KEY_TRAINING_DATA, AccessMode.READONLY).getPath() + "/"
                    + TRAINING_DATA_FILENAME);

    Instances trainData = TaskUtils.getInstances(arffFileTrain, multiLabel);

    // get number of outcomes
    List<String> trainOutcomeValues = TaskUtils.getClassLabels(trainData, multiLabel);

    Clusterer clusterer = AbstractClusterer.forName(clusteringArguments.get(0),
            clusteringArguments.subList(1, clusteringArguments.size()).toArray(new String[0]));

    Instances copyTrainData = new Instances(trainData);
    trainData = WekaUtils.removeOutcomeId(trainData, multiLabel);

    // generate data for clusterer (w/o class)
    Remove filter = new Remove();
    filter.setAttributeIndices("" + (trainData.classIndex() + 1));
    filter.setInputFormat(trainData);
    Instances clusterTrainData = Filter.useFilter(trainData, filter);

    clusterer.buildClusterer(clusterTrainData);

    // get a mapping from clusterIDs to instance offsets in the ARFF
    Map<Integer, Set<Integer>> clusterMap = getClusterMap(clusterTrainData, clusterer);

    Map<String, String> instanceId2TextMap = getInstanceId2TextMap(aContext);

    ConditionalFrequencyDistribution<Integer, String> clusterAssignments = new ConditionalFrequencyDistribution<Integer, String>();
    for (Integer clusterId : clusterMap.keySet()) {
        System.out.println("CLUSTER: " + clusterId);
        for (Integer offset : clusterMap.get(clusterId)) {

            // get instance ID from instance
            Instance instance = copyTrainData.get(offset);

            Double classOffset = new Double(instance.value(copyTrainData.classAttribute()));
            String label = (String) trainOutcomeValues.get(classOffset.intValue());

            clusterAssignments.addSample(clusterId, label);

            String instanceId = instance
                    .stringValue(copyTrainData.attribute(AddIdFeatureExtractor.ID_FEATURE_NAME).index());
            System.out.println(label + "\t" + instanceId2TextMap.get(instanceId));
        }
        System.out.println();
    }

    System.out.println("ID\tSIZE\tPURITY\tRMSE");
    for (Integer clusterId : clusterMap.keySet()) {
        FrequencyDistribution<String> fd = clusterAssignments.getFrequencyDistribution(clusterId);
        double purity = (double) fd.getCount(fd.getSampleWithMaxFreq()) / fd.getN();
        String purityString = String.format("%.2f", purity);
        double rmse = getRMSE(fd, trainOutcomeValues);
        String rmseString = String.format("%.2f", rmse);
        System.out.println(
                clusterId + "\t" + clusterMap.get(clusterId).size() + "\t" + purityString + "\t" + rmseString);
    }
    System.out.println();
}

From source file:org.dspace.app.statistics.LogAnalyser.java

/**
 * using the pre-configuration information passed here, analyse the logs
 * and produce the aggregation file/*from   w w w.j  a va 2  s  .c  o  m*/
 *
 * @param   context     the DSpace context object this occurs under
 * @param   myLogDir    the passed log directory.  Uses default if null
 * @param   myFileTemplate  the passed file name regex.  Uses default if null
 * @param   myConfigFile    the DStat config file.  Uses default if null
 * @param   myOutFile    the file to which to output aggregation data.  Uses default if null
 * @param   myStartDate     the desired start of the analysis.  Starts from the beginning otherwise
 * @param   myEndDate       the desired end of the analysis.  Goes to the end otherwise
 * @param   myLookUp        force a lookup of the database
 * @return aggregate output
 * @throws IOException if IO error
 * @throws SQLException if database error
 * @throws SearchServiceException if search error
 */
public static String processLogs(Context context, String myLogDir, String myFileTemplate, String myConfigFile,
        String myOutFile, Date myStartDate, Date myEndDate, boolean myLookUp)
        throws IOException, SQLException, SearchServiceException {
    // FIXME: perhaps we should have all parameters and aggregators put 
    // together in a single aggregating object

    // if the timer has not yet been started, then start it
    startTime = new GregorianCalendar();

    //instantiate aggregators
    actionAggregator = new HashMap<String, Integer>();
    searchAggregator = new HashMap<String, Integer>();
    userAggregator = new HashMap<String, Integer>();
    itemAggregator = new HashMap<String, Integer>();
    archiveStats = new HashMap<String, Integer>();

    //instantiate lists
    generalSummary = new ArrayList<String>();
    excludeWords = new ArrayList<String>();
    excludeTypes = new ArrayList<String>();
    excludeChars = new ArrayList<String>();
    itemTypes = new ArrayList<String>();

    // set the parameters for this analysis
    setParameters(myLogDir, myFileTemplate, myConfigFile, myOutFile, myStartDate, myEndDate, myLookUp);

    // pre prepare our standard file readers and buffered readers
    FileReader fr = null;
    BufferedReader br = null;

    // read in the config information, throwing an error if we fail to open
    // the given config file
    readConfig(configFile);

    // assemble the regular expressions for later use (requires the file
    // template to build the regex to match it
    setRegex(fileTemplate);

    // get the log files
    File[] logFiles = getLogFiles(logDir);

    // standard loop counter
    int i = 0;

    // for every log file do analysis
    // FIXME: it is easy to implement not processing log files after the
    // dates exceed the end boundary, but is there an easy way to do it
    // for the start of the file?  Note that we can assume that the contents
    // of the log file are sequential, but can we assume the files are
    // provided in a data sequence?
    for (i = 0; i < logFiles.length; i++) {
        // check to see if this file is a log file agains the global regex
        Matcher matchRegex = logRegex.matcher(logFiles[i].getName());
        if (matchRegex.matches()) {
            // if it is a log file, open it up and lets have a look at the
            // contents.
            try {
                fr = new FileReader(logFiles[i].toString());
                br = new BufferedReader(fr);
            } catch (IOException e) {
                System.out.println("Failed to read log file " + logFiles[i].toString());
                System.exit(0);
            }

            // for each line in the file do the analysis
            // FIXME: perhaps each section needs to be dolled out to an
            // analysing class to allow pluggability of other methods of
            // analysis, and ease of code reading too - Pending further thought
            String line = null;
            while ((line = br.readLine()) != null) {
                // get the log line object
                LogLine logLine = getLogLine(line);

                // if there are line segments get on with the analysis
                if (logLine != null) {
                    // first find out if we are constraining by date and 
                    // if so apply the restrictions
                    if ((startDate != null) && (!logLine.afterDate(startDate))) {
                        continue;
                    }

                    if ((endDate != null) && (!logLine.beforeDate(endDate))) {
                        break;
                    }

                    // count the number of lines parsed
                    lineCount++;

                    // if we are not constrained by date, register the date
                    // as the start/end date if it is the earliest/latest so far
                    // FIXME: this should probably have a method of its own
                    if (startDate == null) {
                        if (logStartDate != null) {
                            if (logLine.beforeDate(logStartDate)) {
                                logStartDate = logLine.getDate();
                            }
                        } else {
                            logStartDate = logLine.getDate();
                        }
                    }

                    if (endDate == null) {
                        if (logEndDate != null) {
                            if (logLine.afterDate(logEndDate)) {
                                logEndDate = logLine.getDate();
                            }
                        } else {
                            logEndDate = logLine.getDate();
                        }
                    }

                    // count the warnings
                    if (logLine.isLevel("WARN")) {
                        // FIXME: really, this ought to be some kind of level
                        // aggregator
                        warnCount++;
                    }
                    // count the exceptions
                    if (logLine.isLevel("ERROR")) {
                        excCount++;
                    }

                    if (null == logLine.getAction()) {
                        continue;
                    }

                    // is the action a search?
                    if (logLine.isAction("search")) {
                        // get back all the valid search words from the query
                        String[] words = analyseQuery(logLine.getParams());

                        // for each search word add to the aggregator or
                        // increment the aggregator's counter
                        for (int j = 0; j < words.length; j++) {
                            // FIXME: perhaps aggregators ought to be objects
                            // themselves
                            searchAggregator.put(words[j], increment(searchAggregator, words[j]));
                        }
                    }

                    // is the action a login, and are we counting user logins?
                    if (logLine.isAction("login") && !userEmail.equals("off")) {
                        userAggregator.put(logLine.getUser(), increment(userAggregator, logLine.getUser()));
                    }

                    // is the action an item view?
                    if (logLine.isAction("view_item")) {
                        String handle = logLine.getParams();

                        // strip the handle string
                        Matcher matchHandle = handleRX.matcher(handle);
                        handle = matchHandle.replaceAll("");

                        // strip the item id string
                        Matcher matchItem = itemRX.matcher(handle);
                        handle = matchItem.replaceAll("").trim();

                        // either add the handle to the aggregator or
                        // increment its counter
                        itemAggregator.put(handle, increment(itemAggregator, handle));
                    }

                    // log all the activity
                    actionAggregator.put(logLine.getAction(), increment(actionAggregator, logLine.getAction()));
                }
            }

            // close the file reading buffers
            br.close();
            fr.close();

        }
    }

    // do we want to do a database lookup?  Do so only if the start and
    // end dates are null or lookUp is true
    // FIXME: this is a kind of separate section.  Would it be worth building
    // the summary string separately and then inserting it into the real
    // summary later?  Especially if we make the archive analysis more complex
    archiveStats.put("All Items", getNumItems(context));
    for (i = 0; i < itemTypes.size(); i++) {
        archiveStats.put(itemTypes.get(i), getNumItems(context, itemTypes.get(i)));
    }

    // now do the host name and url lookup
    hostName = ConfigurationManager.getProperty("dspace.hostname").trim();
    name = ConfigurationManager.getProperty("dspace.name").trim();
    url = ConfigurationManager.getProperty("dspace.url").trim();
    if ((url != null) && (!url.endsWith("/"))) {
        url = url + "/";
    }

    // do the average views analysis
    if ((archiveStats.get("All Items")).intValue() != 0) {
        // FIXME: this is dependent on their being a query on the db, which
        // there might not always be if it becomes configurable
        Double avg = Math.ceil((actionAggregator.get("view_item")).doubleValue()
                / (archiveStats.get("All Items")).doubleValue());
        views = avg.intValue();
    }

    // finally, write the output
    return createOutput();
}

From source file:org.openscience.cdk.applications.taverna.qsar.GetMolecularWeightDistributionFromQSARVectorActivity.java

@Override
@SuppressWarnings("unchecked")
public void work() throws Exception {
    // Get input/*from  www .  j av a 2 s  . c  o m*/
    Map<UUID, Map<String, Object>> vectorMap;
    try {
        vectorMap = (Map<UUID, Map<String, Object>>) this.getInputAsObject(this.INPUT_PORTS[0]);
    } catch (Exception e) {
        ErrorLogger.getInstance().writeError(CDKTavernaException.WRONG_INPUT_PORT_TYPE, this.getActivityName(),
                e);
        throw new CDKTavernaException(this.getConfiguration().getActivityName(), e.getMessage());
    }
    File targetFile = this.getInputAsFile(this.INPUT_PORTS[1]);
    String directory = Tools.getDirectory(targetFile);
    String name = Tools.getFileName(targetFile);
    // Do work
    ChartTool chartTool = new ChartTool();
    ArrayList<String> molIdSWeightCSV = new ArrayList<String>();
    ArrayList<String> weightDistributionCSV = new ArrayList<String>();
    try {
        QSARVectorUtility util = new QSARVectorUtility();
        List<UUID> uuids = util.getUUIDs(vectorMap);
        LinkedList<Double> weigths = new LinkedList<Double>();
        int maxWeight = 0;
        molIdSWeightCSV.add("ID;Molecular Weight (g/mol);");
        for (int i = 0; i < uuids.size(); i++) {
            UUID uuid = uuids.get(i);
            Map<String, Object> values = vectorMap.get(uuid);
            Double weight = (Double) values.get("weight");
            if (weight.isInfinite() || weight.isNaN()) {
                continue;
            }
            weigths.add(weight);
            if (weight.intValue() > maxWeight) {
                maxWeight = weight.intValue();
            }
            molIdSWeightCSV.add(uuid.toString() + ";" + String.format("%.2f", weight) + ";");
        }
        int[] weightDistribution = new int[maxWeight + 1];
        for (Double weight : weigths) {
            int value = weight.intValue();
            weightDistribution[value]++;
        }
        weightDistributionCSV.add("Molecular Weight (g/mol);Number Of Molecules;");
        for (int i = 1; i < weightDistribution.length; i++) {
            weightDistributionCSV.add(i + ";" + weightDistribution[i] + ";");
        }
        // Create chart
        XYSeries series = new XYSeries("Weight");
        for (int i = 0; i < weightDistribution.length; i++) {
            series.add(i, weightDistribution[i]);
        }
        XYSeriesCollection dataset = new XYSeriesCollection(series);
        JFreeChart chart = chartTool.createXYBarChart("Weight Distribution", "Weight (g/mol)",
                "Number of Compounds", dataset, true, false);
        File file = FileNameGenerator.getNewFile(directory, ".pdf", name);
        chartTool.writeChartAsPDF(file, Collections.singletonList((Object) chart));
    } catch (Exception e) {
        ErrorLogger.getInstance().writeError("Error during extraction of molecular weight from QSAR vector!",
                this.getActivityName(), e);
        throw new CDKTavernaException(this.getConfiguration().getActivityName(), e.getMessage());
    }
    // Set output
    this.setOutputAsStringList(weightDistributionCSV, this.OUTPUT_PORTS[0]);
    this.setOutputAsStringList(molIdSWeightCSV, this.OUTPUT_PORTS[1]);
}

From source file:Coordinate.java

/**
 * The compareTo method compares the receiving object with the specified object and returns a 
 * negative integer, 0, or a positive integer depending on whether the receiving object is 
 * less than, equal to, or greater than the specified object.
 *
 * @param c the event to compare this one to
 *
 * @return  an integer indicating comparison result
 *//*  w  ww.ja  v a  2  s. c o  m*/
public int compareTo(Coordinate c) {

    String me = this.getLatitudeAsString() + this.getLongitudeAsString();
    String you = c.getLatitudeAsString() + c.getLongitudeAsString();

    Double meDbl = Double.valueOf(me);
    Double youDbl = Double.valueOf(you);

    if (meDbl == youDbl) {
        return 0;
    } else {
        Double tmp = Math.floor(meDbl - youDbl);
        return tmp.intValue();
    }

}

From source file:com.mchp.android.PIC32_BTSK.TemperatureFragment.java

@Override
public void onViewStateRestored(Bundle savedInstanceState) {
    super.onViewStateRestored(savedInstanceState);

    Double d = graphViewData[graphViewData.length - 1].getY();
    mLastTemp.setText(String.valueOf(d.intValue()) + "\u00b0");

    return;//from  w  ww . j a  va2 s  .c o  m
}

From source file:org.protelis.lang.datatype.impl.ArrayTupleImpl.java

/**
 * Compatibility method to speed up calls made using doubles.
 * /* w ww .  j a v a  2 s .  com*/
 * @param i
 *            the element position (will be floored to int)
 * @return the i-th element
 */
public Object get(final Double i) {
    return get(i.intValue());
}

From source file:com.aspose.showcase.qrcodegen.web.api.controller.QRCodeManagementController.java

private Dimension geCustomImageSizeDimention(String imgSize) {

    try {//from w ww. ja  va2 s . co  m

        if (StringUtils.isBlank(imgSize)) {
            return null;
        }

        String size = imgSize.trim();

        String[] parts = size.split("x");

        if (parts.length < SIZE_LENGTH) {
            return null;
        }

        int width = Integer.parseInt(parts[0]);
        int height = Integer.parseInt(parts[1]);

        Double dwidth = width * PIXEL_MULTIPLIER;
        Double dheight = height * PIXEL_MULTIPLIER;

        return new Dimension(dwidth.intValue(), dheight.intValue());

    } catch (Exception e) {
        LOGGER.info(e);
        return null;
    }

}

From source file:net.starschema.clouddb.jdbc.list.TreeBuilder.java

/**
 * Makes 26 ^ seqWidth UniqueId and stores it in the
 * UniqueIds as UNIQ_ID_ + the generated chars
 * //from  w  ww.j  a va2s .c  o m
 * currently seqWidth is 4 so the ID-s start at
 * UNIQ_ID_AAAA and ends at UNIQ_ID_ZZZZ
 * 
 * This function runs only one time, to generate all the UNIQ_ID_
 * 
 */
private void makeUniqueId() {
    this.nextIdPosition = 0;
    // This is the configurable param
    int seqWidth = 4;

    Double charSetSize = 26d;

    // The size of the array will be 26 ^ seqWidth. ie: if 2 chars wide, 26
    // * 26. 3 chars, 26 * 26 * 26
    Double total = Math.pow(charSetSize, (new Integer(seqWidth)).doubleValue());

    StringBuilder[] sbArr = new StringBuilder[total.intValue()];
    // Initializing the Array
    for (int j = 0; j < total; j++) {
        sbArr[j] = new StringBuilder();
    }

    char ch = 'A';
    // Iterating over the entire length for the 'seqWidth' number of times.
    for (int k = seqWidth; k > 0; k--) {
        // Iterating and adding each char to the entire array.
        for (int l = 1; l <= total; l++) {
            sbArr[l - 1].append(ch);
            if ((l % (Math.pow(charSetSize, k - 1d))) == 0) {
                ch++;
                if (ch > 'Z') {
                    ch = 'A';
                }
            }
        }
    }

    this.UniqueIds = new ArrayList<String>();
    // Use the stringbuilder array, to fill the UniquIds
    for (StringBuilder id : sbArr) {
        this.UniqueIds.add("UNIQ_ID_" + id);
    }
}

From source file:dpfmanager.shell.modules.report.util.ReportHtml.java

/**
 * Parse a global report to XML format.//from   w ww . ja v a 2 s  . c  o m
 *
 * @param outputfile the output file.
 * @param gr         the global report.
 */
public void parseGlobal(String outputfile, GlobalReport gr, ReportGenerator generator) {
    String templatePath = "templates/global.html";
    String imagePath = "templates/image.html";
    String newHtmlFolder = outputfile.substring(0, outputfile.lastIndexOf("/"));

    String imagesBody = "";
    String pieFunctions = "";

    // Parse individual Reports
    int index = 0;
    for (IndividualReport ir : gr.getIndividualReports()) {
        if (!ir.containsData())
            continue;
        String imageBody;
        imageBody = generator.readFilefromResources(imagePath);
        // Image
        String imgPath = "html/img/" + new File(ir.getReportPath()).getName() + ".jpg";
        boolean check = tiff2Jpg(ir.getFilePath(), newHtmlFolder + "/" + imgPath);
        if (!check) {
            imgPath = "html/img/noise.jpg";
        }
        imageBody = StringUtils.replace(imageBody, "##IMG_PATH##", encodeUrl(imgPath));

        // Basic
        int percent = ir.calculatePercent();
        imageBody = StringUtils.replace(imageBody, "##PERCENT##", "" + percent);
        imageBody = StringUtils.replace(imageBody, "##INDEX##", "" + index);
        imageBody = StringUtils.replace(imageBody, "##IMG_NAME##", "" + ir.getFileName());

        /**
         * Errors / warnings resume (individual)
         */
        String rowTmpl = "<tr>\n" + "\t\t\t\t\t\t        <td class=\"c1\">##NAME##</td>\n"
                + "\t\t\t\t\t\t        <td class=\"c2 ##ERR_C##\">##ERR_N## errors</td>\n"
                + "\t\t\t\t\t\t        <td class=\"c2 ##WAR_C##\">##WAR_N## warnings</td>\n"
                + "\t\t\t\t\t\t        <td></td>\n" + "\t\t\t\t\t\t    </tr>";
        String rows = "";
        int totalWarnings = 0;
        for (String iso : ir.getCheckedIsos()) {
            if (ir.hasValidation(iso)) {
                String name = ImplementationCheckerLoader.getIsoName(iso);
                String row = rowTmpl;
                int errorsCount = ir.getNErrors(iso);
                int warningsCount = ir.getNWarnings(iso);
                totalWarnings += warningsCount;
                row = StringUtils.replace(row, "##NAME##", name);
                row = StringUtils.replace(row, "##ERR_N##", "" + errorsCount);
                row = StringUtils.replace(row, "##WAR_N##", "" + warningsCount);
                if (errorsCount > 0) {
                    row = StringUtils.replace(row, "##ERR_C##", "error");
                } else {
                    row = StringUtils.replace(row, "##ERR_C##", "");
                }
                if (warningsCount > 0) {
                    row = StringUtils.replace(row, "##WAR_C##", "warning");
                } else {
                    row = StringUtils.replace(row, "##WAR_C##", "");
                }
                rows += row;
            }
        }
        imageBody = StringUtils.replace(imageBody, "##TABLE_RESUME_IMAGE##", rows);
        imageBody = StringUtils.replace(imageBody, "##HREF##",
                "html/" + encodeUrl(new File(ir.getReportPath()).getName() + ".html"));

        /**
         * Percent info
         */
        if (percent == 100) {
            imageBody = StringUtils.replace(imageBody, "##CLASS##", "success");
            imageBody = StringUtils.replace(imageBody, "##RESULT##", "Passed");
            if (totalWarnings > 0) {
                imageBody = StringUtils.replace(imageBody, "##DISPLAY_WAR##", "inline-block");
            } else {
                imageBody = StringUtils.replace(imageBody, "##DISPLAY_WAR##", "none");
            }
        } else {
            imageBody = StringUtils.replace(imageBody, "##CLASS##", "error");
            imageBody = StringUtils.replace(imageBody, "##RESULT##", "Failed");
            imageBody = StringUtils.replace(imageBody, "##DISPLAY_WAR##", "none");
        }

        /**
         * Percent chart
         */
        int angle = percent * 360 / 100;
        int reverseAngle = 360 - angle;
        String functionPie = "plotPie('pie-" + index + "', " + angle + ", " + reverseAngle;
        if (percent < 100) {
            functionPie += ", '#CCCCCC', 'red'); ";
        } else {
            functionPie += ", '#66CC66', '#66CC66'); ";
        }
        pieFunctions += functionPie;

        imagesBody += imageBody;
        index++;
    }

    // Parse the sumary report numbers
    String htmlBody;
    htmlBody = generator.readFilefromResources(templatePath);
    Double doub = 1.0 * gr.getAllReportsOk() / gr.getReportsCount() * 100.0;
    int globalPercent = doub.intValue();
    htmlBody = StringUtils.replace(htmlBody, "##IMAGES_LIST##", imagesBody);
    htmlBody = StringUtils.replace(htmlBody, "##PERCENT##", "" + globalPercent);
    String scount = gr.getReportsCount() + " ";
    if (gr.getReportsCount() == 1)
        scount += "file";
    else
        scount += "files";
    htmlBody = StringUtils.replace(htmlBody, "##COUNT##", "" + scount);
    htmlBody = StringUtils.replace(htmlBody, "##OK##", "" + gr.getAllReportsOk());

    /**
     * Conforms table (all)
     */
    String rows = "";
    for (String iso : gr.getCheckedIsos()) {
        if (gr.getIsos().contains(iso) || gr.getReportsOk(iso) == gr.getReportsCount()) {
            rows += makeConformsRow(gr, iso, true);
        }
    }
    htmlBody = StringUtils.replace(htmlBody, "##TABLE_RESUME##", rows);

    htmlBody = StringUtils.replace(htmlBody, "##KO##", "" + gr.getAllReportsKo());
    if (gr.getAllReportsOk() >= gr.getAllReportsKo()) {
        htmlBody = StringUtils.replace(htmlBody, "##OK_C##", "success");
        htmlBody = StringUtils.replace(htmlBody, "##KO_C##", "info-white");
    } else {
        htmlBody = StringUtils.replace(htmlBody, "##OK_C##", "info-white");
        htmlBody = StringUtils.replace(htmlBody, "##KO_C##", "error");
    }

    // Chart
    int angleG = globalPercent * 360 / 100;
    int reverseAngleG = 360 - angleG;
    String functionPie = "";
    if (angleG > reverseAngleG) {
        functionPie = "plotPie('pie-global', " + angleG + ", " + reverseAngleG;
        if (gr.getAllReportsOk() >= gr.getAllReportsKo()) {
            functionPie += ", '#66CC66', '#F2F2F2'); ";
        } else {
            functionPie += ", '#F2F2F2', 'red'); ";
        }
    } else {
        functionPie = "plotPie('pie-global', " + reverseAngleG + ", " + angleG;
        if (gr.getAllReportsOk() >= gr.getAllReportsKo()) {
            functionPie += ", '#F2F2F2', '#66CC66'); ";
        } else {
            functionPie += ", 'red', '#F2F2F2'); ";
        }
    }
    pieFunctions += functionPie;

    // All charts calls
    htmlBody = StringUtils.replace(htmlBody, "##PLOT##", pieFunctions);

    // TO-DO
    htmlBody = StringUtils.replace(htmlBody, "##OK_PC##", "0");
    htmlBody = StringUtils.replace(htmlBody, "##OK_EP##", "0");
    // END TO-DO

    htmlBody = htmlBody.replaceAll("\\.\\./", "");
    generator.writeToFile(outputfile, htmlBody);
}