Example usage for weka.core Utils roundDouble

List of usage examples for weka.core Utils roundDouble

Introduction

In this page you can find the example usage for weka.core Utils roundDouble.

Prototype

public staticdouble roundDouble(double value, int afterDecimalPoint) 

Source Link

Document

Rounds a double to the given number of decimal places.

Usage

From source file:Pair.java

License:Open Source License

/**
 * Returns description of the boosted classifier.
 *
 * @return description of the boosted classifier as a string
 *//*from   w w  w . j  a  v  a2  s  .  c  om*/
public String toString() {

    StringBuffer text = new StringBuffer();

    if (m_NumIterationsPerformed == 0) {
        text.append("TrAdaBoost: No model built yet.\n");
    } else if (m_NumIterationsPerformed == 1) {
        text.append("TrAdaBoost: No boosting possible, one classifier used!\n");
        text.append(m_Classifiers[0].toString() + "\n");
    } else {
        text.append("TrAdaBoost: Base classifiers and their weights: \n\n");
        for (int i = 0; i < m_NumIterationsPerformed; i++) {
            text.append(m_Classifiers[i].toString() + "\n\n");
            text.append("Weight: " + Utils.roundDouble(m_Betas[i], 2) + "\n\n");
        }
        text.append("Number of performed Iterations: " + m_NumIterationsPerformed + "\n");
    }

    return text.toString();
}

From source file:gyc.OverBoostM1.java

License:Open Source License

/**
 * Returns description of the boosted classifier.
 *
 * @return description of the boosted classifier as a string
 *///from   ww w . j  a v a  2  s.  c  om
public String toString() {

    // only ZeroR model?
    if (m_ZeroR != null) {
        StringBuffer buf = new StringBuffer();
        buf.append(this.getClass().getName().replaceAll(".*\\.", "") + "\n");
        buf.append(this.getClass().getName().replaceAll(".*\\.", "").replaceAll(".", "=") + "\n\n");
        buf.append("Warning: No model could be built, hence ZeroR model is used:\n\n");
        buf.append(m_ZeroR.toString());
        return buf.toString();
    }

    StringBuffer text = new StringBuffer();

    if (m_NumIterationsPerformed == 0) {
        text.append("AdaBoostM1: No model built yet.\n");
    } else if (m_NumIterationsPerformed == 1) {
        text.append("AdaBoostM1: No boosting possible, one classifier used!\n");
        text.append(m_Classifiers[0].toString() + "\n");
    } else {
        text.append("AdaBoostM1: Base classifiers and their weights: \n\n");
        for (int i = 0; i < m_NumIterationsPerformed; i++) {
            text.append(m_Classifiers[i].toString() + "\n\n");
            text.append("Weight: " + Utils.roundDouble(m_Betas[i], 2) + "\n\n");
        }
        text.append("Number of performed Iterations: " + m_NumIterationsPerformed + "\n");
    }

    return text.toString();
}

From source file:j48.ClassifierSplitModel.java

License:Open Source License

/**
 * Prints label for subset index of instances (eg class).
 *
 * @exception Exception if something goes wrong
 *///from   w  w  w.j  a va 2s .co  m
public final String dumpLabel(int index, Instances data) throws Exception {

    StringBuffer text;

    text = new StringBuffer();
    text.append(((Instances) data).classAttribute().value(m_distribution.maxClass(index)));
    text.append(" (" + Utils.roundDouble(m_distribution.perBag(index), 2));
    if (Utils.gr(m_distribution.numIncorrect(index), 0))
        text.append("/" + Utils.roundDouble(m_distribution.numIncorrect(index), 2));
    text.append(")");

    return text.toString();
}

From source file:j48.GraftSplit.java

License:Open Source License

/**
 * Prints label for subset index of instances (eg class).
 *
 * @param index the bag to dump label for
 * @param data to get attribute names and such
 * @return the label as a string//from   w  w w. ja  v  a  2 s . com
 * @exception Exception if something goes wrong
 */
public final String dumpLabelG(int index, Instances data) throws Exception {

    StringBuffer text;

    text = new StringBuffer();
    text.append(((Instances) data).classAttribute()
            .value((index == subsetOfInterest()) ? m_maxClass : m_otherLeafMaxClass));
    text.append(" (" + Utils.roundDouble(m_graftdistro.perBag(index), 1));
    if (Utils.gr(m_graftdistro.numIncorrect(index), 0))
        text.append("/" + Utils.roundDouble(m_graftdistro.numIncorrect(index), 2));

    // show the graft values, only if this is subsetOfInterest()
    if (index == subsetOfInterest()) {
        text.append("|" + Utils.roundDouble(m_distribution.perBag(index), 2));
        if (Utils.gr(m_distribution.numIncorrect(index), 0))
            text.append("/" + Utils.roundDouble(m_distribution.numIncorrect(index), 2));
    }
    text.append(")");
    return text.toString();
}

From source file:meka.classifiers.multilabel.Evaluation.java

License:Open Source License

/**
 * RunExperiment - Build and evaluate a model with command-line options.
 * @param   h      multi-label classifier
 * @param   options   command line options
 *//*from  ww w.ja  v a 2 s.  co  m*/
public static void runExperiment(MultiLabelClassifier h, String options[]) throws Exception {

    // Help
    if (Utils.getOptionPos('h', options) >= 0) {
        System.out.println("\nHelp requested");
        Evaluation.printOptions(h.listOptions());
        return;
    }

    h.setOptions(options);

    if (h.getDebug())
        System.out.println("Loading and preparing dataset ...");

    // Load Instances from a file
    Instances D_train = loadDataset(options);

    Instances D_full = D_train;

    // Try extract and set a class index from the @relation name
    MLUtils.prepareData(D_train);

    // Override the number of classes with command-line option (optional)
    if (Utils.getOptionPos('C', options) >= 0) {
        int L = Integer.parseInt(Utils.getOption('C', options));
        D_train.setClassIndex(L);
    }

    // We we still haven't found -C option, we can't continue (don't know how many labels)
    int L = D_train.classIndex();
    if (L <= 0) {
        throw new Exception(
                "[Error] Number of labels not specified.\n\tYou must set the number of labels with the -C option, either inside the @relation tag of the Instances file, or on the command line.");
        // apparently the dataset didn't contain the '-C' flag, check in the command line options ...
    }

    // Randomize (Instances) 
    int seed = (Utils.getOptionPos('s', options) >= 0) ? Integer.parseInt(Utils.getOption('s', options)) : 0;
    if (Utils.getFlag('R', options)) {
        D_train.randomize(new Random(seed));
    }
    boolean Threaded = false;
    if (Utils.getOptionPos("Thr", options) >= 0) {
        Threaded = Utils.getFlag("Thr", options);
    }

    // Verbosity Option
    String voption = "1";
    if (Utils.getOptionPos("verbosity", options) >= 0) {
        voption = Utils.getOption("verbosity", options);
    }

    // Save for later?
    //String fname = null;
    //if (Utils.getOptionPos('f',options) >= 0) {
    //   fname = Utils.getOption('f',options);
    //}
    // Dump for later?
    String dname = null;
    if (Utils.getOptionPos('d', options) >= 0) {
        dname = Utils.getOption('d', options);
    }
    // Load from file?
    String lname = null;
    Instances dataHeader = null;
    if (Utils.getOptionPos('l', options) >= 0) {
        lname = Utils.getOption('l', options);
        Object[] data = SerializationHelper.readAll(lname);
        h = (MultiLabelClassifier) data[0];
        if (data.length > 1)
            dataHeader = (Instances) data[1];
        //Object o[] = SerializationHelper.readAll(lname);
        //h = (MultilabelClassifier)o[0];
    }

    try {

        Result r = null;

        // Threshold OPtion
        String top = "PCut1"; // default
        if (Utils.getOptionPos("threshold", options) >= 0)
            top = Utils.getOption("threshold", options);

        if (Utils.getOptionPos('x', options) >= 0) {
            // CROSS-FOLD-VALIDATION

            int numFolds = MLUtils.getIntegerOption(Utils.getOption('x', options), 10); // default 10
            // Check for remaining options
            Utils.checkForRemainingOptions(options);
            r = Evaluation.cvModel(h, D_train, numFolds, top, voption);
            System.out.println(r.toString());
        } else {
            // TRAIN-TEST SPLIT

            Instances D_test = null;

            if (Utils.getOptionPos('T', options) >= 0) {
                // load separate test set
                try {
                    D_test = loadDataset(options, 'T');
                    MLUtils.prepareData(D_test);
                } catch (Exception e) {
                    throw new Exception("[Error] Failed to Load Test Instances from file.", e);
                }
            } else {
                // split training set into train and test sets
                // default split
                int N_T = (int) (D_train.numInstances() * 0.60);
                if (Utils.getOptionPos("split-percentage", options) >= 0) {
                    // split by percentage
                    double percentTrain = Double.parseDouble(Utils.getOption("split-percentage", options));
                    N_T = (int) Math.round((D_train.numInstances() * (percentTrain / 100.0)));
                } else if (Utils.getOptionPos("split-number", options) >= 0) {
                    // split by number
                    N_T = Integer.parseInt(Utils.getOption("split-number", options));
                }

                int N_t = D_train.numInstances() - N_T;
                D_test = new Instances(D_train, N_T, N_t);
                D_train = new Instances(D_train, 0, N_T);

            }

            // Invert the split?
            if (Utils.getFlag('i', options)) { //boolean INVERT          = Utils.getFlag('i',options);
                Instances temp = D_test;
                D_test = D_train;
                D_train = temp;
            }

            // Check for remaining options
            Utils.checkForRemainingOptions(options);

            if (h.getDebug())
                System.out.println(":- Dataset -: " + MLUtils.getDatasetName(D_train) + "\tL=" + L
                        + "\tD(t:T)=(" + D_train.numInstances() + ":" + D_test.numInstances() + ")\tLC(t:T)="
                        + Utils.roundDouble(MLUtils.labelCardinality(D_train, L), 2) + ":"
                        + Utils.roundDouble(MLUtils.labelCardinality(D_test, L), 2) + ")");

            if (lname != null) {
                // h is already built, and loaded from a file, test it!
                r = testClassifier(h, D_test);

                String t = top;

                if (top.startsWith("PCut")) {
                    // if PCut is specified we need the training data,
                    // so that we can calibrate the threshold!
                    t = MLEvalUtils.getThreshold(r.predictions, D_train, top);
                }
                r = evaluateModel(h, D_test, t, voption);
            } else {
                //check if train and test set size are > 0
                if (D_train.numInstances() > 0 && D_test.numInstances() > 0) {
                    if (Threaded) {
                        r = evaluateModelM(h, D_train, D_test, top, voption);
                    } else {

                        r = evaluateModel(h, D_train, D_test, top, voption);
                    }
                } else {
                    // otherwise just train on full set. Maybe better throw an exception.
                    h.buildClassifier(D_full);

                }
            }

            // @todo, if D_train==null, assume h is already trained
            if (D_train.numInstances() > 0 && D_test.numInstances() > 0) {
                System.out.println(r.toString());
            }
        }

        // Save model to file?
        if (dname != null) {
            dataHeader = new Instances(D_train, 0);
            SerializationHelper.writeAll(dname, new Object[] { h, dataHeader });
        }

    } catch (Exception e) {
        e.printStackTrace();
        Evaluation.printOptions(h.listOptions());
        System.exit(1);
    }

    System.exit(0);
}

From source file:meka.classifiers.multilabel.Evaluation.java

License:Open Source License

/**
 * CVModel - Split D into train/test folds, and then train and evaluate on each one.
 * @param   h       a multi-output classifier
 * @param   D          test data Instances
 * @param   numFolds number of folds of CV
 * @param   top        Threshold OPtion (pertains to multi-label data only)
 * @param   vop       Verbosity OPtion (which measures do we want to calculate/output)
 * @return   Result   raw prediction data with evaluation statistics included.
 *//*from  w w w.jav  a  2 s . com*/
public static Result cvModel(MultiLabelClassifier h, Instances D, int numFolds, String top, String vop)
        throws Exception {
    Result r_[] = new Result[numFolds];
    for (int i = 0; i < numFolds; i++) {
        Instances D_train = D.trainCV(numFolds, i);
        Instances D_test = D.testCV(numFolds, i);
        if (h.getDebug())
            System.out.println(":- Fold [" + i + "/" + numFolds + "] -: " + MLUtils.getDatasetName(D) + "\tL="
                    + D.classIndex() + "\tD(t:T)=(" + D_train.numInstances() + ":" + D_test.numInstances()
                    + ")\tLC(t:T)=" + Utils.roundDouble(MLUtils.labelCardinality(D_train, D.classIndex()), 2)
                    + ":" + Utils.roundDouble(MLUtils.labelCardinality(D_test, D.classIndex()), 2) + ")");
        r_[i] = evaluateModel(h, D_train, D_test); // <-- should not run stats yet!
    }
    Result r = MLEvalUtils.combinePredictions(r_);
    if (h instanceof MultiTargetClassifier || isMT(D)) {
        r.setInfo("Type", "MT-CV");
    } else if (h instanceof MultiLabelClassifier) {
        r.setInfo("Type", "ML-CV");
        try {
            r.setInfo("Threshold", String.valueOf(Double.parseDouble(top)));
        } catch (Exception e) {
            System.err.println(
                    "[WARNING] Automatic threshold calibration not currently enabled for cross-fold validation, setting threshold = 0.5.\n");
            r.setInfo("Threshold", String.valueOf(0.5));
        }
    }
    r.setInfo("Verbosity", vop);
    r.output = Result.getStats(r, vop);
    // Need to reset this because of CV
    r.setValue("Number of training instances", D.numInstances());
    r.setValue("Number of test instances", D.numInstances());
    return r;
}

From source file:milk.classifiers.MIBoost.java

License:Open Source License

/**
  * Gets a string describing the classifier.
  */*from   www  . j av  a2 s. c o m*/
  * @return a string describing the classifer built.
  */
 public String toString() {

     if (m_Models == null) {
         return "No model built yet!";
     }
     StringBuffer text = new StringBuffer();
     text.append("MIBoost: number of bins in discretization = " + m_DiscretizeBin + "\n");
     if (m_NumIterations == 0) {
         text.append("No model built yet.\n");
     } else if (m_NumIterations == 1) {
         text.append("No boosting possible, one classifier used: Weight = " + Utils.roundDouble(m_Beta[0], 2)
                 + "\n");
         text.append("Base classifiers:\n" + m_Models[0].toString());
     } else {
         text.append("Base classifiers and their weights: \n");
         for (int i = 0; i < m_NumIterations; i++) {
             text.append("\n\n" + i + ": Weight = " + Utils.roundDouble(m_Beta[i], 2) + "\nBase classifier:\n"
                     + m_Models[i].toString());
         }
     }

     text.append("\n\nNumber of performed Iterations: " + m_NumIterations + "\n");

     return text.toString();
 }

From source file:myclassifier.myC45Pack.C45ClassifierSplitModel.java

/**
* Prints label for subset index of instances (eg class).
*
* @exception Exception if something goes wrong
*///  w  w w. j a va 2 s  .  c o m
public String printLabel(int index, Instances data) throws Exception {

    StringBuffer text = new StringBuffer();
    text.append(((Instances) data).classAttribute().value(classDist.maxClass(index)));
    text.append(" (").append(Utils.roundDouble(classDist.w_perSubdataset[index], 2));
    if (Utils.gr(classDist.numIncorrect(index), 0)) {
        text.append("/").append(Utils.roundDouble(classDist.numIncorrect(index), 2));
    }
    text.append(")");

    return text.toString();
}

From source file:org.jomics.isoelectricpoint.app.IsoelectricPointTool.java

private void computeStatisticalButtonActionPerformed(ActionEvent e) {
    // TODO add your code here
    ArrayList<Double> xData = new ArrayList<Double>();
    ArrayList<Double> yData = new ArrayList<Double>();

    for (int i = 1; i < table2.getRowCount() - 1; i++) {

        Double pi_experimental = (Double) table2.getValueAt(i, 1);
        Double pi_theoretical = (Double) table2.getValueAt(i, 2);

        xData.add(pi_experimental.doubleValue());
        yData.add(pi_theoretical.doubleValue());

    }/*from  www .  ja v a 2  s  .co  m*/

    double start_pi = 3.0;
    double end_pi = 10;
    double step_pi = 0.2;

    DefaultTableModel dm = (DefaultTableModel) table1.getModel();
    dm.getDataVector().removeAllElements();
    ElectrophoreticFraction fraction;
    while (start_pi <= end_pi) {
        fraction = new ElectrophoreticFraction(xData, yData, start_pi, start_pi + step_pi);
        if (fraction.getCountElements() >= 2) {
            Object[] tmpData = new Object[] {
                    (String.valueOf(Utils.roundDouble(start_pi, 3)).concat("-")
                            .concat(String.valueOf(Utils.roundDouble(start_pi + step_pi, 3)))),
                    fraction.getCountElements(), fraction.getPiExpFractionMean(),
                    fraction.getPiTeoFractionMean(), fraction.getStandardDev(), fraction.getOutliersPercent() };
            dm.addRow(tmpData);
        }
        start_pi += step_pi;
    }

    //to enable operation-dependent
    this.plotFractionButton.setEnabled(true);
}

From source file:tr.gov.ulakbim.jDenetX.streams.generators.multilabel.MetaMultilabelGenerator.java

License:Open Source License

/**
 * genMatrix.//from w ww.ja  va2s  .c  o  m
 * P(i) = matrix[i][i]
 * P(i|j) = matrix[i][j]
 *
 * @param skew the matrix with skew stored along the diagonal
 * @param Z    goal label cardinality
 * @param r    random seed
 */
protected double[][] fillMatrix(double skew[], double Z, Random r) {

    this.matrix = new double[skew.length][skew.length];

    //System.out.println("skew "+Arrays.toString(skew));

    for (int i = 0; i < skew.length; i++) {
        matrix[i][i] = Utils.roundDouble(skew[i], 3);
    }

    for (int i = 0; i < matrix.length; i++) {
        for (int j = i + 1; j < matrix[i].length; j++) {
            // label-dependence factors
            if (r.nextDouble() <= (Z * 2.0)) {
                matrix[i][j] = randFromRange(min(P(i), P(j)), max(P(i), P(j)));
                matrix[j][i] = (matrix[i][j] * matrix[i][i]) / matrix[j][j]; // Bayes Rule
            }
            // label-exclusivity factors
            else {
                matrix[i][j] = min(P(i), P(j));
                matrix[j][i] = (matrix[i][j] * matrix[j][j]) / matrix[i][i]; // Bayes Rule
            }
            // this is just rounding
            matrix[i][j] = Utils.roundDouble(matrix[i][j], 3);
            matrix[j][i] = Utils.roundDouble(matrix[j][i], 3);
        }
    }

    return matrix;
}