Example usage for weka.core Range Range

List of usage examples for weka.core Range Range

Introduction

In this page you can find the example usage for weka.core Range Range.

Prototype


public Range() 

Source Link

Document

Default constructor.

Usage

From source file:FlexDMThread.java

License:Open Source License

public void run() {
    try {//from  w ww  .j a v a2  s  .  c o  m
        //Get the data from the source

        FlexDM.getMainData.acquire();
        Instances data = dataset.getSource().getDataSet();
        FlexDM.getMainData.release();

        //Set class attribute if undefined
        if (data.classIndex() == -1) {
            data.setClassIndex(data.numAttributes() - 1);
        }

        //Process hyperparameters for classifier
        String temp = "";
        for (int i = 0; i < classifier.getNumParams(); i++) {
            temp += classifier.getParameter(i).getName();
            temp += " ";
            if (classifier.getParameter(i).getValue() != null) {
                temp += classifier.getParameter(i).getValue();
                temp += " ";
            }
        }

        String[] options = weka.core.Utils.splitOptions(temp);

        //Print to console- experiment is starting
        if (temp.equals("")) { //no parameters
            temp = "results_no_parameters";
            try {
                System.out.println("STARTING CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset "
                        + dataset.getName().substring(dataset.getName().lastIndexOf("\\") + 1)
                        + " with no parameters");
            } catch (Exception e) {
                System.out.println("STARTING CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset "
                        + dataset.getName() + " with no parameters");
            }
        } else { //parameters
            try {
                System.out.println("STARTING CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset "
                        + dataset.getName().substring(dataset.getName().lastIndexOf("\\") + 1)
                        + " with parameters " + temp);
            } catch (Exception e) {
                System.out.println("STARTING CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset "
                        + dataset.getName() + " with parameters " + temp);
            }
        }

        //Create classifier, setting parameters
        weka.classifiers.Classifier x = createObject(classifier.getName());
        x.setOptions(options);
        x.buildClassifier(data);

        //Process the test selection
        String[] tempTest = dataset.getTest().split("\\s");

        //Create evaluation object for training and testing classifiers
        Evaluation eval = new Evaluation(data);
        StringBuffer predictions = new StringBuffer();

        //Train and evaluate classifier
        if (tempTest[0].equals("testset")) { //specified test file
            //Build classifier
            x.buildClassifier(data);

            //Open test file, load data
            //DataSource testFile = new DataSource(dataset.getTest().substring(7).trim());
            // Instances testSet = testFile.getDataSet();
            FlexDM.getTestData.acquire();
            Instances testSet = dataset.getTestFile().getDataSet();
            FlexDM.getTestData.release();

            //Set class attribute if undefined
            if (testSet.classIndex() == -1) {
                testSet.setClassIndex(testSet.numAttributes() - 1);
            }

            //Evaluate model
            Object[] array = { predictions, new Range(), new Boolean(true) };
            eval.evaluateModel(x, testSet, array);
        } else if (tempTest[0].equals("xval")) { //Cross validation
            //Build classifier
            x.buildClassifier(data);

            //Cross validate
            eval.crossValidateModel(x, data, Integer.parseInt(tempTest[1]), new Random(1), predictions,
                    new Range(), true);
        } else if (tempTest[0].equals("leavexval")) { //Leave one out cross validation
            //Build classifier
            x.buildClassifier(data);

            //Cross validate
            eval.crossValidateModel(x, data, data.numInstances() - 1, new Random(1), predictions, new Range(),
                    true);
        } else if (tempTest[0].equals("percent")) { //Percentage split of single data set
            //Set training and test sizes from percentage
            int trainSize = (int) Math.round(data.numInstances() * Double.parseDouble(tempTest[1]));
            int testSize = data.numInstances() - trainSize;

            //Load specified data
            Instances train = new Instances(data, 0, trainSize);
            Instances testSet = new Instances(data, trainSize, testSize);

            //Build classifier
            x.buildClassifier(train);

            //Train and evaluate model
            Object[] array = { predictions, new Range(), new Boolean(true) };
            eval.evaluateModel(x, testSet, array);
        } else { //Evaluate on training data
            //Test and evaluate model
            Object[] array = { predictions, new Range(), new Boolean(true) };
            eval.evaluateModel(x, data, array);
        }

        //create datafile for results
        String filename = dataset.getDir() + "/" + classifier.getDirName() + "/" + temp + ".txt";
        PrintWriter writer = new PrintWriter(filename, "UTF-8");

        //Print classifier, dataset, parameters info to file
        try {
            writer.println("CLASSIFIER: " + classifier.getName() + "\n DATASET: " + dataset.getName()
                    + "\n PARAMETERS: " + temp);
        } catch (Exception e) {
            writer.println("CLASSIFIER: " + classifier.getName() + "\n DATASET: " + dataset.getName()
                    + "\n PARAMETERS: " + temp);
        }

        //Add evaluation string to file
        writer.println(eval.toSummaryString());
        //Process result options
        if (checkResults("stats")) { //Classifier statistics
            writer.println(eval.toClassDetailsString());
        }
        if (checkResults("model")) { //The model
            writer.println(x.toString());
        }
        if (checkResults("matrix")) { //Confusion matrix
            writer.println(eval.toMatrixString());
        }
        if (checkResults("entropy")) { //Entropy statistics
            //Set options req'd to get the entropy stats
            String[] opt = new String[4];
            opt[0] = "-t";
            opt[1] = dataset.getName();
            opt[2] = "-k";
            opt[3] = "-v";

            //Evaluate model
            String entropy = Evaluation.evaluateModel(x, opt);

            //Grab the relevant info from the results, print to file
            entropy = entropy.substring(entropy.indexOf("=== Stratified cross-validation ===") + 35,
                    entropy.indexOf("=== Confusion Matrix ==="));
            writer.println("=== Entropy Statistics ===");
            writer.println(entropy);
        }
        if (checkResults("predictions")) { //The models predictions
            writer.println("=== Predictions ===\n");
            if (!dataset.getTest().contains("xval")) { //print header of predictions table if req'd
                writer.println(" inst#     actual  predicted error distribution ()");
            }
            writer.println(predictions.toString()); //print predictions to file
        }

        writer.close();

        //Summary file is semaphore controlled to ensure quality
        try { //get a permit
              //grab the summary file, write the classifiers details to it
            FlexDM.writeFile.acquire();
            PrintWriter p = new PrintWriter(new FileWriter(summary, true));
            if (temp.equals("results_no_parameters")) { //change output based on parameters
                temp = temp.substring(8);
            }

            //write percent correct, classifier name, dataset name to summary file
            p.write(dataset.getName() + ", " + classifier.getName() + ", " + temp + ", " + eval.correct() + ", "
                    + eval.incorrect() + ", " + eval.unclassified() + ", " + eval.pctCorrect() + ", "
                    + eval.pctIncorrect() + ", " + eval.pctUnclassified() + ", " + eval.kappa() + ", "
                    + eval.meanAbsoluteError() + ", " + eval.rootMeanSquaredError() + ", "
                    + eval.relativeAbsoluteError() + ", " + eval.rootRelativeSquaredError() + ", "
                    + eval.SFPriorEntropy() + ", " + eval.SFSchemeEntropy() + ", " + eval.SFEntropyGain() + ", "
                    + eval.SFMeanPriorEntropy() + ", " + eval.SFMeanSchemeEntropy() + ", "
                    + eval.SFMeanEntropyGain() + ", " + eval.KBInformation() + ", " + eval.KBMeanInformation()
                    + ", " + eval.KBRelativeInformation() + ", " + eval.weightedTruePositiveRate() + ", "
                    + eval.weightedFalsePositiveRate() + ", " + eval.weightedTrueNegativeRate() + ", "
                    + eval.weightedFalseNegativeRate() + ", " + eval.weightedPrecision() + ", "
                    + eval.weightedRecall() + ", " + eval.weightedFMeasure() + ", "
                    + eval.weightedAreaUnderROC() + "\n");
            p.close();

            //release semaphore
            FlexDM.writeFile.release();
        } catch (InterruptedException e) { //bad things happened
            System.err.println("FATAL ERROR OCCURRED: Classifier: " + cNum + " - " + classifier.getName()
                    + " on dataset " + dataset.getName());
        }

        //output we have successfully finished processing classifier
        if (temp.equals("no_parameters")) { //no parameters
            try {
                System.out.println("FINISHED CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset "
                        + dataset.getName().substring(dataset.getName().lastIndexOf("\\") + 1)
                        + " with no parameters");
            } catch (Exception e) {
                System.out.println("FINISHED CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset "
                        + dataset.getName() + " with no parameters");
            }
        } else { //with parameters
            try {
                System.out.println("FINISHED CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset "
                        + dataset.getName().substring(dataset.getName().lastIndexOf("\\") + 1)
                        + " with parameters " + temp);
            } catch (Exception e) {
                System.out.println("FINISHED CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset "
                        + dataset.getName() + " with parameters " + temp);
            }
        }

        try { //get a permit
              //grab the log file, write the classifiers details to it
            FlexDM.writeLog.acquire();
            PrintWriter p = new PrintWriter(new FileWriter(log, true));

            Date date = new Date();
            Format formatter = new SimpleDateFormat("dd/MM/YYYY HH:mm:ss");
            //formatter.format(date)

            if (temp.equals("results_no_parameters")) { //change output based on parameters
                temp = temp.substring(8);
            }

            //write details to log file
            p.write(dataset.getName() + ", " + dataset.getTest() + ", \"" + dataset.getResult_string() + "\", "
                    + classifier.getName() + ", " + temp + ", " + formatter.format(date) + "\n");
            p.close();

            //release semaphore
            FlexDM.writeLog.release();
        } catch (InterruptedException e) { //bad things happened
            System.err.println("FATAL ERROR OCCURRED: Classifier: " + cNum + " - " + classifier.getName()
                    + " on dataset " + dataset.getName());
        }

        s.release();

    } catch (Exception e) {
        //an error occurred
        System.err.println("FATAL ERROR OCCURRED: " + e.toString() + "\nClassifier: " + cNum + " - "
                + classifier.getName() + " on dataset " + dataset.getName());
        s.release();
    }

}

From source file:adams.flow.transformer.WekaExperimentEvaluation.java

License:Open Source License

/**
 * Sets up the testing algorithm and returns it.
 *
 * @param data   the experimental data/*from  w w  w  .j a v  a 2s  . c o m*/
 * @return      the configured testing algorithm
 * @throws Exception    If something goes wrong, like testing algorithm of
 *          result matrix cannot be instantiated
 */
protected Tester getTester(Instances data) throws Exception {
    Tester ttester;
    ResultMatrix matrix;
    String tmpStr;
    weka.core.Attribute att;
    List<String> rows;
    List<String> cols;
    String selectedList;
    String selectedListDataset;
    boolean comparisonFieldSet;
    int i;
    String name;
    Range generatorRange;

    ttester = (Tester) Utils.deepCopy(m_Tester);
    matrix = (ResultMatrix) Utils.deepCopy(m_OutputFormat);
    ttester.setInstances(data);
    ttester.setSignificanceLevel(m_Significance);
    ttester.setShowStdDevs(matrix.getShowStdDev());
    ttester.setSortColumn(-1);

    if (!m_SwapRowsAndColumns) {
        rows = determineColumnNames(m_Row, ExperimenterDefaults.getRow());
        cols = determineColumnNames(m_Column, ExperimenterDefaults.getColumn());
    } else {
        cols = determineColumnNames(m_Row, ExperimenterDefaults.getRow());
        rows = determineColumnNames(m_Column, ExperimenterDefaults.getColumn());
    }
    selectedList = "";
    selectedListDataset = "";
    comparisonFieldSet = false;
    for (i = 0; i < data.numAttributes(); i++) {
        name = data.attribute(i).name();

        if (rows.contains(name.toLowerCase())) {
            selectedListDataset += "," + (i + 1);
        } else if (name.toLowerCase().equals("key_run")) {
            ttester.setRunColumn(i);
        } else if (name.toLowerCase().equals("key_fold")) {
            ttester.setFoldColumn(i);
        } else if (cols.contains(name.toLowerCase())) {
            selectedList += "," + (i + 1);
        } else if (name.toLowerCase().contains(ExperimenterDefaults.getComparisonField())) {
            comparisonFieldSet = true;
        } else if ((name.toLowerCase().contains("root_relative_squared_error")) && (!comparisonFieldSet)) {
            comparisonFieldSet = true;
        }
    }
    generatorRange = new Range();
    if (selectedList.length() != 0) {
        try {
            generatorRange.setRanges(selectedList);
        } catch (Exception ex) {
            handleException("Failed to set ranges: " + selectedList, ex);
        }
    }
    ttester.setResultsetKeyColumns(generatorRange);

    generatorRange = new Range();
    if (selectedListDataset.length() != 0) {
        try {
            generatorRange.setRanges(selectedListDataset);
        } catch (Exception ex) {
            handleException("Failed to set dataset ranges: " + selectedListDataset, ex);
        }
    }
    ttester.setDatasetKeyColumns(generatorRange);

    tmpStr = m_ComparisonField.getField();
    att = data.attribute(tmpStr);
    if (att == null)
        throw new Exception("Cannot find comparison field '" + tmpStr + "' in data!");
    ttester.setDisplayedResultsets(null); // all
    ttester.setResultMatrix(matrix);

    return ttester;
}

From source file:milk.gui.experiment.MIResultsPanel.java

License:Open Source License

/**
 * Sets up the panel with a new set of instances, attempting
 * to guess the correct settings for various columns.
 *
 * @param newInstances the new set of results.
 *//* www  .  j  a  v  a 2  s. c  o  m*/
public void setInstances(Instances newInstances) {

    m_Instances = newInstances;
    m_TTester.setInstances(m_Instances);
    m_FromLab.setText("Got " + m_Instances.numInstances() + " results");

    // Temporarily remove the configuration listener
    m_RunCombo.removeActionListener(m_ConfigureListener);

    // Do other stuff
    m_DatasetKeyModel.removeAllElements();
    m_RunModel.removeAllElements();
    m_ResultKeyModel.removeAllElements();
    m_CompareModel.removeAllElements();
    int datasetCol = -1;
    int runCol = -1;
    String selectedList = "";
    String selectedListDataset = "";
    for (int i = 0; i < m_Instances.numAttributes(); i++) {
        String name = m_Instances.attribute(i).name();
        m_DatasetKeyModel.addElement(name);
        m_RunModel.addElement(name);
        m_ResultKeyModel.addElement(name);
        m_CompareModel.addElement(name);

        if (name.toLowerCase().equals("key_dataset")) {
            m_DatasetKeyList.addSelectionInterval(i, i);
            selectedListDataset += "," + (i + 1);
        } else if ((runCol == -1) && (name.toLowerCase().equals("key_run"))) {
            m_RunCombo.setSelectedIndex(i);
            runCol = i;
        } else if (name.toLowerCase().equals("key_fold")) {
            m_TTester.setFoldColumn(i);
        } else if (name.toLowerCase().equals("key_scheme") || name.toLowerCase().equals("key_scheme_options")
                || name.toLowerCase().equals("key_scheme_version_id")) {
            m_ResultKeyList.addSelectionInterval(i, i);
            selectedList += "," + (i + 1);
        } else if (name.toLowerCase().indexOf("percent_correct") != -1) {
            m_CompareCombo.setSelectedIndex(i);
            //   break;
        } else if ((name.toLowerCase().indexOf("root_relative_squared_error") != -1)
                && (m_CompareCombo.getSelectedIndex() < 0)) {
            m_CompareCombo.setSelectedIndex(i);
        }
    }
    if (runCol == -1) {
        runCol = 0;
    }
    m_DatasetKeyBut.setEnabled(true);
    m_RunCombo.setEnabled(true);
    m_ResultKeyBut.setEnabled(true);
    m_CompareCombo.setEnabled(true);

    // Reconnect the configuration listener
    m_RunCombo.addActionListener(m_ConfigureListener);

    // Set up the TTester with the new data
    m_TTester.setRunColumn(runCol);
    Range generatorRange = new Range();
    if (selectedList.length() != 0) {
        try {
            generatorRange.setRanges(selectedList);
        } catch (Exception ex) {
            ex.printStackTrace();
            System.err.println(ex.getMessage());
        }
    }
    m_TTester.setResultsetKeyColumns(generatorRange);

    generatorRange = new Range();
    if (selectedListDataset.length() != 0) {
        try {
            generatorRange.setRanges(selectedListDataset);
        } catch (Exception ex) {
            ex.printStackTrace();
            System.err.println(ex.getMessage());
        }
    }
    m_TTester.setDatasetKeyColumns(generatorRange);

    m_SigTex.setEnabled(true);

    setTTester();
}

From source file:milk.gui.experiment.MIResultsPanel.java

License:Open Source License

public void setResultKeyFromDialog() {

    ListSelectorDialog jd = new ListSelectorDialog(null, m_ResultKeyList);

    // Open the dialog
    int result = jd.showDialog();

    // If accepted, update the ttester
    if (result == ListSelectorDialog.APPROVE_OPTION) {
        int[] selected = m_ResultKeyList.getSelectedIndices();
        String selectedList = "";
        for (int i = 0; i < selected.length; i++) {
            selectedList += "," + (selected[i] + 1);
        }//w w  w .j ava 2s. c  o m
        Range generatorRange = new Range();
        if (selectedList.length() != 0) {
            try {
                generatorRange.setRanges(selectedList);
            } catch (Exception ex) {
                ex.printStackTrace();
                System.err.println(ex.getMessage());
            }
        }
        m_TTester.setResultsetKeyColumns(generatorRange);
        setTTester();
    }
}

From source file:milk.gui.experiment.MIResultsPanel.java

License:Open Source License

public void setDatasetKeyFromDialog() {

    ListSelectorDialog jd = new ListSelectorDialog(null, m_DatasetKeyList);

    // Open the dialog
    int result = jd.showDialog();

    // If accepted, update the ttester
    if (result == ListSelectorDialog.APPROVE_OPTION) {
        int[] selected = m_DatasetKeyList.getSelectedIndices();
        String selectedList = "";
        for (int i = 0; i < selected.length; i++) {
            selectedList += "," + (selected[i] + 1);
        }//  w  w w  .j  a v  a 2s  . c om
        Range generatorRange = new Range();
        if (selectedList.length() != 0) {
            try {
                generatorRange.setRanges(selectedList);
            } catch (Exception ex) {
                ex.printStackTrace();
                System.err.println(ex.getMessage());
            }
        }
        m_TTester.setDatasetKeyColumns(generatorRange);
        setTTester();
    }
}

From source file:moa.reduction.bayes.OCdiscretize.java

License:Open Source License

/** Constructor - initializes the filter */
public OCdiscretize() {
    super();//  w  ww  . j  a  va  2 s . c o m
    m_DiscretizeCols = new Range();
    totalCount = 0;
    trees = null;
    setAttributeIndices("first-last");
    this.provideProb = false;
}

From source file:SEE.NSGAII.java

License:Open Source License

/**
 * reset to default values for options//from w  ww.j  ava2s .c  o  m
 */
private void resetOptions() {
    m_population = null;
    m_popSize = 80;
    m_lookupTableSize = 1001;
    m_pCrossover = 0.6;
    m_pMutation = 0.033;
    m_maxGenerations = 30;
    m_reportFrequency = m_maxGenerations;
    m_starting = null;
    m_startRange = new Range();
    m_seed = 1;
}

From source file:src.BestFirst.java

License:Open Source License

/**
 * Reset options to default values//from   w  w w  .  j  av  a2 s.  c o  m
 */
protected void resetOptions() {
    m_maxStale = 2;
    m_searchDirection = SELECTION_FORWARD;
    m_starting = null;
    m_startRange = new Range();
    m_classIndex = -1;
    m_totalEvals = 0;
    m_cacheSize = 1;
    m_debug = false;
}

From source file:src.RandomSearch.java

License:Open Source License

/**
 * Reset options to default values/*from   www .  j a  va 2  s . com*/
 */
protected void resetOptions() {
    m_startRange = new Range();
    m_classIndex = -1;
    m_totalEvals = 0;
    m_cacheSize = 1;
    m_debug = false;
}