Example usage for weka.classifiers.evaluation.output.prediction PlainText printClassifications

List of usage examples for weka.classifiers.evaluation.output.prediction PlainText printClassifications

Introduction

In this page you can find the example usage for weka.classifiers.evaluation.output.prediction PlainText printClassifications.

Prototype

public void printClassifications(Classifier classifier, DataSource testset) throws Exception 

Source Link

Document

Prints the classifications to the buffer.

Usage

From source file:org.scify.NewSumServer.Server.MachineLearning.labelTagging.java

License:Apache License

/**
 * Find the recommend labels from classifier
 *
 * @return the recommend labels/*from  w  w  w  .  j a  v a2 s  .  c o m*/
 */
public static String recommendation(INSECTDB file, String text) {

    String labelList = "-none-";
    //create IVector
    String Ivector = vector.labellingVector(text, file); // take the similarity vectors for each class graph

    try {

        Instances dataTrainSet = dataSets.trainingSet(file); //take the train  dataset 
        Instances dataLabelSet = dataSets.labelingSet(file, Ivector);//take tha labe  dataset
        ArffSaver saver = new ArffSaver();
        saver.setInstances(dataTrainSet);
        saver.setFile(new File("./data/dataTrainSet.arff"));
        saver.writeBatch();

        ArffSaver saver2 = new ArffSaver();
        saver2.setInstances(dataLabelSet);
        saver2.setFile(new File("./data/dataLabelSet.arff"));
        saver2.writeBatch();

        File temp = File.createTempFile("exportFile", null);
        //TODO: creat classifier

        //            String option = "-S 2 -K 2 -D 3 -G 0.0 -R 0.0 -N 0.5 -M 40.0 -C 1.0 -E 0.001 -P 0.1"; // classifier options
        //            String[] options = option.split("\\s+");

        if (dataTrainSet.classIndex() == -1) {
            dataTrainSet.setClassIndex(dataTrainSet.numAttributes() - 1);
        }

        // Create a  classifier LibSVM

        //            NaiveBayes nb = new NaiveBayes();
        //            RandomForest nb = new RandomForest();
        J48 nb = new J48();
        //            nb.setOptions(options);
        nb.buildClassifier(dataTrainSet);

        // End train method

        if (dataLabelSet.classIndex() == -1) {
            dataLabelSet.setClassIndex(dataLabelSet.numAttributes() - 1);
        }

        StringBuffer writer = new StringBuffer();

        PlainText output = new PlainText();
        output.setBuffer(writer);
        output.setHeader(dataLabelSet);
        output.printClassifications(nb, dataLabelSet);

        //            PrintStream ps2 = new PrintStream(classGname);
        //            ps2.print(writer.toString());
        //            ps2.close();
        PrintStream ps = new PrintStream(temp); //Add to temp file the results of classifying
        ps.print(writer.toString());
        ps.close();

        //TODO: export result
        //            labelList = result(temp);                                                    //if result is true adds the current class graph name in label list
        labelList = result(temp) + " --------->> " + text; //if result is true adds the current class graph name in label list
        Utilities.appendToFile(labelList);

    } catch (Exception ex) {
        Logger.getLogger(labelTagging.class.getName()).log(Level.SEVERE, null, ex);
    }

    return labelList;
}