Example usage for weka.classifiers.evaluation Evaluation Evaluation

List of usage examples for weka.classifiers.evaluation Evaluation Evaluation

Introduction

In this page you can find the example usage for weka.classifiers.evaluation Evaluation Evaluation.

Prototype

public Evaluation(Instances data) throws Exception 

Source Link

Document

Initializes all the counters for the evaluation.

Usage

From source file:clasificador.RedNeuronal.java

public void Entrenamiento(String paramNN) {
    try {/*from   w w  w.  java2  s. c  o m*/
        //aqui va a anetrenar la red neuronal con parametros para la red
        FileReader trainReader = new FileReader(
                new File(System.getProperty("user.dir") + "\\src\\clasificador\\archivos\\libro.arff"));
        //FileReader trainReader = new FileReader("aqui va la ruta");
        //intancias
        //lo que vamoas a hacer en agarrar ese objeto y cargarlo dentro de nuestra clase instancias
        Instances trainInstance = new Instances(trainReader);
        trainInstance.setClassIndex(trainInstance.numAttributes() - 1);//esta fijando las etiquetas en el archivo las clases estan en el final es decir el total -1 esto es xk es un ambiento controlado 

        //construccion de la red perceptron multicapa 
        MultilayerPerceptron mlp = new MultilayerPerceptron(); // creo un objeto de  perceptron multicapaa
        mlp.setOptions(Utils.splitOptions(paramNN));
        //fijar los parametros de la red perceptron util es para q reciba toda la confiuguracion es proipio de weka
        mlp.buildClassifier(trainInstance);// la construccion se hace ya basadao en los parametron configurado 

        //Guardar el mlp en un archivo 
        Debug.saveToFile("TrainMLP.train", mlp);
        //evaluacion del entrenamiento despies solo se ocupa el trainMLp
        SerializedClassifier sc = new SerializedClassifier();
        sc.setModelFile(new File("TrainMLP.train"));
        Evaluation evaluarEntrenamiento = new Evaluation(trainInstance);
        evaluarEntrenamiento.evaluateModel(mlp, trainInstance);//evaluando el modelo
        System.out.println(evaluarEntrenamiento.toSummaryString("resultado", false));
        System.out.println(evaluarEntrenamiento.toMatrixString("*****************Matriz de confusion*******"));
        trainReader.close();
    } catch (FileNotFoundException ex) {
        Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException ex) {
        Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
    } catch (Exception ex) {
        Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:clasificador.RedNeuronal.java

public void testing() {
    try {/*  www  .  ja v  a  2  s  .  com*/
        FileReader testReader = new FileReader(
                new File(System.getProperty("user.dir") + "\\src\\clasificador\\archivos\\librotest.arff"));
        Instances testInstance = new Instances(testReader);
        testInstance.setClassIndex(testInstance.numAttributes() - 1);
        Evaluation evalTest = new Evaluation(testInstance);
        SerializedClassifier clasificador = new SerializedClassifier();
        clasificador.setModelFile(new File("TrainMLP.train"));
        //CLASIFICADOR ESTANDAR
        Classifier clasificadorEstandar = clasificador.getCurrentModel();
        evalTest.evaluateModel(clasificadorEstandar, testInstance);

        System.out.println(evalTest.toSummaryString("resultado:", false));
        System.out.println(evalTest.toMatrixString("*****************Matriz de confusion*******"));

        //vamos a predecir el numero q voy a usar       
        // evalTest.toMatrixString();
    } catch (IOException e) {
        e.printStackTrace();
    } catch (Exception ex) {
        Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:clasificador.RedNeuronal.java

public void prediccion() {

    FileReader testReader = null;
    try {//from   w  ww .  j  a v a 2  s.c o  m
        testReader = new FileReader(
                new File(System.getProperty("user.dir") + "\\src\\clasificador\\archivos\\libro1.arff"));
        Instances testInstance = new Instances(testReader);
        testInstance.setClassIndex(testInstance.numAttributes() - 1);
        Evaluation evalTest = new Evaluation(testInstance);
        SerializedClassifier clasificador = new SerializedClassifier();
        clasificador.setModelFile(new File("TrainMLP.train"));
        //CLASIFICADOR ESTANDAR
        Classifier clasificadorEstandar = clasificador.getCurrentModel();
        evalTest.evaluateModel(clasificadorEstandar, testInstance);

        double[] valores = evalTest.evaluateModel(clasificadorEstandar, testInstance);

        for (int i = 0; i < valores.length; i++) {

            System.out.println("se predice:     " + valores[i] + "\n");
        }
    } catch (FileNotFoundException ex) {
        Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException ex) {
        Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
    } catch (Exception ex) {
        Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
    } finally {
        try {
            testReader.close();
        } catch (IOException ex) {
            Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

}

From source file:cn.ict.zyq.bestConf.bestConf.BestConf.java

License:Open Source License

public static void testCOMT2() throws Exception {
    BestConf bestconf = new BestConf();
    Instances trainingSet = DataIOFile.loadDataFromArffFile("data/trainingBestConf0.arff");
    trainingSet.setClassIndex(trainingSet.numAttributes() - 1);

    Instances samplePoints = LHSInitializer.getMultiDimContinuous(bestconf.getAttributes(),
            InitialSampleSetSize, false);
    samplePoints.insertAttributeAt(trainingSet.classAttribute(), samplePoints.numAttributes());
    samplePoints.setClassIndex(samplePoints.numAttributes() - 1);

    COMT2 comt = new COMT2(samplePoints, COMT2Iteration);

    comt.buildClassifier(trainingSet);//from  w w w . j a v  a 2 s.  c  om

    Evaluation eval = new Evaluation(trainingSet);
    eval.evaluateModel(comt, trainingSet);
    System.err.println(eval.toSummaryString());

    Instance best = comt.getInstanceWithPossibleMaxY(samplePoints.firstInstance());
    Instances bestInstances = new Instances(trainingSet, 2);
    bestInstances.add(best);
    DataIOFile.saveDataToXrffFile("data/trainingBestConf_COMT2.arff", bestInstances);

    //now we output the training set with the class value updated as the predicted value
    Instances output = new Instances(trainingSet, trainingSet.numInstances());
    Enumeration<Instance> enu = trainingSet.enumerateInstances();
    while (enu.hasMoreElements()) {
        Instance ins = enu.nextElement();
        double[] values = ins.toDoubleArray();
        values[values.length - 1] = comt.classifyInstance(ins);
        output.add(ins.copy(values));
    }
    DataIOFile.saveDataToXrffFile("data/trainingBestConf0_predict.xrff", output);
}

From source file:cyber009.udal.functions.StatisticalAnalysis.java

/**
 * /*from  w ww.ja  v  a 2s .  co  m*/
 * @param classifier
 * @param trainingDataSet
 * @param unLabelSet
 * @param classTarget
 * @return 
 */
public double posteriorDistribution(Classifier classifier, Instances trainingDataSet, Instance unLabelSet,
        double classTarget) {
    double prDistribution = 0.0D;
    double classPradic = 0.0D;
    Evaluation evaluation = null;
    try {
        evaluation = new Evaluation(trainingDataSet);
        evaluation.evaluateModelOnceAndRecordPrediction(classifier, unLabelSet);
        //evaluation.pctCorrect(); // must be show for correctness  ----------------------
        classPradic = evaluation.meanAbsoluteError();
        prDistribution = classPradic * probabilityOfTargerClass(trainingDataSet, classTarget);
        //            System.out.println(classPradic);
    } catch (Exception ex) {
        Logger.getLogger(StatisticalAnalysis.class.getName()).log(Level.SEVERE, null, ex);
    }
    return prDistribution;
}

From source file:gate.plugin.learningframework.engines.EngineWeka.java

@Override
public Object evaluateHoldout(InstanceList instances, double portion, int repeats, String parms) {
    // Get the parameters 
    // -s/-seed <int> : seed, default 0
    // -S/-nostratify : switch off stratification if we evaluate classification
    Parms opts = new Parms(parms, "s:seed:i", "S:nostratify:b");
    int seed = (int) opts.getValueOrElse("seed", 0);
    if (algorithm instanceof AlgorithmRegression) {
        throw new UnsupportedOperationException("Weka holdout eval for regression not supported yet.");
    } else {/*from  www .j a  v a  2s  .com*/
        // must be classification algorithm then!
        weka.core.Instances all = new CorpusRepresentationWeka(corpusRepresentationMallet)
                .getRepresentationWeka();
        boolean noStratify = (boolean) opts.getValueOrElse("nostratify", 0);
        Random rand = new Random(seed);
        all.randomize(rand);
        boolean stratified = !noStratify;
        // TODO: not sure if/how we can do stratification for holdout evaluation
        // TODO: there must be a better way to do the splitting too!
        // TODO: if there is no better way to split, maybe do out outside for
        // TODO: how to implement repeats?
        if (repeats != 1) {
            throw new GateRuntimeException("Only repeats == 1 supported yet");
        }
        // both regression and classification?
        int trainSize = (int) Math.round(all.numInstances() * portion);
        int testSize = all.numInstances() - trainSize;
        Instances train = new Instances(all, 0, trainSize);
        Instances test = new Instances(all, trainSize, testSize);
        Classifier classifier = (Classifier) trainer;
        try {
            classifier.buildClassifier(train);
        } catch (Exception ex) {
            throw new GateRuntimeException("Error during training of Weka classifier", ex);
        }
        Evaluation eval = null;
        try {
            eval = new Evaluation(train);
        } catch (Exception ex) {
            throw new GateRuntimeException("Could not create Evaluation object", ex);
        }
        try {
            eval.evaluateModel(classifier, test);
        } catch (Exception ex) {
            throw new GateRuntimeException("Error evaluating the classifier", ex);
        }
        System.out.println("Evaluation result:\n" + eval);
        return eval;
    }
}

From source file:gate.plugin.learningframework.engines.EngineWeka.java

@Override
public Object evaluateXVal(InstanceList instances, int k, String parms) {
    Parms opts = new Parms(parms, "s:seed:i", "S:nostratify:b");
    int seed = (int) opts.getValueOrElse("seed", 0);
    boolean noStratify = (boolean) opts.getValueOrElse("nostratify", 0);
    Random rand = new Random(seed);
    Instances all = new CorpusRepresentationWeka(corpusRepresentationMallet).getRepresentationWeka();
    Evaluation eval = null;/*w  ww. ja v  a  2 s  .  c  om*/
    try {
        eval = new Evaluation(all);
    } catch (Exception ex) {
        throw new GateRuntimeException("Could not create evaluation object", ex);
    }
    Classifier classifier = (Classifier) trainer;
    try {
        eval.crossValidateModel(classifier, all, k, rand);
    } catch (Exception ex) {
        throw new GateRuntimeException("Error running cross validation", ex);
    }
    System.out.println("Crossvaliation evaluation result:\n" + eval);
    return eval;
}

From source file:gov.va.chir.tagline.TagLineEvaluator.java

License:Open Source License

public void evaluate(final ClassifierType type, final String... options) throws Exception {
    Classifier model = null;//from   w w w  . ja v  a  2 s  .  c o  m

    if (type == null) {
        throw new IllegalArgumentException("Classifier type must be specified");
    }

    if (type.equals(ClassifierType.J48)) {
        model = new J48();
    } else if (type.equals(ClassifierType.LMT)) {
        model = new LMT();
    } else if (type.equals(ClassifierType.RandomForest)) {
        model = new RandomForest();
    } else if (type.equals(ClassifierType.SVM)) {
        model = new LibSVM();
    } else {
        throw new IllegalArgumentException(String.format("Classifier type not supported (%s)", type));
    }

    if (model != null) {
        // Set classifier options
        if (options != null && options.length > 0) {
            if (model instanceof AbstractClassifier) {
                ((AbstractClassifier) model).setOptions(options);
            }
        }

        fc.setClassifier(model);

        final Attribute attrDocId = instances.attribute(DatasetUtil.DOC_ID);

        if (attrDocId == null) {
            throw new IllegalStateException(String.format("%s attribute must exist", DatasetUtil.DOC_ID));
        }
        final List<Set<Object>> foldDocIds = getFoldDocIds(attrDocId);

        final RemoveWithValues rmv = new RemoveWithValues();

        // RemoveWithValues filter is not zero-based!
        rmv.setAttributeIndex(String.valueOf(attrDocId.index() + 1));
        rmv.setModifyHeader(false);

        final Evaluation eval = new Evaluation(instances);

        // Perform cross-validation
        for (int i = 0; i < numFolds; i++) {
            rmv.setNominalIndicesArr(getAttributeIndexValues(attrDocId, foldDocIds.get(i)));

            rmv.setInvertSelection(false);
            rmv.setInputFormat(instances); // Must be called AFTER all options
            final Instances train = Filter.useFilter(instances, rmv);

            rmv.setInvertSelection(true);
            rmv.setInputFormat(instances); // Must be called AFTER all options
            final Instances test = Filter.useFilter(instances, rmv);

            fc.buildClassifier(train);
            eval.evaluateModel(fc, test);
        }

        evaluationSummary = String.format("%s%s%s%s%s", eval.toSummaryString(),
                System.getProperty("line.separator"), eval.toMatrixString(),
                System.getProperty("line.separator"), eval.toClassDetailsString());
    }
}