Example usage for edu.stanford.nlp.stats Scorer score

List of usage examples for edu.stanford.nlp.stats Scorer score

Introduction

In this page you can find the example usage for edu.stanford.nlp.stats Scorer score.

Prototype

public <F> double score(ProbabilisticClassifier<L, F> classifier, GeneralDataset<L, F> data);

Source Link

Usage

From source file:gr.aueb.cs.nlp.wordtagger.classifier.SVMWindows64Factory.java

License:Open Source License

/**
 * This method will cross validate on the given data and number of folds
 * to find the optimal C.  The scorer is how you determine what to
 * optimize for (F-score, accuracy, etc).  The C is then saved, so that
 * if you train a classifier after calling this method, that C will be used.
 *///from  ww  w  . j a  va 2s.co  m
public void crossValidateSetC(GeneralDataset<L, F> dataset, int numFolds, final Scorer<L> scorer,
        LineSearcher minimizer) {
    System.out.println("in Cross Validate");

    useAlphaFile = true;
    boolean oldUseSigmoid = useSigmoid;
    useSigmoid = true;

    final CrossValidator<L, F> crossValidator = new CrossValidator<L, F>(dataset, numFolds);
    final Function<Triple<GeneralDataset<L, F>, GeneralDataset<L, F>, CrossValidator.SavedState>, Double> score = fold -> {
        GeneralDataset<L, F> trainSet = fold.first();
        GeneralDataset<L, F> devSet = fold.second();
        alphaFile = (File) fold.third().state;
        //train(trainSet,true,true);
        SVMLightClassifier<L, F> classifier = trainClassifierBasic(trainSet);
        fold.third().state = alphaFile;
        return scorer.score(classifier, devSet);
    };

    Function<Double, Double> negativeScorer = cToTry -> {
        C = cToTry;
        if (verbose) {
            System.out.print("C = " + cToTry + " ");
        }
        Double averageScore = crossValidator.computeAverage(score);
        if (verbose) {
            System.out.println(" -> average Score: " + averageScore);
        }
        return -averageScore;
    };

    C = minimizer.minimize(negativeScorer);

    useAlphaFile = false;
    useSigmoid = oldUseSigmoid;
}

From source file:gr.aueb.cs.nlp.wordtagger.classifier.SVMWindows64Factory.java

License:Open Source License

/**
 * This method will cross validate on the given data and number of folds
 * to find the optimal C.  The scorer is how you determine what to
 * optimize for (F-score, accuracy, etc).  The C is then saved, so that
 * if you train a classifier after calling this method, that C will be used.
 */// www .j a  v  a  2s.  c  om
public void heldOutSetC(final GeneralDataset<L, F> trainSet, final GeneralDataset<L, F> devSet,
        final Scorer<L> scorer, LineSearcher minimizer) {

    useAlphaFile = true;
    boolean oldUseSigmoid = useSigmoid;
    useSigmoid = true;

    Function<Double, Double> negativeScorer = cToTry -> {
        C = cToTry;
        SVMLightClassifier<L, F> classifier = trainClassifierBasic(trainSet);
        double score = scorer.score(classifier, devSet);
        return -score;
    };

    C = minimizer.minimize(negativeScorer);

    useAlphaFile = false;
    useSigmoid = oldUseSigmoid;
}