List of usage examples for edu.stanford.nlp.classify CrossValidator CrossValidator
public CrossValidator(GeneralDataset<L, F> trainData, int kFold)
From source file:gr.aueb.cs.nlp.wordtagger.classifier.SVMWindows64Factory.java
License:Open Source License
/** * This method will cross validate on the given data and number of folds * to find the optimal C. The scorer is how you determine what to * optimize for (F-score, accuracy, etc). The C is then saved, so that * if you train a classifier after calling this method, that C will be used. *//*from w w w. j a va 2 s . com*/ public void crossValidateSetC(GeneralDataset<L, F> dataset, int numFolds, final Scorer<L> scorer, LineSearcher minimizer) { System.out.println("in Cross Validate"); useAlphaFile = true; boolean oldUseSigmoid = useSigmoid; useSigmoid = true; final CrossValidator<L, F> crossValidator = new CrossValidator<L, F>(dataset, numFolds); final Function<Triple<GeneralDataset<L, F>, GeneralDataset<L, F>, CrossValidator.SavedState>, Double> score = fold -> { GeneralDataset<L, F> trainSet = fold.first(); GeneralDataset<L, F> devSet = fold.second(); alphaFile = (File) fold.third().state; //train(trainSet,true,true); SVMLightClassifier<L, F> classifier = trainClassifierBasic(trainSet); fold.third().state = alphaFile; return scorer.score(classifier, devSet); }; Function<Double, Double> negativeScorer = cToTry -> { C = cToTry; if (verbose) { System.out.print("C = " + cToTry + " "); } Double averageScore = crossValidator.computeAverage(score); if (verbose) { System.out.println(" -> average Score: " + averageScore); } return -averageScore; }; C = minimizer.minimize(negativeScorer); useAlphaFile = false; useSigmoid = oldUseSigmoid; }