Example usage for edu.stanford.nlp.optimization QNMinimizer useOWLQN

List of usage examples for edu.stanford.nlp.optimization QNMinimizer useOWLQN

Introduction

In this page you can find the example usage for edu.stanford.nlp.optimization QNMinimizer useOWLQN.

Prototype

boolean useOWLQN

To view the source code for edu.stanford.nlp.optimization QNMinimizer useOWLQN.

Click Source Link

Usage

From source file:hofastcrf.HighOrderFastCRF.java

License:Open Source License

/**
 * Executes the training.//from   w w w.  j  ava 2s.co  m
 * @param observationSequenceList the list of the observations
 * @param featureTemplateGenerator the feature template generator that generate feature templates from the observations
 * @param maxLabelLength the maximum length of the feature labels
 * @param maxIters the maximum iteration counts
 * @param concurrency
 * @param useL1Regularization true if use the L1 regularization, false if use the L2 regularization
 * @param regularizationCoefficient regularization coefficient (either for L1 or L2)
 * @param epsilonForConvergence
 * @throws IOException 
 */
public void train(List<ObservationSequence<T>> observationSequenceList,
        FeatureTemplateGenerator<T> featureTemplateGenerator, int maxLabelLength, int maxIters, int concurrency,
        boolean useL1Regularization, double regularizationCoefficient, double epsilonForConvergence)
        throws IOException {
    ObservationSet<T> observationSet = new ObservationSet<T>(observationSequenceList);
    Map<String, Integer> labelMap = observationSet.generateLabelMap();
    DataSet dataSet = observationSet.generateDataSet(featureTemplateGenerator, labelMap, maxLabelLength);
    Map<Feature, Integer> featureCountMap = dataSet.generateFeatureCountMap();
    Map<FeatureTemplate, List<Feature>> featureTemplateToFeatureMap = new HashMap<FeatureTemplate, List<Feature>>();

    List<Feature> featureList = new ArrayList<Feature>();

    if (DebugInfoManager.getInstance().getDebugMode()) {
        HighOrderCRFData data = new HighOrderCRFData(featureList, labelMap);
        DebugInfoManager.getInstance().setDebugData("ReversedLabelMap", data.getReversedLabelMap());
    }
    int[] featureCountArray = new int[featureCountMap.size()];

    int count = 0;
    for (Map.Entry<Feature, Integer> entry : featureCountMap.entrySet()) {
        Feature f = entry.getKey();
        featureList.add(f);
        featureCountArray[count] = entry.getValue();

        FeatureTemplate ft = f.createFeatureTemplate();
        if (!featureTemplateToFeatureMap.containsKey(ft)) {
            featureTemplateToFeatureMap.put(ft, new ArrayList<Feature>());
        }
        featureTemplateToFeatureMap.get(ft).add(f);
        ++count;
    }

    List<PatternSetSequence> patternSetSequenceList = dataSet
            .generatePatternSetSequenceList(featureTemplateToFeatureMap);

    QNMinimizer qn = new QNMinimizer();
    if (useL1Regularization) {
        qn.useOWLQN(true, regularizationCoefficient);
        regularizationCoefficient = 0.0;
    }

    Function df = new Function(patternSetSequenceList, featureList, featureCountArray, concurrency,
            regularizationCoefficient);
    double[] lambda = new double[featureList.size()];
    lambda = qn.minimize(df, epsilonForConvergence, lambda, maxIters);

    List<Feature> featureListToSave = new ArrayList<Feature>();
    for (int i = 0; i < lambda.length; ++i) {
        Feature feature = featureList.get(i);
        if (lambda[i] != 0.0 || (feature.obs.isEmpty() && feature.pat.getLength() == 1)) {
            feature.reset(lambda[i]);
            featureListToSave.add(feature);
        } else {
            feature.reset(lambda[i]);
        }
    }
    modelData = new HighOrderCRFData(featureListToSave, labelMap);
    dumpFeatures("features.txt");
}