Example usage for org.apache.lucene.analysis.standard ClassicTokenizer setMaxTokenLength

List of usage examples for org.apache.lucene.analysis.standard ClassicTokenizer setMaxTokenLength

Introduction

In this page you can find the example usage for org.apache.lucene.analysis.standard ClassicTokenizer setMaxTokenLength.

Prototype

public void setMaxTokenLength(int length) 

Source Link

Document

Set the max allowed token length.

Usage

From source file:org.apache.solr.analysis.ClassicTokenizerFactory.java

License:Apache License

public Tokenizer create(Reader input) {
    ClassicTokenizer tokenizer = new ClassicTokenizer(luceneMatchVersion, input);
    tokenizer.setMaxTokenLength(maxTokenLength);
    return tokenizer;
}

From source file:org.elasticsearch.analysis.common.ClassicTokenizerFactory.java

License:Apache License

@Override
public Tokenizer create() {
    ClassicTokenizer tokenizer = new ClassicTokenizer();
    tokenizer.setMaxTokenLength(maxTokenLength);
    return tokenizer;
}

From source file:org.geotoolkit.lucene.analysis.standard.ClassicAnalyzer.java

License:Apache License

@Override
protected TokenStreamComponents createComponents(final String fieldName) {
    final ClassicTokenizer src = new ClassicTokenizer();
    src.setMaxTokenLength(maxTokenLength);
    TokenStream tok = new ClassicFilter(src);
    tok = new LowerCaseFilter(tok);
    tok = new StopFilter(tok, stopwords);
    return new TokenStreamComponents(src, tok) {
        @Override//from w w w .j  a  v  a 2  s. c o m
        protected void setReader(final Reader reader) {
            src.setMaxTokenLength(ClassicAnalyzer.this.maxTokenLength);
            super.setReader(reader);
        }
    };
}