Example usage for org.apache.lucene.analysis TokenStream reset

List of usage examples for org.apache.lucene.analysis TokenStream reset

Introduction

In this page you can find the example usage for org.apache.lucene.analysis TokenStream reset.

Prototype

public void reset() throws IOException 

Source Link

Document

This method is called by a consumer before it begins consumption using #incrementToken() .

Usage

From source file:jp.sf.fess.solr.plugin.analysis.synonym.NGramSynonymTokenizerTest.java

License:Apache License

@Test
public void testAfterStrSingleSynonymExpand2() throws Exception {
    Analyzer a = new NGramSynonymTokenizerTestAnalyzer(2, true, "a,aa");
    TokenStream stream = a.tokenStream("f", new StringReader("ab"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1");

    a = new NGramSynonymTokenizerTestAnalyzer(2, true, "a,aa");
    stream = a.tokenStream("f", new StringReader("abb"));
    stream.reset();/*from ww  w.  jav a  2  s  .c o m*/
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1/bb,1,3,0");

    a = new NGramSynonymTokenizerTestAnalyzer(2, true, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcd"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1/bc,1,3,0/cd,2,4,1");

    a = new NGramSynonymTokenizerTestAnalyzer(2, true, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcde"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1/bc,1,3,0/cd,2,4,1/de,3,5,1");
}

From source file:jp.sf.fess.solr.plugin.analysis.synonym.NGramSynonymTokenizerTest.java

License:Apache License

@Test
public void testAfterStrSingleSynonymExpand3() throws Exception {
    Analyzer a = new NGramSynonymTokenizerTestAnalyzer(3, true, "a,aa");
    TokenStream stream = a.tokenStream("f", new StringReader("ab"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1");

    a = new NGramSynonymTokenizerTestAnalyzer(3, true, "a,aa");
    stream = a.tokenStream("f", new StringReader("abb"));
    stream.reset();// w ww. j  a va  2  s . c o m
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1/bb,1,3,0");

    a = new NGramSynonymTokenizerTestAnalyzer(3, true, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcd"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1/bc,1,3,0/bcd,1,4,0");

    a = new NGramSynonymTokenizerTestAnalyzer(3, true, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcde"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1/bc,1,3,0/bcd,1,4,0/cde,2,5,1");
}

From source file:jp.sf.fess.solr.plugin.analysis.synonym.NGramSynonymTokenizerTest.java

License:Apache License

@Test
public void testAfterStrSingleSynonymExpand4() throws Exception {
    Analyzer a = new NGramSynonymTokenizerTestAnalyzer(4, true, "a,aa");
    TokenStream stream = a.tokenStream("f", new StringReader("ab"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1");

    a = new NGramSynonymTokenizerTestAnalyzer(4, true, "a,aa");
    stream = a.tokenStream("f", new StringReader("abb"));
    stream.reset();/*from   w w  w .  java2s. c om*/
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1/bb,1,3,0");

    a = new NGramSynonymTokenizerTestAnalyzer(4, true, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcd"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1/bc,1,3,0/bcd,1,4,0");

    a = new NGramSynonymTokenizerTestAnalyzer(4, true, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcde"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1/bc,1,3,0/bcd,1,4,0/bcde,1,5,0");

    a = new NGramSynonymTokenizerTestAnalyzer(4, true, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcdef"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1/bc,1,3,0/bcd,1,4,0/bcde,1,5,0/cdef,2,6,1");
}

From source file:jp.sf.fess.solr.plugin.analysis.synonym.NGramSynonymTokenizerTest.java

License:Apache License

@Test
public void testSandwichStr1() throws Exception {
    Analyzer a = new NGramSynonymTokenizerTestAnalyzer(1, false, "a,aa");
    TokenStream stream = a.tokenStream("f", new StringReader("aba"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/b,1,2,1/a,2,3,1");

    a = new NGramSynonymTokenizerTestAnalyzer(1, false, "a,aa");
    stream = a.tokenStream("f", new StringReader("abba"));
    stream.reset();/*from w w  w. j a  v  a 2s  .c o  m*/
    assertTokenStream(stream, "a,0,1,1/b,1,2,1/b,2,3,1/a,3,4,1");

    a = new NGramSynonymTokenizerTestAnalyzer(1, false, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcda"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/b,1,2,1/c,2,3,1/d,3,4,1/a,4,5,1");

    a = new NGramSynonymTokenizerTestAnalyzer(1, false, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcdea"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/b,1,2,1/c,2,3,1/d,3,4,1/e,4,5,1/a,5,6,1");
}

From source file:jp.sf.fess.solr.plugin.analysis.synonym.NGramSynonymTokenizerTest.java

License:Apache License

@Test
public void testSandwichStr2() throws Exception {
    Analyzer a = new NGramSynonymTokenizerTestAnalyzer(2, false, "a,aa");
    TokenStream stream = a.tokenStream("f", new StringReader("aba"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/b,1,2,1/a,2,3,1");

    a = new NGramSynonymTokenizerTestAnalyzer(2, false, "a,aa");
    stream = a.tokenStream("f", new StringReader("abba"));
    stream.reset();//from   www.  j av a2  s  . c  o  m
    assertTokenStream(stream, "a,0,1,1/bb,1,3,1/a,3,4,1");

    a = new NGramSynonymTokenizerTestAnalyzer(2, false, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcda"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/bc,1,3,1/cd,2,4,1/a,4,5,1");

    a = new NGramSynonymTokenizerTestAnalyzer(2, false, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcdea"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/bc,1,3,1/cd,2,4,1/de,3,5,1/a,5,6,1");
}

From source file:jp.sf.fess.solr.plugin.analysis.synonym.NGramSynonymTokenizerTest.java

License:Apache License

@Test
public void testSandwichStr3() throws Exception {
    Analyzer a = new NGramSynonymTokenizerTestAnalyzer(3, false, "a,aa");
    TokenStream stream = a.tokenStream("f", new StringReader("aba"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/b,1,2,1/a,2,3,1");

    a = new NGramSynonymTokenizerTestAnalyzer(3, false, "a,aa");
    stream = a.tokenStream("f", new StringReader("abba"));
    stream.reset();/*from w  w w.ja  va 2 s.c o  m*/
    assertTokenStream(stream, "a,0,1,1/bb,1,3,1/a,3,4,1");

    a = new NGramSynonymTokenizerTestAnalyzer(3, false, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcda"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/bcd,1,4,1/a,4,5,1");

    a = new NGramSynonymTokenizerTestAnalyzer(3, false, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcdea"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/bcd,1,4,1/cde,2,5,1/a,5,6,1");

    a = new NGramSynonymTokenizerTestAnalyzer(3, false, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcdefa"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/bcd,1,4,1/cde,2,5,1/def,3,6,1/a,6,7,1");
}

From source file:jp.sf.fess.solr.plugin.analysis.synonym.NGramSynonymTokenizerTest.java

License:Apache License

@Test
public void testSandwichStr4() throws Exception {
    Analyzer a = new NGramSynonymTokenizerTestAnalyzer(4, false, "a,aa");
    TokenStream stream = a.tokenStream("f", new StringReader("aba"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/b,1,2,1/a,2,3,1");

    a = new NGramSynonymTokenizerTestAnalyzer(4, false, "a,aa");
    stream = a.tokenStream("f", new StringReader("abba"));
    stream.reset();//from  www .java2s . c o m
    assertTokenStream(stream, "a,0,1,1/bb,1,3,1/a,3,4,1");

    a = new NGramSynonymTokenizerTestAnalyzer(4, false, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcda"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/bcd,1,4,1/a,4,5,1");

    a = new NGramSynonymTokenizerTestAnalyzer(4, false, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcdea"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/bcde,1,5,1/a,5,6,1");

    a = new NGramSynonymTokenizerTestAnalyzer(4, false, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcdefa"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/bcde,1,5,1/cdef,2,6,1/a,6,7,1");
}

From source file:jp.sf.fess.solr.plugin.analysis.synonym.NGramSynonymTokenizerTest.java

License:Apache License

@Test
public void testSandwichStrExpand1() throws Exception {
    Analyzer a = new NGramSynonymTokenizerTestAnalyzer(1, true, "a,aa");
    TokenStream stream = a.tokenStream("f", new StringReader("aba"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1/a,2,3,1/aa,2,3,0");

    a = new NGramSynonymTokenizerTestAnalyzer(1, true, "a,aa");
    stream = a.tokenStream("f", new StringReader("abba"));
    stream.reset();/* ww w  .  j a va2s.co m*/
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1/b,2,3,1/a,3,4,1/aa,3,4,0");

    a = new NGramSynonymTokenizerTestAnalyzer(1, true, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcda"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1/c,2,3,1/d,3,4,1/a,4,5,1/aa,4,5,0");

    a = new NGramSynonymTokenizerTestAnalyzer(1, true, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcdea"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1/c,2,3,1/d,3,4,1/e,4,5,1/a,5,6,1/aa,5,6,0");
}

From source file:jp.sf.fess.solr.plugin.analysis.synonym.NGramSynonymTokenizerTest.java

License:Apache License

@Test
public void testSandwichStrExpand2() throws Exception {
    Analyzer a = new NGramSynonymTokenizerTestAnalyzer(2, true, "a,aa");
    TokenStream stream = a.tokenStream("f", new StringReader("aba"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1/a,2,3,1/aa,2,3,0");

    a = new NGramSynonymTokenizerTestAnalyzer(2, true, "a,aa");
    stream = a.tokenStream("f", new StringReader("abba"));
    stream.reset();//from   ww w .j ava 2s  .c  o m
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1/bb,1,3,0/b,2,3,0/a,3,4,1/aa,3,4,0");

    a = new NGramSynonymTokenizerTestAnalyzer(2, true, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcda"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1/bc,1,3,0/cd,2,4,1/d,3,4,0/a,4,5,1/aa,4,5,0");

    a = new NGramSynonymTokenizerTestAnalyzer(2, true, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcdea"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1/bc,1,3,0/cd,2,4,1/de,3,5,1/e,4,5,0/a,5,6,1/aa,5,6,0");
}

From source file:jp.sf.fess.solr.plugin.analysis.synonym.NGramSynonymTokenizerTest.java

License:Apache License

@Test
public void testSandwichStrExpand3() throws Exception {
    Analyzer a = new NGramSynonymTokenizerTestAnalyzer(3, true, "a,aa");
    TokenStream stream = a.tokenStream("f", new StringReader("aba"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1/a,2,3,1/aa,2,3,0");

    a = new NGramSynonymTokenizerTestAnalyzer(3, true, "a,aa");
    stream = a.tokenStream("f", new StringReader("abba"));
    stream.reset();/*from  w  w w .j a v a  2s. com*/
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1/bb,1,3,0/b,2,3,0/a,3,4,1/aa,3,4,0");

    a = new NGramSynonymTokenizerTestAnalyzer(3, true, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcda"));
    stream.reset();
    assertTokenStream(stream, "a,0,1,1/aa,0,1,0/b,1,2,1/bc,1,3,0/bcd,1,4,0/cd,2,4,0/d,3,4,0/a,4,5,1/aa,4,5,0");

    a = new NGramSynonymTokenizerTestAnalyzer(3, true, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcdea"));
    stream.reset();
    assertTokenStream(stream,
            "a,0,1,1/aa,0,1,0/b,1,2,1/bc,1,3,0/bcd,1,4,0/cde,2,5,1/de,3,5,0/e,4,5,0/a,5,6,1/aa,5,6,0");

    a = new NGramSynonymTokenizerTestAnalyzer(3, true, "a,aa");
    stream = a.tokenStream("f", new StringReader("abcdefa"));
    stream.reset();
    assertTokenStream(stream,
            "a,0,1,1/aa,0,1,0/b,1,2,1/bc,1,3,0/bcd,1,4,0/cde,2,5,1/def,3,6,1/ef,4,6,0/f,5,6,0/a,6,7,1/aa,6,7,0");
}