Example usage for org.apache.lucene.queryparser.flexible.core.nodes FieldQueryNode FieldQueryNode

List of usage examples for org.apache.lucene.queryparser.flexible.core.nodes FieldQueryNode FieldQueryNode

Introduction

In this page you can find the example usage for org.apache.lucene.queryparser.flexible.core.nodes FieldQueryNode FieldQueryNode.

Prototype

public FieldQueryNode(CharSequence field, CharSequence text, int begin, int end) 

Source Link

Usage

From source file:at.ac.univie.mminf.luceneSKOS.queryparser.flexible.standard.processors.SKOSQueryNodeProcessor.java

License:Apache License

@Override
protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException {

    if (node instanceof TextableQueryNode && !(node instanceof WildcardQueryNode)
            && !(node instanceof FuzzyQueryNode) && !(node instanceof RegexpQueryNode)
            && !(node.getParent() instanceof RangeQueryNode)) {

        FieldQueryNode fieldNode = ((FieldQueryNode) node);
        String text = fieldNode.getTextAsString();
        String field = fieldNode.getFieldAsString();

        TokenStream source;//from   www  .j  a  v a  2s .  c o m
        try {
            source = this.analyzer.tokenStream(field, text);
            source.reset();
        } catch (IOException e1) {
            throw new RuntimeException(e1);
        }
        CachingTokenFilter buffer = new CachingTokenFilter(source);

        PositionIncrementAttribute posIncrAtt = null;
        int numTokens = 0;
        int positionCount = 0;
        boolean severalTokensAtSamePosition = false;

        if (buffer.hasAttribute(PositionIncrementAttribute.class)) {
            posIncrAtt = buffer.getAttribute(PositionIncrementAttribute.class);
        }

        try {

            while (buffer.incrementToken()) {
                numTokens++;
                int positionIncrement = (posIncrAtt != null) ? posIncrAtt.getPositionIncrement() : 1;
                if (positionIncrement != 0) {
                    positionCount += positionIncrement;

                } else {
                    severalTokensAtSamePosition = true;
                }

            }

        } catch (IOException e) {
            // ignore
        }

        try {
            // rewind the buffer stream
            buffer.reset();

            // close original stream - all tokens buffered
            source.close();
        } catch (IOException e) {
            // ignore
        }

        if (!buffer.hasAttribute(CharTermAttribute.class)) {
            return new NoTokenFoundQueryNode();
        }

        CharTermAttribute termAtt = buffer.getAttribute(CharTermAttribute.class);

        if (numTokens == 0) {
            return new NoTokenFoundQueryNode();

        } else if (numTokens == 1) {
            String term = null;
            try {
                boolean hasNext;
                hasNext = buffer.incrementToken();
                assert hasNext == true;
                term = termAtt.toString();

            } catch (IOException e) {
                // safe to ignore, because we know the number of tokens
            }

            fieldNode.setText(term);

            return fieldNode;

        } else if (severalTokensAtSamePosition || !(node instanceof QuotedFieldQueryNode)) {
            if (positionCount == 1 || !(node instanceof QuotedFieldQueryNode)) {
                // no phrase query:
                LinkedList<QueryNode> children = new LinkedList<QueryNode>();

                for (int i = 0; i < numTokens; i++) {
                    String term = null;
                    try {
                        boolean hasNext = buffer.incrementToken();
                        assert hasNext == true;
                        term = termAtt.toString();

                    } catch (IOException e) {
                        // safe to ignore, because we know the number of tokens
                    }

                    if (buffer.hasAttribute(SKOSTypeAttribute.class) && boosts != null) {

                        SKOSTypeAttribute skosAttr = buffer.getAttribute(SKOSTypeAttribute.class);
                        children.add(new BoostQueryNode(new FieldQueryNode(field, term, -1, -1),
                                getBoost(skosAttr.getSkosType())));

                    } else {

                        children.add(new FieldQueryNode(field, term, -1, -1));

                    }

                }
                return new GroupQueryNode(new StandardBooleanQueryNode(children, positionCount == 1));
            } else {
                // phrase query:
                MultiPhraseQueryNode mpq = new MultiPhraseQueryNode();

                List<FieldQueryNode> multiTerms = new ArrayList<FieldQueryNode>();
                int position = -1;
                int i = 0;
                int termGroupCount = 0;
                for (; i < numTokens; i++) {
                    String term = null;
                    int positionIncrement = 1;
                    try {
                        boolean hasNext = buffer.incrementToken();
                        assert hasNext == true;
                        term = termAtt.toString();
                        if (posIncrAtt != null) {
                            positionIncrement = posIncrAtt.getPositionIncrement();
                        }

                    } catch (IOException e) {
                        // safe to ignore, because we know the number of tokens
                    }

                    if (positionIncrement > 0 && multiTerms.size() > 0) {

                        for (FieldQueryNode termNode : multiTerms) {

                            if (this.positionIncrementsEnabled) {
                                termNode.setPositionIncrement(position);
                            } else {
                                termNode.setPositionIncrement(termGroupCount);
                            }

                            mpq.add(termNode);

                        }

                        // Only increment once for each "group" of
                        // terms that were in the same position:
                        termGroupCount++;

                        multiTerms.clear();

                    }

                    position += positionIncrement;
                    multiTerms.add(new FieldQueryNode(field, term, -1, -1));

                }

                for (FieldQueryNode termNode : multiTerms) {

                    if (this.positionIncrementsEnabled) {
                        termNode.setPositionIncrement(position);

                    } else {
                        termNode.setPositionIncrement(termGroupCount);
                    }

                    mpq.add(termNode);

                }

                return mpq;

            }

        } else {

            TokenizedPhraseQueryNode pq = new TokenizedPhraseQueryNode();

            int position = -1;

            for (int i = 0; i < numTokens; i++) {
                String term = null;
                int positionIncrement = 1;

                try {
                    boolean hasNext = buffer.incrementToken();
                    assert hasNext == true;
                    term = termAtt.toString();

                    if (posIncrAtt != null) {
                        positionIncrement = posIncrAtt.getPositionIncrement();
                    }

                } catch (IOException e) {
                    // safe to ignore, because we know the number of tokens
                }

                FieldQueryNode newFieldNode = new FieldQueryNode(field, term, -1, -1);

                if (this.positionIncrementsEnabled) {
                    position += positionIncrement;
                    newFieldNode.setPositionIncrement(position);

                } else {
                    newFieldNode.setPositionIncrement(i);
                }

                pq.add(newFieldNode);

            }

            return pq;

        }

    }

    return node;

}

From source file:com.sindicetech.siren.qparser.keyword.BasicSyntaxTest.java

License:Open Source License

@Test(expected = IllegalArgumentException.class)
public void testTwigQueryNodeWithMoreThan2Children() throws Exception {
    final StandardExtendedKeywordQueryParser parser = new StandardExtendedKeywordQueryParser();
    final QueryNodeProcessorPipeline pipeline = new QueryNodeProcessorPipeline();
    pipeline.add(new QueryNodeProcessorImpl() {
        @Override//from ww w .  j  a va2  s .c o  m
        protected List<QueryNode> setChildrenOrder(final List<QueryNode> children) throws QueryNodeException {
            return children;
        }

        @Override
        protected QueryNode preProcessNode(final QueryNode node) throws QueryNodeException {
            if (node instanceof TwigQueryNode) {
                node.add(new FieldQueryNode("field", "text", 0, 4));
            }
            return node;
        }

        @Override
        protected QueryNode postProcessNode(final QueryNode node) throws QueryNodeException {
            return node;
        }
    });
    parser.setQueryNodeProcessor(pipeline);

    parser.parse("a : b", SirenTestCase.DEFAULT_TEST_FIELD);
}

From source file:com.sindicetech.siren.qparser.keyword.BasicSyntaxTest.java

License:Open Source License

@Test
public void testTwigQueryNodeParent() throws Exception {
    final TwigQueryNode twig = new TwigQueryNode(new WildcardNodeQueryNode(), new WildcardNodeQueryNode());
    final FieldQueryNode term = new FieldQueryNode("field", "term", 0, 4);
    assertTrue(term.getParent() == null);
    assertEquals(twig, twig.getRoot().getParent());
    assertEquals(twig, twig.getChild().getParent());
    twig.setRoot(term);//  w w  w  .  j  ava  2  s. c  om
    twig.setChild(term);
    assertEquals(twig, twig.getRoot().getParent());
    assertEquals(twig, twig.getChild().getParent());
}

From source file:com.sindicetech.siren.qparser.keyword.processors.PhraseQueryNodeProcessor.java

License:Open Source License

@Override
protected QueryNode postProcessNode(final QueryNode node) throws QueryNodeException {
    if (node instanceof TextableQueryNode && !(node instanceof WildcardQueryNode)
            && !(node instanceof FuzzyQueryNode) && !(node instanceof RegexpQueryNode)
            && !(node instanceof ProtectedQueryNode) && !(node.getParent() instanceof RangeQueryNode)) {

        final FieldQueryNode fieldNode = ((FieldQueryNode) node);
        final String text = fieldNode.getTextAsString();
        final String field = fieldNode.getFieldAsString();

        final TokenStream source;
        try {/* www.j  av  a2  s  .  c o m*/
            source = this.analyzer.tokenStream(field, new StringReader(text));
            source.reset();
        } catch (final IOException e1) {
            throw new RuntimeException(e1);
        }
        final CachingTokenFilter buffer = new CachingTokenFilter(source);

        int numTokens = 0;
        try {
            while (buffer.incrementToken()) {
                numTokens++;
            }
        } catch (final IOException e) {
            // ignore
        }

        try {
            // rewind the buffer stream
            buffer.reset();
            // close original stream - all tokens buffered
            source.close();
        } catch (final IOException e) {
            // ignore
        }

        if (!buffer.hasAttribute(CharTermAttribute.class)) {
            return new NoTokenFoundQueryNode();
        }
        final CharTermAttribute termAtt = buffer.getAttribute(CharTermAttribute.class);

        if (numTokens == 0) {
            return new NoTokenFoundQueryNode();
        }
        // phrase query
        else if (numTokens != 1) {
            String datatype = (String) DatatypeProcessor.getDatatype(this.getQueryConfigHandler(), node);
            final TokenizedPhraseQueryNode pq = new TokenizedPhraseQueryNode();
            // assign datatype
            pq.setTag(DatatypeQueryNode.DATATYPE_TAGID, datatype);

            for (int i = 0; i < numTokens; i++) {
                String term = null;

                try {
                    final boolean hasNext = buffer.incrementToken();
                    assert hasNext == true;
                    term = termAtt.toString();

                } catch (final IOException e) {
                    // safe to ignore, because we know the number of tokens
                }

                final FieldQueryNode newFieldNode = new FieldQueryNode(field, term, -1, -1);
                // set position increment
                newFieldNode.setPositionIncrement(i);
                // assign datatype
                newFieldNode.setTag(DatatypeQueryNode.DATATYPE_TAGID, datatype);
                pq.add(newFieldNode);
            }
            return pq;
        }
    }
    return node;
}

From source file:org.sindice.siren.qparser.keyword.KeywordQueryParserTest.java

License:Apache License

@Test(expected = IllegalArgumentException.class)
public void testTwigQueryNodeWithMoreThan2Children() throws Exception {
    final KeywordQueryParser parser = new KeywordQueryParser();
    final QueryNodeProcessorPipeline pipeline = new QueryNodeProcessorPipeline();
    pipeline.add(new QueryNodeProcessorImpl() {
        @Override//  w w w .jav  a 2  s  . c o  m
        protected List<QueryNode> setChildrenOrder(final List<QueryNode> children) throws QueryNodeException {
            return children;
        }

        @Override
        protected QueryNode preProcessNode(final QueryNode node) throws QueryNodeException {
            if (node instanceof TwigQueryNode) {
                node.add(new FieldQueryNode("field", "text", 0, 4));
            }
            return node;
        }

        @Override
        protected QueryNode postProcessNode(final QueryNode node) throws QueryNodeException {
            return node;
        }
    });
    parser.setQueryNodeProcessor(pipeline);

    parser.parse("a : b", SirenTestCase.DEFAULT_TEST_FIELD);
}

From source file:org.sindice.siren.qparser.keyword.processors.DatatypeAnalyzerProcessor.java

License:Apache License

@Override
protected QueryNode postProcessNode(final QueryNode node) throws QueryNodeException {
    if (node instanceof TextableQueryNode && !(node instanceof WildcardQueryNode)
            && !(node instanceof FuzzyQueryNode) && !(node instanceof RegexpQueryNode)
            && !(node.getParent() instanceof RangeQueryNode)) {

        this.positionIncrementsEnabled = false;
        final Boolean positionIncrementsEnabled = this.getQueryConfigHandler()
                .get(ConfigurationKeys.ENABLE_POSITION_INCREMENTS);
        if (positionIncrementsEnabled != null) {
            this.positionIncrementsEnabled = positionIncrementsEnabled;
        }// w  ww.  ja  v  a  2 s.c o  m

        final FieldQueryNode fieldNode = ((FieldQueryNode) node);
        final String text = fieldNode.getTextAsString();
        final String field = fieldNode.getFieldAsString();
        final String datatype = (String) fieldNode.getTag(DatatypeQueryNode.DATATYPE_TAGID);

        if (datatype == null) {
            return node;
        }

        final Analyzer analyzer = this.getQueryConfigHandler().get(KeywordConfigurationKeys.DATATYPES_ANALYZERS)
                .get(datatype);
        if (analyzer == null) {
            throw new QueryNodeException(new MessageImpl(QueryParserMessages.INVALID_SYNTAX,
                    "No analyzer associated with " + datatype));
        }

        PositionIncrementAttribute posIncrAtt = null;
        int numTokens = 0;
        int positionCount = 0;
        boolean severalTokensAtSamePosition = false;

        final TokenStream source;
        try {
            source = analyzer.tokenStream(field, new StringReader(text));
            source.reset();
        } catch (final IOException e1) {
            throw new RuntimeException(e1);
        }
        final CachingTokenFilter buffer = new CachingTokenFilter(source);

        if (buffer.hasAttribute(PositionIncrementAttribute.class)) {
            posIncrAtt = buffer.getAttribute(PositionIncrementAttribute.class);
        }

        try {
            while (buffer.incrementToken()) {
                numTokens++;
                final int positionIncrement = (posIncrAtt != null) ? posIncrAtt.getPositionIncrement() : 1;
                if (positionIncrement != 0) {
                    positionCount += positionIncrement;
                } else {
                    severalTokensAtSamePosition = true;
                }
            }
        } catch (final IOException e) {
            // ignore
        }

        try {
            // rewind the buffer stream
            buffer.reset();
            // close original stream - all tokens buffered
            source.close();
        } catch (final IOException e) {
            // ignore
        }

        if (!buffer.hasAttribute(CharTermAttribute.class)) {
            return new NoTokenFoundQueryNode();
        }
        final CharTermAttribute termAtt = buffer.getAttribute(CharTermAttribute.class);

        if (numTokens == 0) {
            if (nbTwigs != 0) { // Twig special case
                return new WildcardNodeQueryNode();
            }
            return new NoTokenFoundQueryNode();
        } else if (numTokens == 1) {
            String term = null;
            try {
                boolean hasNext;
                hasNext = buffer.incrementToken();
                assert hasNext == true;
                term = termAtt.toString();
            } catch (final IOException e) {
                // safe to ignore, because we know the number of tokens
            }
            fieldNode.setText(term);
            return fieldNode;
        } else {
            // no phrase query:
            final LinkedList<QueryNode> children = new LinkedList<QueryNode>();

            int position = -1;

            for (int i = 0; i < numTokens; i++) {
                String term = null;
                final int positionIncrement = 1;

                try {
                    final boolean hasNext = buffer.incrementToken();
                    assert hasNext == true;
                    term = termAtt.toString();

                } catch (final IOException e) {
                    // safe to ignore, because we know the number of tokens
                }

                final FieldQueryNode newFieldNode = new FieldQueryNode(field, term, -1, -1);

                if (this.positionIncrementsEnabled) {
                    position += positionIncrement;
                    newFieldNode.setPositionIncrement(position);
                } else {
                    newFieldNode.setPositionIncrement(i);
                }

                children.add(new FieldQueryNode(field, term, -1, -1));
            }

            if (node.getParent() instanceof TokenizedPhraseQueryNode) {
                throw new QueryNodeException(new MessageImpl("Cannot build a MultiPhraseQuery"));
            }
            // If multiple terms at one single position, this must be a query
            // expansion. Perform a OR between the terms.
            if (severalTokensAtSamePosition && positionCount == 1) {
                return new GroupQueryNode(new OrQueryNode(children));
            }
            // if several tokens at same position && position count > 1, then
            // results can be unexpected
            else {
                final TokenizedPhraseQueryNode pq = new TokenizedPhraseQueryNode();
                for (int i = 0; i < children.size(); i++) {
                    pq.add(children.get(i));
                }
                return pq;
            }
        }
    } else if (node instanceof TwigQueryNode) {
        nbTwigs--;
        assert nbTwigs >= 0;
    }
    return node;
}

From source file:org.sindice.siren.qparser.keyword.processors.PhraseQueryNodeProcessor.java

License:Apache License

@Override
protected QueryNode postProcessNode(final QueryNode node) throws QueryNodeException {
    if (node instanceof TextableQueryNode && !(node instanceof WildcardQueryNode)
            && !(node instanceof FuzzyQueryNode) && !(node instanceof RegexpQueryNode)
            && !(node.getParent() instanceof RangeQueryNode)) {

        final FieldQueryNode fieldNode = ((FieldQueryNode) node);
        final String text = fieldNode.getTextAsString();
        final String field = fieldNode.getFieldAsString();

        final TokenStream source;
        try {//from   www .j  a  va2s .  c om
            source = this.analyzer.tokenStream(field, new StringReader(text));
            source.reset();
        } catch (final IOException e1) {
            throw new RuntimeException(e1);
        }
        final CachingTokenFilter buffer = new CachingTokenFilter(source);

        int numTokens = 0;
        try {
            while (buffer.incrementToken()) {
                numTokens++;
            }
        } catch (final IOException e) {
            // ignore
        }

        try {
            // rewind the buffer stream
            buffer.reset();
            // close original stream - all tokens buffered
            source.close();
        } catch (final IOException e) {
            // ignore
        }

        if (!buffer.hasAttribute(CharTermAttribute.class)) {
            return new NoTokenFoundQueryNode();
        }
        final CharTermAttribute termAtt = buffer.getAttribute(CharTermAttribute.class);

        if (numTokens == 0) {
            return new NoTokenFoundQueryNode();
        } else if (numTokens != 1) {
            // phrase query
            final TokenizedPhraseQueryNode pq = new TokenizedPhraseQueryNode();

            for (int i = 0; i < numTokens; i++) {
                String term = null;

                try {
                    final boolean hasNext = buffer.incrementToken();
                    assert hasNext == true;
                    term = termAtt.toString();

                } catch (final IOException e) {
                    // safe to ignore, because we know the number of tokens
                }

                final FieldQueryNode newFieldNode = new FieldQueryNode(field, term, -1, -1);
                newFieldNode.setPositionIncrement(i);
                pq.add(newFieldNode);
            }
            return pq;
        }
    }
    return node;
}