Example usage for org.apache.lucene.search BooleanQuery setMaxClauseCount

List of usage examples for org.apache.lucene.search BooleanQuery setMaxClauseCount

Introduction

In this page you can find the example usage for org.apache.lucene.search BooleanQuery setMaxClauseCount.

Prototype

public static void setMaxClauseCount(int maxClauseCount) 

Source Link

Document

Set the maximum number of clauses permitted per BooleanQuery.

Usage

From source file:org.rssowl.core.tests.model.ModelSearchTest4.java

License:Open Source License

/**
 * @throws Exception//from   w  ww  . jav  a 2 s.  c om
 */
@Test
public void testMaxClauseCount() throws Exception {
    int maxClauseCount = BooleanQuery.getMaxClauseCount();

    IFeed feed = fFactory.createFeed(null, new URI("http://www.feed.com/feed.xml"));
    createNews(feed, "Foo", "http://www.news.com/news3.html", State.READ);
    DynamicDAO.save(feed);

    waitForIndexer();

    ISearchField field = fFactory.createSearchField(INews.TITLE, fNewsEntityName);

    List<ISearchCondition> conditions = new ArrayList<ISearchCondition>();
    for (int i = 0; i < 1030; i++) {
        ISearchCondition condition1 = fFactory.createSearchCondition(field, SearchSpecifier.CONTAINS,
                "foo" + i);
        conditions.add(condition1);
    }

    conditions.add(fFactory.createSearchCondition(field, SearchSpecifier.CONTAINS, "foo"));

    List<SearchHit<NewsReference>> result = fModelSearch.searchNews(conditions, false);
    assertEquals(1, result.size());
    assertEquals("Foo", result.get(0).getResult().resolve().getTitle());

    BooleanQuery.setMaxClauseCount(maxClauseCount);
}

From source file:org.rssowl.core.tests.model.ModelSearchTest4.java

License:Open Source License

/**
 * @throws Exception/*from  w w  w . j a  v  a  2 s. c om*/
 */
@Test
public void testMaxClauseCountForQuery() throws Exception {
    int maxClauseCount = BooleanQuery.getMaxClauseCount();
    BooleanQuery.setMaxClauseCount(3);

    IFolderChild root = fFactory.createFolder(null, null, "Root");
    IFeed feed1 = DynamicDAO.save(fFactory.createFeed(null, new URI("http://www.feed.com/feed1.xml")));
    IFeed feed2 = DynamicDAO.save(fFactory.createFeed(null, new URI("http://www.feed.com/feed2.xml")));
    IFeed feed3 = DynamicDAO.save(fFactory.createFeed(null, new URI("http://www.feed.com/feed3.xml")));
    IFeed feed4 = DynamicDAO.save(fFactory.createFeed(null, new URI("http://www.feed.com/feed4.xml")));

    DynamicDAO
            .save(fFactory.createBookMark(null, (IFolder) root, new FeedLinkReference(feed1.getLink()), "BM1"));
    DynamicDAO
            .save(fFactory.createBookMark(null, (IFolder) root, new FeedLinkReference(feed2.getLink()), "BM1"));
    DynamicDAO
            .save(fFactory.createBookMark(null, (IFolder) root, new FeedLinkReference(feed3.getLink()), "BM1"));
    DynamicDAO
            .save(fFactory.createBookMark(null, (IFolder) root, new FeedLinkReference(feed4.getLink()), "BM1"));

    ISearchField field = fFactory.createSearchField(INews.LOCATION, fNewsEntityName);
    List<ISearchCondition> conditions = new ArrayList<ISearchCondition>();
    conditions.add(fFactory.createSearchCondition(field, SearchSpecifier.IS,
            ModelUtils.toPrimitive(Collections.singletonList(root))));

    Query query = ModelSearchQueries.createQuery(conditions, null, false);
    assertNotNull(query);

    BooleanQuery.setMaxClauseCount(maxClauseCount);
}

From source file:org.sindice.siren.search.node.TestNodeNumericRangeQuery32.java

License:Apache License

private void testRandomTrieAndClassicRangeQuery(final int precisionStep) throws Exception {
    final String field = "field" + precisionStep;
    int totalTermCountT = 0, totalTermCountC = 0, termCountT, termCountC;
    final int num = _TestUtil.nextInt(random(), 10, 20);

    BooleanQuery.setMaxClauseCount(Integer.MAX_VALUE);
    for (int i = 0; i < num; i++) {
        int lower = (int) (random().nextDouble() * noDocs * distance) + startOffset;
        int upper = (int) (random().nextDouble() * noDocs * distance) + startOffset;
        if (lower > upper) {
            final int a = lower;
            lower = upper;/*from  w  ww.  ja v a2  s.  com*/
            upper = a;
        }
        /*
         * In SIREn, the numeric type and the precision step are prepended to the
         * indexed numeric terms.
         */
        final BytesRef lowerBytes = new BytesRef(NumericType.INT.toString() + precisionStep);
        final BytesRef upperBytes = new BytesRef(NumericType.INT.toString() + precisionStep);
        final BytesRef lBytes = new BytesRef(NumericUtils.BUF_SIZE_INT);
        final BytesRef uBytes = new BytesRef(NumericUtils.BUF_SIZE_INT);
        NumericUtils.intToPrefixCoded(lower, 0, lBytes);
        NumericUtils.intToPrefixCoded(upper, 0, uBytes);
        lowerBytes.append(lBytes);
        upperBytes.append(uBytes);

        // test inclusive range
        MultiNodeTermQuery tq = (MultiNodeTermQuery) nmqInt(field, precisionStep, lower, upper, true, true)
                .getNodeQuery();
        MultiNodeTermQuery cq = new NodeTermRangeQuery(field, lowerBytes, upperBytes, true, true);
        TopDocs tTopDocs = index.searcher.search(dq(tq), 1);
        TopDocs cTopDocs = index.searcher.search(dq(cq), 1);
        assertEquals("Returned count for NumericRangeQuery and TermRangeQuery must be equal",
                cTopDocs.totalHits, tTopDocs.totalHits);
        totalTermCountT += termCountT = this.countTerms(tq);
        totalTermCountC += termCountC = this.countTerms(cq);
        this.checkTermCounts(precisionStep, termCountT, termCountC);
        // test exclusive range
        tq = (MultiNodeTermQuery) nmqInt(field, precisionStep, lower, upper, false, false).getNodeQuery();
        cq = new NodeTermRangeQuery(field, lowerBytes, upperBytes, false, false);
        tTopDocs = index.searcher.search(dq(tq), 1);
        cTopDocs = index.searcher.search(dq(cq), 1);
        assertEquals("Returned count for NumericRangeQuery and TermRangeQuery must be equal",
                cTopDocs.totalHits, tTopDocs.totalHits);
        totalTermCountT += termCountT = this.countTerms(tq);
        totalTermCountC += termCountC = this.countTerms(cq);
        this.checkTermCounts(precisionStep, termCountT, termCountC);
        // test left exclusive range
        tq = (MultiNodeTermQuery) nmqInt(field, precisionStep, lower, upper, false, true).getNodeQuery();
        cq = new NodeTermRangeQuery(field, lowerBytes, upperBytes, false, true);
        tTopDocs = index.searcher.search(dq(tq), 1);
        cTopDocs = index.searcher.search(dq(cq), 1);
        assertEquals("Returned count for NumericRangeQuery and TermRangeQuery must be equal",
                cTopDocs.totalHits, tTopDocs.totalHits);
        totalTermCountT += termCountT = this.countTerms(tq);
        totalTermCountC += termCountC = this.countTerms(cq);
        this.checkTermCounts(precisionStep, termCountT, termCountC);
        // test right exclusive range
        tq = (MultiNodeTermQuery) nmqInt(field, precisionStep, lower, upper, true, false).getNodeQuery();
        cq = new NodeTermRangeQuery(field, lowerBytes, upperBytes, true, false);
        tTopDocs = index.searcher.search(dq(tq), 1);
        cTopDocs = index.searcher.search(dq(cq), 1);
        assertEquals("Returned count for NumericRangeQuery and TermRangeQuery must be equal",
                cTopDocs.totalHits, tTopDocs.totalHits);
        totalTermCountT += termCountT = this.countTerms(tq);
        totalTermCountC += termCountC = this.countTerms(cq);
        this.checkTermCounts(precisionStep, termCountT, termCountC);
    }

    this.checkTermCounts(precisionStep, totalTermCountT, totalTermCountC);
    if (VERBOSE && precisionStep != Integer.MAX_VALUE) {
        System.out.println("Average number of terms during random search on '" + field + "':");
        System.out.println(" Numeric query: " + (((double) totalTermCountT) / (num * 4)));
        System.out.println(" Classical query: " + (((double) totalTermCountC) / (num * 4)));
    }
}

From source file:org.sindice.siren.search.node.TestNodeNumericRangeQuery64.java

License:Apache License

private void testRandomTrieAndClassicRangeQuery(final int precisionStep) throws Exception {
    final String field = "field" + precisionStep;
    int totalTermCountT = 0, totalTermCountC = 0, termCountT, termCountC;
    final int num = _TestUtil.nextInt(random(), 10, 20);

    BooleanQuery.setMaxClauseCount(Integer.MAX_VALUE);
    for (int i = 0; i < num; i++) {
        long lower = (long) (random().nextDouble() * noDocs * distance) + startOffset;
        long upper = (long) (random().nextDouble() * noDocs * distance) + startOffset;
        if (lower > upper) {
            final long a = lower;
            lower = upper;//from   ww  w . ja  v  a2 s.c  om
            upper = a;
        }
        /*
         * In SIREn, the numeric type and the precision step are prepended to the
         * indexed numeric terms.
         */
        final BytesRef lowerBytes = new BytesRef(NumericType.LONG.toString() + precisionStep);
        final BytesRef upperBytes = new BytesRef(NumericType.LONG.toString() + precisionStep);
        final BytesRef lBytes = new BytesRef(NumericUtils.BUF_SIZE_LONG);
        final BytesRef uBytes = new BytesRef(NumericUtils.BUF_SIZE_LONG);
        NumericUtils.longToPrefixCoded(lower, 0, lBytes);
        NumericUtils.longToPrefixCoded(upper, 0, uBytes);
        lowerBytes.append(lBytes);
        upperBytes.append(uBytes);

        // test inclusive range
        MultiNodeTermQuery tq = (MultiNodeTermQuery) nmqLong(field, precisionStep, lower, upper, true, true)
                .getNodeQuery();
        MultiNodeTermQuery cq = new NodeTermRangeQuery(field, lowerBytes, upperBytes, true, true);
        TopDocs tTopDocs = index.searcher.search(dq(tq), 1);
        TopDocs cTopDocs = index.searcher.search(dq(cq), 1);
        assertEquals("Returned count for NumericRangeQuery and TermRangeQuery must be equal",
                cTopDocs.totalHits, tTopDocs.totalHits);
        totalTermCountT += termCountT = this.countTerms(tq);
        totalTermCountC += termCountC = this.countTerms(cq);
        this.checkTermCounts(precisionStep, termCountT, termCountC);
        // test exclusive range
        tq = (MultiNodeTermQuery) nmqLong(field, precisionStep, lower, upper, false, false).getNodeQuery();
        cq = new NodeTermRangeQuery(field, lowerBytes, upperBytes, false, false);
        tTopDocs = index.searcher.search(dq(tq), 1);
        cTopDocs = index.searcher.search(dq(cq), 1);
        assertEquals("Returned count for NumericRangeQuery and TermRangeQuery must be equal",
                cTopDocs.totalHits, tTopDocs.totalHits);
        totalTermCountT += termCountT = this.countTerms(tq);
        totalTermCountC += termCountC = this.countTerms(cq);
        this.checkTermCounts(precisionStep, termCountT, termCountC);
        // test left exclusive range
        tq = (MultiNodeTermQuery) nmqLong(field, precisionStep, lower, upper, false, true).getNodeQuery();
        cq = new NodeTermRangeQuery(field, lowerBytes, upperBytes, false, true);
        tTopDocs = index.searcher.search(dq(tq), 1);
        cTopDocs = index.searcher.search(dq(cq), 1);
        assertEquals("Returned count for NumericRangeQuery and TermRangeQuery must be equal",
                cTopDocs.totalHits, tTopDocs.totalHits);
        totalTermCountT += termCountT = this.countTerms(tq);
        totalTermCountC += termCountC = this.countTerms(cq);
        this.checkTermCounts(precisionStep, termCountT, termCountC);
        // test right exclusive range
        tq = (MultiNodeTermQuery) nmqLong(field, precisionStep, lower, upper, true, false).getNodeQuery();
        cq = new NodeTermRangeQuery(field, lowerBytes, upperBytes, true, false);
        tTopDocs = index.searcher.search(dq(tq), 1);
        cTopDocs = index.searcher.search(dq(cq), 1);
        assertEquals("Returned count for NumericRangeQuery and TermRangeQuery must be equal",
                cTopDocs.totalHits, tTopDocs.totalHits);
        totalTermCountT += termCountT = this.countTerms(tq);
        totalTermCountC += termCountC = this.countTerms(cq);
        this.checkTermCounts(precisionStep, termCountT, termCountC);
    }

    this.checkTermCounts(precisionStep, totalTermCountT, totalTermCountC);
    if (VERBOSE && precisionStep != Integer.MAX_VALUE) {
        System.out.println("Average number of terms during random search on '" + field + "':");
        System.out.println(" Numeric query: " + (((double) totalTermCountT) / (num * 4)));
        System.out.println(" Classical query: " + (((double) totalTermCountC) / (num * 4)));
    }
}

From source file:org.splevo.vpm.analyzer.semantic.lucene.finder.SharedTermFinder.java

License:Open Source License

/**
 * This Method builds the {@link Query} the Finder uses to search similarities.
 *
 * @param fieldName//from ww  w.j a  v a 2 s . co m
 *            The name of the field that should be searched.
 * @param termFrequencies
 *            A {@link Map} that contains all terms and their frequencies.
 * @return The {@link Query}.
 */
private Query buildQuery(String fieldName, Map<String, Integer> termFrequencies) {
    BooleanQuery.setMaxClauseCount(Integer.MAX_VALUE);
    BooleanQuery finalQuery = new BooleanQuery();

    // Add a TermQuery for each term in the document.
    for (String key : termFrequencies.keySet()) {
        Term t = new Term(fieldName, key);
        TermQuery termQuery = new TermQuery(t);
        finalQuery.add(termQuery, Occur.SHOULD);
    }

    finalQuery.setMinimumNumberShouldMatch(minSharedTerms);

    return finalQuery;
}

From source file:org.zenoss.zep.index.impl.EventIndexDaoImplIT.java

License:Open Source License

@Test
public void testPassMaxCountClauseParam() throws ZepException {

    // I really just want to see that an exception doesn't get thrown, so no asserts here
    BooleanQuery.setMaxClauseCount(4096);
    eventIndexDao.getEventTagSeverities(getEventFilterInst(2000));
}

From source file:org.zenoss.zep.index.impl.lucene.LuceneEventIndexBackend.java

License:Open Source License

public LuceneEventIndexBackend(String name, IndexWriter writer, EventSummaryBaseDao eventSummaryBaseDao,
        Integer maxClauseCount, LuceneFilterCacheManager filterCacheManager, int readerRefreshInterval,
        Messages messages, TaskScheduler scheduler, UUIDGenerator uuidGenerator) throws IOException {
    super(messages, scheduler, uuidGenerator);
    this.name = name;
    this.writer = writer;
    this.trackingIndexWriter = new TrackingIndexWriter(this.writer);
    this.searcherManager = new SearcherManager(this.writer, true, null);
    this.eventSummaryBaseDao = eventSummaryBaseDao;
    this.archive = "event_archive".equals(name);
    this.filterCacheManager = filterCacheManager;
    this.readerReopenInterval = readerRefreshInterval;
    BooleanQuery.setMaxClauseCount(maxClauseCount);

    // Deal with the reader reopen thread
    if (this.readerReopenInterval != 0) {
        startReopenThread();/*from ww  w .  j  ava  2  s  . co  m*/
    } else {
        this.nrtManagerReopenThread = null;
    }
}

From source file:psidev.psi.mi.search.Playground.java

License:Apache License

public static void main(String[] args) throws Exception {
    //InputStream is = new FileInputStream("/ebi/sp/pro6/intact/local/data/released/current/psimitab/intact.txt");
    //String indexDir = "/homes/baranda/tmp_pub/intact-20070731";
    String indexDir = "/ebi/sp/pro6/intact/public-tomcat/psimitab-index/current";

    //Directory ramDir = new RAMDirectory();

    //IndexWriter indexWriter = new IndexWriter(indexDir, new StandardAnalyzer(), false);
    //indexWriter.optimize();

    //Searcher.buildIndex(indexDir, is, true, true);

    BooleanQuery.setMaxClauseCount(1024 * 150);

    //SearchResult result = Searcher.search("detmethod:\"MI:0018\"", indexDir, 0, 50);

    SearchResult result = Searcher.search("Q08641 NOT species:human", indexDir, 0, 1000);

    System.out.println(result.getLuceneQuery());

    System.out.println(result.getTotalCount());

}

From source file:retriever.TermStats.java

DocStats(CrossLingualAligner aligner, int docId) {
    this.docId = docId;
    this.reader = aligner.enIndexReader;
    termStats = new ArrayList<>();
    BooleanQuery.setMaxClauseCount(8192);
    MAX_QUERY_TERMS = BooleanQuery.getMaxClauseCount();
    queryToDocRatio = Float.parseFloat(aligner.prop.getProperty("querysel.q_to_d_ratio", "0.4"));
    qSelLambda = Float.parseFloat(aligner.prop.getProperty("querysel.lambda", "0.4"));
}

From source file:reviewclassification.ReviewClassification.java

/**
 * @param args the command line arguments
 *//*from w  ww .j  a v a 2s  . c o  m*/
public static void main(String[] args)
        throws IOException, ParseException, org.apache.lucene.queryparser.classic.ParseException {
    String base_dir = "processed_data";

    tlog(ft, "Reading reviews from disk.");

    ArrayList<Document> documents = readReviews(base_dir + "/train/neg/", -1);
    documents.addAll(readReviews(base_dir + "/train/pos/", -1));
    ArrayList<Document> query_set = readReviews(base_dir + "/test/", -1);

    tlog(ft, "Done reading from disk.");

    BooleanQuery.setMaxClauseCount(100000);

    tlog(ft, "Starting.");

    //accuracyTest(documents, 1, 200, "./accuracy_test_results.txt");
    predictTestSet(documents, query_set, 61, "./cos_score_results.txt");

    tlog(ft, "Done.");
}