List of usage examples for org.apache.lucene.search BooleanQuery getMaxClauseCount
public static int getMaxClauseCount()
From source file:org.apache.jackrabbit.core.JahiaSearchManager.java
License:Open Source License
private void addJahiaDependencies(final Set<NodeId> removedIds, final Map<NodeId, EventImpl> addedStates, List<EventImpl> propEvents, final Set<NodeId> nodeEventRemovedIds) throws RepositoryException, IOException { Set<NodeId> hierarchyNodeIds = getReMovedOrRenamedHierarchicalNodes(nodeEventRemovedIds); if (!hierarchyNodeIds.isEmpty()) { // if a node which is referenced with a hierarchical faceting property is moved/renamed, we need to re-index the nodes // referring to it final IndexReader reader = ((SearchIndex) getQueryHandler()).getIndexReader(); final Searcher searcher = new IndexSearcher(reader); try {/*from www .jav a 2 s. com*/ int removeSubListStart = 0; List<NodeId> removeList = new ArrayList<NodeId>(hierarchyNodeIds); int removeSubListEnd = Math.min(removeList.size(), BooleanQuery.getMaxClauseCount()); while (removeSubListStart < removeList.size()) { long timer = System.currentTimeMillis(); BooleanQuery query = new BooleanQuery(true); for (final NodeId nodeId : new ArrayList<NodeId>( removeList.subList(removeSubListStart, removeSubListEnd))) { TermQuery termQuery = new TermQuery( new Term(JahiaNodeIndexer.FACET_HIERARCHY, nodeId.toString())); query.add(new BooleanClause(termQuery, BooleanClause.Occur.SHOULD)); } searcher.search(query, new AbstractHitCollector() { public void collect(int doc, float score) { try { String uuid = reader.document(doc, FieldSelectors.UUID).get("_:UUID"); addIdToBeIndexed(new NodeId(uuid), removedIds, addedStates); } catch (Exception e) { log.warn( "Documents referencing moved/renamed hierarchy facet nodes may not be updated", e); } } }); if (log.isDebugEnabled()) { log.debug("Facet hierarchy search in {} ms", new Object[] { (System.currentTimeMillis() - timer) }); } removeSubListStart += BooleanQuery.getMaxClauseCount(); removeSubListEnd = Math.min(removeList.size(), removeSubListEnd + BooleanQuery.getMaxClauseCount()); } } finally { searcher.close(); Util.closeOrRelease(reader); } } // index also translation subnodes, unless only properties are changed, which are excluded from copying down to // translation nodes if (!addedStates.isEmpty() && !areAllPropertiesCopyExcluded(propEvents)) { for (final NodeId node : new HashSet<NodeId>(addedStates.keySet())) { if (itemMgr.hasItemState(node)) { try { for (ChildNodeEntry childNodeEntry : ((NodeState) itemMgr.getItemState(node)) .getChildNodeEntries()) { if (childNodeEntry.getName().getLocalName() .startsWith(TRANSLATION_LOCALNODENAME_PREFIX)) { try { addIdToBeIndexed(childNodeEntry.getId(), removedIds, addedStates); } catch (ItemStateException e) { log.warn("Index of translation node may not be updated", e); } } } } catch (ItemStateException e) { log.warn("Index of translation node may not be updated", e); } } } } }
From source file:org.apache.jackrabbit.core.query.lucene.LuceneQueryFactory.java
License:Apache License
protected Query getDescendantNodeQuery(DescendantNode dn, JackrabbitIndexSearcher searcher) throws RepositoryException, IOException { BooleanQuery query = new BooleanQuery(); int clauses = 0; try {// www . ja va2s .c o m LinkedList<String> ids = new LinkedList<String>(); Node ancestor = session.getNode(dn.getAncestorPath()); ids.add(ancestor.getIdentifier()); while (!ids.isEmpty()) { String id = ids.removeFirst(); Query q = new JackrabbitTermQuery(new Term(FieldNames.PARENT, id)); QueryHits hits = searcher.evaluate(q); ScoreNode sn = hits.nextScoreNode(); if (sn != null) { // reset query so it does not overflow because of the max // clause count condition, // see JCR-3108 clauses++; if (clauses == BooleanQuery.getMaxClauseCount()) { BooleanQuery wrapQ = new BooleanQuery(); wrapQ.add(query, SHOULD); query = wrapQ; clauses = 1; } query.add(q, SHOULD); do { ids.add(sn.getNodeId().toString()); sn = hits.nextScoreNode(); } while (sn != null); } } } catch (PathNotFoundException e) { query.add(new JackrabbitTermQuery(new Term(FieldNames.UUID, "invalid-node-id")), // never matches SHOULD); } return query; }
From source file:org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexProviderService.java
License:Apache License
private void configureBooleanClauseLimit(Map<String, ?> config) { int booleanClauseLimit = PropertiesUtil.toInteger(config.get(PROP_BOOLEAN_CLAUSE_LIMIT), PROP_BOOLEAN_CLAUSE_LIMIT_DEFAULT); if (booleanClauseLimit != BooleanQuery.getMaxClauseCount()) { BooleanQuery.setMaxClauseCount(booleanClauseLimit); log.info("Changed the Max boolean clause limit to {}", booleanClauseLimit); }/*ww w . java2 s . co m*/ }
From source file:org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexProviderServiceTest.java
License:Apache License
@Test public void defaultSetup() throws Exception { MockOsgi.activate(service, context.bundleContext(), getDefaultConfig()); assertNotNull(context.getService(QueryIndexProvider.class)); assertNotNull(context.getService(Observer.class)); assertNotNull(context.getService(IndexEditorProvider.class)); LuceneIndexEditorProvider editorProvider = (LuceneIndexEditorProvider) context .getService(IndexEditorProvider.class); assertNotNull(editorProvider.getIndexCopier()); IndexCopier indexCopier = service.getIndexCopier(); assertNotNull("IndexCopier should be initialized as CopyOnRead is enabled by default", indexCopier); assertTrue(indexCopier.isPrefetchEnabled()); assertNotNull("CopyOnRead should be enabled by default", context.getService(CopyOnReadStatsMBean.class)); assertNotNull(context.getService(CacheStatsMBean.class)); assertTrue(context.getService(Observer.class) instanceof BackgroundObserver); assertEquals(InfoStream.NO_OUTPUT, InfoStream.getDefault()); assertEquals(1024, BooleanQuery.getMaxClauseCount()); MockOsgi.deactivate(service);/*from ww w . j a va 2 s. c o m*/ }
From source file:org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexProviderServiceTest.java
License:Apache License
@Test public void booleanQuerySize() throws Exception { Map<String, Object> config = getDefaultConfig(); config.put("booleanClauseLimit", 4000); MockOsgi.activate(service, context.bundleContext(), config); assertEquals(4000, BooleanQuery.getMaxClauseCount()); }
From source file:org.apache.solr.core.SolrConfig.java
License:Apache License
/** Creates a configuration instance from a resource loader, a configuration name and a stream. * If the stream is null, the resource loader will open the configuration stream. * If the stream is not null, no attempt to load the resource will occur (the name is not used). *@param loader the resource loader//from w w w . j a v a 2s . c o m *@param name the configuration name *@param is the configuration stream */ public SolrConfig(SolrResourceLoader loader, String name, InputSource is) throws ParserConfigurationException, IOException, SAXException { super(loader, name, is, "/config/"); initLibs(); luceneMatchVersion = getLuceneVersion("luceneMatchVersion"); String indexConfigPrefix; // Old indexDefaults and mainIndex sections are deprecated and fails fast for luceneMatchVersion=>LUCENE_40. // For older solrconfig.xml's we allow the old sections, but never mixed with the new <indexConfig> boolean hasDeprecatedIndexConfig = (getNode("indexDefaults", false) != null) || (getNode("mainIndex", false) != null); boolean hasNewIndexConfig = getNode("indexConfig", false) != null; if (hasDeprecatedIndexConfig) { if (luceneMatchVersion.onOrAfter(Version.LUCENE_40)) { throw new SolrException(ErrorCode.FORBIDDEN, "<indexDefaults> and <mainIndex> configuration sections are discontinued. Use <indexConfig> instead."); } else { // Still allow the old sections for older LuceneMatchVersion's if (hasNewIndexConfig) { throw new SolrException(ErrorCode.FORBIDDEN, "Cannot specify both <indexDefaults>, <mainIndex> and <indexConfig> at the same time. Please use <indexConfig> only."); } log.warn( "<indexDefaults> and <mainIndex> configuration sections are deprecated and will fail for luceneMatchVersion=LUCENE_40 and later. Please use <indexConfig> instead."); defaultIndexConfig = new SolrIndexConfig(this, "indexDefaults", null); mainIndexConfig = new SolrIndexConfig(this, "mainIndex", defaultIndexConfig); indexConfigPrefix = "mainIndex"; } } else { defaultIndexConfig = mainIndexConfig = null; indexConfigPrefix = "indexConfig"; } nrtMode = getBool(indexConfigPrefix + "/nrtMode", true); // Parse indexConfig section, using mainIndex as backup in case old config is used indexConfig = new SolrIndexConfig(this, "indexConfig", mainIndexConfig); booleanQueryMaxClauseCount = getInt("query/maxBooleanClauses", BooleanQuery.getMaxClauseCount()); log.info("Using Lucene MatchVersion: " + luceneMatchVersion); // Warn about deprecated / discontinued parameters // boolToFilterOptimizer has had no effect since 3.1 if (get("query/boolTofilterOptimizer", null) != null) log.warn("solrconfig.xml: <boolTofilterOptimizer> is currently not implemented and has no effect."); if (get("query/HashDocSet", null) != null) log.warn("solrconfig.xml: <HashDocSet> is deprecated and no longer recommended used."); // TODO: Old code - in case somebody wants to re-enable. Also see SolrIndexSearcher#search() // filtOptEnabled = getBool("query/boolTofilterOptimizer/@enabled", false); // filtOptCacheSize = getInt("query/boolTofilterOptimizer/@cacheSize",32); // filtOptThreshold = getFloat("query/boolTofilterOptimizer/@threshold",.05f); useFilterForSortedQuery = getBool("query/useFilterForSortedQuery", false); queryResultWindowSize = Math.max(1, getInt("query/queryResultWindowSize", 1)); queryResultMaxDocsCached = getInt("query/queryResultMaxDocsCached", Integer.MAX_VALUE); enableLazyFieldLoading = getBool("query/enableLazyFieldLoading", false); filterCacheConfig = CacheConfig.getConfig(this, "query/filterCache"); queryResultCacheConfig = CacheConfig.getConfig(this, "query/queryResultCache"); documentCacheConfig = CacheConfig.getConfig(this, "query/documentCache"); CacheConfig conf = CacheConfig.getConfig(this, "query/fieldValueCache"); if (conf == null) { Map<String, String> args = new HashMap<String, String>(); args.put("name", "fieldValueCache"); args.put("size", "10000"); args.put("initialSize", "10"); args.put("showItems", "-1"); conf = new CacheConfig(FastLRUCache.class, args, null); } fieldValueCacheConfig = conf; unlockOnStartup = getBool(indexConfigPrefix + "/unlockOnStartup", false); useColdSearcher = getBool("query/useColdSearcher", false); dataDir = get("dataDir", null); if (dataDir != null && dataDir.length() == 0) dataDir = null; userCacheConfigs = CacheConfig.getMultipleConfigs(this, "query/cache"); org.apache.solr.search.SolrIndexSearcher.initRegenerators(this); hashSetInverseLoadFactor = 1.0f / getFloat("//HashDocSet/@loadFactor", 0.75f); hashDocSetMaxSize = getInt("//HashDocSet/@maxSize", 3000); httpCachingConfig = new HttpCachingConfig(this); Node jmx = getNode("jmx", false); if (jmx != null) { jmxConfig = new JmxConfiguration(true, get("jmx/@agentId", null), get("jmx/@serviceUrl", null), get("jmx/@rootName", null)); } else { jmxConfig = new JmxConfiguration(false, null, null, null); } maxWarmingSearchers = getInt("query/maxWarmingSearchers", Integer.MAX_VALUE); loadPluginInfo(SolrRequestHandler.class, "requestHandler", REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK); loadPluginInfo(QParserPlugin.class, "queryParser", REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK); loadPluginInfo(QueryResponseWriter.class, "queryResponseWriter", REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK); loadPluginInfo(ValueSourceParser.class, "valueSourceParser", REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK); loadPluginInfo(TransformerFactory.class, "transformer", REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK); loadPluginInfo(SearchComponent.class, "searchComponent", REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK); // TODO: WTF is up with queryConverter??? // it aparently *only* works as a singleton? - SOLR-4304 // and even then -- only if there is a single SpellCheckComponent // because of queryConverter.setAnalyzer loadPluginInfo(QueryConverter.class, "queryConverter", REQUIRE_NAME, REQUIRE_CLASS); // this is hackish, since it picks up all SolrEventListeners, // regardless of when/how/why they are used (or even if they are // declared outside of the appropriate context) but there's no nice // way around that in the PluginInfo framework loadPluginInfo(SolrEventListener.class, "//listener", REQUIRE_CLASS, MULTI_OK); loadPluginInfo(DirectoryFactory.class, "directoryFactory", REQUIRE_CLASS); loadPluginInfo(IndexDeletionPolicy.class, indexConfigPrefix + "/deletionPolicy", REQUIRE_CLASS); loadPluginInfo(CodecFactory.class, "codecFactory", REQUIRE_CLASS); loadPluginInfo(IndexReaderFactory.class, "indexReaderFactory", REQUIRE_CLASS); loadPluginInfo(UpdateRequestProcessorChain.class, "updateRequestProcessorChain", MULTI_OK); loadPluginInfo(UpdateLog.class, "updateHandler/updateLog"); loadPluginInfo(IndexSchemaFactory.class, "schemaFactory", REQUIRE_CLASS); updateHandlerInfo = loadUpdatehandlerInfo(); Config.log.info("Loaded SolrConfig: " + name); }
From source file:org.compass.core.lucene.engine.LuceneSettings.java
License:Apache License
public void configure(CompassSettings settings) throws SearchEngineException { this.settings = settings; connection = settings.getSetting(CompassEnvironment.CONNECTION); if (connection == null) { throw new SearchEngineException("Lucene connection must be set in the settings. Please set [" + CompassEnvironment.CONNECTION + "]"); }/*ww w .j a va 2 s . co m*/ subContext = settings.getSetting(CompassEnvironment.CONNECTION_SUB_CONTEXT); if (log.isDebugEnabled()) { log.debug("Using connection [" + connection + "][" + subContext + "]"); } // the alias property aliasProperty = settings.getSetting(CompassEnvironment.Alias.NAME, CompassEnvironment.Alias.DEFAULT_NAME); if (log.isDebugEnabled()) { log.debug("Using alias property [" + aliasProperty + "]"); } extendedAliasProperty = settings.getSetting(CompassEnvironment.Alias.EXTENDED_ALIAS_NAME, CompassEnvironment.Alias.DEFAULT_EXTENDED_ALIAS_NAME); if (log.isDebugEnabled()) { log.debug("Using extended alias property [" + extendedAliasProperty + "]"); } // get the all property allProperty = settings.getSetting(CompassEnvironment.All.NAME, CompassEnvironment.All.DEFAULT_NAME); if (log.isDebugEnabled()) { log.debug("Using default all property [" + allProperty + "]"); } String allPropertyTermVectorSettings = settings.getSetting(CompassEnvironment.All.TERM_VECTOR, "no"); if (log.isDebugEnabled()) { log.debug("Using all property term vector [" + allPropertyTermVectorSettings + "]"); } allPropertyBoostSupport = settings.getSettingAsBoolean(CompassEnvironment.All.BOOST_SUPPORT, true); if (log.isDebugEnabled()) { log.debug("All property boost support is [" + allPropertyBoostSupport + "]"); } if ("no".equals(allPropertyTermVectorSettings)) { allPropertyTermVector = Property.TermVector.NO; } else if ("yes".equals(allPropertyTermVectorSettings)) { allPropertyTermVector = Property.TermVector.YES; } else if ("positions".equals(allPropertyTermVectorSettings)) { allPropertyTermVector = Property.TermVector.WITH_POSITIONS; } else if ("offsets".equals(allPropertyTermVectorSettings)) { allPropertyTermVector = Property.TermVector.WITH_OFFSETS; } else if ("positions_offsets".equals(allPropertyTermVectorSettings)) { allPropertyTermVector = Property.TermVector.WITH_POSITIONS_OFFSETS; } else { throw new SearchEngineException("Unrecognized term vector setting for the all property [" + allPropertyTermVectorSettings + "]"); } // get the default search term, defaults to the all property defaultSearchPropery = settings.getSetting(LuceneEnvironment.DEFAULT_SEARCH, allProperty); if (log.isDebugEnabled()) { log.debug("Using default search property [" + defaultSearchPropery + "]"); } // lucene specifics parameters transactionLockTimout = settings.getSettingAsTimeInSeconds(LuceneEnvironment.Transaction.LOCK_TIMEOUT, 10) * 1000; if (log.isDebugEnabled()) { log.debug("Using transaction lock timeout [" + transactionLockTimout + "ms]"); } IndexWriter.setDefaultWriteLockTimeout(transactionLockTimout); Lock.LOCK_POLL_INTERVAL = settings.getSettingAsLong(LuceneEnvironment.Transaction.LOCK_POLL_INTERVAL, 100); if (log.isDebugEnabled()) { log.debug("Using lock poll interval [" + Lock.LOCK_POLL_INTERVAL + "ms]"); } lockDir = settings.getSetting("compass.transaction.lockDir"); if (lockDir != null) { throw new IllegalArgumentException("compass.transaction.lockDir setting is no longer supported. " + "The lock by default is stored in the index directory now, and can be conrolled by using LockFactory"); } maxMergeDocs = settings.getSettingAsInt(LuceneEnvironment.SearchEngineIndex.MAX_MERGE_DOCS, Integer.MAX_VALUE); // pure lucene transaction settings mergeFactor = settings.getSettingAsInt(LuceneEnvironment.SearchEngineIndex.MERGE_FACTOR, 10); maxBufferedDocs = settings.getSettingAsInt(LuceneEnvironment.SearchEngineIndex.MAX_BUFFERED_DOCS, IndexWriter.DISABLE_AUTO_FLUSH); maxBufferedDeletedTerms = settings.getSettingAsInt( LuceneEnvironment.SearchEngineIndex.MAX_BUFFERED_DELETED_TERMS, IndexWriter.DISABLE_AUTO_FLUSH); termIndexInterval = settings.getSettingAsInt(LuceneEnvironment.SearchEngineIndex.TERM_INDEX_INTERVAL, IndexWriter.DEFAULT_TERM_INDEX_INTERVAL); maxFieldLength = settings.getSettingAsInt(LuceneEnvironment.SearchEngineIndex.MAX_FIELD_LENGTH, IndexWriter.DEFAULT_MAX_FIELD_LENGTH); ramBufferSize = settings.getSettingAsDouble(LuceneEnvironment.SearchEngineIndex.RAM_BUFFER_SIZE, IndexWriter.DEFAULT_RAM_BUFFER_SIZE_MB); // cach invalidation settings cacheInvalidationInterval = settings.getSettingAsTimeInMillis( LuceneEnvironment.SearchEngineIndex.CACHE_INTERVAL_INVALIDATION, LuceneEnvironment.SearchEngineIndex.DEFAULT_CACHE_INTERVAL_INVALIDATION); if (log.isDebugEnabled()) { log.debug("Using cache invalidation interval [" + cacheInvalidationInterval + "ms]"); } waitForCacheInvalidationOnIndexOperation = settings.getSettingAsBoolean( LuceneEnvironment.SearchEngineIndex.WAIT_FOR_CACHE_INVALIDATION_ON_INDEX_OPERATION, false); if (log.isDebugEnabled()) { log.debug("Wait for cahce invalidation on index operatrion is set to [" + waitForCacheInvalidationOnIndexOperation + "]"); } BooleanQuery.setMaxClauseCount(settings.getSettingAsInt(LuceneEnvironment.Query.MAX_CLAUSE_COUNT, BooleanQuery.getMaxClauseCount())); if (log.isDebugEnabled()) { log.debug("Setting *static* Lucene BooleanQuery maxClauseCount to [" + BooleanQuery.getMaxClauseCount() + "]"); } }
From source file:org.easynet.resource.queryparser.QueryParserTestBase.java
License:Apache License
@Before public void setUp() throws Exception { originalMaxClauses = BooleanQuery.getMaxClauseCount(); }
From source file:org.elasticsearch.index.query.BoolQueryParser.java
License:Apache License
@Inject public BoolQueryParser(Settings settings) { BooleanQuery.setMaxClauseCount(settings.getAsInt("index.query.bool.max_clause_count", settings.getAsInt("indices.query.bool.max_clause_count", BooleanQuery.getMaxClauseCount()))); }
From source file:org.elasticsearch.index.query.TermsSetQueryBuilder.java
License:Apache License
@Override protected Query doToQuery(QueryShardContext context) throws IOException { if (values.isEmpty()) { return Queries.newMatchNoDocsQuery("No terms supplied for \"" + getName() + "\" query."); }//from w ww. j a v a2 s. co m // Fail before we attempt to create the term queries: if (values.size() > BooleanQuery.getMaxClauseCount()) { throw new BooleanQuery.TooManyClauses(); } final MappedFieldType fieldType = context.fieldMapper(fieldName); final List<Query> queries = new ArrayList<>(values.size()); for (Object value : values) { if (fieldType != null) { queries.add(fieldType.termQuery(value, context)); } else { queries.add(new TermQuery(new Term(fieldName, BytesRefs.toBytesRef(value)))); } } final LongValuesSource longValuesSource; if (minimumShouldMatchField != null) { MappedFieldType msmFieldType = context.fieldMapper(minimumShouldMatchField); if (msmFieldType == null) { throw new QueryShardException(context, "failed to find minimum_should_match field [" + minimumShouldMatchField + "]"); } IndexNumericFieldData fieldData = context.getForField(msmFieldType); longValuesSource = new FieldValuesSource(fieldData); } else if (minimumShouldMatchScript != null) { SearchScript.Factory factory = context.getScriptService().compile(minimumShouldMatchScript, SearchScript.CONTEXT); Map<String, Object> params = new HashMap<>(); params.putAll(minimumShouldMatchScript.getParams()); params.put("num_terms", queries.size()); SearchScript.LeafFactory leafFactory = factory.newFactory(params, context.lookup()); longValuesSource = new ScriptLongValueSource(minimumShouldMatchScript, leafFactory); } else { throw new IllegalStateException("No minimum should match has been specified"); } return new CoveringQuery(queries, longValuesSource); }