List of usage examples for org.apache.lucene.search BooleanQuery clauses
List clauses
To view the source code for org.apache.lucene.search BooleanQuery clauses.
Click Source Link
From source file:org.hibernate.search.backend.elasticsearch.impl.ToElasticsearch.java
License:LGPL
private static JsonObject convertBooleanQuery(BooleanQuery booleanQuery) { JsonArray musts = new JsonArray(); JsonArray shoulds = new JsonArray(); JsonArray mustNots = new JsonArray(); JsonArray filters = new JsonArray(); for (BooleanClause clause : booleanQuery.clauses()) { switch (clause.getOccur()) { case MUST: musts.add(fromLuceneQuery(clause.getQuery())); break; case FILTER: filters.add(fromLuceneQuery(clause.getQuery())); break; case MUST_NOT: mustNots.add(fromLuceneQuery(clause.getQuery())); break; case SHOULD: shoulds.add(fromLuceneQuery(clause.getQuery())); break; }/*from ww w. java 2 s . com*/ } JsonObject clauses = new JsonObject(); if (musts.size() > 1) { clauses.add("must", musts); } else if (musts.size() == 1) { clauses.add("must", musts.iterator().next()); } if (shoulds.size() > 1) { clauses.add("should", shoulds); } else if (shoulds.size() == 1) { clauses.add("should", shoulds.iterator().next()); } if (mustNots.size() > 1) { clauses.add("must_not", mustNots); } else if (mustNots.size() == 1) { clauses.add("must_not", mustNots.iterator().next()); } if (filters.size() > 1) { clauses.add("filter", filters); } else if (filters.size() == 1) { clauses.add("filter", filters.iterator().next()); } JsonObject bool = new JsonObject(); bool.add("bool", clauses); return bool; }
From source file:org.hibernate.search.test.dsl.DSLTest.java
License:LGPL
@Test @TestForIssue(jiraKey = "HSEARCH-2034") public void testBooleanWithoutScoring() throws Exception { final QueryBuilder monthQb = helper.queryBuilder(Month.class); //must + disable scoring Query query = monthQb.bool().must(monthQb.keyword().onField("mythology").matching("colder").createQuery()) .disableScoring().createQuery(); helper.assertThat(query).from(Month.class).matchesExactlyIds(1); assertTrue(query instanceof BooleanQuery); BooleanQuery bq = (BooleanQuery) query; BooleanClause firstBooleanClause = bq.clauses().get(0); assertFalse(firstBooleanClause.isScoring()); }
From source file:org.hibernate.search.test.dsl.DSLTest.java
License:LGPL
@Test @TestForIssue(jiraKey = "HSEARCH-2037") public void testBooleanWithOnlyNegationQueries() throws Exception { final QueryBuilder monthQb = helper.queryBuilder(Month.class); //must + disable scoring Query query = monthQb.bool().must(monthQb.keyword().onField("mythology").matching("colder").createQuery()) .not() //expectation: exclude January .must(monthQb.keyword().onField("mythology").matching("snowboarding").createQuery()).not() //expectation: exclude February .createQuery();/*from w w w .j av a 2s . c o m*/ helper.assertThat(query).from(Month.class).matchesExactlyIds(3); assertTrue(query instanceof BooleanQuery); BooleanQuery bq = (BooleanQuery) query; BooleanClause firstBooleanClause = bq.clauses().get(0); assertFalse(firstBooleanClause.isScoring()); }
From source file:org.hibernate.search.test.dsl.MoreLikeThisTest.java
License:LGPL
@Test public void testMoreLikeThisBasicBehavior() throws Exception { QueryBuilder qb = getCoffeeQueryBuilder(); Query mltQuery = qb.moreLikeThis().favorSignificantTermsWithFactor(1).comparingAllFields() .toEntityWithId(decaffInstance.getId()).createQuery(); List<Object[]> results = doQuery(mltQuery); assertThat(results).isNotEmpty();/*w w w .ja va2 s . co m*/ Set<Term> terms = extractTerms(mltQuery, Coffee.class); assertThat(terms).describedAs("internalDescription should be ignored") .doesNotSatisfy(new Condition<Collection<?>>() { @SuppressWarnings("unchecked") @Override public boolean matches(Collection<?> value) { for (Term term : (Collection<Term>) value) { if ("internalDescription".equals(term.field())) { return true; } } return false; } }); outputQueryAndResults(decaffInstance, mltQuery, results); //custom fields mltQuery = qb.moreLikeThis().comparingField("summary").boostedTo(10f).andField("description") .toEntityWithId(decaffInstance.getId()).createQuery(); results = doQuery(mltQuery); assertThat(results).isNotEmpty(); assertThat(mltQuery instanceof BooleanQuery); BooleanQuery topMltQuery = (BooleanQuery) mltQuery; // FIXME: I'd prefer a test that uses data instead of how the query is actually built assertThat(topMltQuery.clauses()).onProperty("query.boost").contains(1f, 10f); outputQueryAndResults(decaffInstance, mltQuery, results); //using non compatible field try { qb.moreLikeThis().comparingField("summary").andField("internalDescription") .toEntityWithId(decaffInstance.getId()).createQuery(); } catch (SearchException e) { assertThat(e.getMessage()).as("Internal description is neither stored nor store termvectors") .contains("internalDescription"); } }
From source file:org.hippoecm.repository.FacetedNavigationEngineImpl.java
License:Apache License
public Result doView(String queryName, QueryImpl initialQuery, ContextImpl contextImpl, List<KeyValue<String, String>> facetsQueryList, List<FacetRange> rangeQuery, QueryImpl openQuery, Map<String, Map<String, Count>> resultset, Map<String, String> inheritedFilter, HitsRequested hitsRequested) throws UnsupportedOperationException, IllegalArgumentException { NamespaceMappings nsMappings = getNamespaceMappings(); IndexReader indexReader = null;//from w w w .j a va2 s .c o m try { indexReader = getIndexReader(false); IndexSearcher searcher = new IndexSearcher(indexReader); SetDocIdSetBuilder matchingDocsSetBuilder = new SetDocIdSetBuilder(); BooleanQuery facetsQuery = new FacetsQuery(facetsQueryList, nsMappings).getQuery(); matchingDocsSetBuilder.add(filterDocIdSetPlainLuceneQuery(facetsQuery, indexReader)); BooleanQuery facetRangeQuery = new FacetRangeQuery(rangeQuery, nsMappings, this).getQuery(); matchingDocsSetBuilder.add(filterDocIdSetPlainLuceneQuery(facetRangeQuery, indexReader)); BooleanQuery inheritedFilterQuery = new InheritedFilterQuery(inheritedFilter, nsMappings).getQuery(); matchingDocsSetBuilder.add(filterDocIdSetPlainLuceneQuery(inheritedFilterQuery, indexReader)); org.apache.lucene.search.Query initialLuceneQuery = null; if (initialQuery != null && initialQuery.scopes != null && initialQuery.scopes.length > 0) { if (initialQuery.scopes.length == 1) { initialLuceneQuery = new TermQuery( new Term(ServicingFieldNames.HIPPO_PATH, initialQuery.scopes[0])); } else { initialLuceneQuery = new BooleanQuery(true); for (String scope : initialQuery.scopes) { ((BooleanQuery) initialLuceneQuery) .add(new TermQuery(new Term(ServicingFieldNames.HIPPO_PATH, scope)), Occur.SHOULD); } } } matchingDocsSetBuilder.add(filterDocIdSetPlainLuceneQuery(initialLuceneQuery, indexReader)); FacetFiltersQuery facetFiltersQuery = null; if (initialQuery != null && initialQuery.facetFilters != null) { facetFiltersQuery = new FacetFiltersQuery(initialQuery.facetFilters, nsMappings, this.getTextAnalyzer(), this.getSynonymProvider()); } final BooleanQuery authorizationQuery = contextImpl.getAuthorizationQuery(); if (authorizationQuery != null) { final DocIdSet authorisationIdSet = contextImpl.getAuthorisationIdSet(indexReader); if (authorisationIdSet != null) { matchingDocsSetBuilder.add(authorisationIdSet); } } if (resultset != null) { // If there are more than one facet in the 'resultset' we return an empty result as this is not allowed if (resultset.size() > 1) { log.error("The resultset cannot contain multiple facets"); return new ResultImpl(0, null); } int cardinality = 0; for (String namespacedFacet : resultset.keySet()) { // Not a search involving scoring, thus compute bitsets for facetFiltersQuery & freeSearchInjectedSort if (facetFiltersQuery != null) { if (facetFiltersQuery.isPlainLuceneQuery()) { matchingDocsSetBuilder .add(filterDocIdSetPlainLuceneQuery(facetFiltersQuery.getQuery(), indexReader)); } else { matchingDocsSetBuilder .add(filterDocIdSetJackRabbitQuery(facetFiltersQuery.getQuery(), indexReader)); } } if (openQuery != null) { QueryAndSort queryAndSort = openQuery.getLuceneQueryAndSort(contextImpl); // open query is always a jackrabbit query matchingDocsSetBuilder.add(filterDocIdSetJackRabbitQuery(queryAndSort.query, indexReader)); } OpenBitSet matchingDocs = matchingDocsSetBuilder.toBitSet(); cardinality = (int) matchingDocs.cardinality(); /* * Nodes not having this facet, still should be counted if they are a hit * in the query without this facet. Therefor, first get the count query without * FacetPropExistsQuery. */ int numHits = 0; if (hitsRequested.isFixedDrillPath()) { // only in the case of the fixed drillpath we use the count where the facet does not need to exist numHits = (int) matchingDocs.cardinality(); } ParsedFacet parsedFacet; try { parsedFacet = ParsedFacet.getInstance(namespacedFacet); } catch (Exception e) { log.error("Error parsing facet: ", e); return new ResultImpl(0, null); } String propertyName = ServicingNameFormat.getInteralPropertyPathName(nsMappings, parsedFacet.getNamespacedProperty()); /* * facetPropExists: the node must have the property as facet */ matchingDocsSetBuilder.add(filterDocIdSetPlainLuceneQuery( new FacetPropExistsQuery(propertyName).getQuery(), indexReader)); matchingDocs = matchingDocsSetBuilder.toBitSet(); cardinality = (int) matchingDocs.cardinality(); // this method populates the facetValueCountMap for the current facet // index reader is instance of JackrabbitIndexReader : we need the wrapped multi-index reader as // cache key : since during deletes only, the backing index reader can stay the same, we // also need to use numDocs to be sure we get the right cached values Object[] keyObjects = { matchingDocs, propertyName, parsedFacet, indexReader.getCoreCacheKey(), indexReader.numDocs() }; FVCKey fvcKey = new FVCKey(keyObjects); Map<String, Count> facetValueCountMap = facetValueCountCache.getIfPresent(fvcKey); if (facetValueCountMap == null) { facetValueCountMap = new HashMap<String, Count>(); populateFacetValueCountMap(propertyName, parsedFacet, facetValueCountMap, matchingDocs, indexReader); facetValueCountCache.put(fvcKey, facetValueCountMap); log.debug("Caching new facet value count map"); } else { log.debug("Reusing previously cached facet value count map"); } Map<String, Count> resultFacetValueCountMap = resultset.get(namespacedFacet); resultFacetValueCountMap.putAll(facetValueCountMap); // set the numHits value if (hitsRequested.isFixedDrillPath()) { return new ResultImpl(numHits, null); } } return new ResultImpl(cardinality, null); } else { // resultset is null, so search for HippoNodeType.HIPPO_RESULTSET if (!hitsRequested.isResultRequested()) { // No search with SCORING involved, this everything can be done with BitSet's if (facetFiltersQuery != null && facetFiltersQuery.getQuery().clauses().size() > 0) { matchingDocsSetBuilder .add(filterDocIdSetPlainLuceneQuery(facetFiltersQuery.getQuery(), indexReader)); } if (openQuery != null) { QueryAndSort queryAndSort = openQuery.getLuceneQueryAndSort(contextImpl); matchingDocsSetBuilder.add(filterDocIdSetJackRabbitQuery(queryAndSort.query, indexReader)); } int size = (int) matchingDocsSetBuilder.toBitSet().cardinality(); return new ResultImpl(size, null); } else { BooleanQuery searchQuery = new BooleanQuery(false); Sort freeSearchInjectedSort = null; if (facetFiltersQuery != null && facetFiltersQuery.getQuery().clauses().size() > 0) { searchQuery.add(facetFiltersQuery.getQuery(), Occur.MUST); } if (openQuery != null) { QueryAndSort queryAndSort = openQuery.getLuceneQueryAndSort(contextImpl); if (queryAndSort.query != null) { searchQuery.add(queryAndSort.query, Occur.MUST); } freeSearchInjectedSort = queryAndSort.sort; } Set<String> fieldNames = new HashSet<String>(); fieldNames.add(FieldNames.UUID); FieldSelector fieldSelector = new SetBasedFieldSelector(fieldNames, new HashSet<String>()); int fetchTotal = hitsRequested.getOffset() + hitsRequested.getLimit(); Sort sort = null; if (freeSearchInjectedSort != null) { // we already have a sort from the xpath or sql free search. Use this one sort = freeSearchInjectedSort; } else if (hitsRequested.getOrderByList().size() > 0) { List<Path> orderPropertiesList = new ArrayList<Path>(); List<Boolean> ascSpecsList = new ArrayList<Boolean>(); for (OrderBy orderBy : hitsRequested.getOrderByList()) { try { Name orderByProp = NameFactoryImpl.getInstance().create(orderBy.getName()); boolean isAscending = !orderBy.isDescending(); orderPropertiesList.add(createPath(orderByProp)); ascSpecsList.add(isAscending); } catch (IllegalArgumentException e) { log.warn("Skip property '{}' because cannot create a Name for it: {}", orderBy.getName(), e.toString()); } } if (orderPropertiesList.size() > 0) { Path[] orderProperties = orderPropertiesList .toArray(new Path[orderPropertiesList.size()]); boolean[] ascSpecs = new boolean[ascSpecsList.size()]; int i = 0; for (Boolean b : ascSpecsList) { ascSpecs[i] = b; i++; } sort = new Sort(createSortFields(orderProperties, ascSpecs, new String[orderProperties.length])); } } boolean sortScoreAscending = false; // if the sort is on score descending, we can set it to null as this is the default and more efficient if (sort != null && sort.getSort().length == 1 && sort.getSort()[0].getType() == SortField.SCORE) { if (sort.getSort()[0].getReverse()) { sortScoreAscending = true; } else { // we can skip sort as it is on score descending sort = null; } } TopDocs tfDocs; org.apache.lucene.search.Query query = searchQuery; if (searchQuery.clauses().size() == 0) { // add a match all query // searchQuery.add(new MatchAllDocsQuery(), Occur.MUST); query = new MatchAllDocsQuery(); } if (sort == null) { // when sort == null, use this search without search as is more efficient Filter filterToApply = new DocIdSetFilter(matchingDocsSetBuilder.toBitSet()); tfDocs = searcher.search(query, filterToApply, fetchTotal); } else { if (sortScoreAscending) { // we need the entire searchQuery because scoring is involved Filter filterToApply = new DocIdSetFilter(matchingDocsSetBuilder.toBitSet()); tfDocs = searcher.search(query, filterToApply, fetchTotal, sort); } else { // because we have at least one explicit sort, scoring can be skipped. We can use cached bitsets combined with a match all query if (facetFiltersQuery != null) { matchingDocsSetBuilder.add( filterDocIdSetPlainLuceneQuery(facetFiltersQuery.getQuery(), indexReader)); } if (openQuery != null) { QueryAndSort queryAndSort = openQuery.getLuceneQueryAndSort(contextImpl); matchingDocsSetBuilder .add(filterDocIdSetJackRabbitQuery(queryAndSort.query, indexReader)); } Filter filterToApply = new DocIdSetFilter(matchingDocsSetBuilder.toBitSet()); // set query to MatchAllDocsQuery because we have everything as filter now query = new MatchAllDocsQuery(); tfDocs = searcher.search(query, filterToApply, fetchTotal, sort); } } ScoreDoc[] hits = tfDocs.scoreDocs; int position = hitsRequested.getOffset(); // LinkedHashSet because ordering should be kept! Set<NodeId> nodeIdHits = new LinkedHashSet<NodeId>(); while (position < hits.length) { Document d = indexReader.document(hits[position].doc, fieldSelector); Field uuidField = d.getField(FieldNames.UUID); if (uuidField != null) { nodeIdHits.add(NodeId.valueOf(uuidField.stringValue())); } position++; } return new ResultImpl(nodeIdHits.size(), nodeIdHits); } } } catch (IllegalNameException e) { log.error("Error during creating view: ", e); } catch (IOException e) { log.error("Error during creating view: ", e); } finally { if (indexReader != null) { try { // do not call indexReader.close() as ref counting is taken care of by // org.apache.jackrabbit.core.query.lucene.Util#closeOrRelease Util.closeOrRelease(indexReader); } catch (IOException e) { log.error("Exception while closing index reader", e); } } } return new ResultImpl(0, null); }
From source file:org.jahia.services.search.facets.SimpleJahiaJcrFacets.java
License:Open Source License
/** Returns the original query if it was already a positive query, otherwise * return the negative of the query (i.e., a positive query). * <p>/*from ww w . ja va 2s.com*/ * Example: both id:10 and id:-10 will return id:10 * <p> * The caller can tell the sign of the original by a reference comparison between * the original and returned query. * @param q * @return */ static Query getAbs(Query q) { if (!(q instanceof BooleanQuery)) return q; BooleanQuery bq = (BooleanQuery) q; List<BooleanClause> clauses = bq.clauses(); if (clauses.size() == 0) return q; for (BooleanClause clause : clauses) { if (!clause.isProhibited()) return q; } if (clauses.size() == 1) { // if only one clause, dispense with the wrapping BooleanQuery Query negClause = clauses.get(0).getQuery(); // we shouldn't need to worry about adjusting the boosts since the negative // clause would have never been selected in a positive query, and hence would // not contribute to a score. return negClause; } else { BooleanQuery newBq = new BooleanQuery(bq.isCoordDisabled()); newBq.setBoost(bq.getBoost()); // ignore minNrShouldMatch... it doesn't make sense for a negative query // the inverse of -a -b is a OR b for (BooleanClause clause : clauses) { newBq.add(clause.getQuery(), BooleanClause.Occur.SHOULD); } return newBq; } }
From source file:org.languagetool.dev.index.PatternRuleQueryBuilder.java
License:Open Source License
/** * Iterate over all elements, ignore those not supported, add the other ones to a BooleanQuery. * @throws UnsupportedPatternRuleException if no query could be created for the rule */// w w w . j a va 2 s . c o m public Query buildRelaxedQuery(PatternRule rule) throws UnsupportedPatternRuleException { final BooleanQuery booleanQuery = new BooleanQuery(); for (Element element : rule.getElements()) { try { final BooleanClause clause = makeQuery(element); booleanQuery.add(clause); } catch (UnsupportedPatternRuleException e) { //System.out.println("Ignoring because it's not supported: " + element + ": " + e); // cannot handle - okay to ignore, as we may return too broad matches } catch (Exception e) { throw new RuntimeException("Could not create query for rule " + rule.getId(), e); } } if (booleanQuery.clauses().size() == 0) { throw new UnsupportedPatternRuleException( "No items found in rule that can be used to build a search query: " + rule); } return booleanQuery; }
From source file:org.LexGrid.LexBIG.Impl.Extensions.Search.query.SpanWildcardQuery.java
License:Open Source License
public Query rewrite(IndexReader reader) throws IOException { WildcardQuery orig = new WildcardQuery(term); orig.setRewriteMethod(SpanMultiTermQueryWrapper.SCORING_SPAN_QUERY_REWRITE); Query rewritten = orig.rewrite(reader); SpanQuery[] spanQueries = null;//from www . j a va 2s .c o m SpanOrQuery termQuery = null; if (rewritten instanceof BooleanQuery) { BooleanQuery booleanQuery = (BooleanQuery) rewritten; List<BooleanClause> clauses = booleanQuery.clauses(); spanQueries = new SpanQuery[clauses.size()]; for (int i = 0; i < clauses.size(); i++) { BooleanClause clause = clauses.get(i); // Clauses from RegexQuery.rewrite are always TermQuery's TermQuery tq = (TermQuery) clause.getQuery(); spanQueries[i] = new SpanTermQuery(tq.getTerm()); spanQueries[i].setBoost(tq.getBoost()); } return new SpanOrQuery(spanQueries); } else { termQuery = (SpanOrQuery) rewritten; } ; termQuery.setBoost(orig.getBoost()); return termQuery; }
From source file:org.LexGrid.LexBIG.Impl.Extensions.Search.SpellingErrorTolerantSubStringSearch.java
License:Open Source License
/** * Builds the span near query.//from www . ja v a 2s . c o m * * @param tokens the tokens * @param luceneSearchField the lucene search field * @param slop the slop * @param inOrder the in order * * @return the query */ protected Query buildSpanNearQuery(String[] tokens, String luceneSearchField, int slop, boolean boostLiteral) { PhraseQuery.Builder builder = new PhraseQuery.Builder(); for (int i = 0; i < tokens.length; i++) { Query parsedQuery = null; try { parsedQuery = super.getQueryParser().parse(luceneSearchField + ":( " + tokens[i] + ")"); parsedQuery.toString(); } catch (ParseException e) { e.printStackTrace(); } if (parsedQuery instanceof BooleanQuery) { BooleanQuery booleanQuery = (BooleanQuery) parsedQuery; TermQuery tq = null; for (BooleanClause clause : booleanQuery.clauses()) { if (clause.getQuery() instanceof BooleanQuery) { tq = (TermQuery) clause.getQuery(); } else { tq = (TermQuery) clause.getQuery(); } builder.add(tq.getTerm()); } } else { TermQuery tq = (TermQuery) parsedQuery; builder.add(tq.getTerm()); } } builder.setSlop(slop); PhraseQuery returnQuery = builder.build(); if (boostLiteral) { returnQuery.setBoost(0.5f); } return returnQuery; }
From source file:org.modeshape.jcr.query.lucene.basic.BasicLuceneQueryFactory.java
License:Open Source License
@Override protected Query findNodesWith(SelectorName selectorName, PropertyValue propertyValue, Operator operator, Object value, CaseOperation caseOperation) { if (caseOperation == null) caseOperation = CaseOperations.AS_IS; String field = propertyValue.getPropertyName(); Schemata.Column metadata = getMetadataFor(selectorName, field); if (metadata != null) { PropertyType requiredType = metadata.getRequiredType(); PropertyType valueType = PropertyType.discoverType(value); // The supplied value might not match the required type. If it doesn't, then the client issuing the query // has different expectations on what values are stored in the index. If the types are different, then // we should compute a query based upon the required type (which converts the supplied value) *and* // a query based upon the actual type; and we can OR these together. Query query1 = findNodesWith(selectorName, propertyValue, operator, value, caseOperation, requiredType, metadata);//from w w w. ja v a2 s. c o m if (requiredType == valueType) { return query1; } // Otherwise the types are different, so build the same query using the actual type ... Query query2 = findNodesWith(selectorName, propertyValue, operator, value, caseOperation, valueType, metadata); if (query1.equals(query2)) return query1; if (operator == Operator.NOT_EQUAL_TO) { // We actually want to AND the negated results ... BooleanQuery result = new BooleanQuery(); result.add(new BooleanClause(query1, Occur.MUST)); result.add(new BooleanClause(query2, Occur.MUST)); return result; } BooleanQuery result = new BooleanQuery(); result.add(new BooleanClause(query1, Occur.SHOULD)); result.add(new BooleanClause(query2, Occur.SHOULD)); return result; } assert metadata == null; if (!(value instanceof String)) { // This is due to an explicit cast, so treat it as the actual value ... PropertyType type = PropertyType.discoverType(value); return findNodesWith(selectorName, propertyValue, operator, value, caseOperation, type, metadata); } if (NodeInfoIndex.FieldName.WORKSPACE.equals(field)) { String strValue = stringFactory.create(value); return findNodesWith(selectorName, propertyValue, operator, strValue, caseOperation, PropertyType.STRING, null); } // Otherwise, the metadata is null and the value is a string. We can't find metadata if the property is residual, // and since the value is a string, we may be able to represent the value using different types. So rather than // determining the type from the string value, we can try converting the value to the different types and see // which ones work. If there are multiple conversions (including string), then we can OR them together. BooleanQuery orOfValues = new BooleanQuery(); boolean checkBoolean = false; boolean checkDate = true; try { Long lValue = factories.getLongFactory().create(value); Query query = findNodesWith(selectorName, propertyValue, operator, lValue, caseOperation, PropertyType.LONG, null); if (query != null) { orOfValues.add(query, Occur.SHOULD); } checkBoolean = lValue.longValue() == 1L || lValue.longValue() == 0L; checkDate = false; // no need to check the date, as we'd just convert it to a long and we've already added that } catch (ValueFormatException e) { // Not a long value ... } try { Double dValue = factories.getDoubleFactory().create(value); Query query = findNodesWith(selectorName, propertyValue, operator, dValue, caseOperation, PropertyType.DOUBLE, null); if (query != null) { orOfValues.add(query, Occur.SHOULD); } } catch (ValueFormatException e) { // Not a long value ... } if (checkBoolean) { try { Boolean b = factories.getBooleanFactory().create(value); Query query = findNodesWith(selectorName, propertyValue, operator, b, caseOperation, PropertyType.BOOLEAN, null); if (query != null) { orOfValues.add(query, Occur.SHOULD); } } catch (ValueFormatException e) { // Not a long value ... } } if (checkDate) { try { DateTime date = factories.getDateFactory().create(value); Query query = findNodesWith(selectorName, propertyValue, operator, date, caseOperation, PropertyType.DATE, null); if (query != null) { orOfValues.add(query, Occur.SHOULD); } } catch (ValueFormatException e) { // Not a long value ... } } // Finally treat it as a string ... String strValue = stringFactory.create(value); Query strQuery = findNodesWith(selectorName, propertyValue, operator, strValue, caseOperation, PropertyType.STRING, null); if (orOfValues.clauses().isEmpty()) { return strQuery; } orOfValues.add(strQuery, Occur.SHOULD); return orOfValues; }