List of usage examples for org.apache.lucene.queryparser.flexible.standard StandardQueryParser setAllowLeadingWildcard
@Override public void setAllowLeadingWildcard(boolean allowLeadingWildcard)
true
to allow leading wildcard characters. From source file:com.tuplejump.stargate.lucene.query.LuceneCondition.java
License:Apache License
/** * {@inheritDoc}/*w w w.j a v a 2 s . c o m*/ */ @Override public Query query(Options schema) { if (query == null) { throw new IllegalArgumentException("Query statement required"); } try { StandardQueryParser parser = new StandardQueryParser(schema.analyzer); parser.setNumericConfigMap(schema.numericFieldOptions); parser.setAllowLeadingWildcard(true); Query luceneQuery = parser.parse(query, getDefaultField(schema)); luceneQuery.setBoost(boost); logger.debug("Lucene query is {}", luceneQuery); return luceneQuery; } catch (Exception e) { throw new RuntimeException("Error while parsing lucene syntax query", e); } }
From source file:de.walware.statet.r.internal.core.rhelp.index.SearchQuery.java
License:Open Source License
static Query createMainQuery(final String fields[], final String queryText) throws QueryNodeException { final StandardQueryParser p = new StandardQueryParser(QUERY_ANALYZER); p.setDefaultOperator(Operator.AND);//from w w w .j a va 2 s . c om p.setAllowLeadingWildcard(true); p.setMultiFields(fields); return p.parse(queryText, null); }
From source file:org.apache.jackrabbit.oak.plugins.index.lucene.LucenePropertyIndex.java
License:Apache License
static Query tokenToQuery(String text, String fieldName, Analyzer analyzer) { if (analyzer == null) { return null; }/* w ww . j av a 2s . c o m*/ StandardQueryParser parserHelper = new StandardQueryParser(analyzer); parserHelper.setAllowLeadingWildcard(true); parserHelper.setDefaultOperator(StandardQueryConfigHandler.Operator.AND); text = rewriteQueryText(text); try { return parserHelper.parse(text, fieldName); } catch (QueryNodeException e) { throw new RuntimeException(e); } }
From source file:org.efaps.admin.index.Searcher.java
License:Apache License
/** * Search./*from www .j a va2 s .c o m*/ * * @param _search the search * @return the search result * @throws EFapsException on error */ protected SearchResult executeSearch(final ISearch _search) throws EFapsException { final SearchResult ret = new SearchResult(); try { LOG.debug("Starting search with: {}", _search.getQuery()); final StandardQueryParser queryParser = new StandardQueryParser(Index.getAnalyzer()); queryParser.setAllowLeadingWildcard(true); if (EFapsSystemConfiguration.get().containsAttributeValue(KernelSettings.INDEXDEFAULTOP)) { queryParser.setDefaultOperator(EnumUtils.getEnum(StandardQueryConfigHandler.Operator.class, EFapsSystemConfiguration.get().getAttributeValue(KernelSettings.INDEXDEFAULTOP))); } else { queryParser.setDefaultOperator(StandardQueryConfigHandler.Operator.AND); } final Query query = queryParser.parse(_search.getQuery(), "ALL"); final IndexReader reader = DirectoryReader.open(Index.getDirectory()); Sort sort = _search.getSort(); if (sort == null) { sort = new Sort(new SortField(Key.CREATED.name(), SortField.Type.LONG, true)); } final FacetsConfig facetConfig = Index.getFacetsConfig(); final DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(Index.getTaxonomyDirectory()); final IndexSearcher searcher = new IndexSearcher(reader); final FacetsCollector fc = new FacetsCollector(); final TopFieldDocs topFieldDocs = FacetsCollector.search(searcher, query, _search.getNumHits(), sort, fc); if (_search.getConfigs().contains(SearchConfig.ACTIVATE_DIMENSION)) { final Facets facets = new FastTaxonomyFacetCounts(taxoReader, facetConfig, fc); for (final FacetResult result : facets.getAllDims(1000)) { LOG.debug("FacetResult {}.", result); final DimConfig dimConfig = facetConfig.getDimConfig(result.dim); final Dimension retDim = new Dimension().setKey(result.dim); ret.getDimensions().add(retDim); for (final LabelAndValue labelValue : result.labelValues) { final DimValue dimValue = new DimValue().setLabel(labelValue.label) .setValue(labelValue.value.intValue()); dimValue.setPath(new String[] { retDim.getKey() }); retDim.getValues().add(dimValue); if (dimConfig.hierarchical) { addSubDimension(facets, dimValue, result.dim, labelValue.label); } } } } ret.setHitCount(topFieldDocs.totalHits); if (ret.getHitCount() > 0) { final ScoreDoc[] hits = topFieldDocs.scoreDocs; LOG.debug("Found {} hits.", hits.length); for (int i = 0; i < hits.length; ++i) { final Document doc = searcher.doc(hits[i].doc); final String oid = doc.get(Key.OID.name()); final String text = doc.get(Key.MSGPHRASE.name()); LOG.debug("{}. {}\t {}", i + 1, oid, text); final Instance instance = Instance.get(oid); final List<Instance> list; if (this.typeMapping.containsKey(instance.getType())) { list = this.typeMapping.get(instance.getType()); } else { list = new ArrayList<Instance>(); this.typeMapping.put(instance.getType(), list); } list.add(instance); final Element element = new Element().setOid(oid).setText(text); for (final Entry<String, Collection<String>> entry : _search.getResultFields().entrySet()) { for (final String name : entry.getValue()) { final String value = doc.get(name); if (value != null) { element.addField(name, value); } } } this.elements.put(instance, element); } } reader.close(); checkAccess(); ret.getElements().addAll(this.elements.values()); } catch (final IOException | QueryNodeException e) { LOG.error("Catched Exception", e); } return ret; }
From source file:org.scilab.modules.xcos.palette.PaletteSearcher.java
/** * @param str Query/*from w w w. j a v a 2s. c o m*/ * @return paths to the found blocks */ public List<Document> search(String str) { List<Document> found = new ArrayList<>(); try (IndexReader reader = DirectoryReader.open(mgr.getDirectory())) { IndexSearcher searcher = new IndexSearcher(reader); StandardQueryParser queryParserHelper = new StandardQueryParser(); queryParserHelper.setAllowLeadingWildcard(true); queryParserHelper.setLowercaseExpandedTerms(true); queryParserHelper.setAnalyzer(mgr.getAnalyzer()); queryParserHelper.setMultiFields(new String[] { "refname", "refpurpose", "content" }); Query query = queryParserHelper.parse(str, null); TopDocs results = searcher.search(query, XcosConstants.MAX_HITS); ScoreDoc[] hits = results.scoreDocs; if (hits.length == 0) { query = queryParserHelper.parse("*" + str + "*", null); results = searcher.search(query, XcosConstants.MAX_HITS); hits = results.scoreDocs; } for (int i = 0; i < hits.length; i++) { Document doc = searcher.doc(hits[i].doc); found.add(doc); } } catch (IOException | QueryNodeException e) { Logger.getLogger(PaletteSearcher.class.getName()).log(Level.SEVERE, null, e); } return found; }
From source file:stroom.search.server.SearchExpressionQueryBuilder.java
License:Apache License
private Query getSubQuery(final Version matchVersion, final IndexField field, final String value, final Set<String> terms, final boolean in) { Query query = null;//from w ww . ja v a2s . com // Store terms for hit highlighting. String highlight = value; highlight = NON_WORD.matcher(highlight).replaceAll(" "); highlight = highlight.trim(); highlight = MULTIPLE_SPACE.matcher(highlight).replaceAll(" "); final String[] highlights = highlight.split(" "); Collections.addAll(terms, highlights); // If we have omitted term frequencies and positions for this field then // we can't expect to do a sentence match. In this case we need to // modify the query so that each word becomes a new term in a boolean // query. String val = value.trim(); if (in || !AnalyzerType.KEYWORD.equals(field.getAnalyzerType())) { // If the field has been analysed then we need to analyse the search // query to create matching terms. final Analyzer analyzer = AnalyzerFactory.create(field.getAnalyzerType(), field.isCaseSensitive()); if (!field.isTermPositions()) { val = NON_WORD_OR_WILDCARD.matcher(val).replaceAll(" "); val = val.trim(); val = MULTIPLE_SPACE.matcher(val).replaceAll(" +"); val = MULTIPLE_WILDCARD.matcher(val).replaceAll("+"); } if (val.length() > 0) { final StandardQueryParser queryParser = new StandardQueryParser(analyzer); queryParser.setAllowLeadingWildcard(true); queryParser.setLowercaseExpandedTerms(!field.isCaseSensitive()); try { query = queryParser.parse(val, field.getFieldName()); } catch (final QueryNodeException e) { throw new SearchException("Unable to parse query term '" + val + "'", e); } } } else { if (val.length() > 0) { // As this is just indexed as a keyword we only want to search // for the term. if (!field.isCaseSensitive()) { val = value.toLowerCase(); } final Term term = new Term(field.getFieldName(), val); final boolean termContainsWildcard = (val.indexOf('*') != -1) || (val.indexOf('?') != -1); if (termContainsWildcard) { query = new WildcardQuery(new Term(field.getFieldName(), val)); } else { query = new TermQuery(term); } } } return query; }