List of usage examples for org.apache.lucene.search IndexSearcher IndexSearcher
public IndexSearcher(IndexReaderContext context)
From source file:com.epam.catgenome.dao.index.FeatureIndexDao.java
License:Open Source License
/** * Returns a {@code List} of chromosome IDs for a project, specified by ID, where variations exist and satisfy a * specified query/*from w w w . j ava2 s.c o m*/ * * @param projectId an ID of a project, which index to query * @param query a query to filter variations * @return a {@code List} of chromosome IDs * @throws IOException */ public List<Long> getChromosomeIdsWhereVariationsPresentFacet(long projectId, Query query) throws IOException { List<Long> chromosomeIds = new ArrayList<>(); try (Directory index = fileManager.getIndexForProject(projectId); IndexReader reader = DirectoryReader.open(index)) { if (reader.numDocs() == 0) { return Collections.emptyList(); } FacetsCollector facetsCollector = new FacetsCollector(); IndexSearcher searcher = new IndexSearcher(reader); searcher.search(query, facetsCollector); Facets facets = new SortedSetDocValuesFacetCounts(new DefaultSortedSetDocValuesReaderState(reader, FeatureIndexFields.FACET_CHR_ID.getFieldName()), facetsCollector); FacetResult res = facets.getTopChildren(FACET_LIMIT, FeatureIndexFields.CHR_ID.getFieldName()); if (res == null) { return Collections.emptyList(); } for (LabelAndValue labelAndValue : res.labelValues) { chromosomeIds.add(Long.parseLong(labelAndValue.label)); } } return chromosomeIds; }
From source file:com.epam.catgenome.dao.index.FeatureIndexDao.java
License:Open Source License
/** * Returns a {@code List} of chromosome IDs from specified files, where variations exist and satisfy a * specified query/*from w ww. j a va 2 s . c om*/ * * @param files a list of {@link FeatureFile}s to search chromosomes * @param query a query to filter variations * @return a {@code List} of chromosome IDs * @throws IOException */ public List<Long> getChromosomeIdsWhereVariationsPresentFacet(List<? extends FeatureFile> files, Query query) throws IOException { if (CollectionUtils.isEmpty(files)) { return Collections.emptyList(); } List<Long> chromosomeIds = new ArrayList<>(); SimpleFSDirectory[] indexes = fileManager.getIndexesForFiles(files); try (MultiReader reader = openMultiReader(indexes)) { if (reader.numDocs() == 0) { return Collections.emptyList(); } FacetsCollector facetsCollector = new FacetsCollector(); IndexSearcher searcher = new IndexSearcher(reader); searcher.search(query, facetsCollector); Facets facets = new SortedSetDocValuesFacetCounts(new DefaultSortedSetDocValuesReaderState(reader, FeatureIndexFields.FACET_CHR_ID.getFieldName()), facetsCollector); FacetResult res = facets.getTopChildren(FACET_LIMIT, FeatureIndexFields.CHR_ID.getFieldName()); if (res == null) { return Collections.emptyList(); } for (LabelAndValue labelAndValue : res.labelValues) { chromosomeIds.add(Long.parseLong(labelAndValue.label)); } } finally { closeIndexes(indexes); } return chromosomeIds; }
From source file:com.epam.catgenome.dao.index.FeatureIndexDao.java
License:Open Source License
/** * Searches gene IDs, affected by variations in specified VCF files in a specified project * * @param projectId an ID of a project to search genes * @param gene a prefix of a gene ID to search * @param vcfFileIds a {@code List} of IDs of VCF files in project to search for gene IDs * @return a {@code Set} of gene IDs, that are affected by some variations in specified VCf files * @throws IOException/* w ww . j a v a2 s.c om*/ */ public Set<String> searchGenesInVcfFilesInProject(long projectId, String gene, List<Long> vcfFileIds) throws IOException { if (vcfFileIds == null || vcfFileIds.isEmpty()) { return Collections.emptySet(); } BooleanQuery.Builder builder = new BooleanQuery.Builder(); PrefixQuery geneIdPrefixQuery = new PrefixQuery( new Term(FeatureIndexFields.GENE_ID.getFieldName(), gene.toLowerCase())); PrefixQuery geneNamePrefixQuery = new PrefixQuery( new Term(FeatureIndexFields.GENE_NAME.getFieldName(), gene.toLowerCase())); BooleanQuery.Builder geneIdOrNameQuery = new BooleanQuery.Builder(); geneIdOrNameQuery.add(geneIdPrefixQuery, BooleanClause.Occur.SHOULD); geneIdOrNameQuery.add(geneNamePrefixQuery, BooleanClause.Occur.SHOULD); builder.add(geneIdOrNameQuery.build(), BooleanClause.Occur.MUST); List<Term> terms = vcfFileIds.stream() .map(vcfFileId -> new Term(FeatureIndexFields.FILE_ID.getFieldName(), vcfFileId.toString())) .collect(Collectors.toList()); TermsQuery termsQuery = new TermsQuery(terms); builder.add(termsQuery, BooleanClause.Occur.MUST); BooleanQuery query = builder.build(); Set<String> geneIds; try (Directory index = fileManager.getIndexForProject(projectId); IndexReader reader = DirectoryReader.open(index)) { if (reader.numDocs() == 0) { return Collections.emptySet(); } IndexSearcher searcher = new IndexSearcher(reader); final TopDocs docs = searcher.search(query, reader.numDocs()); final ScoreDoc[] hits = docs.scoreDocs; geneIds = fetchGeneIds(hits, searcher); } catch (IOException e) { LOGGER.error(MessageHelper.getMessage(MessagesConstants.ERROR_FEATURE_INDEX_SEARCH_FAILED), e); return Collections.emptySet(); } return geneIds; }
From source file:com.epam.catgenome.dao.index.FeatureIndexDao.java
License:Open Source License
public Set<String> searchGenesInVcfFiles(String gene, List<VcfFile> vcfFiles) throws IOException { if (CollectionUtils.isEmpty(vcfFiles)) { return Collections.emptySet(); }//from ww w .j a va 2 s . c om BooleanQuery.Builder builder = new BooleanQuery.Builder(); PrefixQuery geneIdPrefixQuery = new PrefixQuery( new Term(FeatureIndexFields.GENE_ID.getFieldName(), gene.toLowerCase())); PrefixQuery geneNamePrefixQuery = new PrefixQuery( new Term(FeatureIndexFields.GENE_NAME.getFieldName(), gene.toLowerCase())); BooleanQuery.Builder geneIdOrNameQuery = new BooleanQuery.Builder(); geneIdOrNameQuery.add(geneIdPrefixQuery, BooleanClause.Occur.SHOULD); geneIdOrNameQuery.add(geneNamePrefixQuery, BooleanClause.Occur.SHOULD); builder.add(geneIdOrNameQuery.build(), BooleanClause.Occur.MUST); BooleanQuery query = builder.build(); Set<String> geneIds; SimpleFSDirectory[] indexes = fileManager.getIndexesForFiles(vcfFiles); try (MultiReader reader = openMultiReader(indexes)) { if (reader.numDocs() == 0) { return Collections.emptySet(); } IndexSearcher searcher = new IndexSearcher(reader); final TopDocs docs = searcher.search(query, reader.numDocs()); final ScoreDoc[] hits = docs.scoreDocs; geneIds = fetchGeneIds(hits, searcher); } catch (IOException e) { LOGGER.error(MessageHelper.getMessage(MessagesConstants.ERROR_FEATURE_INDEX_SEARCH_FAILED), e); return Collections.emptySet(); } return geneIds; }
From source file:com.epam.wilma.message.search.lucene.search.helper.IndexSearcherFactory.java
License:Open Source License
/** * Factory for creating new {@link IndexSearcher} instance using the given {@link IndexReader}. * @param indexReader is necessary to create {@link IndexSearcher} * @return with the new {@link IndexSearcher} instance *///from w w w .j ava2s.c om public IndexSearcher create(final IndexReader indexReader) { return new IndexSearcher(indexReader); }
From source file:com.esri.gpt.catalog.lucene.LuceneIndexAdapter.java
License:Apache License
/** * Makes a searcher for catalog documents. * <p/>The searcher is created from the value returned by * getCatalogIndexPath().// w ww. j ava 2s . c om * @return the writer * @throws CorruptIndexException if the index is corrupt * @throws IOException if the directory cannot be read/written to, * or if it does not exist and create is false or if there is any * other low-level IO error */ public synchronized IndexSearcher newSearcher() throws CorruptIndexException, IOException { IndexSearcher searcher = null; if (!this.useSingleSearcher) { getLogger().finer("Opening Lucene IndexSearcher..."); IndexReader reader = IndexReader.open(this.newDirectory(), true); searcher = new IndexSearcher(reader); } else { if (REFERENCED_SEARCHER == null) { REFERENCED_SEARCHER = new ReferencedSearcher(this.newDirectory()); return REFERENCED_SEARCHER.get(); } else { try { REFERENCED_SEARCHER.checkForReopen(); } catch (InterruptedException e) { throw new IOException("Interrupted while opening single searcher.", e); } return REFERENCED_SEARCHER.get(); } /* File fDir = new File(this.luceneConfig.getIndexLocation()); String path = fDir.getCanonicalPath(); synchronized (SEARCHERS) { searcher = SEARCHERS.get(path); if (searcher != null) { try { if (!searcher.getIndexReader().isCurrent()) { SEARCHERS.remove(path); searcher.getIndexReader().close(); searcher.close(); searcher = null; } } catch (AlreadyClosedException e) { SEARCHERS.remove(path); searcher = null; } } if (searcher == null) { IndexReader reader = IndexReader.open(this.newDirectory(),true); searcher = new IndexSearcher(reader); SEARCHERS.put(path,searcher); } } */ } return searcher; }
From source file:com.esri.gpt.catalog.lucene.ReferencedSearcher.java
License:Apache License
/** * Constructs with a supplied directory. * @param dir the directory/*from www .j a v a2 s .c om*/ */ public ReferencedSearcher(Directory dir) throws IOException { this.currentSearcher = new IndexSearcher(IndexReader.open(dir, true)); this.warm(this.currentSearcher); }
From source file:com.esri.gpt.catalog.lucene.ReferencedSearcher.java
License:Apache License
protected void checkForReopen() throws InterruptedException, IOException { this.onReopenBegin(); try {/*w w w. jav a 2 s . com*/ final IndexSearcher searcher = get(); try { IndexReader newReader = this.currentSearcher.getIndexReader().reopen(); if (newReader != this.currentSearcher.getIndexReader()) { IndexSearcher newSearcher = new IndexSearcher(newReader); this.warm(newSearcher); this.swap(newSearcher); } } catch (IOException e) { LogUtil.getLogger().log(Level.SEVERE, "Error during index re-open.", e); } finally { this.release(searcher); } } finally { onReopenEnd(); } }
From source file:com.esri.gpt.server.assertion.handler.AsnCommentHandler.java
License:Apache License
/** * Queries comments.//from w w w . j a v a 2s. com * @param context the assertion operation context * @throws Exception if an exception occurs */ private void query(AsnContext context) throws Exception { // initialize AsnOperation operation = context.getOperation(); AsnAssertionSet asnSet = operation.getAssertionSet(); AsnValueType vType = asnSet.getValueType(); String subject = operation.getSubject().getURN(); String predicate = vType.getRdfPredicate(); // build a query to match all occurrences of the subject/predicate pair BooleanQuery query = new BooleanQuery(); Query qSubject = new TermQuery(new Term(AsnConstants.FIELD_RDF_SUBJECT, subject)); Query qPredicate = new TermQuery(new Term(AsnConstants.FIELD_RDF_PREDICATE, predicate)); query.add(qSubject, BooleanClause.Occur.MUST); query.add(qPredicate, BooleanClause.Occur.MUST); // sort on descending timestamp String tsField = AsnConstants.FIELD_SYS_TIMESTAMP; Sort sortOption = new Sort(new SortField(tsField, SortField.STRING, true)); // determine the start and end positions int startRecord = context.getRequestOptions().getStartRecord() - 1; int maxRecords = context.getRequestOptions().getMaxRecords(); if (startRecord < 0) startRecord = 0; int recordsPerPage = maxRecords; if (recordsPerPage <= 0) recordsPerPage = 1; int hitsToReturn = startRecord + recordsPerPage; int nextRecord = 0; int numDocs = 0; IndexReader reader = null; IndexSearcher searcher = null; try { // make the reader and searcher, execute the search reader = this.getIndexAdapter().makeIndexReader(); searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(query, null, hitsToReturn, sortOption); ScoreDoc[] scoreDocs = null; int totalHits = topDocs.totalHits; if (maxRecords > 0) { scoreDocs = topDocs.scoreDocs; if ((scoreDocs != null) && (scoreDocs.length) > 0) { numDocs = scoreDocs.length; if (totalHits > numDocs) { nextRecord = numDocs + 1; } } } // root property for the response String rootSubject = subject; String roorPredicate = operation.getPredicate().getURN() + "response"; AsnProperty rootProp = new AsnProperty(rootSubject, roorPredicate, null); // hit count and next record String queryPfx = asnSet.getURNPrefix() + ":query"; rootProp.getChildren().add(new AsnProperty(null, queryPfx + ":hits", "" + totalHits)); if (nextRecord > 0) { rootProp.getChildren().add(new AsnProperty(null, queryPfx + ":nextRecord", "" + nextRecord)); } // canCreate capability for the active user String canCreatePred = asnSet.getURNPrefix() + ":activeUser:canCreate"; String canCreateVal = "" + context.getAuthorizer().canCreate(context, asnSet.getAuthPolicy()); rootProp.getChildren().add(new AsnProperty(null, canCreatePred, canCreateVal)); // process the documents, generate the response AsnAssertionRenderer renderer = new AsnAssertionRenderer(); for (int i = startRecord; i < numDocs; i++) { Document document = reader.document(scoreDocs[i].doc); Assertion assertion = asnSet.newAssertion(context, false); assertion.load(document); rootProp.getChildren().add(renderer.makeProperty(context, assertion)); } context.getOperationResponse().generateResponse(context, rootProp.getChildren()); } finally { this.getIndexAdapter().closeReader(reader); this.getIndexAdapter().closeSearcher(searcher); } }
From source file:com.esri.gpt.server.assertion.handler.AsnRatingHandler.java
License:Apache License
/** * Summarizes the ratings for a resource. * @param context the assertion operation context * @throws Exception if an exception occurs *//* w w w . java 2s . com*/ private void query(AsnContext context) throws Exception { // initialize AsnOperation operation = context.getOperation(); AsnAssertionSet asnSet = operation.getAssertionSet(); AsnValueType vType = asnSet.getValueType(); String subject = operation.getSubject().getURN(); String predicate = vType.getRdfPredicate(); String valueField = vType.getRdfValueField(); String upValue = "urn:esri:geoportal:rating:value:up"; String downValue = "urn:esri:geoportal:rating:value:down"; IndexReader reader = null; IndexSearcher searcher = null; try { // make the reader and searcher reader = this.getIndexAdapter().makeIndexReader(); searcher = new IndexSearcher(reader); // count up votes long nUp = this.getIndexAdapter().count(context, searcher, valueField, subject, predicate, upValue); // count down votes long nDown = this.getIndexAdapter().count(context, searcher, valueField, subject, predicate, downValue); // root property for the response String rootSubject = subject; String roorPredicate = operation.getPredicate().getURN() + "response"; AsnProperty rootProp = new AsnProperty(rootSubject, roorPredicate, null); // up, down and total counts rootProp.getChildren().add(new AsnProperty(null, upValue + ":count", "" + nUp)); rootProp.getChildren().add(new AsnProperty(null, downValue + ":count", "" + nDown)); rootProp.getChildren().add(new AsnProperty(null, asnSet.getURNPrefix() + ":count", "" + (nUp + nDown))); // canCreate capability for the active user String canCreatePred = asnSet.getURNPrefix() + ":activeUser:canCreate"; String canCreateVal = "" + context.getAuthorizer().canCreate(context, asnSet.getAuthPolicy()); rootProp.getChildren().add(new AsnProperty(null, canCreatePred, canCreateVal)); // user's previous rating Assertion previous = this.getIndexAdapter().loadPreviousUserAssertion(context, searcher); if (previous != null) { String prevSubj = Val.chkStr(previous.getSystemPart().getAssertionId()); prevSubj = asnSet.getAssertionIdPrefix() + ":" + prevSubj; String predPred = asnSet.getURNPrefix() + ":activeUser:previousValue"; String prevVal = previous.getRdfPart().getValue(); rootProp.getChildren().add(new AsnProperty(prevSubj, predPred, prevVal)); } // generate the response context.getOperationResponse().generateResponse(context, rootProp.getChildren()); } finally { this.getIndexAdapter().closeReader(reader); this.getIndexAdapter().closeSearcher(searcher); } }