Example usage for org.apache.lucene.search Sort getSort

List of usage examples for org.apache.lucene.search Sort getSort

Introduction

In this page you can find the example usage for org.apache.lucene.search Sort getSort.

Prototype

public SortField[] getSort() 

Source Link

Document

Representation of the sort criteria.

Usage

From source file:arena.lucene.LuceneIndexSearcherImpl.java

License:Open Source License

protected TopDocs executeSearch(IndexSearcher searcher, Query query, Filter filter, Sort sort,
        int collectorLimit) throws IOException {
    // Decide on how to search based on which elements of the lucene query model are available
    if (query != null) {
        // Full scoring search
        TopDocsCollector<? extends ScoreDoc> collector = null;
        if (sort == null) {
            collector = TopScoreDocCollector.create(collectorLimit, true);
        } else {//from  w  w  w.j a  va  2  s .c om
            SortField sortFields[] = sort.getSort();
            if (sortFields != null && sortFields.length > 0 && sortFields[0].getType() == SortField.SCORE
                    && !sortFields[0].getReverse()) {
                collector = TopScoreDocCollector.create(collectorLimit, true);
            } else {
                collector = TopFieldCollector.create(sort, collectorLimit, false, true, true, true);
            }
        }
        searcher.search(query, filter, collector);
        return collector.topDocs();

    } else if (filter != null) {
        // No query = no need for scoring, just dump the results into a hit collector that runs 
        // off the results in the order we want 
        DocIdSetIterator filterMatchesIterator = filter.getDocIdSet(searcher.getIndexReader()).iterator();
        if (sort == null) {
            // no sort available, so the natural iteration order is fine
            // if we have an iterator that means sorting is already handled, so just pull off the first n rows into the output
            ScoreDoc[] scoreDocs = new ScoreDoc[collectorLimit];
            int found = 0;
            int docId;
            while (found < collectorLimit
                    && (docId = filterMatchesIterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
                scoreDocs[found++] = new ScoreDoc(docId, 1f);
            }
            return new TopDocs(found, found < collectorLimit ? Arrays.copyOf(scoreDocs, found) : scoreDocs, 1f);
        } else {
            TopDocsCollector<? extends ScoreDoc> collector = TopFieldCollector.create(sort, collectorLimit,
                    false, true, true, true);
            int docId;
            while ((docId = filterMatchesIterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
                collector.collect(docId);
            }
            return collector.topDocs();

        }
    } else if (sort != null) {
        // no query and no filter so no score but add every doc in the index for non-score sorting            
        TopDocsCollector<? extends ScoreDoc> collector = TopFieldCollector.create(sort, collectorLimit, false,
                true, true, true);
        int numDocs = searcher.getIndexReader().numDocs();
        for (int n = 0; n < numDocs; n++) {
            collector.collect(n);
        }
        return collector.topDocs();
    } else {
        // no query filter or sort: return the top n docs
        ScoreDoc[] scoreDocs = new ScoreDoc[Math.min(collectorLimit, searcher.getIndexReader().numDocs())];

        for (int n = 0; n < scoreDocs.length; n++) {
            scoreDocs[n] = new ScoreDoc(n, 1f);
        }
        return new TopDocs(scoreDocs.length, scoreDocs, 1f);
    }
}

From source file:com.dasasian.chok.lucene.Hits.java

License:Apache License

public void fieldSort(Sort sort, WritableType[] fieldTypes, int count) {
    // TODO merge sort does not work due CHOK-93
    final ArrayList<Hit> list = new ArrayList<>(count);
    final int size = _hitsList.size();
    for (int i = 0; i < size; i++) {
        list.addAll(_hitsList.remove(0));
    }//from www . jav a  2  s .  com
    _hitsList = new ArrayList<>();
    if (!list.isEmpty()) {
        Collections.sort(list, new FieldSortComparator(sort.getSort(), fieldTypes));
    }
    _sortedList = list.subList(0, Math.min(count, list.size()));
}

From source file:com.dasasian.chok.lucene.LuceneServer.java

License:Apache License

/**
 * Search in the given shards and return max hits for given query
 *
 * @param query the query/*from   ww  w.  jav  a2s.c o m*/
 * @param freqs document frequency writer
 * @param shards the shards
 * @param result the writable for the result
 * @param max max results
 * @param sort the sort order
 * @param timeout timeout value
 * @param filter filter to apply
 * @throws IOException when an error occurs
 */
protected final void search(final Query query, final DocumentFrequencyWritable freqs, final String[] shards,
        final HitsMapWritable result, final int max, Sort sort, long timeout, Filter filter)
        throws IOException {
    timeout = getCollectorTimeout(timeout);
    final Query rewrittenQuery = rewrite(query, shards);
    final int numDocs = freqs.getNumDocsAsInteger();
    final Weight weight = rewrittenQuery
            .weight(new CachedDfSource(freqs.getAll(), numDocs, new DefaultSimilarity()));
    int totalHits = 0;
    final int shardsCount = shards.length;

    // Run the search in parallel on the shards with a thread pool.
    CompletionService<SearchResult> csSearch = new ExecutorCompletionService<>(threadPool);

    for (int i = 0; i < shardsCount; i++) {
        SearchCall call = new SearchCall(shards[i], weight, max, sort, timeout, i, filter);
        csSearch.submit(call);
    }

    final ScoreDoc[][] scoreDocs = new ScoreDoc[shardsCount][];
    ScoreDoc scoreDocExample = null;
    for (int i = 0; i < shardsCount; i++) {
        try {
            final SearchResult searchResult = csSearch.take().get();
            final int callIndex = searchResult.getSearchCallIndex();

            totalHits += searchResult._totalHits;
            scoreDocs[callIndex] = searchResult._scoreDocs;
            if (scoreDocExample == null && scoreDocs[callIndex].length > 0) {
                scoreDocExample = scoreDocs[callIndex][0];
            }
        } catch (InterruptedException e) {
            throw new IOException("Multithread shard search interrupted:", e);
        } catch (ExecutionException e) {
            throw new IOException("Multithread shard search could not be executed:", e);
        }
    }

    result.addTotalHits(totalHits);

    final Iterable<Hit> finalHitList;
    // Limit the request to the number requested or the total number of
    // documents, whichever is smaller.
    int limit = Math.min(numDocs, max);
    if (sort == null || totalHits == 0) {
        final ChokHitQueue hq = new ChokHitQueue(limit);
        int pos = 0;
        BitSet done = new BitSet(shardsCount);
        while (done.cardinality() != shardsCount) {
            ScoreDoc scoreDoc = null;
            for (int i = 0; i < shardsCount; i++) {
                // only process this shard if it is not yet done.
                if (!done.get(i)) {
                    final ScoreDoc[] docs = scoreDocs[i];
                    if (pos < docs.length) {
                        scoreDoc = docs[pos];
                        final Hit hit = new Hit(shards[i], getNodeName(), scoreDoc.score, scoreDoc.doc);
                        if (!hq.insert(hit)) {
                            // no doc left that has a higher score than the lowest score in
                            // the queue
                            done.set(i, true);
                        }
                    } else {
                        // no docs left in this shard
                        done.set(i, true);
                    }
                }
            }
            // we always wait until we got all hits from this position in all
            // shards.

            pos++;
            if (scoreDoc == null) {
                // we do not have any more data
                break;
            }
        }
        finalHitList = hq;
    } else {
        WritableType[] sortFieldsTypes;
        FieldDoc fieldDoc = (FieldDoc) scoreDocExample;
        sortFieldsTypes = WritableType.detectWritableTypes(fieldDoc.fields);
        result.setSortFieldTypes(sortFieldsTypes);
        finalHitList = mergeFieldSort(new FieldSortComparator(sort.getSort(), sortFieldsTypes), limit,
                scoreDocs, shards, getNodeName());
    }

    for (Hit hit : finalHitList) {
        if (hit != null) {
            result.addHit(hit);
        }
    }
}

From source file:com.xiaomi.linden.core.search.LindenResultParser.java

License:Apache License

private int getSortScoreFieldPos(Sort sort) {
    int sortScoreField = -1;
    if (sort != null) {
        for (int i = 0; i < sort.getSort().length; ++i) {
            if (sort.getSort()[i].getType() == SortField.Type.SCORE) {
                sortScoreField = i;/*from   www .  jav  a 2s  . co  m*/
                break;
            }
        }
    }
    return sortScoreField;
}

From source file:net.sf.katta.lib.lucene.Hits.java

License:Apache License

public void fieldSort(Sort sort, WritableType[] fieldTypes, int count) {
    // TODO merge sort does not work due KATTA-93
    final ArrayList<Hit> list = new ArrayList<Hit>(count);
    final int size = _hitsList.size();
    for (int i = 0; i < size; i++) {
        list.addAll(_hitsList.remove(0));
    }//www .  ja v  a 2  s.co m
    _hitsList = new ArrayList<List<Hit>>();
    if (!list.isEmpty()) {
        Collections.sort(list, new FieldSortComparator(sort.getSort(), fieldTypes));
    }
    _sortedList = list.subList(0, Math.min(count, list.size()));
}

From source file:net.sf.katta.lib.lucene.LuceneServer.java

License:Apache License

/**
 * Search in the given shards and return max hits for given query
 * //from  w  ww . ja  va 2  s  . c  o  m
 * @param query
 * @param freqs
 * @param shards
 * @param result
 * @param max
 * @throws IOException
 */
protected final void search(final Query query, final DocumentFrequencyWritable freqs, final String[] shards,
        final HitsMapWritable result, final int max, Sort sort, long timeout, Filter filter)
        throws IOException {
    timeout = getCollectorTiemout(timeout);
    final Query rewrittenQuery = rewrite(query, shards);
    final int numDocs = freqs.getNumDocsAsInteger();

    final Weight weight = rewrittenQuery
            .weight(new CachedDfSource(freqs.getAll(), numDocs, new DefaultSimilarity()));
    int totalHits = 0;
    final int shardsCount = shards.length;

    // Run the search in parallel on the shards with a thread pool.
    CompletionService<SearchResult> csSearch = new ExecutorCompletionService<SearchResult>(_threadPool);

    for (int i = 0; i < shardsCount; i++) {
        SearchCall call = new SearchCall(shards[i], weight, max, sort, timeout, i, filter);
        csSearch.submit(call);
    }

    final ScoreDoc[][] scoreDocs = new ScoreDoc[shardsCount][];
    ScoreDoc scoreDocExample = null;
    for (int i = 0; i < shardsCount; i++) {
        try {
            final SearchResult searchResult = csSearch.take().get();
            final int callIndex = searchResult.getSearchCallIndex();

            totalHits += searchResult._totalHits;
            scoreDocs[callIndex] = searchResult._scoreDocs;
            if (scoreDocExample == null && scoreDocs[callIndex].length > 0) {
                scoreDocExample = scoreDocs[callIndex][0];
            }
        } catch (InterruptedException e) {
            throw new IOException("Multithread shard search interrupted:", e);
        } catch (ExecutionException e) {
            throw new IOException("Multithread shard search could not be executed:", e);
        }
    }

    result.addTotalHits(totalHits);

    final Iterable<Hit> finalHitList;
    // Limit the request to the number requested or the total number of
    // documents, whichever is smaller.
    int limit = Math.min(numDocs, max);
    if (sort == null || totalHits == 0) {
        final KattaHitQueue hq = new KattaHitQueue(limit);
        int pos = 0;
        BitSet done = new BitSet(shardsCount);
        while (done.cardinality() != shardsCount) {
            ScoreDoc scoreDoc = null;
            for (int i = 0; i < shardsCount; i++) {
                // only process this shard if it is not yet done.
                if (!done.get(i)) {
                    final ScoreDoc[] docs = scoreDocs[i];
                    if (pos < docs.length) {
                        scoreDoc = docs[pos];
                        final Hit hit = new Hit(shards[i], getNodeName(), scoreDoc.score, scoreDoc.doc);
                        if (!hq.insert(hit)) {
                            // no doc left that has a higher score than the lowest score in
                            // the queue
                            done.set(i, true);
                        }
                    } else {
                        // no docs left in this shard
                        done.set(i, true);
                    }
                }
            }
            // we always wait until we got all hits from this position in all
            // shards.

            pos++;
            if (scoreDoc == null) {
                // we do not have any more data
                break;
            }
        }
        finalHitList = hq;
    } else {
        WritableType[] sortFieldsTypes = null;
        FieldDoc fieldDoc = (FieldDoc) scoreDocExample;
        sortFieldsTypes = WritableType.detectWritableTypes(fieldDoc.fields);
        result.setSortFieldTypes(sortFieldsTypes);
        finalHitList = mergeFieldSort(new FieldSortComparator(sort.getSort(), sortFieldsTypes), limit,
                scoreDocs, shards, getNodeName());
    }

    for (Hit hit : finalHitList) {
        if (hit != null) {
            result.addHit(hit);
        }
    }
}

From source file:nl.strohalm.cyclos.utils.lucene.LuceneQueryHandler.java

License:Open Source License

private <E extends Entity & Indexable> List<E> iterator(final Class<E> entityType, final Query query,
        final Filter filter, final Sort sort, final PageParameters pageParameters,
        final Relationship... fetch) {
    IndexSearcher searcher = null;/*  w w w  .  ja  va2 s. c  o m*/
    // Prepare the parameters
    IndexReader reader;
    try {
        reader = indexHandler.openReader(entityType);
    } catch (final DaoException e) {
        // Probably index files don't exist
        return new IteratorListImpl<E>(Collections.<E>emptyList().iterator());
    }
    final int firstResult = pageParameters == null ? 0 : pageParameters.getFirstResult();
    int maxResults = (pageParameters == null || pageParameters.getMaxResults() == 0) ? Integer.MAX_VALUE
            : pageParameters.getMaxResults() + firstResult;

    try {
        // Run the search
        searcher = new IndexSearcher(reader);
        TopDocs topDocs;
        if (sort == null || ArrayUtils.isEmpty(sort.getSort())) {
            topDocs = searcher.search(query, filter, maxResults);
        } else {
            topDocs = searcher.search(query, filter, maxResults, sort);
        }
        // Open the iterator
        Iterator<E> iterator = new DocsIterator<E>(this, entityType, reader, topDocs, firstResult, fetch);
        DataIteratorHelper.registerOpen(iterator, false);

        // Wrap the iterator
        return new IteratorListImpl<E>(iterator);

    } catch (final Exception e) {
        throw new DaoException(e);
    } finally {
        try {
            searcher.close();
        } catch (final Exception e) {
            // Silently ignore
        }
    }
}

From source file:nl.strohalm.cyclos.utils.lucene.LuceneQueryHandler.java

License:Open Source License

private <E extends Entity & Indexable> List<E> listOrPage(final Class<E> entityType, final Query query,
        final Filter filter, final Sort sort, final ResultType resultType, final PageParameters pageParameters,
        final Relationship... fetch) {
    IndexSearcher searcher = null;/*from  w ww.  j  a  va  2s  .  com*/
    // Prepare the parameters
    IndexReader reader;
    try {
        reader = indexHandler.openReader(entityType);
    } catch (final DaoException e) {
        // Probably index files don't exist
        return Collections.emptyList();
    }
    final int firstResult = pageParameters == null ? 0 : pageParameters.getFirstResult();
    int maxResults = pageParameters == null ? Integer.MAX_VALUE : pageParameters.getMaxResults() + firstResult;
    try {
        searcher = new IndexSearcher(reader);
        if (maxResults == 0 && resultType == ResultType.PAGE) {
            // We just want the total hit count.
            TotalHitCountCollector collector = new TotalHitCountCollector();
            searcher.search(query, filter, collector);
            int totalHits = collector.getTotalHits();
            return new PageImpl<E>(pageParameters, totalHits, Collections.<E>emptyList());
        } else {
            if (maxResults == 0) {
                maxResults = Integer.MAX_VALUE;
            }
            // Run the search
            TopDocs topDocs;
            if (sort == null || ArrayUtils.isEmpty(sort.getSort())) {
                topDocs = searcher.search(query, filter, maxResults);
            } else {
                topDocs = searcher.search(query, filter, maxResults, sort);
            }

            // Build the list
            ScoreDoc[] scoreDocs = topDocs.scoreDocs;
            List<E> list = new ArrayList<E>(Math.min(firstResult, scoreDocs.length));
            for (int i = firstResult; i < scoreDocs.length; i++) {
                ScoreDoc scoreDoc = scoreDocs[i];
                E entity = toEntity(reader, scoreDoc.doc, entityType, fetch);
                if (entity != null) {
                    list.add(entity);
                }
            }

            // When result type is page, get the additional data
            if (resultType == ResultType.PAGE) {
                list = new PageImpl<E>(pageParameters, topDocs.totalHits, list);
            }
            return list;
        }
    } catch (final EntityNotFoundException e) {
        throw new ValidationException("general.error.indexedRecordNotFound");
    } catch (ApplicationException e) {
        throw e;
    } catch (final Exception e) {
        throw new DaoException(e);
    } finally {
        // Close resources
        try {
            searcher.close();
        } catch (final Exception e) {
            // Silently ignore
        }
        try {
            reader.close();
        } catch (final Exception e) {
            // Silently ignore
        }
    }
}

From source file:org.alfresco.repo.search.impl.querymodel.impl.lucene.LuceneQueryEngine.java

License:Open Source License

public QueryEngineResults executeQuery(Query query, QueryOptions options,
        FunctionEvaluationContext functionContext) {
    Set<String> selectorGroup = null;
    if (query.getSource() != null) {
        List<Set<String>> selectorGroups = query.getSource().getSelectorGroups(functionContext);

        if (selectorGroups.size() == 0) {
            throw new UnsupportedOperationException("No selectors");
        }/* ww  w.ja v  a2  s . co  m*/

        if (selectorGroups.size() > 1) {
            throw new UnsupportedOperationException("Advanced join is not supported");
        }

        selectorGroup = selectorGroups.get(0);
    }

    SearchParameters searchParameters = new SearchParameters();
    if (options.getLocales().size() > 0) {
        for (Locale locale : options.getLocales()) {
            searchParameters.addLocale(locale);
        }
    }
    searchParameters.excludeDataInTheCurrentTransaction(!options.isIncludeInTransactionData());
    searchParameters.setSkipCount(options.getSkipCount());
    searchParameters.setMaxPermissionChecks(options.getMaxPermissionChecks());
    searchParameters.setMaxPermissionCheckTimeMillis(options.getMaxPermissionCheckTimeMillis());
    searchParameters.setDefaultFieldName(options.getDefaultFieldName());
    searchParameters.setMlAnalaysisMode(options.getMlAnalaysisMode());
    if (options.getMaxItems() >= 0) {
        searchParameters.setLimitBy(LimitBy.FINAL_SIZE);
        searchParameters.setLimit(options.getMaxItems());
        searchParameters.setMaxItems(options.getMaxItems());
    } else {
        searchParameters.setLimitBy(LimitBy.UNLIMITED);
    }
    searchParameters.setUseInMemorySort(options.getUseInMemorySort());
    searchParameters.setMaxRawResultSetSizeForInMemorySort(options.getMaxRawResultSetSizeForInMemorySort());
    searchParameters.setBulkFetchEnabled(options.isBulkFetchEnabled());
    searchParameters.setQueryConsistency(options.getQueryConsistency());

    try {
        StoreRef storeRef = options.getStores().get(0);
        searchParameters.addStore(storeRef);
        if (query instanceof LuceneQueryBuilder) {
            SearchService searchService = indexAndSearcher.getSearcher(storeRef,
                    options.isIncludeInTransactionData());
            if (searchService instanceof LuceneSearcher) {
                LuceneSearcher luceneSearcher = (LuceneSearcher) searchService;
                ClosingIndexSearcher searcher = luceneSearcher.getClosingIndexSearcher();
                LuceneQueryBuilderContext<org.apache.lucene.search.Query, Sort, ParseException> luceneContext = new LuceneQueryBuilderContextImpl(
                        dictionaryService, namespaceService, tenantService, searchParameters,
                        indexAndSearcher.getDefaultMLSearchAnalysisMode(), searcher.getIndexReader());

                @SuppressWarnings("unchecked")
                LuceneQueryBuilder<org.apache.lucene.search.Query, Sort, ParseException> builder = (LuceneQueryBuilder<org.apache.lucene.search.Query, Sort, ParseException>) query;
                org.apache.lucene.search.Query luceneQuery = builder.buildQuery(selectorGroup, luceneContext,
                        functionContext);

                if (logger.isDebugEnabled()) {
                    logger.debug("Executing lucene query: " + luceneQuery);
                }

                Sort sort = builder.buildSort(selectorGroup, luceneContext, functionContext);

                Hits hits = searcher.search(luceneQuery);

                boolean postSort = false;
                ;
                if (sort != null) {
                    postSort = searchParameters.usePostSort(hits.length(), useInMemorySort,
                            maxRawResultSetSizeForInMemorySort);
                    if (postSort == false) {
                        hits = searcher.search(luceneQuery, sort);
                    }
                }

                ResultSet answer;
                ResultSet result = new LuceneResultSet(hits, searcher, nodeService, tenantService,
                        searchParameters, indexAndSearcher);
                if (postSort) {
                    if (sort != null) {
                        for (SortField sf : sort.getSort()) {
                            searchParameters.addSort(sf.getField(), !sf.getReverse());
                        }
                    }

                    ResultSet sorted = new SortedResultSet(result, nodeService,
                            builder.buildSortDefinitions(selectorGroup, luceneContext, functionContext),
                            namespaceService, dictionaryService, searchParameters.getSortLocale());
                    answer = sorted;
                } else {
                    answer = result;
                }
                ResultSet rs = new PagingLuceneResultSet(answer, searchParameters, nodeService);

                Map<Set<String>, ResultSet> map = new HashMap<Set<String>, ResultSet>(1);
                map.put(selectorGroup, rs);
                return new QueryEngineResults(map);
            } else {
                throw new UnsupportedOperationException();
            }
        } else {
            throw new UnsupportedOperationException();
        }
    } catch (ParseException e) {
        throw new SearcherException("Failed to parse query: " + e);
    } catch (IOException e) {
        throw new SearcherException("IO exception during search", e);
    }
}

From source file:org.apache.jackrabbit.core.query.lucene.DescendantSelfAxisQuery.java

License:Apache License

/**
 * {@inheritDoc}/*from w  w w .java  2 s.c o m*/
 */
public QueryHits execute(final JackrabbitIndexSearcher searcher, final SessionImpl session, final Sort sort)
        throws IOException {
    if (sort.getSort().length == 0 && subQueryMatchesAll()) {
        // maps path String to NodeId
        Map startingPoints = new TreeMap();
        QueryHits result = searcher.execute(getContextQuery(), sort);
        try {
            // minLevels 0 and 1 are handled with a series of
            // NodeTraversingQueryHits directly on result. For minLevels >= 2
            // intermediate ChildNodesQueryHits are required.
            for (int i = 2; i <= getMinLevels(); i++) {
                result = new ChildNodesQueryHits(result, session);
            }

            ScoreNode sn;
            try {
                while ((sn = result.nextScoreNode()) != null) {
                    Node node = session.getNodeById(sn.getNodeId());
                    startingPoints.put(node.getPath(), sn);
                }
            } catch (RepositoryException e) {
                IOException ex = new IOException(e.getMessage());
                ex.initCause(e);
                throw ex;
            }
        } finally {
            result.close();
        }

        // prune overlapping starting points
        String previousPath = null;
        for (Iterator it = startingPoints.keySet().iterator(); it.hasNext();) {
            String path = (String) it.next();
            // if the previous path is a prefix of this path then the
            // current path is obsolete
            if (previousPath != null && path.startsWith(previousPath)) {
                it.remove();
            } else {
                previousPath = path;
            }
        }

        final Iterator scoreNodes = startingPoints.values().iterator();
        return new AbstractQueryHits() {

            private NodeTraversingQueryHits currentTraversal;

            {
                fetchNextTraversal();
            }

            public void close() throws IOException {
                if (currentTraversal != null) {
                    currentTraversal.close();
                }
            }

            public ScoreNode nextScoreNode() throws IOException {
                while (currentTraversal != null) {
                    ScoreNode sn = currentTraversal.nextScoreNode();
                    if (sn != null) {
                        return sn;
                    } else {
                        fetchNextTraversal();
                    }
                }
                // if we get here there are no more score nodes
                return null;
            }

            private void fetchNextTraversal() throws IOException {
                if (currentTraversal != null) {
                    currentTraversal.close();
                }
                if (scoreNodes.hasNext()) {
                    ScoreNode sn = (ScoreNode) scoreNodes.next();
                    try {
                        Node node = session.getNodeById(sn.getNodeId());
                        currentTraversal = new NodeTraversingQueryHits(node, getMinLevels() == 0);
                    } catch (RepositoryException e) {
                        IOException ex = new IOException(e.getMessage());
                        ex.initCause(e);
                        throw ex;
                    }
                } else {
                    currentTraversal = null;
                }
            }
        };
    } else {
        return null;
    }
}